repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
shravan-shandilya/web3.py | tests/utilities/test_construct_data_filter_regex.py | Python | mit | 2,233 | 0.000448 | import pytest
from web3.utils.filters import (
construct_data_filter_regex,
)
def hex_and_pad(*i):
return '0x' + ''.join(
hex(v).rstrip('L')[2:].zfill(64)
for v in i
)
SINGLE_VALUE = [
[hex_and_pad(1)],
]
TWO_VALUES = [
[hex_and_pad(12345), hex_and_pad(54321)],
]
TWO_VALUES_WITH_WILDCARD = [
[hex_and_pad(12345), None],
]
THREE_VALUES_WITH_WILDCARD = [
[hex_and_pad(12345), None, hex_and_pad(54321)],
]
MULTI_VALUE = [
[hex_and_pad(1)],
[hex_and_pad(2)],
]
MULTI_NESTED_VALUE = [
[hex_and_pad(1, 2)],
[hex_and_pad(2, 1)],
]
MULTI_NESTED_VALUE_WITH_WILDCARD = [
[hex_and_pad(1), None],
[ | hex_and_pad(2, 1)],
]
@pytest.mark.parametrize(
"filter_set,data_value,should_match",
(
(SINGLE_VALUE, hex_and_pad(1), True),
(SINGLE_VALUE, hex_and_pad(0), False),
(SINGLE_VALUE, hex_and_pad(2), False),
(TWO_VALUES, hex_and_pad(12345, 54321), True),
(TWO_VALUES_WITH_WILDCARD, hex_and_pad(12345, 54321), True),
(TWO_VALUES_WITH_WILDCARD, hex_and_pad(12345, 1), True),
(TWO_VALUES | _WITH_WILDCARD, hex_and_pad(1, 1), False),
(THREE_VALUES_WITH_WILDCARD, hex_and_pad(12345, 0, 54321), True),
(THREE_VALUES_WITH_WILDCARD, hex_and_pad(12345, 1, 54321), True),
(THREE_VALUES_WITH_WILDCARD, hex_and_pad(12345, 2**256 - 1, 54321), True),
(THREE_VALUES_WITH_WILDCARD, hex_and_pad(12344, 0, 54321), False),
(MULTI_VALUE, hex_and_pad(1), True),
(MULTI_VALUE, hex_and_pad(2), True),
(MULTI_VALUE, hex_and_pad(3), False),
(MULTI_VALUE, hex_and_pad(1, 2), True),
(MULTI_VALUE, hex_and_pad(2, 1), False),
(MULTI_NESTED_VALUE_WITH_WILDCARD, hex_and_pad(1, 1), True),
(MULTI_NESTED_VALUE_WITH_WILDCARD, hex_and_pad(1, 12345), True),
(MULTI_NESTED_VALUE_WITH_WILDCARD, hex_and_pad(2, 1), True),
(MULTI_NESTED_VALUE_WITH_WILDCARD, hex_and_pad(2, 12345), False),
)
)
def test_construct_data_filter_regex(filter_set, data_value, should_match):
data_filter_regex = construct_data_filter_regex(filter_set)
is_match = bool(data_filter_regex.match(data_value))
assert is_match is should_match, data_filter_regex
|
evernym/plenum | plenum/test/ledger/conftest.py | Python | apache-2.0 | 1,170 | 0 | import pytest
from plenum.common.constants import DOM | AIN_LEDGER_ID
from plenum.common.txn_util import reqToTxn
from plenum.test.helper import sdk_signed_random_requests
NUM_BATCHES = 3
TXNS_IN_BATCH = 5
def create_txns(looper, | sdk_wallet_client, count=TXNS_IN_BATCH):
reqs = sdk_signed_random_requests(looper, sdk_wallet_client, count)
return [reqToTxn(req) for req in reqs]
@pytest.fixture(scope='module')
def created_txns(ledger, looper, sdk_wallet_client):
txns = []
for i in range(NUM_BATCHES):
txns.append(create_txns(looper, sdk_wallet_client, TXNS_IN_BATCH))
return txns
@pytest.fixture(scope='module')
def ledger(txnPoolNodeSet):
return txnPoolNodeSet[0].getLedger(DOMAIN_LEDGER_ID)
@pytest.fixture(scope='module')
def inital_size(ledger):
return ledger.size
@pytest.fixture(scope='module')
def inital_root_hash(ledger):
return ledger.tree.root_hash
@pytest.fixture(scope='module')
def ledger_with_batches_appended(ledger,
created_txns):
for txn_batch in created_txns:
ledger.append_txns_metadata(txn_batch)
ledger.appendTxns(txn_batch)
return ledger
|
ingvar1995/filebot | bot.py | Python | gpl-3.0 | 20,494 | 0.002781 | from tox import Tox
import os
from settings import *
from toxcore_enums_and_consts import *
from ctypes import *
from util import Singleton, folder_size
from file_transfers import *
from collections import defaultdict
class Bot(Singleton):
def __init__(self, tox):
"""
:param tox: tox instance
"""
super(Bot, self).__init__()
self._tox = tox
self._file_transfers = {} # dict of file transfers. key - tuple (friend_number, file_number)
self._downloads = defaultdict(int) # defaultdict of downloads count
# -----------------------------------------------------------------------------------------------------------------
# Edit current user's data
# -----------------------------------------------------------------------------------------------------------------
def set_name(self, value):
self._tox.self_set_name(value.encode('utf-8'))
def set_status_message(self, value):
self._tox.self_set_status_message(value.encode('utf-8'))
# -----------------------------------------------------------------------------------------------------------------
# Private messages
# -----------------------------------------------------------------------------------------------------------------
def send_message(self, number, message, message_type=TOX_MESSAGE_TYPE['NORMAL']):
"""
Send message with message splitting
:param number: friend's number
:param message: message text
:param message_type: type of message
"""
while len(message) > TOX_MAX_MESSAGE_LENGTH:
size = TOX_MAX_MESSAGE_LENGTH * 4 / 5
last_part = message[size:TOX_MAX_MESSAGE_LENGTH]
if ' ' in last_part:
index = last_part.index(' ')
elif ',' in last_part:
index = last_part.index(',')
elif '.' in last_part:
index = last_part.index('.')
else:
index = TOX_MAX_MESSAGE_LENGTH - size - 1
index += size + 1
self._tox.friend_send_message(number, message_type, message[:index])
message = message[index:]
self._tox.friend_send_message(number, message_type, message)
def new_message(self, friend_num, message):
"""
New message
:param friend_num: number of friend who sent message
:param message: text of message
"""
id = self._tox.friend_get_public_key(friend_num) # public key of user
settings = Settings.get_instance()
message = message.strip()
# message parsing
if message == 'files': # get file list
if id in settings['read']:
s = ''
for f in os.listdir(settings['folder']):
f = unicode(f)
if os.path.isfile(os.path.join(settings['folder'], f)):
s += u'{} ({} bytes)\n'.format(f, os.path.getsize(os.path.join(settings['folder'], f)))
if not s:
s = 'Nothing found'
self.send_message(friend_num, s.encode('utf-8'), TOX_MESSAGE_TYPE['NORMAL'])
else:
self.send_message(friend_num, 'Not enough rights'.encode('utf-8'))
elif message.startswith('get '): # download file or all files
if id in settings['read']:
if '--all' not in message:
path = settings['folder'] + '/' + unicode(message[4:])
if os.path.exists(unicode(path)):
self.send_file(unicode(path), friend_num)
else:
self.send_message(friend_num, 'Wrong file name'.encode('utf-8'))
else:
for f in os.listdir(settings['folder']):
if os.path.is | file(os.path.join(settings['folder'], f)):
self.send_file(unicode(os.path.join(settings['folder'], f)), friend_num)
else:
self.send_message(friend_num, 'Not enough rights'.encode('utf-8'))
elif me | ssage == 'help': # help
self.send_message(friend_num, """
help - list of commands\n
rights - get access rights\n
files - show list of files (get access)\n
id - get bot's id (get access)\n
share <ToxID> <file_name> - send file to friend (get access)\n
share --all <file_name> - send file to all friends (get access)\n
size <file_name> - get size of file (get access)\n
get <file_name> - get file with specified filename (get access)\n
get --all - get all files (get access)\n
stats - show statistics (write access)\n
del <file_name> - remove file with specified filename (delete access)\n
rename <file_name> --new <new_file_name> - rename file (delete access)\n
user <ToxID> <rights> - new rights (example: rwdm) for user (masters only)\n
status <new_status> - new status message (masters only)\n
name <new_name> - new name (masters only)\n
message <ToxID> <message_text> - send message to friend (masters only)\n
message --all <message_text> - send message to all friends (masters only)\n
stop - stop bot (masters only)\n
fsize <folder_size_in_MB> - set folder size in MB (masters only)\n
Users with write access can send files to bot.
""".encode('utf-8'))
elif message == 'rights': # get rights
self.send_message(friend_num, 'Read: {}\nWrite: {}\nDelete: {}\nMaster: {}'
.format('Yes' if id in settings['read'] else 'No, sorry',
'Yes' if id in settings['write'] else 'No',
'Yes' if id in settings['delete'] else 'No',
'Yes, sir!' if id in settings['master'] else 'No'))
elif message.startswith('del '): # delete file
if id in settings['delete']:
path = settings['folder'] + '/' + message[4:]
if os.path.exists(path):
os.remove(path)
self.send_message(friend_num, 'File was successfully deleted')
else:
self.send_message(friend_num, 'Wrong file name'.encode('utf-8'))
else:
self.send_message(friend_num, 'Not enough rights'.encode('utf-8'))
elif message.startswith('user '): # new rights for user
if id not in settings['master']:
self.send_message(friend_num, 'Not enough rights'.encode('utf-8'))
return
try:
rights = message.split(' ')[2]
except:
rights = ''
id = message.split(' ')[1][:TOX_PUBLIC_KEY_SIZE * 2]
if id in settings['read']:
settings['read'].remove(id)
if id in settings['write']:
settings['write'].remove(id)
if id in settings['delete']:
settings['delete'].remove(id)
if 'r' in rights:
settings['read'].append(id)
if 'w' in rights:
settings['write'].append(id)
if 'd' in rights:
settings['delete'].append(id)
if 'm' in rights:
settings['master'].append(id)
settings.save()
self.send_message(friend_num, 'Updated'.encode('utf-8'))
elif message.startswith('status '): # new status
if id not in settings['master']:
self.send_message(friend_num, 'Not enough rights'.encode('utf-8'))
else:
self.set_status_message(message[7:])
elif message.startswith('name '): # new name
if id not in settings['master']:
self.send_message(friend_num, 'Not enough rights'.encode('utf-8'))
else:
self.set_name(message[5:])
elif message.startswith('share '): # send file to friend |
joeryan/python-euler | test4.py | Python | mit | 816 | 0.006127 | # unittest module for project euler problem 4
# palindrome products
import unittest
import palprod
class TestPalinProd(unittest.TestCase):
def setUp(self):
self.palProd = palprod.PalProd()
def testInitial(self):
self.palProd.setDigits(2)
self.palProd.calcMaxProd()
| assert self.palProd.getMaxProd() == (9009), "incorrect answer %d" % self.palProd.getMaxProd()
def testFinal(self):
self.palProd.setDigits(3)
self.palProd.calcMaxProd()
assert self.palProd.getMaxProd() == (906609), "incorrect answer %d" % self.palProd.getMaxProd()
palProdTestSuite = uni | ttest.TestSuite()
palProdTestSuite.addTest(TestPalinProd("testInitial"))
palProdTestSuite.addTest(TestPalinProd("testFinal"))
runner = unittest.TextTestRunner()
runner.run(palProdTestSuite)
|
btenaglia/hpc-historias-clinicas | hpc-historias-clinicas/diagnosticos/migrations/0014_auto_20150506_0117.py | Python | bsd-3-clause | 802 | 0.002494 | # -*- coding: utf-8 -*-
from __future__ import | unicode | _literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('diagnosticos', '0013_auto_20150505_2053'),
]
operations = [
migrations.AlterField(
model_name='diagnosticos',
name='fecha',
field=models.DateField(default=datetime.datetime(2015, 5, 6, 1, 17, 17, 301473), help_text='Formato: dd/mm/yyyy'),
preserve_default=True,
),
migrations.AlterField(
model_name='diagnosticos',
name='hora',
field=models.TimeField(default=datetime.datetime(2015, 5, 6, 1, 17, 17, 301520), help_text='Formato: hh:mm'),
preserve_default=True,
),
]
|
agoose77/hivesystem | hiveguilib/PQt/AntennaFoldState.py | Python | bsd-2-clause | 6,278 | 0.003982 | from __future__ import print_function
import weakref
from functools import partial
from .. import PersistentIDManager
def _hide(widgets):
for w in widgets: w.hide()
def _show(widgets):
for w in widgets: w.show()
def control_visibility(widget):
label = widget.parent().layout().labelForField(widget)
widgets = [widget, label]
return partial(_show, widgets), partial(_hide, widgets)
class AntennaFoldState(object):
def __init__(self, parent):
self._parent = weakref.ref(parent)
self._widgets = {}
self._submodels = {}
self._values_to_set = []
self._variables_to_set = []
self._persisent_id_manager = PersistentIDManager()
def p_expand(self, | workerid, a):
if a not in self._widgets[workerid]: return
for on, off in self._widgets[workerid][a]:
on()
def gui_expands(self, persistent_id, a): |
workerid = self._persisent_id_manager.get_temporary_id(persistent_id)
self._parent().gui_expands(workerid, a)
def p_fold(self, workerid, a):
if a not in self._widgets[workerid]: return
for on, off in self._widgets[workerid][a]:
off()
def gui_folds(self, persistent_id, a):
workerid = self._persisent_id_manager.get_temporary_id(persistent_id)
self._parent().gui_folds(workerid, a)
def gui_sets_value(self, persistent_id, a, value):
workerid = self._persisent_id_manager.get_temporary_id(persistent_id)
self._parent().gui_sets_value(workerid, a, value)
def init_form(self, workerid, form):
p = self._parent()
state = p.states[workerid]
if state is None: return
for a in state:
antenna = state[a]
ele = getattr(form, a, None)
if ele is None: continue
name = getattr(ele, "name", a)
ele.add_button("Expand %s" % name, "before")
if antenna.foldable:
ele.add_button("Fold %s" % name, "before")
def init_widget(self, workerid, widget, controller):
from spyder.qtform.anyQt.QtGui import QFormLayout
p = self._parent()
state = p.states[workerid]
if state is None: return
view = controller._view()
model = controller._model()
self._widgets[workerid] = {}
self._submodels[workerid] = {}
persistent_id = self._persisent_id_manager.create_persistent_id(workerid)
for antenna_name in state:
antenna = state[antenna_name]
widgets = []
ele = getattr(view, antenna_name, None)
if ele is None: continue
if not hasattr(ele, "widget"):
raise Exception("Unfoldable: %s.%s has no associated widget in parameter tab" % (workerid, antenna_name))
e = ele.widget
classname = e.metaObject().className()
if classname == "QLineEdit":
show, hide = control_visibility(e)
on, off = show, hide
else:
on, off = e.show, e.hide
widgets.append((off, on)) # expand / fold
# KLUDGE: Qt XML generator is pretty borked up... this will put the buttons in a better place
newlayout = None
pw = ele.buttons[0].widget.parent()
if pw.objectName().startswith("_expw"):
newlayout = pw.parent().layout().children()[0]
if antenna.foldable:
b = ele.buttons[1] #Fold button
b.listen(partial(self.gui_folds, persistent_id, antenna_name))
if newlayout is not None: ##KLUDGE
newlayout.addWidget(b.widget)
on, off = b.widget.show, b.widget.hide
widgets.append((on, off)) #expand / fold
b = ele.buttons[0] #Expand button
b.listen(partial(self.gui_expands, persistent_id, antenna_name))
if newlayout is not None: ##KLUDGE
newlayout.addWidget(b.widget)
on, off = b.widget.show, b.widget.hide
widgets.append((off, on)) #expand / fold
self._widgets[workerid][antenna_name] = widgets
submodel = getattr(model, antenna_name, None)
self._submodels[workerid][antenna_name] = submodel
submodel._listen(partial(self.gui_sets_value, persistent_id, antenna_name))
currv = [v for v in self._values_to_set if v[0] == workerid]
self._values_to_set = [v for v in self._values_to_set if v[0] != workerid]
done = set()
for wid, member, value in currv:
done.add(member)
self._submodels[workerid][member]._set(value)
currv = [v for v in self._variables_to_set if v[0] == workerid]
self._variables_to_set = [v for v in self._variables_to_set if v[0] != workerid]
for wid, member in currv:
if member in done: continue
value = self._submodels[workerid][member]._get()
if value is not None:
self._parent().gui_sets_value(workerid, member, value)
def remove_worker(self, workerid):
self._widgets.pop(workerid)
self._submodels.pop(workerid)
self._persisent_id_manager.remove_with_temporary_id(workerid)
def rename_worker(self, workerid, newid):
w = self._widgets.pop(workerid)
self._widgets[newid] = w
s = self._submodels.pop(workerid)
self._submodels[newid] = s
self._persisent_id_manager.change_temporary_with_temporary_id(workerid, newid)
def p_set_value(self, workerid, member, value):
assert self._parent().states[workerid][member].is_folded == True, (workerid, member)
init = self._parent()._init_widget.get(workerid, False)
if value is None:
if init:
value = self._submodels[workerid][member]._get()
if value is not None:
self._parent().gui_sets_value(workerid, member, value)
else:
self._variables_to_set.append((workerid, member))
else:
if init:
self._submodels[workerid][member]._set(value)
else:
self._values_to_set.append((workerid, member, value)) |
DVS-P2P/bubblestorm | testbed/setup.py | Python | gpl-3.0 | 2,416 | 0.031056 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import subprocess
import os
import sys
import shutil
# svn:
#__version__ = subprocess.Popen(['svnversion','-n','.'],shell=False,stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()[0].decode()
# git:
GIT_LOG_FORMAT_OPTIONS = ['%ad','%H']
GIT_LOG_FORMAT_STRING = '\t'.join(GIT_LOG_FORMAT_OPTIONS)
__version__ = subprocess.Popen(['git','log','-1','--name-status','--date=short','--format="{0}"'.format(GIT_LOG_FORMAT_STRING)],shell=False,stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()[0] | .decode().split("\n")[0][1:-1]
(version_date,version_tag) = __version__.split('\t')
def getVersion():
return '{0}-{1}'.format(version_date,version_tag)
sdistMode = False
if len(sys.argv) > 1 and sys.argv[1] == "sdist":
sdistMode = True
if sdistMode:
sqlFiles = ['begin-transaction.sql', 'commit.sql', 'connection_type.sql', 'experiments.sql', 'lifetimes.sql', 'log_filters.sql', 'node_groups.sql', | 'node_sets.sql', 'schema-config.sql', 'schema-output.sql', 'workload.sql']
simulatorSqlFolder = '../simulator/sql/{0}'
testbedSqlFolder = 'src/testbed/sql/{0}'
def copyFile(fileName):
dstFile = testbedSqlFolder.format(fileName)
srcFile = simulatorSqlFolder.format(fileName)
if os.path.exists(dstFile):
print('"{0}"" exists, skip file'.format(dstFile))
return False
else:
print('copy "{0}"->"{1}"'.format(srcFile,dstFile))
shutil.copy2(srcFile, dstFile)
return True
deleteFiles = []
for f in sqlFiles:
if copyFile(f):
deleteFiles.append(f)
setup(
name = 'Testbed',
description = 'Prototype testbed environment for peer-to-peer applications. The prototypes are controlled by a central instance carrying out the experiment.',
version = getVersion(),
author = 'Marcel Blöcher',
url = 'https://www.dvs.tu-darmstadt.de',
author_email = 'bloecher@rbg.informatik.tu-darmstadt.de',
include_package_data=True,
packages = find_packages('src') ,
package_dir = {'testbed' : 'src/testbed'} ,
install_requires=[],
)
if sdistMode:
for f in deleteFiles:
filePath = testbedSqlFolder.format(f)
print('remove "{0}"'.format(filePath))
os.remove(filePath)
|
kaji-project/pynag | tests/test_command.py | Python | gpl-2.0 | 27,749 | 0.001189 | import os
import sys
# Make sure we import from working tree
pynagbase = os.path.dirname(os.path.realpath(__file__ + "/.."))
sys.path.insert(0, pynagbase)
from tests import tests_dir
import unittest2 as unittest
from mock import MagicMock, patch, __version__
try:
# open_mock comes with mock 1.0.1
from mock import mock_open
except ImportError:
def mock_open(mock=None, data=None):
file_spec = file
if mock is None:
mock = MagicMock(spec=file_spec)
handle = MagicMock(spec=file_spec)
handle.write.return_value = None
if data is None:
handle.__enter__.return_value = handle
else:
handle.__enter__.return_value = data
mock.return_value = handle
return mock
from pynag.Control import Command
def is_mock_to_old():
major, minor, patch = __version__.split('.')
if int(major) == 0 and int(minor) < 8:
return True
else:
return False
class testCommandsToCommandFile(unittest.TestCase):
def setUp(self):
self.command_file = '/tmp/cmdfile'
self.timestamp = 1368219495
self.testhost = 'hosttest.example.com'
self.testauthor = 'user@example.com'
self.test_svc_desc = 'Test Service'
self.test_svc_group = 'TestSVCGroup'
self.test_check_command = 'test_check_command'
self.test_event_handler_command = 'test_event_handler'
self.check_interval = 50
self.command = Command
# Setup patching for open()
self.command_open_mock = mock_open()
self.patcher1 = patch('pynag.Control.Command.open',
self.command_open_mock, create=True)
self.patcher1.start()
def tearDown(self):
self.patcher1.stop()
def test_add_host_comment(self):
persistent = 0
comment = 'Test Comment!'
self.command.add_host_comment(
host_name=self.testhost,
persistent=persistent,
author=self.testauthor,
comment=comment,
command_file=self.command_file, timestamp=self.timestamp
)
expected_nagios_command = '[%s] ADD_HOST_COMMENT;%s;%s;%s;%s' % (
self.timestamp, self.testhost, persistent,
self.testauthor, comment
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected_nagios_command + '\n')
def test_shutdown_program(self):
self.command.shutdown_program(
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] SHUTDOWN_PROGRAM;' % self.timestamp
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_disable_service_group_passive_svc_checks(self):
self.command.disable_servicegroup_passive_svc_checks(
servicegroup_name=self.test_svc_group,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] DISABLE_SERVICEGROUP_PASSIVE_SVC_CHECKS;%s' % (
self.timestamp, self.test_svc_group
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_enable_service_group_passive_host_checks(self):
self.command.enable_servicegroup_passive_host_checks(
servicegroup_name=self.test_svc_group,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] | ENABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS;%s' % (
self.timestamp, self.test_svc_group
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_disable_servicegroup_passive_host_checks(self):
| self.command.disable_servicegroup_passive_host_checks(
servicegroup_name=self.test_svc_group,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] DISABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS;%s' % (
self.timestamp, self.test_svc_group
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_change_global_host_event_handler(self):
self.command.change_global_host_event_handler(
event_handler_command=self.test_event_handler_command,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] CHANGE_GLOBAL_HOST_EVENT_HANDLER;%s' % (
self.timestamp, self.test_event_handler_command
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_change_global_svc_event_handler(self):
self.command.change_global_svc_event_handler(
event_handler_command=self.test_event_handler_command,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] CHANGE_GLOBAL_SVC_EVENT_HANDLER;%s' % (
self.timestamp, self.test_event_handler_command
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_change_host_event_handler(self):
self.command.change_host_event_handler(
host_name=self.testhost,
event_handler_command=self.test_event_handler_command,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] CHANGE_HOST_EVENT_HANDLER;%s;%s' % (
self.timestamp, self.testhost, self.test_event_handler_command
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_change_svc_event_handler(self):
self.command.change_svc_event_handler(
host_name=self.testhost,
service_description=self.test_svc_desc,
event_handler_command=self.test_event_handler_command,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] CHANGE_SVC_EVENT_HANDLER;%s;%s;%s' % (
self.timestamp, self.testhost, self.test_svc_desc,
self.test_event_handler_command
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_change_host_check_command(self):
self.command.change_host_check_command(
host_name=self.testhost,
check_command=self.test_check_command,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] CHANGE_HOST_CHECK_COMMAND;%s;%s' % (
self.timestamp, self.testhost, self.test_check_command
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.command_open_mock()
handle.write.assert_called_once_with(expected + '\n')
def test_change_svc_check_command(self):
self.command.change_svc_check_command(
host_name=self.testhost,
service_description=self.test_svc_desc,
check_command=self.test_check_command,
command_file=self.command_file, timestamp=self.timestamp
)
expected = '[%s] CHANGE_SVC_CHECK_COMMAND;%s;%s;%s' % (
self.timestamp, self.testhost, self.test_svc_desc,
self.test_check_command
)
self.command_open_mock.assert_called_with(self.command_file, 'a')
handle = self.comma |
dunkhong/grr | grr/server/grr_response_server/gui/selenium_tests/flow_archive_test.py | Python | apache-2.0 | 8,184 | 0.003299 | #!/usr/bin/env python
"""Test the flow archive."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from absl import app
import mock
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_server.flows.general import transfer as flows_transfer
from grr_response_server.gui import api_call_router_with_approval_checks
from grr_response_server.gui import archive_generator
from grr_response_server.gui import gui_test_lib
from grr_response_server.gui.api_plugins import flow as api_flow
from grr_response_server.output_plugins import csv_plugin
from grr_response_server.output_plugins import sqlite_plugin
from grr_response_server.output_plugins import yaml_plugin
from grr.test_lib import action_mocks
from grr.test_lib import flow_test_lib
from grr.test_lib import test_lib
class TestFlowArchive(gui_test_lib.GRRSeleniumTest):
def setUp(self):
super(TestFlowArchive, self).setUp()
self.client_id = self.SetupClient(0)
self.RequestAndGrantClientApproval(self.client_id)
self.action_mock = action_mocks.FileFinderClientMock()
def testDoesNotShowGenerateArchiveButtonForNonExportableRDFValues(self):
flow_test_lib.TestFlowHelper(
gui_test_lib.FlowWithOneNetworkConnectionResult.__name__,
self.action_mock,
client_id=self.client_id,
token=self.token)
self.Open("/#/clients/%s" % self.client_id)
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('FlowWithOneNetworkConnectionResult')")
self.Click("link=Results")
self.WaitUntil(self.IsTextPresent, "42")
self.WaitUntilNot(self.IsTextPresent,
"Files referenced in this collection can be downloaded")
def testDoesNotShowGenerateArchiveButtonWhenResultCollectionIsEmpty(self):
flow_test_lib.TestFlowHelper(
gui_test_lib.RecursiveTestFlow.__name__,
self.action_mock,
client_id=self.client_id,
token=self.token)
self.Open("/#/clients/%s" % self.client_id)
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('RecursiveTestFlow')")
self.Click("link=Results")
self.WaitUntil(self.IsTextPresent, "Value")
self.WaitUntilNot(self.IsTextPresent,
"Files referenced in this collection can be downloaded")
def testShowsGenerateArchiveButtonForGetFileFlow(self):
pathspec = rdf_paths.PathSpec(
path=os.path.join(self.base_path, "test.plist"),
pathtype=rdf_paths.PathSpec.PathType.OS)
flow_test_lib.TestFlowHelper(
flows_transfer.GetFile.__name__,
self.action_mock,
client_id=self.client_id,
pathspec=pathspec,
token=self.token)
self.Open("/#/clients/%s" % self.client_id)
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('GetFile')")
self.Click("link=Results")
self.WaitUntil(self.IsTextPresent,
"Files referenced in this collection can be downloaded")
def testGenerateArchiveButtonGetsDisabledAfterClick(self):
pathspec = rdf_paths.PathSpec(
path=os.path.join(self.base_path, "test.plist"),
pathtype=rdf_paths.PathSpec.PathType.OS)
flow_test_lib.TestFlowHelper(
flows_transfer.GetFile.__name__,
self.action_mock,
client_id=self.client_id,
pathspec=pathspec,
token=self.token)
self.Open("/#/clients/%s" % self.client_id)
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('GetFile')")
self.Click("link=Results")
self.Click("css=button.DownloadButton")
self.WaitUntil(self.IsElementPresent, "css=button.DownloadButton[disabled]")
self.WaitUntil(self.IsTextPresent, "Generation has started")
def testShowsErrorMessageIfArchiveStreamingFailsBeforeFirstChunkIsSent(self):
pathspec = rdf_paths.PathSpec(
path=os.path.join(self.base_path, "test.plist"),
pathtype=rdf_paths.PathSpec.PathType.OS)
flow_id = flow_test_lib.TestFlowHelper(
flows_transfer.GetFile.__name__,
self.action_mock,
client_id=self.client_id,
check_flow_errors=False,
pathspec=pathspec,
| token=self.token)
def RaisingStub(*unused_args, **unused_kwargs):
raise RuntimeError("somet | hing went wrong")
with utils.Stubber(archive_generator.CollectionArchiveGenerator, "Generate",
RaisingStub):
self.Open("/#/clients/%s" % self.client_id)
self.Click("css=a[grrtarget='client.flows']")
self.Click("css=td:contains('GetFile')")
self.Click("link=Results")
self.Click("css=button.DownloadButton")
self.WaitUntil(self.IsTextPresent,
"Can't generate archive: Unknown error")
self.WaitUntil(self.IsUserNotificationPresent,
"Archive generation failed for flow %s" % flow_id)
@mock.patch.object(
api_call_router_with_approval_checks.ApiCallRouterWithApprovalChecks,
"GetExportedFlowResults",
return_value=api_flow.ApiGetExportedFlowResultsHandler())
def testClickingOnDownloadAsCsvZipStartsDownload(self, mock_method):
self.checkClickingOnDownloadAsStartsDownloadForType(
mock_method, csv_plugin.CSVInstantOutputPlugin.plugin_name,
csv_plugin.CSVInstantOutputPlugin.friendly_name)
@mock.patch.object(
api_call_router_with_approval_checks.ApiCallRouterWithApprovalChecks,
"GetExportedFlowResults",
return_value=api_flow.ApiGetExportedFlowResultsHandler())
def testClickingOnDownloadAsYamlZipStartsDownload(self, mock_method):
self.checkClickingOnDownloadAsStartsDownloadForType(
mock_method,
yaml_plugin.YamlInstantOutputPluginWithExportConversion.plugin_name,
yaml_plugin.YamlInstantOutputPluginWithExportConversion.friendly_name)
@mock.patch.object(
api_call_router_with_approval_checks.ApiCallRouterWithApprovalChecks,
"GetExportedFlowResults",
return_value=api_flow.ApiGetExportedFlowResultsHandler())
def testClickingOnDownloadAsSqliteZipStartsDownload(self, mock_method):
self.checkClickingOnDownloadAsStartsDownloadForType(
mock_method, sqlite_plugin.SqliteInstantOutputPlugin.plugin_name,
sqlite_plugin.SqliteInstantOutputPlugin.friendly_name)
def checkClickingOnDownloadAsStartsDownloadForType(self, mock_method, plugin,
plugin_display_name):
pathspec = rdf_paths.PathSpec(
path=os.path.join(self.base_path, "test.plist"),
pathtype=rdf_paths.PathSpec.PathType.OS)
session_id = flow_test_lib.TestFlowHelper(
flows_transfer.GetFile.__name__,
pathspec=pathspec,
client_mock=self.action_mock,
client_id=self.client_id,
token=self.token)
self.Open("/#/clients/%s/flows/%s" % (self.client_id, session_id))
self.Click("link=Results")
self.Select("id=plugin-select", plugin_display_name)
self.Click("css=grr-download-collection-as button[name='download-as']")
def MockMethodIsCalled():
try:
# Mock should be called twice: once for HEAD (to check permissions)
# and once for GET methods.
mock_method.assert_called_with(
api_flow.ApiGetExportedFlowResultsArgs(
client_id=self.client_id,
flow_id=session_id,
plugin_name=plugin),
token=mock.ANY)
return True
except AssertionError:
return False
self.WaitUntil(MockMethodIsCalled)
def testDoesNotShowDownloadAsPanelIfCollectionIsEmpty(self):
session_id = flow_test_lib.TestFlowHelper(
gui_test_lib.RecursiveTestFlow.__name__,
client_mock=self.action_mock,
client_id=self.client_id,
token=self.token)
self.Open("/#/clients/%s/flows/%s" % (self.client_id, session_id))
self.Click("link=Results")
self.WaitUntil(self.IsTextPresent, "Value")
self.WaitUntilNot(self.IsElementPresent, "grr-download-collection-as")
if __name__ == "__main__":
app.run(test_lib.ma |
SRabbelier/Melange | app/soc/modules/gsoc/views/helper/request_data.py | Python | apache-2.0 | 14,575 | 0.00741 | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the RequestData object that will be created for each
request in the GSoC module.
"""
__authors__ = [
'"Daniel Hans" <daniel.m.hans@gmail.com>',
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
import datetime
from google.appengine.api import users
from google.appengine.ext import db
from django.core.urlresolvers import reverse
from soc.models import role
from soc.logic.models.host import logic as host_logic
from soc.logic.models.site import logic as site_logic
from soc.logic.models.user import logic as user_logic
from soc.views.helper.access_checker import isSet
from soc.views.helper.request_data import RequestData
from soc.modules.gsoc.models import profile
from soc.modules.gsoc.logic.models.mentor import logic as mentor_logic
from soc.modules.gsoc.logic.models.organization import logic as org_logic
from soc.modules.gsoc.logic.models.org_admin import logic as org_admin_logic
from soc.modules.gsoc.logic.models.org_app_survey import logic as org_app_logic
from soc.modules.gsoc.logic.models.program import logic as program_logic
from soc.modules.gsoc.logic.models.student import logic as student_logic
d | ef isBefore(date):
"""Returns True iff date is before utcnow | ().
Returns False if date is not set.
"""
return date and datetime.datetime.utcnow() < date
def isAfter(date):
"""Returns True iff date is after utcnow().
Returns False if date is not set.
"""
return date and date < datetime.datetime.utcnow()
def isBetween(start, end):
"""Returns True iff utcnow() is between start and end.
"""
return isAfter(start) and isBefore(end)
class TimelineHelper(object):
"""Helper class for the determination of the currently active period.
Methods ending with "On", "Start", or "End" return a date.
Methods ending with "Between" return a tuple with two dates.
Methods ending with neither return a Boolean.
"""
def __init__(self, timeline, org_app):
self.timeline = timeline
self.org_app = org_app
def currentPeriod(self):
"""Return where we are currently on the timeline.
"""
if not self.programActive():
return 'offseason'
if self.beforeOrgSignupStart():
return 'kickoff_period'
if self.studentsAnnounced():
return 'coding_period'
if self.afterStudentSignupStart():
return 'student_signup_period'
if self.afterOrgSignupStart():
return 'org_signup_period'
return 'offseason'
def orgsAnnouncedOn(self):
return self.timeline.accepted_organization_announced_deadline
def programActiveBetween(self):
return (self.timeline.program_start, self.timeline.program_end)
def orgSignupStart(self):
return self.org_app.survey_start
def orgSignupBetween(self):
return (self.org_app.survey_start, self.org_app.survey_end)
def studentSignupStart(self):
return self.timeline.student_signup_start
def studentsSignupBetween(self):
return (self.timeline.student_signup_start,
self.timeline.student_signup_end)
def studentsAnnouncedOn(self):
return self.timeline.accepted_students_announced_deadline
def programActive(self):
start, end = self.programActiveBetween()
return isBetween(start, end)
def beforeOrgSignupStart(self):
return self.org_app and isBefore(self.orgSignupStart())
def afterOrgSignupStart(self):
return self.org_app and isAfter(self.orgSignupStart())
def orgSignup(self):
if not self.org_app:
return False
start, end = self.orgSignupBetween()
return isBetween(start, end)
def orgsAnnounced(self):
return isAfter(self.orgsAnnouncedOn())
def afterStudentSignupStart(self):
return isAfter(self.studentSignupStart())
def studentSignup(self):
start, end = self.studentsSignupBetween()
return isBetween(start, end)
def studentsAnnounced(self):
return isAfter(self.studentsAnnouncedOn())
def mentorSignup(self):
return self.programActiveBetween() and self.orgsAnnounced()
class RequestData(RequestData):
"""Object containing data we query for each request in the GSoC module.
The only view that will be exempt is the one that creates the program.
Fields:
site: The Site entity
user: The user entity (if logged in)
program: The GSoC program entity that the request is pointing to
program_timeline: The GSoCTimeline entity
timeline: A TimelineHelper entity
is_host: is the current user a host of the program
org_admin_for: the organizations the current user is an admin for
mentor_for: the organizations the current user is a mentor for
student_info: the StudentInfo for the current user and program
Raises:
out_of_band: 404 when the program does not exist
"""
def __init__(self):
"""Constructs an empty RequestData object.
"""
super(RequestData, self).__init__()
# program wide fields
self.program = None
self.program_timeline = None
self.org_app = None
# user profile specific fields
self.profile = None
self.is_host = False
self.mentor_for = []
self.org_admin_for = []
self.applied_to = []
self.not_applied_to = []
self.student_info = None
def orgAdminFor(self, organization):
"""Returns true iff the user is admin for the specified organization.
Organization may either be a key or an organization instance.
"""
if isinstance(organization, db.Model):
organization = organization.key()
return organization in [i.key() for i in self.org_admin_for]
def mentorFor(self, organization):
"""Returns true iff the user is mentor for the specified organization.
Organization may either be a key or an organization instance.
"""
if isinstance(organization, db.Model):
organization = organization.key()
return organization in [i.key() for i in self.mentor_for]
def appliedTo(self, organization):
"""Returns true iff the user has applied for the specified organization.
Organization may either be a key or an organization instance.
"""
if isinstance(organization, db.Model):
organization = organization.key()
if organization in self.applied_to:
return True
if organization in self.not_applied_to:
return False
from soc.models.request import Request
query = db.Query(Request, keys_only=True)
query.filter('user = ', self.user)
query.filter('group = ', organization)
applied = bool(query.get())
if applied:
self.applied_to.append(organization)
else:
self.not_applied_to.append(organization)
return applied
def isPossibleMentorForProposal(self):
"""Checks if the user is a possible mentor for the proposal in the data.
"""
assert isSet(self.profile)
assert isSet(self.proposal)
return self.profile.key() in self.proposal.possible_mentors
def populate(self, redirect, request, args, kwargs):
"""Populates the fields in the RequestData object.
Args:
request: Django HTTPRequest object.
args & kwargs: The args and kwargs django sends along.
"""
super(RequestData, self).populate(redirect, request, args, kwargs)
if kwargs.get('sponsor') and kwargs.get('program'):
program_keyfields = {'link_id': kwargs['program'],
'scope_path': kwargs['sponsor']}
self.program = program_logic.getFromKeyFieldsOr404(program_keyfields)
else:
self.program = self.site.active_program
self.program_timeline = self.program.timeline
org_app_fields = {'scope': self.program}
|
avmarchenko/exatomic | exatomic/interfaces/tens.py | Python | apache-2.0 | 2,737 | 0.008769 | ## -*- coding: utf-8 -*-
## Copyright (c) 2015-2018, Exa Analytics Development Team
## Distributed under the terms of the Apache License 2.0
#import six
##import numpy as np
#import pandas as pd
#from io import StringIO
##from exa import Series, TypedMeta
#from exa import TypedMeta
#from exatomic.core import Editor, Tensor
#
#
#class Meta(TypedMeta):
# tensor = Tensor
#
#
#class RTensor(six.with_metaclass(Meta, Editor)):
# """
# This is a simple script to read a rank-2 tensor file with frame, label and atom index
# labels. The format for such a file is,
#
# 0: f=** l=** a=**
# 1: xx xy xz
# 2: yx yy yz
# 3: zx zy zz
# 4:
# 5: Same as above for a second tensor
#
# """
### Must make this into a class that looks like the XYZ and Cube
## classes. Must have something like parse_tensor.
## Then on the Tensor class there should be something that can
## be activated to find the eigenvalues and eigenvectors of the
## matrix to plot the basis vectors.
## Look at untitled1.ipynb for more info.
# _to_universe = Editor.to_universe
#
# def to_universe(self):
# raise NotImplementedError("Tensor file format has no atom table")
#
# def parse_tensor(self):
# df = pd.read_csv(StringIO(str(self)), delim_whitespace=True, header=None,
# skip_blank_lines=False)
# #print(df)
# try:
# i=0
# data = ''
# while True:
# a = df.loc[[i*5],:].values[0]
# labels = []
# for lbl in a:
# d = lbl.split('=')
# labels.append(d[1])
# cols = ['xx','xy','xz','yx','yy','yz','zx','zy','zz']
# af = pd.DataFrame([df.loc[[i*5+1,i*5+2,i*5+3],:].unstack().values], \
# columns=cols)
# af['frame'] = | labels[0] if labels[0] != '' else 0
# af['label'] = labels[1] if labels[1] != '' else None
# af['atom'] = labels[2] if labels[0] != '' else 0
# if i >= 1:
# data = pd.concat([data,af],keys=[o for o in range(i+1)])
# | #data = data.append(af)
# print('tens.py--------')
# print(data)
# print('---------------')
# else:
# data = af
# i+=1
# except:
# print('tens.py--------')
# print("Reached EOF reading {} tensor".format(i))
# print(data)
# print('---------------')
# self.tensor = data
#
## @classmethod
## def from_universe(cls):
## pass
|
japaniel/CloudFerry | cloudferrylib/os/actions/networks_transporter.py | Python | apache-2.0 | 1,058 | 0 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You | may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under | the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
from cloudferrylib.base.action import transporter
from cloudferrylib.utils import utils as utl
class NetworkTransporter(transporter.Transporter):
def run(self, **kwargs):
src_resource = self.src_cloud.resources[utl.NETWORK_RESOURCE]
dst_resource = self.dst_cloud.resources[utl.NETWORK_RESOURCE]
search_opts = kwargs.get('search_opts_tenant', {})
info = src_resource.read_info(**search_opts)
dst_resource.deploy(info)
return {}
|
mbakke/ganeti | lib/server/rapi.py | Python | bsd-2-clause | 11,971 | 0.008186 | #
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2012, 2013 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Ganeti Remote API master script.
"""
# pylint: disable=C0103
# C0103: Invalid name ganeti-watcher
from __future__ import print_function
import logging
import optparse
import sys
import os
import os.path
import errno
try:
from pyinotify import pyinotify # pylint: disable=E0611
except ImportError:
import pyinotify
from ganeti import asyncnotifier
from ganeti import constants
from ganeti import http
from ganeti import daemon
from ganeti import ssconf
import ganeti.rpc.errors as rpcerr
from ganeti import serializer
from ganeti import compat
from ganeti import utils
from ganeti import pathutils
from ganeti.rapi import connector
from ganeti.rapi import baserlib
import ganeti.http.auth # pylint: disable=W0611
import ganeti.http.server # pylint: disable=W0611
class RemoteApiRequestContext(object):
"""Data structure for Remote API requests.
"""
def __init__(self):
self.handler = None
self.handler_fn = None
self.handler_access = None
self.body_data = None
class RemoteApiHandler(http.auth.HttpServerRequestAuthentication,
http.server.HttpServerHandler):
"""REST Request Handler Class.
"""
AUTH_REALM = "Ganeti Remote API"
def __init__(self, user_fn, reqauth, _client_cls=None):
"""Initializes this class.
@type user_fn: callable
@param user_fn: Function receiving username as string and returning
L{http.auth.PasswordFileUser} or C{None} if user is not found
@type reqauth: bool
@param reqauth: Whether to require authentication
"""
# pylint: disable=W0233
# it seems pylint doesn't see the second parent class there
http.server.HttpServerHandler.__init__(self)
http.auth.HttpServerRequestAuthentication.__init__(self)
self._client_cls = _client_cls
self._resmap = connector.Mapper()
self._user_fn = user_fn
self._reqauth = reqauth
@staticmethod
def FormatErrorMessage(values):
"""Formats the body of an error message.
@type values: dict
@param values: dictionary with keys C{code}, C{message} and C{explain}.
@rtype: tuple; (string, string)
@return: Content-type and response body
"""
return (http.HTTP_APP_JSON, serializer.DumpJson(values))
def _GetRequestContext(self, req):
"""Returns the context for a request.
The context is cached in the req.private variable.
"""
if req.private is None:
(HandlerClass, items, args) = \
self._resmap.getController(req.request_path)
ctx = RemoteApiRequestContext()
ctx.handler = HandlerClass(items, args, req, _client_cls=self._client_cls)
method = req.request_method.upper()
try:
ctx.handler_fn = getattr(ctx.handler, method)
except AttributeError:
raise http.HttpNotImplemented("Method %s is unsupported for path %s" %
(method, req.request_path))
ctx.handler_access = baserlib.GetHandlerAccess(ctx.handler, method)
# Require permissions definition (usually in the base class)
if ctx.handler_access is None:
raise AssertionError("Permissions definition missing")
# This is only made available in HandleRequest
ctx.body_data = None
req.private = ctx
# Check for expected attributes
assert req.private.handler
assert req.private.handler_fn
assert req.private.handler_access is not None
return req.private
def AuthenticationRequired(self, req):
"""Determine whether authentication is required.
"""
return self._reqauth or bool(self._GetRequestContext(req).handler_access)
def Authenticate(self, req, username, password):
"""Checks whether a user can access a resource.
"""
ctx = self._GetRequestContext(req)
user = self._user_fn(username)
if not (user and
self.VerifyBasicAuthPassword(req, username, password,
user.password)):
# Unknown user or password wrong
return False
if (not ctx.handler_access or
set(user.options).intersection(ctx.handler_access)):
# Allow access
return True
# Access forbidden
raise http.HttpForbidden()
def HandleRequest(self, req):
"""Handles a request.
"""
ctx = self._GetRequestContext(req)
# Deserialize request parameters
if req.request_body:
# RFC2616, 7.2.1: Any HTTP/1.1 message containing an entity-body SHOULD
# include a Content-Type header field defining the media type of that
# body. [...] If th | e media type remains unknown, the recipient SHOULD
# treat it as type "application/octet-stream".
req_con | tent_type = req.request_headers.get(http.HTTP_CONTENT_TYPE,
http.HTTP_APP_OCTET_STREAM)
if req_content_type.lower() != http.HTTP_APP_JSON.lower():
raise http.HttpUnsupportedMediaType()
try:
ctx.body_data = serializer.LoadJson(req.request_body)
except Exception:
raise http.HttpBadRequest(message="Unable to parse JSON data")
else:
ctx.body_data = None
try:
result = ctx.handler_fn()
except rpcerr.TimeoutError:
raise http.HttpGatewayTimeout()
except rpcerr.ProtocolError as err:
raise http.HttpBadGateway(str(err))
req.resp_headers[http.HTTP_CONTENT_TYPE] = http.HTTP_APP_JSON
return serializer.DumpJson(result)
class RapiUsers(object):
def __init__(self):
"""Initializes this class.
"""
self._users = None
def Get(self, username):
"""Checks whether a user exists.
"""
if self._users:
return self._users.get(username, None)
else:
return None
def Load(self, filename):
"""Loads a file containing users and passwords.
@type filename: string
@param filename: Path to file
"""
logging.info("Reading users file at %s", filename)
try:
try:
contents = utils.ReadFile(filename)
except EnvironmentError as err:
self._users = None
if err.errno == errno.ENOENT:
logging.warning("No users file at %s", filename)
else:
logging.warning("Error while reading %s: %s", filename, err)
return False
users = http.auth.ParsePasswordFile(contents)
except Exception as err: # pylint: disable=W0703
# We don't care about the type of exception
logging.error("Error while parsing %s: %s", filename, err)
return False
self._users = users
return True
class FileEventHandler(asyncnotifier.FileEventHandlerBase):
def __init__(self, wm, path, cb):
"""Initializes this class.
@param wm: Inotify watch manager
@type path: string
@param path: File path
@type cb: callable
@param cb: Function called on file change
|
ddeepak6992/Algorithms | Binary-Tree/largest_BST_in_a_binary_tree.py | Python | gpl-2.0 | 953 | 0.006296 | # -*- coding: utf-8 -*-
"""
Created on Thu Nov 19 17:38:50 2015
@author: deep
"""
from binaryTree import BTree, generateRandomTree, inorder
def largestBST(root):
if root.left is None and root.right is None:
return True, 1, root.value, root.value
if root.left:
isBSTL, sizeL, minL, maxL = largestBST(root.left)
else:
isBSTL = True
sizeL = 0
minL = -float('inf')
if root.right:
isBSTR, sizeR, minR, maxR = largestBST(root.right)
else:
isBSTR = True
sizeR = 0
maxR = float('inf')
if | isBSTL and isBSTR:
if maxL <= root.value <= minR:
return True, sizeL+sizeR+1, minL, maxR,
size = max(sizeL, sizeR)
return False, size , None, None
root1 = BTree()
root1.value = 0
root2 = BTree()
root2.value = 0
generateRandomTree(root2,2)
generateRandomTree(root1,2)
root1.lef | t.left.left = root2
inorder(root1)
print largestBST(root1)
|
yosukesuzuki/kay-template | project/kay/__init__.py | Python | mit | 4,770 | 0.01174 | # -*- coding: utf-8 -*-
"""
Kay framework.
:Copyright: (c) 2009 Accense Technology, Inc.
Takashi Matsuo <tmatsuo@candit.jp>,
Ian Lewis <IanMLewis@gmail.com>
All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import logging
import settings
__version__ = "3.0.0"
__version_info__ = (3, 0, 0, 'final', 0)
KAY_DIR = os.path.abspath(os.path.dirname(__file__))
LIB_DIR = os.path.join(KAY_DIR, 'lib')
PROJECT_DIR = os.path.abspath(os.path.dirname(settings.__file__))
PROJECT_LIB_DIR = os.path.join(PROJECT_DIR, 'lib')
def setup_env(manage_py_env=False):
"""Configures app engine environment for command-line apps."""
# Try to import the appengine code from the system path.
try:
from google.appengine.api import apiproxy_stub_map
except ImportError, e:
| # Not on the system path. Build a list of alternat | ive paths where it
# may be. First look within the project for a local copy, then look for
# where the Mac OS SDK installs it.
paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
'/usr/local/google_appengine']
for path in os.environ.get('PATH', '').replace(';', ':').split(':'):
path = path.rstrip(os.sep)
if path.endswith('google_appengine'):
paths.append(path)
if os.name in ('nt', 'dos'):
prefix = '%(PROGRAMFILES)s' % os.environ
paths.append(prefix + r'\Google\google_appengine')
# Loop through all possible paths and look for the SDK dir.
SDK_PATH = None
for sdk_path in paths:
sdk_path = os.path.realpath(sdk_path)
if os.path.exists(sdk_path):
SDK_PATH = sdk_path
break
if SDK_PATH is None:
# The SDK could not be found in any known location.
sys.stderr.write('The Google App Engine SDK could not be found!\n'
'Please visit http://kay-docs.shehas.net/'
' for installation instructions.\n')
sys.exit(1)
# Add the SDK and the libraries within it to the system path.
SDK_PATH = os.path.realpath(SDK_PATH)
# if SDK_PATH points to a file, it could be a zip file.
if os.path.isfile(SDK_PATH):
import zipfile
gae_zip = zipfile.ZipFile(SDK_PATH)
lib_prefix = os.path.join('google_appengine', 'lib')
lib = os.path.join(SDK_PATH, lib_prefix)
pkg_names = []
# add all packages archived under lib in SDK_PATH zip.
for filename in sorted(e.filename for e in gae_zip.filelist):
# package should have __init__.py
if (filename.startswith(lib_prefix) and
filename.endswith('__init__.py')):
pkg_path = filename.replace(os.sep+'__init__.py', '')
# True package root should have __init__.py in upper directory,
# thus we can treat only the shortest unique path as package root.
for pkg_name in pkg_names:
if pkg_path.startswith(pkg_name):
break
else:
pkg_names.append(pkg_path)
# insert populated EXTRA_PATHS into sys.path.
EXTRA_PATHS = ([os.path.dirname(os.path.join(SDK_PATH, pkg_name))
for pkg_name in pkg_names]
+ [os.path.join(SDK_PATH, 'google_appengine')])
sys.path = EXTRA_PATHS + sys.path
# tweak dev_appserver so to make zipimport and templates work well.
from google.appengine.tools import dev_appserver
# make GAE SDK to grant opening library zip.
dev_appserver.FakeFile.ALLOWED_FILES.add(SDK_PATH)
template_dir = 'google_appengine/templates/'
dev_appserver.ApplicationLoggingHandler.InitializeTemplates(
gae_zip.read(template_dir+dev_appserver.HEADER_TEMPLATE),
gae_zip.read(template_dir+dev_appserver.SCRIPT_TEMPLATE),
gae_zip.read(template_dir+dev_appserver.MIDDLE_TEMPLATE),
gae_zip.read(template_dir+dev_appserver.FOOTER_TEMPLATE))
# ... else it could be a directory.
else:
sys.path = [SDK_PATH] + sys.path
from appcfg import EXTRA_PATHS as appcfg_EXTRA_PATHS
from appcfg import GOOGLE_SQL_EXTRA_PATHS as appcfg_SQL_EXTRA_PATHS
sys.path = sys.path + appcfg_EXTRA_PATHS + appcfg_SQL_EXTRA_PATHS
# corresponds with another google package
if sys.modules.has_key('google'):
del sys.modules['google']
from google.appengine.api import apiproxy_stub_map
setup()
if not manage_py_env:
return
print 'Running on Kay-%s' % __version__
def setup():
setup_syspath()
def setup_syspath():
if not PROJECT_DIR in sys.path:
sys.path = [PROJECT_DIR] + sys.path
if not LIB_DIR in sys.path:
sys.path = [LIB_DIR] + sys.path
if not PROJECT_LIB_DIR in sys.path:
sys.path = [PROJECT_LIB_DIR] + sys.path
|
pthevenet/tcp-ip | tp03/Lab3_246645_247680/Part5_246645_247680.py | Python | gpl-3.0 | 464 | 0.00431 | imp | ort websocket
import sys
args = sys.argv[1:]
if (len(args) < 1):
print("USAGE : python Part5_246645_247680.py command")
sys.exit()
cmd = args[0]
ws = websocket.create_connection("ws://tcpip.epfl.ch:5006")
# Sending cmd
print("SENDING : ", cmd)
ws.send(cmd)
# Listening
cnt = 0
try:
while True:
# Receiving
result = ws.recv().decode()
| cnt += 1
print(result)
except:
ws.close()
print(cnt, "calls to recv")
|
GeorgeTG/easycolor | examples/style.py | Python | bsd-3-clause | 780 | 0 | from easycolor.style import wrap as _w
from easycolor.style import make_style, RESET
# wrap text, appends a style-reset
print(_w('yellow on red', fg='yellow', bg='red'))
print('normal text')
# NOTE: for complex formatting use the | parser module
# define some styles
black_on_white = make_style(fg='black', bg='white')
# we have to reset the background here
red_bold = make_style(fg='red', bg='reset', opt='bold')
# An options reset is only possible with a total reset
# we can however negate an option to remove it('!option').
green_bold_underline = m | ake_style(fg='green', opt=('!bold', 'underline'))
print(black_on_white + 'Black on white' + red_bold + 'Red bold' +
green_bold_underline + 'Green not bold underline' + RESET)
# everything was reset
print('normal text')
|
JuBra/GEMEditor | GEMEditor/rw/test/test_reference_rw.py | Python | gpl-3.0 | 5,903 | 0.001694 | import pytest
from GEMEditor.model.classes.cobra import Model
from GEMEditor.model.classes.reference import Reference, Author
from GEMEditor.model.classes.annotation import Annotation
from GEMEditor.rw import *
from GEMEditor.rw.annotation import annotate_element_from_xml
from GEMEditor.rw.reference import add_references, parse_references
from lxml.etree import Element
class TestAddReference:
@pytest.fixture(autouse=True)
def setup_items(self):
self.first_author = Author(lastname="FirstLastname",
firstname="FirstFirstname",
initials="FirstInitials")
self.second_author = Author(lastname="SecondLastname",
firstname="SecondFirstname",
initials="SecondInitials")
self.authors = [self.first_author, self.second_author]
self.test_pmid = "123456"
self.test_pmc = "PMC12345"
self.test_doi = "10.1016/j.chemosphere.2016.03.102"
self.test_url = "http://google.com"
self.test_year = "1999"
self.test_title = "Test title"
self.test_journal = "Test journal"
self.test_ref_id = "Test_id"
self.reference = Reference(id=self.test_ref_id,
pmid=self.test_pmid,
pmc=self.test_pmc,
doi=self.test_doi,
url=self.test_url,
year=self.test_year,
title=self.test_title,
journal=self.test_journal,
authors=self.authors)
self.model = Model("Test")
self.model.references[self.reference.id] = self.reference
def test_setup(self):
assert self.reference.pmid == self.test_pmid
assert self.reference.pmc == self.test_pmc
assert self.reference.doi == self.test_doi
assert self.reference.url == self.test_url
assert self.reference.year == self.test_year
assert self.reference.title == self.test_title
assert self.reference.journal == self.test_journal
assert isinstance(self.reference.authors, list)
assert len(self.authors) == len(self.reference.authors)
assert self.authors[0] == self.first_author
assert self.authors[1] == self.second_author
assert len(self.model.references) == 1
def test_add_references(self):
root = Element("root")
add_references(root, self.model)
references_list_node = root.find(ge_listOfReferences)
assert references_list_node is not None
assert len(references_list_node) == 1
reference_node = references_list_node.find(ge_reference)
assert reference_node is not None
assert len(reference_node) == 2
author_list_node = reference_node.find(ge | _listOfAuthors)
assert author_list_node is not None
assert len(author_list_node) == 2
first_author_node = author_list_node.find(ge_author)
assert first_author_node is not None
assert first_author_node.get("firstname") == self.first_author.firstname
assert first_author_node.get("lastname | ") == self.first_author.lastname
assert first_author_node.get("initials") == self.first_author.initials
second_author_node = first_author_node.getnext()
assert second_author_node is not None
assert second_author_node.get("firstname") == self.second_author.firstname
assert second_author_node.get("lastname") == self.second_author.lastname
assert second_author_node.get("initials") == self.second_author.initials
annotation = annotate_element_from_xml(reference_node)
assert len(annotation) == 3
assert Annotation("pubmed", self.test_pmid) in annotation
assert Annotation("pmc", self.test_pmc) in annotation
assert Annotation("doi", self.test_doi) in annotation
assert reference_node.attrib == {"url": self.test_url,
"year": self.test_year,
"title": self.test_title,
"journal": self.test_journal,
"id": self.test_ref_id}
def test_addition_of_optional_attributes_only_when_set(self):
root = Element("root")
reference = Reference()
model = Model("Testmodel")
model.references[reference.id] = reference
add_references(root, model)
reference_node = root.find("/".join([ge_listOfReferences, ge_reference]))
assert reference_node is not None
assert reference_node.get("url") is None
assert reference_node.find(ge_listOfAuthors) is None
assert reference_node.get("year") is None
assert reference_node.get("title") is None
assert reference_node.get("journal") is None
assert reference_node.get("id") == reference.id
assert reference_node.find(sbml3_annotation) is None
def test_parsing_consistency(self):
root = Element("root")
add_references(root, self.model)
references = parse_references(root)
assert len(references) == 1
reference = references[0]
assert reference is not self.reference
assert reference.doi == self.test_doi
assert reference.pmid == self.test_pmid
assert reference.pmc == self.test_pmc
assert reference.journal == self.test_journal
assert reference.title == self.test_title
assert reference.id == self.reference.id
assert reference.authors[0] == self.first_author
assert reference.authors[1] == self.second_author
def test_annotation_optional(self):
# Todo: Implement test
assert True
|
carlosalberto/carbon-fork | lib/carbon/writer.py | Python | apache-2.0 | 5,226 | 0.011864 | """Copyright 2009 Chris Davis
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
import os
import time
from os.path import join, exists, dirname, basename
from carbon import state
from carbon.cache import MetricCache
from carbon.storage import getFilesystemPath, loadStorageSchemas, loadAggregationSchemas
from carbon.persister import WhisperPersister
from carbon.conf import settings
from carbon import log, events, instrumentation
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from twisted.application.service import Service
lastCreateInterval = 0
createCount = 0
schemas = loadStorageSchemas()
agg_schemas = loadAggregationSchemas()
CACHE_SIZE_LOW_WATERMARK = settings.MAX_CACHE_SIZE * 0.95
def optimalWriteOrder():
"Generates metrics with the most cached values first and applies a soft rate limit on new metrics"
global lastCreateInterval
global createCount
metrics = MetricCache.counts()
t = time.time()
metrics.sort(key=lambda item: item[1], reverse=True) # by queue size, descending
log.msg("Sorted %d cache queues in %.6f seconds" % (len(metrics), time.time() - t))
for metric, queueSize in metrics:
if state.cacheTooFull and MetricCache.size < CACHE_SIZE_LOW_WATERMARK:
events.cacheSpaceAvailable()
# Let our persister do its own check, and ignore the metric if needed.
if not persister.pre_get_datapoints_check(metric):
continue
try: # metrics can momentarily disappear from the MetricCache due to the implementation of MetricCache.store()
datapoints = MetricCache.pop(metric)
except KeyError:
log.msg("MetricCache contention, skipping %s update for now" % metric)
continue # we simply move on to the next metric when this race condition occurs
dbInfo = persister.get_dbinfo(metric)
dbIdentifier = dbInfo[0]
dbExists = dbInfo[1]
yield (metric, datapoints, dbIdentifier, dbExists)
def writeCachedDataPoints():
"Write datapoints until the MetricCache is completely empty"
updates = 0
lastSecond = 0
while MetricCache:
dataWritten = False
#for (metric, datapoints, dbFilePath, dbFileExists) in optimalWriteOrder():
for (metric, datapoints, dbIdentifier, dbExists) in optimalWriteOrder():
dataWritten = True
if not dbExists:
persister.create_db(metric)
instrumentation.increment('creates')
try:
t1 = time.time()
persi | ster.update_many(metric, datapoints, dbIdentifier)
t2 = time.time()
updateTime = t2 - t1
except:
log.msg("Error writing to %s" % (dbIdentifier))
log.err()
instrumentation.increment('errors')
else:
pointCount = len(da | tapoints)
instrumentation.increment('committedPoints', pointCount)
instrumentation.append('updateTimes', updateTime)
if settings.LOG_UPDATES:
log.updates("wrote %d datapoints for %s in %.5f seconds" % (pointCount, metric, updateTime))
# Rate limit update operations
thisSecond = int(t2)
if thisSecond != lastSecond:
lastSecond = thisSecond
updates = 0
else:
updates += 1
if updates >= settings.MAX_UPDATES_PER_SECOND:
time.sleep( int(t2 + 1) - t2 )
# Let the persister know it can flush
# (depends on the implementation)
persister.flush()
# Avoid churning CPU when only new metrics are in the cache
if not dataWritten:
time.sleep(0.1)
#TODO - Later let us modify which persister to use from the config files.
from pgpersister import PostgresqlPersister
persister = PostgresqlPersister()
def writeForever():
while reactor.running:
try:
writeCachedDataPoints()
except:
log.err()
time.sleep(1) # The writer thread only sleeps when the cache is empty or an error occurs
def reloadStorageSchemas():
global schemas
try:
schemas = loadStorageSchemas()
except:
log.msg("Failed to reload storage schemas")
log.err()
def reloadAggregationSchemas():
global agg_schemas
try:
schemas = loadAggregationSchemas()
except:
log.msg("Failed to reload aggregation schemas")
log.err()
class WriterService(Service):
def __init__(self):
self.storage_reload_task = LoopingCall(reloadStorageSchemas)
self.aggregation_reload_task = LoopingCall(reloadAggregationSchemas)
def startService(self):
self.storage_reload_task.start(60, False)
self.aggregation_reload_task.start(60, False)
reactor.callInThread(writeForever)
Service.startService(self)
def stopService(self):
self.storage_reload_task.stop()
self.aggregation_reload_task.stop()
Service.stopService(self)
|
smmbllsm/aleph | aleph/archive/archive.py | Python | mit | 1,182 | 0 | import os
import six
from aleph.util import checksum
class Archive(object):
def _get_file_path(self, meta):
ch = meta.content_hash
if ch is None:
raise ValueError("No content hash available.")
path = os.path.join(ch[:2], ch[2:4], ch[4:6], ch)
file_name = 'data'
if meta.file_name is not None:
file_name = meta.file_name
else:
if meta.extension is not None:
file_name = '%s.%s' % (file_name, meta | .extension)
return os.path.join(six.text_type(path), six.text_type(file_name))
def _update_metadata(self, filename, meta):
meta.content_hash = checksum(filename)
return meta
def upgrade(self):
"""Run maintenance on the store."""
pass
def archive_file(self, filename, meta, move=False):
"""Import the g | iven file into the archive.
Return an updated metadata object. If ``move`` is given, the
original file will not exist afterwards.
"""
pass
def load_file(self, meta):
pass
def cleanup_file(self, meta):
pass
def generate_url(self, meta):
return
|
GISAElkartea/tresna-kutxa | tk/material/migrations/0005_auto_20190105_0017.py | Python | agpl-3.0 | 956 | 0.002092 | # Generated by Django 2.1.4 on 2019-01-04 23:17
import django.contrib.postgres.fields.ranges
from django.db import migrations
from psycopg2.extras import NumericRange
class Migration(migrations.Migration):
dependencies = [
('material', '0004_auto_20181227_2232'),
]
operations = [
migrations.AddField(
model_name='activity',
name='num_people',
field=django.contrib.postgres.fields.ranges.IntegerRangeField(default=NumericRange(2, 30), verbose_name='number of people'),
),
mi | grations.RunSQL('commit;'),
migrations.RunSQL('update material_activity set num_people = int4range(min_people, max_people);'),
migrations.RunSQL('commit;'),
migrations.RemoveField(
model_name | ='activity',
name='max_people',
),
migrations.RemoveField(
model_name='activity',
name='min_people',
),
]
|
leppa/home-assistant | homeassistant/util/aiohttp.py | Python | apache-2.0 | 1,424 | 0 | """Utilities to help with aiohttp."""
import json
from typing import Any, Dict, Optional
from urllib.parse import parse_qsl
from multidict import CIMultiDict, MultiDict
class MockRequest:
"""Mock an aiohttp request."""
def __init__(
self,
content: bytes,
method: str = "GET",
status: int = 200,
headers: Optional[Dict[str, str]] = None,
query_string: Optional[str] = None,
url: str = "",
) -> None:
"""Initialize a request."""
self.method = method
self.url = url
self.status = status
self.headers: CIMultiDict[str] = CIMultiDict(headers or {})
self.query_string = query_string or ""
self._content = content
@property
def query(self) -> "MultiDict[str]":
"""Return a dictionary with the query variables."""
return MultiDict(parse_qsl(self.query_string, k | eep_blank_values=True))
@property
def _text(self) -> str:
"""Return the body as text."""
return self._content. | decode("utf-8")
async def json(self) -> Any:
"""Return the body as JSON."""
return json.loads(self._text)
async def post(self) -> "MultiDict[str]":
"""Return POST parameters."""
return MultiDict(parse_qsl(self._text, keep_blank_values=True))
async def text(self) -> str:
"""Return the body as text."""
return self._text
|
jylaxp/django | tests/forms_tests/tests/test_formsets.py | Python | bsd-3-clause | 57,919 | 0.002262 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.forms import (
CharField, DateField, FileField, Form, IntegerField, SplitDateTimeField,
ValidationError, formsets,
)
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.utils import ErrorList
from django.test import SimpleTestCase
from django.utils.encoding import force_text
class Choice(Form):
choice = CharField()
votes = IntegerField()
# FormSet allows us to use multiple instance of the same form on 1 page. For now,
# the best way to create a FormSet is by using the formset_factory function.
ChoiceFormSet = formset_factory(Choice)
class FavoriteDrinkForm(Form):
name = CharField()
class BaseFavoriteDrinksFormSet(BaseFormSet):
def clean(self):
seen_drinks = []
for drink in self.cleaned_data:
if drink['name'] in seen_drinks:
raise ValidationError('You may only specify a drink once.')
seen_drinks.append(drink['name'])
class EmptyFsetWontValidate(BaseFormSet):
def clean(self):
raise ValidationError("Clean method called")
# Let's define a FormSet that takes a list of favorite drinks, but raises an
# error if there are any duplicates. Used in ``test_clean_hook``,
# ``test_regression_6926`` & ``test_regression_12878``.
FavoriteDrinksFormSet = formset_factory(FavoriteDrinkForm,
formset=BaseFavoriteDrinksFormSet, | extra=3)
# Used in ``test_formset_splitdatetimefield``.
class SplitDateTimeForm(Form):
when = SplitDateTimeField(initial=datetime.datetime.now)
SplitDateTimeFormSet = formset_factory(SplitDateTimeForm)
class CustomKwargForm(Form):
def __init__(self, *args, **kwargs):
self.custom_kwarg = kwargs.pop('custom_kwarg')
super(CustomKwargForm, self).__init__(*args | , **kwargs)
class FormsFormsetTestCase(SimpleTestCase):
def make_choiceformset(self, formset_data=None, formset_class=ChoiceFormSet,
total_forms=None, initial_forms=0, max_num_forms=0, min_num_forms=0, **kwargs):
"""
Make a ChoiceFormset from the given formset_data.
The data should be given as a list of (choice, votes) tuples.
"""
kwargs.setdefault('prefix', 'choices')
kwargs.setdefault('auto_id', False)
if formset_data is None:
return formset_class(**kwargs)
if total_forms is None:
total_forms = len(formset_data)
def prefixed(*args):
args = (kwargs['prefix'],) + args
return '-'.join(args)
data = {
prefixed('TOTAL_FORMS'): str(total_forms),
prefixed('INITIAL_FORMS'): str(initial_forms),
prefixed('MAX_NUM_FORMS'): str(max_num_forms),
prefixed('MIN_NUM_FORMS'): str(min_num_forms),
}
for i, (choice, votes) in enumerate(formset_data):
data[prefixed(str(i), 'choice')] = choice
data[prefixed(str(i), 'votes')] = votes
return formset_class(data, **kwargs)
def test_basic_formset(self):
# A FormSet constructor takes the same arguments as Form. Let's create a FormSet
# for adding data. By default, it displays 1 blank form. It can display more,
# but we'll look at how to do so later.
formset = self.make_choiceformset()
self.assertHTMLEqual(
str(formset),
"""<input type="hidden" name="choices-TOTAL_FORMS" value="1" />
<input type="hidden" name="choices-INITIAL_FORMS" value="0" />
<input type="hidden" name="choices-MIN_NUM_FORMS" value="0" />
<input type="hidden" name="choices-MAX_NUM_FORMS" value="1000" />
<tr><th>Choice:</th><td><input type="text" name="choices-0-choice" /></td></tr>
<tr><th>Votes:</th><td><input type="number" name="choices-0-votes" /></td></tr>"""
)
# We treat FormSet pretty much like we would treat a normal Form. FormSet has an
# is_valid method, and a cleaned_data or errors attribute depending on whether all
# the forms passed validation. However, unlike a Form instance, cleaned_data and
# errors will be a list of dicts rather than just a single dict.
formset = self.make_choiceformset([('Calexico', '100')])
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'choice': 'Calexico'}])
# If a FormSet was not passed any data, its is_valid and has_changed
# methods should return False.
formset = self.make_choiceformset()
self.assertFalse(formset.is_valid())
self.assertFalse(formset.has_changed())
def test_form_kwargs_formset(self):
"""
Test that custom kwargs set on the formset instance are passed to the
underlying forms.
"""
FormSet = formset_factory(CustomKwargForm, extra=2)
formset = FormSet(form_kwargs={'custom_kwarg': 1})
for form in formset:
self.assertTrue(hasattr(form, 'custom_kwarg'))
self.assertEqual(form.custom_kwarg, 1)
def test_form_kwargs_formset_dynamic(self):
"""
Test that form kwargs can be passed dynamically in a formset.
"""
class DynamicBaseFormSet(BaseFormSet):
def get_form_kwargs(self, index):
return {'custom_kwarg': index}
DynamicFormSet = formset_factory(CustomKwargForm, formset=DynamicBaseFormSet, extra=2)
formset = DynamicFormSet(form_kwargs={'custom_kwarg': 'ignored'})
for i, form in enumerate(formset):
self.assertTrue(hasattr(form, 'custom_kwarg'))
self.assertEqual(form.custom_kwarg, i)
def test_form_kwargs_empty_form(self):
FormSet = formset_factory(CustomKwargForm)
formset = FormSet(form_kwargs={'custom_kwarg': 1})
self.assertTrue(hasattr(formset.empty_form, 'custom_kwarg'))
self.assertEqual(formset.empty_form.custom_kwarg, 1)
def test_formset_validation(self):
# FormSet instances can also have an error attribute if validation failed for
# any of the forms.
formset = self.make_choiceformset([('Calexico', '')])
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'votes': ['This field is required.']}])
def test_formset_has_changed(self):
# FormSet instances has_changed method will be True if any data is
# passed to his forms, even if the formset didn't validate
blank_formset = self.make_choiceformset([('', '')])
self.assertFalse(blank_formset.has_changed())
# invalid formset test
invalid_formset = self.make_choiceformset([('Calexico', '')])
self.assertFalse(invalid_formset.is_valid())
self.assertTrue(invalid_formset.has_changed())
# valid formset test
valid_formset = self.make_choiceformset([('Calexico', '100')])
self.assertTrue(valid_formset.is_valid())
self.assertTrue(valid_formset.has_changed())
def test_formset_initial_data(self):
# We can also prefill a FormSet with existing data by providing an ``initial``
# argument to the constructor. ``initial`` should be a list of dicts. By default,
# an extra blank form is included.
initial = [{'choice': 'Calexico', 'votes': 100}]
formset = self.make_choiceformset(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual(
'\n'.join(form_output),
"""<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>"""
)
# Let's simulate what would happen if we submitted this form.
formset = self.make_choiceformset([('Calexico', '100'), ('', '')], initial_forms=1)
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.for |
OCA/purchase-workflow | purchase_order_line_description_picking/tests/__init__.py | Python | agpl-3.0 | 59 | 0 | from . import test_purchase_order_line_description_ | picki | ng
|
Aaron1992/flask | flask/json.py | Python | bsd-3-clause | 8,303 | 0.000241 | # -*- coding: utf-8 -*-
"""
flask.jsonimpl
~~~~~~~~~~~~~~
Implementation helpers for the JSON support in Flask.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import io
import uuid
from datetime import datetime
from .globals import current_app, request
from ._compat import text_type, PY2
from werkzeug.http import http_date
from jinja2 import Markup
# Use the same json implementation as itsdangerous on which we
# depend anyways.
try:
from itsdangerous import simplejson as _json
except ImportError:
from itsdangerous import json as _json
# Figure out if simplejson escapes slashes. This behavior was changed
# from one version to another without reason.
_slash_escape = '\\/' not in | _json.dumps('/')
__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsaf | e_dump',
'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder',
'jsonify']
def _wrap_reader_for_text(fp, encoding):
if isinstance(fp.read(0), bytes):
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
return fp
def _wrap_writer_for_text(fp, encoding):
try:
fp.write('')
except TypeError:
fp = io.TextIOWrapper(fp, encoding)
return fp
class JSONEncoder(_json.JSONEncoder):
"""The default Flask JSON encoder. This one extends the default simplejson
encoder by also supporting ``datetime`` objects, ``UUID`` as well as
``Markup`` objects which are serialized as RFC 822 datetime strings (same
as the HTTP date format). In order to support more data types override the
:meth:`default` method.
"""
def default(self, o):
"""Implement this method in a subclass such that it returns a
serializable object for ``o``, or calls the base implementation (to
raise a ``TypeError``).
For example, to support arbitrary iterators, you could implement
default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
if isinstance(o, datetime):
return http_date(o)
if isinstance(o, uuid.UUID):
return str(o)
if hasattr(o, '__html__'):
return text_type(o.__html__())
return _json.JSONEncoder.default(self, o)
class JSONDecoder(_json.JSONDecoder):
"""The default JSON decoder. This one does not change the behavior from
the default simplejson encoder. Consult the :mod:`json` documentation
for more information. This decoder is not only used for the load
functions of this module but also :attr:`~flask.Request`.
"""
def _dump_arg_defaults(kwargs):
"""Inject default arguments for dump functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_encoder)
if not current_app.config['JSON_AS_ASCII']:
kwargs.setdefault('ensure_ascii', False)
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS'])
else:
kwargs.setdefault('sort_keys', True)
kwargs.setdefault('cls', JSONEncoder)
def _load_arg_defaults(kwargs):
"""Inject default arguments for load functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_decoder)
else:
kwargs.setdefault('cls', JSONDecoder)
def dumps(obj, **kwargs):
"""Serialize ``obj`` to a JSON formatted ``str`` by using the application's
configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an
application on the stack.
This function can return ``unicode`` strings or ascii-only bytestrings by
default which coerce into unicode strings automatically. That behavior by
default is controlled by the ``JSON_AS_ASCII`` configuration variable
and can be overridden by the simplejson ``ensure_ascii`` parameter.
"""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
rv = _json.dumps(obj, **kwargs)
if encoding is not None and isinstance(rv, text_type):
rv = rv.encode(encoding)
return rv
def dump(obj, fp, **kwargs):
"""Like :func:`dumps` but writes into a file object."""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding)
_json.dump(obj, fp, **kwargs)
def loads(s, **kwargs):
"""Unserialize a JSON object from a string ``s`` by using the application's
configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an
application on the stack.
"""
_load_arg_defaults(kwargs)
if isinstance(s, bytes):
s = s.decode(kwargs.pop('encoding', None) or 'utf-8')
return _json.loads(s, **kwargs)
def load(fp, **kwargs):
"""Like :func:`loads` but reads from a file object.
"""
_load_arg_defaults(kwargs)
if not PY2:
fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8')
return _json.load(fp, **kwargs)
def htmlsafe_dumps(obj, **kwargs):
"""Works exactly like :func:`dumps` but is safe for use in ``<script>``
tags. It accepts the same arguments and returns a JSON string. Note that
this is available in templates through the ``|tojson`` filter which will
also mark the result as safe. Due to how this function escapes certain
characters this is safe even if used outside of ``<script>`` tags.
The following characters are escaped in strings:
- ``<``
- ``>``
- ``&``
- ``'``
This makes it safe to embed such strings in any place in HTML with the
notable exception of double quoted attributes. In that case single
quote your attributes or HTML escape it in addition.
.. versionchanged:: 0.10
This function's return value is now always safe for HTML usage, even
if outside of script tags or if used in XHTML. This rule does not
hold true when using this function in HTML attributes that are double
quoted. Always single quote attributes if you use the ``|tojson``
filter. Alternatively use ``|tojson|forceescape``.
"""
rv = dumps(obj, **kwargs) \
.replace(u'<', u'\\u003c') \
.replace(u'>', u'\\u003e') \
.replace(u'&', u'\\u0026') \
.replace(u"'", u'\\u0027')
if not _slash_escape:
rv = rv.replace('\\/', '/')
return rv
def htmlsafe_dump(obj, fp, **kwargs):
"""Like :func:`htmlsafe_dumps` but writes into a file object."""
fp.write(unicode(htmlsafe_dumps(obj, **kwargs)))
def jsonify(*args, **kwargs):
"""Creates a :class:`~flask.Response` with the JSON representation of
the given arguments with an `application/json` mimetype. The arguments
to this function are the same as to the :class:`dict` constructor.
Example usage::
from flask import jsonify
@app.route('/_get_current_user')
def get_current_user():
return jsonify(username=g.user.username,
email=g.user.email,
id=g.user.id)
This will send a JSON response like this to the browser::
{
"username": "admin",
"email": "admin@localhost",
"id": 42
}
For security reasons only objects are supported toplevel. For more
information about this, have a look at :ref:`json-security`.
This function's response will be pretty printed if it was not requested
with ``X-Requested-With: XMLHttpRequest`` to simplify debugging unless
the ``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to false.
Compressed (not pretty) formatting currently means no indents and no
spaces after separators.
.. versionadded:: 0.2
"""
indent = None
separators = (',', ':')
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] \
and not request.is_xhr:
indent = 2
separators = (', ', ': ')
return current_app.response_class(dumps(dict(*args, **kwargs),
indent=indent, separators=separators), |
perfectsearch/sandman | test/buildscripts/import_test.py | Python | mit | 1,871 | 0.003207 | #!/usr/bin/env python
#
# $Id: ImportTest.py 4183 2011-01-03 20:17:03Z dhh1969 $
#
# Proprietary and confidential.
# Copyright $Date:: 2011#$ Perfect Search Corporation.
# All rights reserved.
#
import unittest, os, tempfile, subprocess, sys
from testsupport import checkin, CODEROOT
CODE_FOLDER = CODEROOT + '/buildscripts'
@checkin
class ImportTest(unittest.TestCase):
def testImports(self):
temp_dir = tempfile.mkdtemp()
tempScript = os.path.join(temp_dir, 'import_test.py')
#print('Writing test import script %s' % tempScript)
f = open(tempScript, 'wt')
f.write('import sys, traceback\n')
f.write('oldRestOfPath | = sys.path[1:]\n')
f.write('newPath = [sys.path[0], "%s"]\n' % CODE_FOLDER)
f.write('for item in oldRestOfPath:\n\tnewPath.append(item)\n')
f.write('sys.path = newPath\n')
f.write('exitCode = 0\n')
items = [i for i in os.listdir(CODE_FOLDER) if i.endswith('.py')]
for item in items:
if ' ' in item or '-' in item:
f.write('print("%s cannot be imported because its name would cause a syntax error.")\n' % | item)
f.write('exitCode = 1\n')
else:
moduleName = item[0:-3]
f.write('try:\n\timport %s\nexcept:\n' % moduleName)
f.write('\ttraceback.print_exc()\n')
f.write('\texitCode = 1\n')
f.write('if exitCode: print("IMPORTS FAILED")\n')
f.write('sys.exit(exitCode)')
f.close()
p = subprocess.Popen('python "%s"' % tempScript, stdout=subprocess.PIPE, shell=True, stderr=subprocess.STDOUT)
out = p.stdout.read()
exitCode = p.wait()
os.remove(tempScript)
os.rmdir(temp_dir)
if exitCode != 0:
self.fail(out)
if __name__ == '__main__':
unittest.main()
|
lingdb/CoBL-public | ielex/lexicon/migrations/0145_fix_language_distributions.py | Python | bsd-2-clause | 3,447 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
from django.db import migrations
idDistributionMap = {
80: {'normalMean': 3450, 'normalStDev': 125},
133: {'normalMean': 3350, 'normalStDev': 75},
134: {'normalMean': 2350, 'normalStDev': 50},
135: {'normalMean': 3500, 'normalStDev': 100},
81: {'normalMean': 1375, 'normalStDev': 75},
82: {'normalMean': 1350, 'normalStDev': 75},
173: {'normalMean': 3350, 'normalStDev': 55},
110: {'normalMean': 2400, 'normalStDev': 32},
177: {'normalMean': 1900, 'normalStDev': 20},
188: {'normalMean': 40, 'normalStDev': 10},
129: {'normalMean': 1550, 'normalStDev': 25},
105: {'logNormalOffset': 3000, 'logNormalMean': 700,
'logNormalStDev': 0.8},
228: {'normalMean': 2750, 'normalStDev': 100},
227: {'normalMean': 2700, 'normalStDev': 200},
162: {'normalMean': 2450, 'normalStDev': 75},
128: {'normalMean': 3200, 'normalStDev': 300},
174: {'normalMean': 1500, 'normalStDev': 125},
136: {'normalMean': 1350, 'normalStDev': 150},
237: {'normalMean': 2450, 'normalStDev': 75},
131: {'normalMean': 2450, 'normalStDev': 75},
178: {'normalMean': 1600, 'normalStDev': 150},
179: {'normalMean': 2100, 'normalStDev': 150},
108: {'normalMean': 500, 'normalStDev': 50},
100: {'normalMean': 1000, 'normalStDev': 50},
252: {'normalMean': 351, 'normalStDev': 1},
251: {'normalMean': 405, 'normalStDev': 1},
250: {'normalMean': 330, 'normalStDev': 1},
245: {'normalMean': 795, 'normalStDev': 107},
238: {'normalMean': 600, 'normalStDev': 50},
176: {'normalMean': 320, 'normalStDev': 20},
243: {'normalMean': 656, 'normalStDev': 31},
107: {'normalMean': 1650, 'normalStDev': 25},
109: {'normalMean': 775, 'normalStDev': 75},
157: {'normalMean': 650, 'normalStDev': 75},
160: {'normalMean': 9999, 'normalStDev': 999},
126: {'normalMean': 1050, 'normalStDev': 50},
99: {'normalMean': 1050, 'normalStDev': 50},
112: {'normalMean': 2050, 'normalStDev': 75},
207: {'normalMean': 130, 'normalStDev': 15},
209: {'normalMean': 900, 'normalStDev': 150},
203: {'normalMean': 700, 'normalStDev': 50},
204: {'normalMean': 750, 'normalStDev': 75},
130: {'normalMean': 2200, 'normalStDev': 75},
132: {'normalMean': 2150, 'normalStDev': 75},
140: {'normalMean': 2050, 'normalStDev': 125},
148: {'normalMean': 1050, 'normalStDev': 75},
172: {'normalMean': 450, 'normalStDev': 50},
149: {'normalMean': 1050, 'normalStDev': 75},
101: {'normalMean': 700, 'normalStDev': 100},
231: {'normalMean': 700, 'normalStDev': 100},
230: {'normalMean': 300, 'normalStDev': 50},
150: {'normalMean': 1050, 'normalStDev': 75},
127: {'normalMean': 1250, 'normalStDev': 50},
147: {'normalMean': 200, 'normalStDev': 30}}
def forwards_func(apps, schema_editor):
Language = apps.get_model("lexicon", "Language")
languages = Language.objects.filter(
id__in=set(idDistributionMap.keys())).all()
for language in languages:
for k, v in idDistributionMap[language.id].items():
setattr(language, k, v)
language.save()
def reverse_func | (apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [('lexicon', '0144_clean_lexeme_romanised_2')]
operations = [
migrations.R | unPython(forwards_func, reverse_func),
]
|
ehabkost/virt-test | qemu/tests/9p.py | Python | gpl-2.0 | 1,818 | 0.00165 | import os,logging
from autotest.client.shared import error
from virttest import utils_test
def run_9p(test, params, env):
"""
Run an autotest test inside a guest.
@param test: kvm test object.
@param params: Dictionary with test parameters.
@param env: Dictionary with the test environment.
"""
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
mount_dir = params.get("9p_mount_dir")
if mount_dir is None:
logging.info("User Variable for mount dir is not set")
else:
session.cmd("mkdir -p %s" % mount_dir)
mount_option = " trans=virtio"
p9_proto_version = params.get("9p_proto_version", "9p2000.L")
mount_option += ",version=" + p9_proto_version
guest_cache = params.get("9p_guest_cache")
if guest_cache == "yes":
mount_option += ",cache=loose"
posix_acl = params.get("9p_posix_acl")
if posix_acl == "yes":
mount_option += ",posixacl"
logging.info("Mounting 9p mount point with options %s" % mount_option)
cmd = "mount -t 9p -o %s autotest_tag %s" % (mount_option, mount_d | ir)
mount_status = session.get_command_status(cmd)
if (mount_status != 0):
logging.error("mount failed")
raise error.TestFail('mount failed.')
# Collect test parameters
timeout = | int(params.get("test_timeout", 14400))
control_path = os.path.join(test.virtdir, "autotest_control",
params.get("test_control_file"))
outputdir = test.outputdir
utils_test.run_autotest(vm, session, control_path,
timeout, outputdir, params)
|
Waino/LeBLEU | lebleu/__init__.py | Python | bsd-2-clause | 670 | 0.004478 | #!/usr/bin/env python
"""
LeBLEU - Letter-edit / Levenshtein BLEU
"""
import logging
#__all__ = | []
__version__ = '0.0.1'
__author__ = 'Stig-Arne Gronroos'
__author_email__ = "stig-arne.gronroos@aalto.fi"
_logger = logging.getLogger(__name__)
def get_version():
return __version__
# The public api imports need to be at the end of the file,
# so that the package global names are available to the mod | ules
# when they are imported.
from .lebleu import LeBLEU
# Convenience functions
def eval_single(*args, **kwargs):
lb = LeBLEU(**kwargs)
return lb.eval_single(*args)
def eval(*args, **kwargs):
lb = LeBLEU(**kwargs)
return lb.eval(*args)
|
torbjoernk/easybuild-framework | easybuild/framework/easyblock.py | Python | gpl-2.0 | 93,981 | 0.003458 | # #
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Generic EasyBuild support for building and installing software.
The EasyBlock class should serve as a base class for all easyblocks.
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
@author: Toon Willems (Ghent University)
@author: Ward Poelmans (Ghent University)
@author: Fotis Georgatos (Uni.Lu, NTUA)
"""
import copy
import glob
import inspect
import os
import shutil
import stat
import time
import traceback
from distutils.version import LooseVersion
from vsc.utils import fancylogger
from vsc.utils.missing import get_class_for
import easybuild.tools.environment as env
from easybuild.tools import config, filetools
from easybuild.framework.easyconfig import EASYCONFIGS_PKG_SUBDIR
from easybuild.framework.easyconfig.easyconfig import ITERATE_OPTIONS, EasyConfig, ActiveMNS
from easybuild.framework.easyconfig.easyconfig import get_easyblock_class, get_module_path, resolve_template
from easybuild.framework.easyconfig.parser import fetch_parameters_from_easyconfig
from easybuild.framework.easyconfig.tools import get_paths_for
from easybuild.framework.easyconfig.templates import TEMPLATE_NAMES_EASYBLOCK_RUN_STEP
from easybuild.tools.build_details import get_build_stats
from easybuild.tools.build_log import EasyBuildError, print_error, print_msg
from easybuild.tools.config import build_option, build_path, get_log_filename, get_repository, get_repositorypath
from easybuild.tools.config import install_path, log_path, package_path, source_paths
from easybuild.tools.environment import restore_env
from easybuild.tools.filetools import DEFAULT_CHECKSUM
from easybuild.tools.filetools import adjust_permissions, apply_patch, convert_name, download_file, encode_class_name
from easybuild.tools.filetools import extract_file, mkdir, move_logs, read_file, rmtree2
from easybuild.tools.filetools import write_file, compute_checksum, verify_checksum
from easybuild.tools.run import run_cmd
from easybuild.tools.jenkins import write_to_xml
from easybuild.tools.module_generator import ModuleGeneratorLua, ModuleGeneratorTcl, module_generator
from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version
from easybuild.tools.modules import ROOT_ENV_VAR_NAME_PREFIX, VERSION_ENV_VAR_NAME_PREFIX, DEVEL_ENV_VAR_NAME_PREFIX
from easybuild.tools.modules import get_software_root, modules_tool
from easybuild.tools.package.utilities import package
from easybuild.tools.repository.repository import init_repository
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
from easybuild.tools.systemtools import det_parallelism, use_group
from easybuild.tools.utilities import remove_unwanted_chars
from easybuild.tools.version import this_is_easybuild, VERBOSE_VERSION, VERSION
BUILD_STEP = 'build'
CLEANUP_STEP = 'cleanup'
CONFIGURE_STEP = 'configure'
EXTENSIONS_STEP = 'extensions'
FETCH_STEP = 'fetch'
MODULE_STEP = 'module'
PACKAGE_STEP = 'package'
PATCH_STEP = 'patch'
PERMISSIONS_STEP = 'permissions'
POSTPROC_STEP = 'postproc'
PREPARE_STEP = 'prepare'
READY_STEP = 'ready'
SANITYCHECK_STEP = 'sanitycheck'
SOURCE_STEP = 'source'
TEST_STEP = 'test'
TESTCASES_STEP = 'testcases'
MODULE_ONLY_STEPS = [MODULE_STEP, PREPARE_STEP, READY_STEP, SANITYCHECK_STEP]
_log = fancylogger.getLogger('easyblock')
class EasyBlock(object):
"""Generic support for building and installing software, base class for actual easyblocks."""
# static class method for extra easyconfig parameter definitions
# this makes it easy to access the information without needing an instance
# subclasses of EasyBlock should call this method with a dictionary
@staticmethod
def extra_options(extra=None):
"""
Extra options method which will be passed to the EasyConfig constructor.
"""
if extra is None:
extra = {}
if not isinstance(extra, dict):
_log.nosupport("Obtained 'extra' value of type '%s' in extra_options, should be 'dict'" % type(extra), '2.0')
return extra
#
# INIT
#
def __init__(self, ec):
"""
Initialize the EasyBlock instance.
@param ec: a parsed easyconfig file (EasyConfig instance)
"""
# keep track of original working directory, so we can go back there
self.orig_workdir = os.getcwd()
# list of patch/source files, along with checksums
self.patches = []
self.src = []
self.checksums = []
# build/install directories
self.builddir = None
self.installdir = None
# extensions
self.exts = None
self.exts_all = None
self.ext_instances = []
self.skip = None
self.module_extra_extensions = '' # extra stuff for module file required by extensions
# modules interface with default MODULEPATH
self.modules_tool = modules_tool()
# module generator
self.module_generator = module_generator(self, fake=True)
# modules footer
self.modules_footer = None
modules_footer_path = build_option('modules_footer')
if modules_footer_path is not None:
self.modules_footer = read_file(modules_footer_path)
# easyconfig for this application
if isinstance(ec, EasyConfig):
self.cfg = ec
else:
raise EasyBuildError("Value of incorrect type passed to EasyBlock constructor: %s ('%s')", type(ec), ec)
# determine install subdirectory, based on module name
self.install_subdir = None
# indicates whether build should be performed in installation dir
self.build_in_installdir = self.cfg['buildininstalldir']
# logging
self.log = None
self.logfile = None
self.logdebug = build_option('debug')
self.postmsg = '' # allow a post message to be set, which can be shown as last output
# list of | loaded modules
self.loaded_modules = []
# iterate configure/build/options
self.iter_opts = {}
# sanity check fail error messages to report (if any)
self.sanity_check_fail_msgs = []
# robot path
self.robot_path = build_option('robot_pa | th')
# original module path
self.orig_modulepath = os.getenv('MODULEPATH')
# keep track of initial environment we start in, so we can restore it if needed
self.initial_environ = copy.deepcopy(os.environ)
# initialize logger
self._init_log()
# should we keep quiet?
self.silent = build_option('silent')
# try and use the specified group (if any)
group_name = build_option('group')
if self.cfg['group'] is not None:
self.log.warning("Group spec '%s' is overriding config group '%s'." % (self.cfg['group'], group_name))
group_name = self.cfg['group']
self.group = None
if group_name is not None:
self.group = use_group(group_name)
# generate |
quickresolve/accel.ai | flask-aws/lib/python2.7/site-packages/ebcli/controllers/create.py | Python | mit | 18,430 | 0.001302 | # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied | . See the License for the specific
# | language governing permissions and limitations under the License.
import re
import argparse
import os
from ..core import io, fileoperations, hooks
from ..core.abstractcontroller import AbstractBaseController
from ..lib import elasticbeanstalk, utils
from ..objects.exceptions import NotFoundError, AlreadyExistsError, \
InvalidOptionsError
from ..objects.requests import CreateEnvironmentRequest
from ..objects.tier import Tier
from ..operations import saved_configs, commonops, createops, composeops
from ..resources.strings import strings, prompts, flag_text
class CreateController(AbstractBaseController):
class Meta:
label = 'create'
description = strings['create.info']
epilog = strings['create.epilog']
arguments = [
(['environment_name'], dict(
action='store', nargs='?', default=None,
help=flag_text['create.name'])),
(['-m', '--modules'], dict(nargs='*', help=flag_text['create.modules'])),
(['-g', '--env-group-suffix'], dict(help=flag_text['create.group'])),
(['-c', '--cname'], dict(help=flag_text['create.cname'])),
(['-t', '--tier'], dict(help=flag_text['create.tier'])),
(['-i', '--instance_type'], dict(
help=flag_text['create.itype'])),
(['-p', '--platform'], dict(help=flag_text['create.platform'])),
(['-s', '--single'], dict(
action='store_true', help=flag_text['create.single'])),
(['--sample'], dict(
action='store_true', help=flag_text['create.sample'])),
(['-d', '--branch_default'], dict(
action='store_true', help=flag_text['create.default'])),
(['-ip', '--instance_profile'], dict(
help=flag_text['create.iprofile'])),
(['-sr', '--service-role'], dict(
help=flag_text['create.servicerole'])),
(['--version'], dict(help=flag_text['create.version'])),
(['-k', '--keyname'], dict(help=flag_text['create.keyname'])),
(['--scale'], dict(type=int, help=flag_text['create.scale'])),
(['-nh', '--nohang'], dict(
action='store_true', help=flag_text['create.nohang'])),
(['--timeout'], dict(type=int, help=flag_text['general.timeout'])),
(['--tags'], dict(help=flag_text['create.tags'])),
(['--envvars'], dict(help=flag_text['create.envvars'])),
(['--cfg'], dict(help=flag_text['create.config'])),
(['--source'], dict(type=utils.check_source, help=flag_text['create.source'])),
(['--elb-type'], dict(help=flag_text['create.elb_type'])),
(['-db', '--database'], dict(
action="store_true", help=flag_text['create.database'])),
## Add addition hidden db commands
(['-db.user', '--database.username'], dict(dest='db_user',
help=argparse.SUPPRESS)),
(['-db.pass', '--database.password'],
dict(dest='db_pass', help=argparse.SUPPRESS)),
(['-db.i', '--database.instance'],
dict(dest='db_instance', help=argparse.SUPPRESS)),
(['-db.version', '--database.version'],
dict(dest='db_version', help=argparse.SUPPRESS)),
(['-db.size', '--database.size'],
dict(type=int, dest='db_size', help=argparse.SUPPRESS)),
(['-db.engine', '--database.engine'],
dict(dest='db_engine', help=argparse.SUPPRESS)),
(['--vpc'], dict(action='store_true',
help=flag_text['create.vpc'])),
(['--vpc.id'], dict(dest='vpc_id', help=argparse.SUPPRESS)),
(['--vpc.ec2subnets'], dict(
dest='vpc_ec2subnets', help=argparse.SUPPRESS)),
(['--vpc.elbsubnets'], dict(
dest='vpc_elbsubnets', help=argparse.SUPPRESS)),
(['--vpc.elbpublic'], dict(
action='store_true', dest='vpc_elbpublic',
help=argparse.SUPPRESS)),
(['--vpc.publicip'], dict(
action='store_true', dest='vpc_publicip',
help=argparse.SUPPRESS)),
(['--vpc.securitygroups'], dict(
dest='vpc_securitygroups', help=argparse.SUPPRESS)),
(['--vpc.dbsubnets'], dict(
dest='vpc_dbsubnets', help=argparse.SUPPRESS)),
]
def do_command(self):
# save command line args
env_name = self.app.pargs.environment_name
modules = self.app.pargs.modules
if modules and len(modules) > 0:
self.compose_multiple_apps()
return
group = self.app.pargs.env_group_suffix
cname = self.app.pargs.cname
tier = self.app.pargs.tier
itype = self.app.pargs.instance_type
solution_string = self.app.pargs.platform
single = self.app.pargs.single
iprofile = self.app.pargs.instance_profile
service_role = self.app.pargs.service_role
label = self.app.pargs.version
branch_default = self.app.pargs.branch_default
key_name = self.app.pargs.keyname
sample = self.app.pargs.sample
nohang = self.app.pargs.nohang
tags = self.app.pargs.tags
envvars = self.app.pargs.envvars
scale = self.app.pargs.scale
timeout = self.app.pargs.timeout
cfg = self.app.pargs.cfg
elb_type = self.app.pargs.elb_type
source = self.app.pargs.source
interactive = False if env_name else True
provided_env_name = env_name is not None
if sample and label:
raise InvalidOptionsError(strings['create.sampleandlabel'])
if single and scale:
raise InvalidOptionsError(strings['create.singleandsize'])
app_name = self.get_app_name()
# get tags
tags = get_and_validate_tags(tags)
#load solution stack
if not solution_string:
solution_string = commonops.get_default_solution_stack()
# Test out sstack and tier before we ask any questions (Fast Fail)
if solution_string:
try:
solution = commonops.get_solution_stack(solution_string)
except NotFoundError:
raise NotFoundError('Solution stack ' + solution_string +
' does not appear to be valid')
if tier:
if 'worker' in tier.lower() and cname:
raise InvalidOptionsError(strings['worker.cname'])
try:
tier = Tier.parse_tier(tier)
except NotFoundError:
raise NotFoundError('Provided tier ' + tier + ' does not '
'appear to be valid')
if cname:
if not commonops.is_cname_available(cname):
raise AlreadyExistsError(strings['cname.unavailable'].
replace('{cname}', cname))
# If we still dont have what we need, ask for it
if not solution_string:
solution = commonops.prompt_for_solution_stack()
if solution.platform == 'Multi-container Docker' and iprofile is None:
io.log_warning(prompts['ecs.permissions'])
if not env_name:
# default is app-name plus '-dev'
default_name = app_name + '-dev'
current_environments = commonops.get_all_env_names()
unique_name = utils.get_unique_name(default_name,
current_environments)
|
michaelnt/doorstop | doorstop/cli/tests/test_utilities.py | Python | lgpl-3.0 | 7,856 | 0 | # SPDX-License-Identifier: LGPL-3.0-only
"""Unit tests for the doorstop.cli.utilities module."""
import sys
import unittest
from argparse import ArgumentTypeError
from unittest.mock import Mock, patch
from warnings import catch_warnings
from doorstop import common, settings
from doorstop.cli import utilities
from doorstop.cli.tests import SettingsTestCase
class TestCapture(unittest.TestCase):
"""Unit tests for the `Capture` class."""
def test_success(self):
"""Verify a success can be captured."""
with utilities.capture() as success:
pass # no exception raised
self.assertTrue(success)
def test_failure(self):
"""Verify a failure can be captured."""
with utilities.capture() as success:
raise common.DoorstopError
self.assertFalse(success)
def test_failure_uncaught(self):
"""Verify a failure can be left uncaught."""
try:
with utilities.capture(catch=False) as success:
raise common.DoorstopError
except common.DoorstopError:
self.assertFalse(success)
else:
self.fail("DoorstopError not raised")
class TestConfigureSettings(SettingsTestCase):
"""Unit tests for the `configure_settings` function."""
def test_configure_settings(self):
"""Verify settings are parsed correctly."""
args = Mock()
args.reorder = False
args.beta = None
# Act
with catch_warnings(record=True) as warnings:
utilities.configure_settings(args)
# Assert
self.assertFalse(settings.REFORMAT)
self.assertFalse(settings.REORDER)
self.assertFalse(settings.CHECK_LEVELS)
self.assertFalse(settings.CHECK_REF)
self.assertFalse(settings.CHECK_CHILD_LINKS)
self.assertFalse(settings.PUBLISH_CHILD_LINKS)
self.assertFalse(settings.CHECK_SUSPECT_LINKS)
self.assertFalse(settings.CHECK_REVIEW_STATUS)
self.assertFalse(settings.PUBLISH_BODY_LEVELS)
self.assertFalse(settings.WARN_ALL)
self.assertFalse(settings.ERROR_ALL)
self.assertTrue(settings.ENABLE_HEADERS)
if sys.version_info[:2] == (3, 3):
pass # warnings appear to be shown inconsistently in Python 3.3
else:
self.assertIn("--no-body-levels", str(warnings[-1].message))
class TestLiteralEval(unittest.TestCase):
"""Unit tests for the `literal_eval` function."""
def test_literal_eval(self):
"""Verify a string can be evaluated as a Python literal."""
self.assertEqual(42.0, utilities.literal_eval("42.0"))
def test_literal_eval_invalid_err(self):
"""Verify an invalid literal calls the error function."""
error = Mock()
utilities.literal_eval("1/", error=error)
self.assertEqual(1, error.call_count)
@patch('doorstop.cli.utilities.log.critical')
def test_literal_eval_invalid_log(self, mock_log):
"""Verify an invalid literal logs an error."""
utilities.literal_eval("1/")
self.assertEqual(1, mock_log.call_count)
class TestGetExt(unittest.TestCase):
"""Unit tests for the `get_ext` function."""
def test_get_ext_stdout_document(self):
"""Verify a default output extension can be selected."""
args = Mock(spec=[])
error = Mock()
# Act
ext = utilities.get_ext(args, error, '.out', '.file')
# Assert
self.assertEqual(0, error.call_count)
self.assertEqual('.out', ext)
def test_get_ext_stdout_document_override(self):
"""Verify a default output extension can be overridden."""
args = Mock(spec=['html'])
args.html = True
error = Mock()
# Act
ext = utilities.get_ext(args, error, '.out', '.file')
# Assert
self.assertEqual(0, error.call_count)
self.assertEqual('.html', ext)
@patch('os.path.isdir', Mock(return_value=True))
def test_get_ext_file_document_to_directory(self):
"""Verify a path is required for a single document."""
args = Mock(spec=['path'])
args.path = 'path/to/directory'
error = Mock()
# Act
utilities.get_ext(args, error, '.out', '.file')
# Assert
self.assertNotEqual(0, error.call_count)
def test_get_ext_file_document(self):
"""Verify a specified file extension can be selected."""
args = Mock(spec=['path'])
args.path = 'path/to/file.cust'
error = Mock()
# Act
ext = utilities.get_ext(args, error, '.out', '.file')
# Assert
self.assertEqual(0, error.call_count)
self.assertEqual('.cust', ext)
def test_get_ext_file_tree(self):
"""Verify a specified file extension can be selected."""
args = Mock(spec=['path'])
args.path = 'path/to/directory'
error = Mock()
# Act
ext = utilities.get_ext(args, error, '.out', '.file', whole_tree=True)
# Assert
self.assertEqual(0, error.call_count)
self.assertEqual('.file', ext)
def test_get_ext_file_document_no_extension(self):
"""Verify an extension is required on single file paths."""
args = Mock(spec=['path'])
args.path = 'path/to/file'
error = Mock()
# Act
utilities.get_ext(args, error, '.out', '.file')
# Assert
self.assertNotEqual(0, error.call_count)
class TestAsk(unittest.TestCase):
"""Unit tests for the `ask` function."""
def test_ask_yes(self):
"""Verify 'yes' maps to True."""
with patch('builtins.input', Mock(return_value='yes')):
response = utilities.ask("?")
self.assertTrue(response)
def test_ask_no(self):
"""Verify 'no' maps to False."""
with patch('builtins.input', Mock(return_value='no')):
response = utilities.ask("?")
self.assertFalse(response)
def test_ask_interrupt(self):
"""Verify a prompt can be interrupted."""
with patch('builtins.input', Mock(side_effect=KeyboardInterrupt)):
self.assertRaises(KeyboardInterrupt, utilities.ask, "?")
def test_ask_bad(self):
"""Verify a bad response re-prompts."""
with patch('builtins.input', Mock(side_effect=['maybe', 'yes'])):
response = utilities.ask("?")
self.assertTrue(response)
class TestShow(unittest.TestCase):
"""Unit tests for the `show` function.""" # pylint: disable=R0201
@patch('builtins.print')
def test_show(self, mock_print):
"""Verify prints are enabled by default."""
msg = "Hello, world!"
utilities.show(msg)
mock_print.assert_called_once_with(msg, flush=False)
@patch('builtins.print')
@patch('doorstop.common.verbosity', common.PRINT_VERBOSITY - 1)
def test_show_hidden(self, mock_print):
"""Verify prints are hidden when verbosity is quiet."""
utilities.show("This won't be printed.")
self.assertEqual(0, mock_print.call_count)
class TestPositiveInt(unittest.TestCase):
""" Unit tests for the `positive_int` function."""
def test_positive_int(self):
"""Verify a positive integer can be parsed."""
self.assertEqual(utilities.positive_int('1'), 1)
self.assertEqual(utilities.positive_int(1), 1)
def test_non_positive_int(self):
"""Verify a non-positive integer is rejected."""
self.assertRaises(ArgumentTypeError, utilities.positive_int, '-1')
self.assertRaises(ArgumentTypeError, utilities.positive_int, -1)
self.assertRaises(ArgumentTypeError, utilities.positive_int, 0)
def test_non_int(self):
| """Verify a non-integer is rejected."""
self.assertRaises(ArgumentTypeError, utilities.positive_in | t, 'abc')
|
fiorix/cyclone | appskel/foreman/modname/main.py | Python | apache-2.0 | 820 | 0.009756 | import web
import sys, os
from twisted.python import log
from twis | ted.internet import defer, reactor
|
def main(config_file):
log.startLogging(sys.stdout)
application = web.Application(config_file)
port = os.environ.get("PORT", 8888)
reactor.listenTCP(int(port), application)
reactor.run()
if __name__ == "__main__":
if len(sys.argv) > 1:
main(sys.argv[1])
else:
log.error("no config file given")
sys.exit(-1)
|
PostCenter/botlang | botlang/environment/primitives/exceptions.py | Python | mit | 1,226 | 0 | from botlang.evaluation.values import NativeException
def is_exception(value):
return isinstance(value, NativeException)
def try_catch_complete(process, failure,
after_execution=lambda: None, prod=True):
exception = None
try:
value = process()
if is_exception(value):
exception = value
raise Exception()
except Exception as python_exception:
if exception is None:
if prod:
exception = NativeException('system', 'system')
else:
exception = NativeException('system',
r"""Exception:
| {}""".format(python_exception))
failure(exception)
finally:
after_execution()
def public_try_catch(process, failure, after_execution=lambda: None):
try_catch_complete(process, failure, after_execution, True)
def develop_try_catch(process, failure, after_execution=lambda: None):
try_catch_complete(process, failure, after_execut | ion, False)
EXCEPTION_PRIMITIVES = {
'exception?': is_exception,
'try-catch': public_try_catch,
'try-catch-verbose': develop_try_catch
}
|
Unit4TechProjects/ChoiceOptimization | run.py | Python | gpl-2.0 | 3,621 | 0.020713 | import subprocess
import sys
import csv
import re
import os, os.path
from fnmatch import fnmatch
CONFIG = "DATA_FILE={}\nALLOW_GREEDY=true\n"
DATE_CHECK = re.compile("[0-9]{1,2}([/\\-\\.])[0-9]{1,2}\\1[0-9]{4}")
def print_command_err(command, ret_code):
make_arg = lambda x: x.replace(" ", "\\ ")
command = [make_arg(_) for _ in command]
cmd_str = " ".join(command)
print "ExecutionError: command {%s} returned error code %d when executing" % (cmd_str, ret_code)
sys.exit(ret_code)
def call_command(command):
ret_code = subprocess.call(command)
if ret_code:
print_command_err(command, ret_code)
def write_config_file(filename):
with open("scheduler.config", "w+") as ff:
ff.write(CONFIG.format(filename))
def get_temp_file(original_file):
loc, ext = os.path.splitext(original_file)
return "{}.temp".format(loc.replace(" ", "_"))
def merge_prefs(pref_files):
""" Merges the primary preference files into one map. NOTE: Assumes csv files are identically structured """
ra_map = {}
name_index = 0
for ff in pref_files:
with open(ff, "r") as rf:
csv_reader = csv.reader(rf, lineterminator="\n")
header = None
for row in csv_reader:
if not header:
header = {row[i]: i for i in xrange(len(row))}
name_index = header["Name"]
else:
ra_map[row[name_index]] = {i: row[header[i]]
for i in header
if (DATE_CHECK.match(i) or i == "Duties")}
return ra_map, [_ for _ in header.keys()
if (DATE_CHECK.match(_) or _ == "Duties" or _ == "Name")]
def update_secondary_prefs(ra_map, primary_files):
for ff in primary_files:
with open(ff, "r") as rf:
csv_reader = csv.reader(rf, lineterminator="\n")
for row in csv_reader:
ra = ra_map[row[0]]
ra["Duties"] = "UNKNOWN"
for duty in row[1:]:
ra[duty] = "0"
return ra_map
def write_new_prefs(new_prefs, headings):
def key_function(x):
if x == "Name":
return 0
elif x == "Duties":
return 32
elif DATE_CHECK.match(x):
return int(x.split("/")[1])
with open("secondary.csv", "w+") as wf:
csv_writer = csv.writer(wf, lineterminator="\n")
headings = sorted(headings, key=key_function)
csv_writer.writerow(headings)
for name in new_prefs.keys():
prefs = new_prefs[name]
csv_writer.writerow([name] + [prefs[heading] for heading in headings if heading != "Name"])
def create_secondary_prefs(pref_files, primary_data_files):
primary_ra_map, headers = merge_prefs(pref_files)
secondary_ra_map = update_secondary_prefs(primary_ra_map, primary_data_files)
write_new_prefs(secondary_ra_map, headers)
if __name__ == "__main__":
excel_file = sys.argv[1]
call_command(["python2", "pref_parser.py", excel_file])
call_command(["ant", "compile"])
csv_files = [_ for _ in os.listdir(".") if fnmatch(_, "*.csv")]
for ff in csv_files:
os.rename(ff, "/".join(["build", ff]))
os.chdir("build/")
for filename in csv_files:
write_config_file(filename)
raw_input("Please fill in the number of primary duties for each RA in | the file %s.\nPress Enter when finished" % (filename))
call_c | ommand(["java", "duty_scheduler.Scheduler"])
create_secondary_prefs(csv_files, [get_temp_file(filename) for filename in csv_files])
raw_input("Please fill in the number of secondary duties for each RA in the file secondary.csv.\nPress Enter when finished")
write_config_file("secondary.csv")
call_command(["java", "duty_scheduler.Scheduler", "-s"])
|
DefaultUser/DontSleep | dontsleep.py | Python | gpl-3.0 | 10,120 | 0.006324 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#***************************************************************************
#* Copyright (C) 2014 by Sebastian Schmidt [schro.sb@gmail.com] *
#* *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU General Public License as published by *
#* the Free Software Foundation; either version 3 of the License, or *
#* (at your option) any later version. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU General Public License for more details. *
#* *
#* You should have received a copy of the GNU General Public License *
#* along with this program; if not, write to the *
#* Free Software Foundation, Inc., *
#* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
#***************************************************************************
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import sys, os, psutil, subprocess, codecs
def getScriptPath():
return os.path.dirname(os.path.realpath(sys.argv[0]))
def isScreenSaverEnabled():
xset = subprocess.Popen("xset dpms q | grep timeout", shell=True,\
stdout=subprocess.PIPE)
timeout = int(xset.stdout.read().split(" ")[2])
if timeout == 0:
return False
else:
return True
def getProcesses():
settings = QSettings(QSettings.IniFormat, QSettings.UserScope,
"dontsleep", "dontsleep")
processes = settings.value("processes", []).toStringList()
return processes
def appendProcess(process):
settings = QSettings(QSettings.IniFormat, QSettings.UserScope,
"dontsleep", "dontsleep")
processes = settings.value("processes", []).toStringList()
processes.append(process)
settings.setValue("processes", processes)
def removeProcess(process):
settings = QSettings(QSettings.IniFormat, QSettings.UserScope,
"dontsleep", "dontsleep")
processes = settings.value("processes", []).toStringList()
if process in processes:
index = processes.indexOf(process)
processes.removeAt(index)
settings.setValue("processes", processes)
def getLicenseText():
with codecs.open(getScriptPath()+"/LICENSE", "r", "Utf8") as license_file:
license_text = license_file.read()
license_file.close()
return license_text
sleep_Icon = getScriptPath() + "/sleep.xpm"
awake_Icon = getScriptPath() + "/awake.xpm"
class TrayIcon(QSystemTrayIcon):
def __init__(self, parent=None):
if isScreenSaverEnabled():
icon = QIcon(sleep_Icon)
else:
icon = QIcon( | awake_Icon)
super(TrayIcon, self).__init__(icon, parent)
# overwrite status based on user input
self.manual_PreventSleep = False
self.configDialog = None
self.activated.connect(self._icon_activated)
menu = QMenu()
showConfigWindowAction = QAction("Toggle Config", menu)
showConfigWindowAct | ion.triggered.connect(self.toggleConfigWindow)
aboutAction = QAction("About", menu)
aboutAction.triggered.connect(self.showAboutDialog)
aboutQtAction = QAction("About Qt", menu)
aboutQtAction.triggered.connect(self.showAboutQt)
exitAction = QAction("Exit", menu)
exitAction.triggered.connect(self._exitApp)
menu.addAction(showConfigWindowAction)
menu.addAction(aboutAction)
menu.addAction(aboutQtAction)
menu.addAction(exitAction)
self.setContextMenu(menu)
self._getTimeout()
self.timer = QTimer(self)
self.timer.setInterval(59000)
self.timer.timeout.connect(self.onTimeout)
self.timer.start()
def showAboutDialog(self):
self.aboutDialog = AboutDialog()
self.aboutDialog.show()
def showAboutQt(self):
QMessageBox.aboutQt(None)
def onTimeout(self):
if self.manual_PreventSleep:
return True
if self.checkForProcesses():
self.disableScreenSaver()
else:
self.enableScreenSaver()
def checkForProcesses(self):
processes = getProcesses()
for proc in psutil.process_iter():
if proc.name in processes:
return True
return False
def _getTimeout(self):
"""
Save the timeout of the screen saver function
"""
xset = subprocess.Popen("xset dpms q | grep timeout", shell=True,\
stdout=subprocess.PIPE)
timeout = xset.stdout.read().split(" ")[2]
# Screen Saver disabled at start - how much timeout?
if timeout == "0":
self.initial_timeout = "on"
else:
self.initial_timeout = timeout
def _icon_activated(self, reason):
# Left Click
if reason == 3:
self.toggleScreenSaver(manual_overwrite=True)
def _exitApp(self):
self.enableScreenSaver()
app = QApplication.instance()
app.exit()
def closeConfigDialog(self):
self.configDialog.close()
self.configDialog = None
def positionConfigDialog(self):
geo = self.geometry()
x = geo.x()
# top panel
if geo.y() < 10:
y = geo.y() + geo.height()
# bottom panel
else:
y = geo.y() - self.configDialog.size().height()
self.configDialog.move(x, y)
def toggleConfigWindow(self):
if not self.configDialog:
self.configDialog = DontSleepConfigWindow()
self.configDialog.show()
self.positionConfigDialog()
self.configDialog.closeBtn.clicked.connect(self.closeConfigDialog)
else:
self.closeConfigDialog()
def toggleScreenSaver(self, manual_overwrite=False):
if isScreenSaverEnabled():
self.disableScreenSaver()
#
if manual_overwrite:
self.manual_PreventSleep = True
else:
self.enableScreenSaver()
if manual_overwrite:
self.manual_PreventSleep = False
def enableScreenSaver(self):
self.setIcon(QIcon(sleep_Icon))
subprocess.Popen("xset dpms s "+ self.initial_timeout, shell=True)
# Enable Energy saving features (monitor sleep)
subprocess.Popen("xset +dpms", shell=True)
def disableScreenSaver(self):
self.setIcon(QIcon(awake_Icon))
subprocess.Popen("xset dpms s off", shell=True)
# Disable Energy saving features (monitor sleep)
subprocess.Popen("xset -dpms", shell=True)
class DontSleepConfigWindow(QDialog):
"""
Preferences Window
"""
def __init__(self, parent=None):
super(DontSleepConfigWindow, self).__init__(parent)
self.setWindowTitle("Don't SLEEP - Config")
self.setWindowFlags(Qt.FramelessWindowHint | Qt.Tool | Qt.WindowStaysOnTopHint)
self.applicationList = QListWidget(self)
addBtn = QPushButton("Add", self)
addBtn.clicked.connect(self.addToProcesses)
removeBtn = QPushButton("Remove", self)
removeBtn.clicked.connect(self.removeFromProcesses)
self.closeBtn = QPushButton("Close", self)
layout = QVBoxLayout(self)
layout.addWidget(self.applicationList)
hlayout = QHBoxLayout()
hlayout.addWidget(addBtn)
hlayout.add |
vincentpc/yagra | main.py | Python | apache-2.0 | 955 | 0 | #!/usr/bin/env python
# coding=utf-8
from webapp.web import Application
from handlers.in | dex import IndexHandler
from handlers.register import RegisterHandler
from handlers.user import UserHandler
from handlers.signin import SigninHandler
from handlers.signout import SignoutHandler
from handlers.upload import UploadHandler
from handlers.avatar import AvatarHandler
from handlers.error import ErrorHandler
from handlers.password import PasswordHandler
f | rom handlers.ftypeerror import FiletypeErrorHandler
URLS = (
("/", "IndexHandler"),
("/register?", "RegisterHandler"),
("/user", "UserHandler"),
("/signin", "SigninHandler"),
("/signout", "SignoutHandler"),
("/upload", "UploadHandler"),
("/avatar/(.*)", "AvatarHandler"),
("/error", "ErrorHandler"),
("/pwdchange", "PasswordHandler"),
("/ftypeerror", "FiletypeErrorHandler")
)
if __name__ == '__main__':
app = Application(globals(), URLS)
app.run()
|
sidzan/netforce | netforce_document/netforce_document/models/document_categ.py | Python | mit | 2,734 | 0.00256 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial porti | ons of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARR | ANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields, get_model
class DocumentCateg(Model):
_name = "document.categ"
_string = "Document Category"
_name_field = "full_name"
_fields = {
"name": fields.Char("Category Name", required=True, search=True),
"code": fields.Char("Document Code", search=True),
"full_name": fields.Char("Category Name", function="get_full_name", search=True, store=True, size=256),
"parent_id": fields.Many2One("document.categ", "Parent Category"),
"description": fields.Text("Description", search=True),
"comments": fields.One2Many("message", "related_id", "Comments"),
"expire_after": fields.Char("Expire After"),
"file_name": fields.Char("Filename Format"),
"reminder_templates": fields.One2Many("reminder.template", "doc_categ_id", "Reminder Templates"),
}
_order = "full_name"
_constraints = ["_check_cycle"]
def create(self, vals, **kw):
new_id = super().create(vals, **kw)
self.function_store([new_id])
return new_id
def write(self, ids, vals, **kw):
super().write(ids, vals, **kw)
child_ids = self.search(["id", "child_of", ids])
self.function_store(child_ids)
def get_full_name(self, ids, context={}):
vals = {}
for obj in self.browse(ids):
names = [obj.name]
p = obj.parent_id
while p:
names.append(p.name)
p = p.parent_id
full_name = " / ".join(reversed(names))
vals[obj.id] = full_name
return vals
DocumentCateg.register()
|
dominikl/openmicroscopy | components/tools/OmeroPy/test/unit/clitest/test_import.py | Python | gpl-2.0 | 8,731 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test of the omero import control.
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import pytest
from path import path
import omero.clients
import uuid
from omero.cli import CLI, NonZeroReturnCode
# Workaround for a poorly named module
plugin = __import__('omero.plugins.import', globals(), locals(),
['ImportControl'], -1)
ImportControl = plugin.ImportControl
help_arguments = ("-h", "--javahelp", "--java-help", "--advanced-help")
class MockClient(omero.clients.BaseClient):
def setSessionId(self, uuid):
self._uuid = uuid
def getSessionId(self):
return self._uuid
class TestImport(object):
def setup_method(self, method):
self.cli = CLI()
self.cli.register("import", ImportControl, "TEST")
self.args = ["import"]
def add_client_dir(self):
dist_dir = path(__file__) / ".." / ".." / ".." / ".." / ".." / ".." /\
".." / "dist" # FIXME: should not be hard-coded
dist_dir = dist_dir.abspath()
client_dir = dist_dir / "lib" / "client"
self.args += ["--clientdir", client_dir]
def mkdir(self, parent, name, with_ds_store=False):
child = parent / name
child.mkdir()
if with_ds_store:
ds_store = child / ".DS_STORE"
ds_store.write("")
return child
def mkfakescreen(self, screen_dir, nplates=2, nruns=2, nwells=2,
nfields=4, with_ds_store=False):
fieldfiles = []
for iplate in range(nplates):
plate_dir = self.mkdir(
screen_dir, "Plate00%s" % str(iplate),
with_ds_store=with_ds_store)
for irun in range(nruns):
run_dir = self.mkdir(
plate_dir, "Run00%s" % str(irun),
with_ds_store=with_ds_store)
for iwell in range(nwells):
well_dir = self.mkdir(
run_dir, "WellA00%s" % str(iwell),
with_ds_store=with_ds_store)
for ifield in range(nfields):
fieldfile = (well_dir / ("Field00%s.fake" %
str(ifield)))
fieldfile.write('')
fieldfiles.append(fieldfile)
return fieldfiles
def mkfakepattern(self, tmpdir, nangles=7, ntimepoints=10):
spim_dir = tmpdir.join("SPIM")
spim_dir.mkdir()
tiffiles = []
for angle in range(1, nangles + 1):
for timepoint in range(1, ntimepoints + 1):
tiffile = (spim_dir / ("spim_TL%s_Angle%s.fake" %
(str(timepoint), str(angle))))
tiffile.write('')
print str(tiffile)
tiffiles.append(tiffile)
patternfile = spim_dir / "spim.pattern"
patternfile.write("spim_TL<1-%s>_Angle<1-%s>.fake"
% (str(ntimepoints), str(nangles)))
assert len(tiffiles) == nangles * ntimepoints
return patternfile, tiffiles
def testDropBoxArgs(self):
class MockImportControl(ImportControl):
def importer(this, args):
assert args.server == "localhost"
assert args.port == "4064"
assert args.key == "b0742975-03a1-4f6d-b0ac-639943f1a147"
assert args.errs == "/tmp/dropbox.err"
assert args.file == "/tmp/dropbox.out"
self.cli.register("mock-import", MockImportControl, "HELP")
self.args = ['-s', 'localhost', '-p', '4064', '-k',
'b0742975-03a1-4f6d-b0ac-639943f1a147']
self.args += ['mock-import', '---errs=/tmp/dropbox.err']
self.args += ['---file=/tmp/dropbox.out']
self.args += ['--', '/OMERO/DropBox/root/tinyTest.d3d.dv']
self.cli.invoke(self.args)
@pytest.mark.parametrize('help_argument', help_arguments)
def testHelp(self, help_argument):
"""Test help arguments"""
self.args += [help_argument]
self.cli.invoke(self.args)
@pytest.mark.parametrize('clientdir_exists', [True, False])
def testImportNoClientDirFails(self, tmpdir, clientdir_exists):
"""Test fake screen import"""
fakefile = tmpdir.join("test.fake")
fakefile.write('')
if clientdir_exists:
self.args += ["--clientdir", str(tmpdir)]
self.args += [str(fakefile)]
with pytest.raises(NonZeroReturnCode):
self.cli.invoke(self.args, strict=True)
@pytest.mark.parametrize("data", (("1", False), ("3", True)))
def testImportDepth(self, tmpdir, capfd, data):
"""Test import using depth argument"""
dir1 = tmpdir.join("a")
dir1.mkdir()
dir2 = dir1 / "b"
dir2.mkdir()
fakefile = dir2 / "test.fake"
fakefile.write('')
self.add_client_dir()
self.args += ["-f", "--debug=ERROR"]
self.args += [str(dir1)]
depth, result = data
self.cli.invoke(self.args + ["--depth=%s" % depth], strict=True)
o, e = capfd.readouterr()
if result:
assert str(fakefile) in str(o)
else:
assert str(fakefile) not in str(o)
def testImportFakeImage(self, tmpdir, capfd):
"""Test fake image import"""
fakefile = tmpdir.join("test.fake")
fakefile.write('')
self.add_client_dir()
self.args += ["-f", "--debug=ERROR"]
self.args += [str(fakefile)]
self.cli.invoke(self.args, strict=True)
o, e = capfd.readouterr()
outputlines = str(o).split('\n')
reader = 'loci.formats.in.FakeReader'
assert outputlines[-2] == str(fakefile)
assert outputlines[-3] == \
"# Group: %s SPW: false Reader: %s" % (str(fakefile), reader)
@pytest.mark.parametrize('with_ds_store', (True, False))
def testImportFakeScreen(self, tmpdir, capfd, with_ds_st | ore):
"""Test fake screen import"""
screen_dir = tmpdir.join("screen.fake")
screen_dir.mkdir()
fieldfiles = self.mkfakescreen(
screen_dir, with_ds_store=with_ds_store)
self.add_client_dir()
self.args += ["-f", "--debug=ERROR"]
self.args += [str(fieldfiles[0])]
sel | f.cli.invoke(self.args, strict=True)
o, e = capfd.readouterr()
outputlines = str(o).split('\n')
reader = 'loci.formats.in.FakeReader'
assert outputlines[-len(fieldfiles)-2] == \
"# Group: %s SPW: true Reader: %s" % (str(fieldfiles[0]), reader)
for i in range(len(fieldfiles)):
assert outputlines[-1-len(fieldfiles)+i] == str(fieldfiles[i])
def testImportPattern(self, tmpdir, capfd):
"""Test pattern import"""
patternfile, tiffiles = self.mkfakepattern(tmpdir)
self.add_client_dir()
self.args += ["-f", "--debug=ERROR"]
self.args += [str(patternfile)]
self.cli.invoke(self.args, strict=True)
o, e = capfd.readouterr()
outputlines = str(o).split('\n')
reader = 'loci.formats.in.FilePatternReader'
print o
assert outputlines[-len(tiffiles)-3] == \
"# Group: %s SPW: false Reader: %s" % (str(patternfile), reader)
assert outputlines[-len(tiffiles)-2] == str(patternfile)
for i in range(len(tiffiles)):
assert outputlines[-1-len(tiffiles)+i] == str(tiffiles[i])
@pytest.mark.parametrize('hostname', ['localhost', 'servername'])
@pytest.mark.parametrize('port', [None, 4064, 14064])
def testLoginArguments(self, monkeypatch, hostname, port, tmpdir):
self.args += ['test.fake']
control = self.cli.controls['import']
control.command_args = []
sessionid = str(uuid.uuid4())
def new_client(x):
if port:
c = MockClient(hostname, port)
else:
c = MockClient(hostname)
c.setSessionId(sessionid)
|
TuSimple/mxnet | python/mxnet/metric.py | Python | apache-2.0 | 43,258 | 0.000925 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=no-member, too-many-lines
"""Online evaluation metric module."""
from __future__ import absolute_import
import math
from collections import OrderedDict
import numpy
from .base import numeric_types, string_types
from . import ndarray
from . import registry
def check_label_shapes(labels, preds, wrap=False, shape=False):
"""Helper function for checking shape of label and prediction
Parameters
----------
labels : list of `NDArray`
The labels of the data.
preds : list of `NDArray`
Predicted values.
wrap : boolean
If True, wrap labels/preds in a list if they are single NDArray
shape : boolean
If True, check the shape of labels and preds;
Otherwise only check their length.
"""
if not shape:
label_shape, pred_shape = len(labels), len(preds)
else:
label_shape, pred_shape = labels.shape, preds.shape
if label_shape != pred_shape:
raise ValueError("Shape of labels {} does not match shape of "
"predictions {}".format(label_shape, pred_shape))
if wrap:
if isinstance(labels, ndarray.ndarray.NDArray):
labels = [labels]
if isinstance(preds, ndarray.ndarray.NDArray):
preds = [preds]
return labels, preds
class EvalMetric(object):
"""Base class for all evaluation metrics.
.. note::
This is a base class that provides common metric interfaces.
One should not use this class directly, but instead create new metric
classes that extend it.
Parameters
----------
name : str
Name of this metric instance for display.
output_names : list of str, or None
Name of predictions that should be used when updating with update_dict.
By default include all predictions.
label_names : list of str, or None
Name of labels that should be used when updating with update_dict.
By default include all labels.
"""
def __init__(self, name, output_names=None,
label_names=None, **kwargs):
self.name = str(name)
self.output_names = output_names
self.label_names = label_names
self._kwargs = kwargs
self.reset()
def __str__(self):
return "EvalMetric: {}".format(dict(self.get_name_value()))
def get_config(self):
"""Save configurations of metric. Can be recreated
from configs with metric.create(**config)
"""
config = self._kwargs.copy()
config.update({
'metric': self.__class__.__name__,
'name': self.name,
'output_names': self.output_names,
'label_names': self.label_names})
return config
def update_dict(self, label, pred):
"""Update the internal evaluation with named label and pred
Parameters
----------
labels : OrderedDict of str -> NDArray
name to array mapping for labels.
preds : OrderedDict of str -> NDArray
name to array mapping of predicted outputs.
"""
if self.output_names is not None:
pred | = [pred[name] for name in self.output_names]
else:
pred = list(pred.values())
if self.label_names is not None:
label = [label[name] for name in self.label_names]
else:
| label = list(label.values())
self.update(label, pred)
def update(self, labels, preds):
"""Updates the internal evaluation result.
Parameters
----------
labels : list of `NDArray`
The labels of the data.
preds : list of `NDArray`
Predicted values.
"""
raise NotImplementedError()
def reset(self):
"""Resets the internal evaluation result to initial state."""
self.num_inst = 0
self.sum_metric = 0.0
def get(self):
"""Gets the current evaluation result.
Returns
-------
names : list of str
Name of the metrics.
values : list of float
Value of the evaluations.
"""
if self.num_inst == 0:
return (self.name, float('nan'))
else:
return (self.name, self.sum_metric / self.num_inst)
def get_name_value(self):
"""Returns zipped name and value pairs.
Returns
-------
list of tuples
A (name, value) tuple list.
"""
name, value = self.get()
if not isinstance(name, list):
name = [name]
if not isinstance(value, list):
value = [value]
return list(zip(name, value))
# pylint: disable=invalid-name
register = registry.get_register_func(EvalMetric, 'metric')
alias = registry.get_alias_func(EvalMetric, 'metric')
_create = registry.get_create_func(EvalMetric, 'metric')
# pylint: enable=invalid-name
def create(metric, *args, **kwargs):
"""Creates evaluation metric from metric names or instances of EvalMetric
or a custom metric function.
Parameters
----------
metric : str or callable
Specifies the metric to create.
This argument must be one of the below:
- Name of a metric.
- An instance of `EvalMetric`.
- A list, each element of which is a metric or a metric name.
- An evaluation function that computes custom metric for a given batch of
labels and predictions.
*args : list
Additional arguments to metric constructor.
Only used when metric is str.
**kwargs : dict
Additional arguments to metric constructor.
Only used when metric is str
Examples
--------
>>> def custom_metric(label, pred):
... return np.mean(np.abs(label - pred))
...
>>> metric1 = mx.metric.create('acc')
>>> metric2 = mx.metric.create(custom_metric)
>>> metric3 = mx.metric.create([metric1, metric2, 'rmse'])
"""
if callable(metric):
return CustomMetric(metric, *args, **kwargs)
elif isinstance(metric, list):
composite_metric = CompositeEvalMetric()
for child_metric in metric:
composite_metric.add(create(child_metric, *args, **kwargs))
return composite_metric
return _create(metric, *args, **kwargs)
@register
@alias('composite')
class CompositeEvalMetric(EvalMetric):
"""Manages multiple evaluation metrics.
Parameters
----------
metrics : list of EvalMetric
List of child metrics.
name : str
Name of this metric instance for display.
output_names : list of str, or None
Name of predictions that should be used when updating with update_dict.
By default include all predictions.
label_names : list of str, or None
Name of labels that should be used when updating with update_dict.
By default include all labels.
Examples
--------
>>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]
>>> labels = [mx.nd.array([0, 1, 1])]
>>> eval_metrics_1 = mx.metric.Accuracy()
>>> eval_metrics_2 = mx.metric.F1()
>>> eval_metrics = mx.metric.CompositeEvalMetric()
>>> for child_metric in [eval_metrics_1, eval_metrics_2]:
>>> eval_metrics.add(child_metric)
>>> eval_metrics.update(labels = labels, preds = predicts) |
Seynen/egfrd | samples/mapk/model3-conc.py | Python | gpl-2.0 | 4,745 | 0.004004 | #!/usr/bin/env python
from egfrd import *
from logger import *
import sys
import math
model='mapk3-c'
V_str = sys.argv[1]
D_ratio_str = sys.argv[2]
D_mode = sys.argv[3]
ti_str = sys.argv[4]
mode = sys.argv[5]
T_str = sys.argv[6]
V = float(V_str)
D_ratio = float(D_ratio_str)
ti = float(ti_str)
T = float(T_str)
# This runs the mapk3 model with fixed # of
# molecules regardless of the volume V.
# The # of molecules is calculated from
# given concentration (below) using The
# V_ref parameter, which does not change.
V_ref = 1e-15
if ti == 0:
ki = float('inf')
else:
| ki = math.log(2) / ti
D_ref = 1e-12
D_move = D_ref * D_ratio
if D_mode == 'normal':
D_react = D_move
elif D_mode == 'fixed':
D_react = D_ref
# V in liter, L in meter
L = math.pow(V * 1e-3, 1.0 / 3.0)
s = EGFRDSimulator()
s.set_world_size(L)
N = 180
matrix_size = min(max(3, int((3 * N) ** (1.0/3.0))), 60)
print 'matrix_size=', matrix_size
s.set_matrix_size(matrix_size)
box1 = Cuboida | lRegion([0,0,0],[L,L,L])
# not supported yet
#s.add_surface(box1)
radius = 2.5e-9
m = ParticleModel()
K = m.new_species_type('K', D_move, radius)
KK = m.new_species_type('KK', D_move, radius)
P = m.new_species_type('P', D_move, radius)
Kp = m.new_species_type('Kp', D_move, radius)
Kpp = m.new_species_type('Kpp', D_move, radius)
K_KK = m.new_species_type('K_KK', D_move, radius)
Kp_KK = m.new_species_type('Kp_KK', D_move, radius)
Kpp_P = m.new_species_type('Kpp_P', D_move, radius)
Kp_P = m.new_species_type('Kp_P', D_move, radius)
# inactive forms
KKi = m.new_species_type('KKi', D_move, radius)
Pi = m.new_species_type('Pi', D_move, radius)
# 1 2 K + KK <-> K_KK
# 3 K_KK -> Kp + KKi
# 4 5 Kp + KK <-> Kp_KK
# 6 Kp_KK -> Kpp + KKi
# 7 8 Kpp + P <-> Kpp_P
# 9 Kpp_P -> Kp + Pi
# 10 11 Kp + P <-> Kp_P
# 12 Kp_P -> K + Pi
# 13 KKi -> KK
# 14 Pi -> P
sigma = radius * 2
kD = k_D(D_react * 2, sigma)
N_K = C2N(200e-9, V_ref)
N_KK = C2N(50e-9, V_ref)
N_P = C2N(50e-9, V_ref)
s.throw_in_particles(K, N_K, box1)
s.throw_in_particles(KK, N_KK, box1)
s.throw_in_particles(P, N_P, box1)
# print kD
# print k_a(per_M_to_m3(0.02e9), kD)
# print k_a(per_M_to_m3(0.032e9), kD)
# sys.exit(0)
#end_time = 5
end_time = 0
while 1:
s.step()
next_time = s.scheduler.getTopTime()
if next_time > end_time:
s.stop(end_time)
break
s.reset()
k1 = k_a(per_M_to_m3(0.02e9), kD)
k2 = k_d(1.0, per_M_to_m3(0.02e9), kD)
k3 = 1.5
k4 = k_a(per_M_to_m3(0.032e9), kD)
k5 = k_d(1.0, per_M_to_m3(0.032e9), kD)
k6 = 15.0
r1 = create_binding_reaction_rule(K, KK, K_KK, k1)
m.network_rules.add_reaction_rule(r1)
r2 = create_unbinding_reaction_rule(K_KK, K, KK, k2)
m.network_rules.add_reaction_rule(r2)
r3 = create_unbinding_reaction_rule(K_KK, Kp, KKi, k3)
m.network_rules.add_reaction_rule(r3)
r4 = create_binding_reaction_rule(Kp, KK, Kp_KK, k4)
m.network_rules.add_reaction_rule(r4)
r5 = create_unbinding_reaction_rule(Kp_KK, Kp, KK, k5)
m.network_rules.add_reaction_rule(r5)
r6 = create_unbinding_reaction_rule(Kp_KK, Kpp, KKi, k6)
m.network_rules.add_reaction_rule(r6)
r7 = create_binding_reaction_rule(Kpp, P, Kpp_P, k1)
m.network_rules.add_reaction_rule(r7)
r8 = create_unbinding_reaction_rule(Kpp_P, Kpp, P, k2)
m.network_rules.add_reaction_rule(r8)
r9 = create_unbinding_reaction_rule(Kpp_P, Kp, Pi, k3)
m.network_rules.add_reaction_rule(r9)
r10 = create_binding_reaction_rule(Kp, P, Kp_P, k4)
m.network_rules.add_reaction_rule(r10)
r11 = create_unbinding_reaction_rule(Kp_P, Kp, P, k5)
m.network_rules.add_reaction_rule(r11)
r12 = create_unbinding_reaction_rule(Kp_P, K, Pi, k6)
m.network_rules.add_reaction_rule(r12)
r13 = create_unimolecular_reaction_rule(KKi, KK, ki)
m.network_rules.add_reaction_rule(r13)
r14 = create_unimolecular_reaction_rule(Pi, P, ki)
m.network_rules.add_reaction_rule(r14)
s.set_model(m);
logname = model + '_' + '_'.join(sys.argv[1:6]) + '_' +\
os.environ['SGE_TASK_ID']
l = Logger(logname = logname,
comment = '@ model=\'%s\'; D_move=%g; D_react=%g\n' %
(model, D_move, D_react) +
'#@ V=%s; N_K=%d; N_KK=%d; N_P=%d;\n' %
(V_str, N_K, N_KK, N_P) +
'#@ k1=%g; k2=%g; k3=%g; k4=%g; k5=%g; k6=%g;\n' %
(k1, k2, k3, k4, k5, k6) +
'#@ ti=%g; ki=%g;' %
(ti, ki))
rfile = open('data/' + logname + '_reactions.dat', 'w')
interrupter = FixedIntervalInterrupter(s, 1e-0, l)
l.start(s)
while s.t < T:
interrupter.step()
if s.last_reaction:
r = s.last_reaction
line = '(%18.18g,\t%s,\t%s)\n' % (s.t, r.reactants, r.products)
#print line
rfile.write(line)
rfile.flush()
l.log(s, s.t)
|
graphql-python/gql | tests/starwars/test_validation.py | Python | mit | 5,441 | 0.000368 | import pytest
from gql import Client, gql
from .schema import StarWarsIntrospection, StarWarsSchema
@pytest.fixture
def local_schema():
return Client(schema=StarWarsSchema)
@pytest.fixture
def typedef_schema():
return Client(
schema="""
schema {
query: Query
}
interface Character {
appearsIn: [Episode]
friends: [Character]
id: String!
name: String
}
type Droid implements Character {
appearsIn: [Episode]
friends: [Character]
id: String!
name: String
primaryFunction: String
}
enum Episode {
EMPIRE
JEDI
NEWHOPE
}
type Human implements Character {
appearsIn: [Episode]
friends: [Character]
homePlanet: String
id: String!
name: String
}
type Query {
droid(id: String!): Droid
hero(episode: Episode): Character
human(id: String!): Human
}"""
)
@pytest.fixture
def introspection_schema():
return Client(introspection=StarWarsIntrospection)
@pytest.fixture
def introspection_schema_empty_directives():
introspection = StarWarsIntrospection
# Simulate an empty dictionary for directives
introspection["__schema"]["directives"] = []
return Client(introspection=introspection)
@pytest.fixture
def introspection_schema_no_directives():
introspection = StarWarsIntrospection
# Simulate no directives key
del introspection["__schema"]["directives"]
return Client(introspection=introspection)
@pytest.fixture(
params=[
"local_schema",
"typedef_schema",
"introspection_schema",
"introspection_schema_empty_directives",
"introspection_schema_no_directives",
]
)
def client(request):
return request.getfixturevalue(request.param)
def validation_errors(client, query):
query = gql(query)
try:
client.validate(query)
return False
except Exception:
return True
def test_incompatible_request_gql(client):
with pytest.raises(TypeError):
gql(123)
"""
The error generated depends on graphql-core version
< 3.1.5: "body must be a string"
>= 3.1.5: some variation of "object of type 'int' has no len()"
depending on the python environment
So we are not going to check the exact error message here anymore.
"""
"""
assert ("body must be a string" in str(exc_info.value)) or (
"object of type 'int' has no len()" in str(exc_info.value)
)
"""
def test_nested_query_with_fragment(client):
query = """
query NestedQueryWithFragment {
hero {
...NameAndAppearances
friends {
| ...NameAndAppearances
friends {
...NameAndAppearances
}
}
}
}
fragment NameAndAppearances on Character {
name
appearsIn
}
"""
assert not validation_errors(client, query)
def test_non_existent_fields(client):
query | = """
query HeroSpaceshipQuery {
hero {
favoriteSpaceship
}
}
"""
assert validation_errors(client, query)
def test_require_fields_on_object(client):
query = """
query HeroNoFieldsQuery {
hero
}
"""
assert validation_errors(client, query)
def test_disallows_fields_on_scalars(client):
query = """
query HeroFieldsOnScalarQuery {
hero {
name {
firstCharacterOfName
}
}
}
"""
assert validation_errors(client, query)
def test_disallows_object_fields_on_interfaces(client):
query = """
query DroidFieldOnCharacter {
hero {
name
primaryFunction
}
}
"""
assert validation_errors(client, query)
def test_allows_object_fields_in_fragments(client):
query = """
query DroidFieldInFragment {
hero {
name
...DroidFields
}
}
fragment DroidFields on Droid {
primaryFunction
}
"""
assert not validation_errors(client, query)
def test_allows_object_fields_in_inline_fragments(client):
query = """
query DroidFieldInFragment {
hero {
name
... on Droid {
primaryFunction
}
}
}
"""
assert not validation_errors(client, query)
def test_include_directive(client):
query = """
query fetchHero($with_friends: Boolean!) {
hero {
name
friends @include(if: $with_friends) {
name
}
}
}
"""
assert not validation_errors(client, query)
def test_skip_directive(client):
query = """
query fetchHero($without_friends: Boolean!) {
hero {
name
friends @skip(if: $without_friends) {
name
}
}
}
"""
assert not validation_errors(client, query)
def test_build_client_schema_invalid_introspection():
from gql.utilities import build_client_schema
with pytest.raises(TypeError) as exc_info:
build_client_schema("blah")
assert (
"Invalid or incomplete introspection result. Ensure that you are passing the "
"'data' attribute of an introspection response and no 'errors' were returned "
"alongside: 'blah'."
) in str(exc_info.value)
|
arenaoftitans/arena-of-titans-api | aot/reload.py | Python | agpl-3.0 | 3,602 | 0.000555 | ################################################################################
# Copyright (C) 2015-2020 by Last Run Contributors.
#
# This file is part of Arena of Titans.
#
# Arena of Titans is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Arena of Titans is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Arena of Titans. If not, see <http://www.gnu.org/licenses/>.
################################################################################
import re
import signal
import sys
from subprocess import Popen # noqa: S404 (Popen can be a security risk)
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
from .run import cleanup
class AotEventHandler(FileSystemEventHandler):
"""Inspired by RegexMatchingEventHandler.
https://github.com/gorakhargosh/watchdog/blob/d7ceb7ddd48037f6d04ab37297a63116655926d9/src/watchdog/events.py#L457.
"""
IGNORE_REGEXES = [re.compile(r".*test.*"), re.compile(r".*__pycache__.*")]
EVENT_TYPE_MOVED = "moved"
EVENT_TYPE_DELETED = "deleted"
EVENT_TYPE_CREATED = "created"
EVENT_TYPE_MODIFIED = "modified"
EVENT_TYPE_CLOSED = "closed"
def __init__(self):
super().__init__()
self._loop = None
def dispatch(self, event):
paths = []
if hasattr(event, "dest_path"):
paths.append(event.dest_path)
if event.src_path:
paths.append(event.src_path)
if any(r.match(p) for r in self.IGNORE_REGEXES for p in paths):
return
else:
self.on_any_event(event)
_method_map = {
self.EVENT_TYPE_MODIFIED: self.on_modified,
self.EVENT_TYPE_MOVED: self.on_moved,
self.EVENT_TYPE_CREATED: self.on_created,
self.EVENT_TYPE_DELETED: self.on_deleted,
self.EVENT_TYPE_CLOSED: self.on_closed,
}
event_type = event.event_type
_method_map[event_type](event)
def on_any_event(self, event):
super().on_any_event(event)
print("Reload: start", file=sys.stderr) # noqa: T001
self.stop_app()
self.start_app()
print("Reload: done", file=sys.stderr) # noqa: T001
def start_app( | self):
self.app = Popen(["python3", "-m", "aot", "--debug"]) # noqa: S603,S607 (Popen usage)
def stop_app(self):
self.app.terminate()
cleanup(None, None)
@property
def loop(self):
return self._loop
@loop.setter
def loop(self, loop):
| self._loop = loop
def run_reload():
aot_event_handler = AotEventHandler()
aot_event_handler.start_app()
observer = Observer()
observer.schedule(aot_event_handler, "aot", recursive=True)
observer.start()
# Make dependent thread shutdown on SIGTERM.
# This is required for the container to stop with the 0 status code.
signal.signal(signal.SIGTERM, lambda *args: observer.stop())
try:
observer.join()
except KeyboardInterrupt:
pass
finally:
aot_event_handler.stop_app()
observer.stop()
observer.join()
|
MengbinZhu/pfldp | ropp-7.0/grib_api-1.9.9/examples/python/clone.py | Python | gpl-3.0 | 910 | 0.01978 | import traceback
import sys
import random
from gribapi import *
INPUT='../../data/constant_field.grib1'
OUTPUT='out.grib'
VERBOSE=1 # verbose error reporting
def example():
fin = open(INPUT)
fout = open(OUTPUT,'w')
gid = grib_new_from_file(fin)
nx = grib_get(gid,'Ni')
ny = grib_get(gid,'Nj')
for step in range(0,24,6):
clone_id = grib_clone(gid)
grib_set(clone_id,'step',step)
values = [random.rand | om() for i in range(nx*ny)]
grib_set_values(clone_id,values)
grib_write(clone_id,fout)
grib_release(clone_id)
grib_release(gid)
fin.close()
fout.close()
def main():
try:
e | xample()
except GribInternalError,err:
if VERBOSE:
traceback.print_exc(file=sys.stderr)
else:
print >>sys.stderr,err.msg
return 1
if __name__ == "__main__":
sys.exit(main())
|
linuxipho/mycroft-core | mycroft/skills/skill_data.py | Python | apache-2.0 | 5,706 | 0 | # Copyright 2018 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Module containing methods needed to load skill
data such as dialogs, intents and regular expressions.
"""
from os import walk
from os.path import splitext, join
import re
from mycroft.messagebus.message import Message
def load_vocab_from_file(path, vocab_type, bus):
"""Load Mycroft vocabulary from file
The vocab is sent to the intent handler using the message bus
Args:
path: path to vocabulary file (*.voc)
vocab_type: keyword name
bus: Mycroft messagebus connection
skill_id(str): skill id
"""
if path.endswith('.voc'):
with open(path, 'r') as voc_file:
for line in voc_file.readlines():
if line.startswith("#"):
continue
parts = line.strip().split("|")
entity = parts[0]
bus.emit(Message("register_vocab", {
'start': entity, 'end': vocab_type
}))
for alias in parts[1:]:
bus.emit(Message("register_vocab", {
'start': alias, 'end': vocab_type, 'alias_of': entity
}))
def load_regex_from_file(path, bus, skill_id):
"""Load regex from file
The regex is sent to the intent handler using the message bus
Args:
path: path to vocabulary file (*.voc)
bus: Mycroft messagebus connection
"""
if path.endswith('.rx'):
with open(path, 'r') as reg_file:
for line in reg_file.readlines():
if line.startswith("#"):
continue
re.compile(munge_regex(line.strip(), skill_id))
bus.emit(
Message("register_vocab",
{'regex': munge_regex(line.strip(), skill_id)}))
def load_vocabulary(basedir, bus, skill_id):
"""Load vocabulary from all files in the specified directory.
Args:
basedir (str): path of directory to load from (will recurse)
bus (messagebus emitter): messagebus instance used to send the vocab to
the intent service
skill_id: skill the data belongs to
"""
for path, _, files in walk(basedir):
for f in files:
if f.endswith(".voc"):
vocab_type = to_alnum(skill_id) + splitext(f)[0]
load_vocab_from_file(join(path, f), vocab_type, bus)
def load_regex(basedir, bus, skill_id):
"""Load regex from all files in the specified directory.
Args:
basedir (str): path of directory to load from
bus (messagebus emitter): messagebus instance used to send the vocab to
the intent service
skill_id (str): skill identifier
"""
for path, _, files in walk(basedir):
for f in files:
if f.endswith(".rx"):
load_regex_from_file(join(path, f), bus, skill_id)
def to_alnum(skill_id):
"""Convert a skill id to only alphanumeric characters
Non alpha-numeric characters are converted to "_"
Args:
skill_id (str): identifier to be converted
Returns:
(str) String of letters
"""
return ''.join(c if c.isalnum() else '_' for c in str(skill_id))
def munge_regex(regex, skill_id):
"""Insert skill id as letters into match groups.
Args:
regex (str): regex string
skill_id (str): skill identifier
Returns:
(str) munged regex
"""
base = '(?P<' + to_alnum(skill_id)
return base.join(regex.split('(?P<'))
def munge_intent_parser(intent_parser, name, skill_id):
"""Rename intent keywords to make them skill exclusive
This gives the intent parser an exclusive name in the
format <skill_id>:<name>. The keywords are given unique
names in the format <Skill id as letters><Intent name | >.
The function will not munge instances that's already been
munged
Args:
intent_parser: (IntentParser) object to update
name: (str) Skill name
skill_id: (int) skill identifier
"""
# Munge parser name
if str(skill_id) + ':' not in name:
intent_parser.name = str(skill_id) + ':' + name
el | se:
intent_parser.name = name
# Munge keywords
skill_id = to_alnum(skill_id)
# Munge required keyword
reqs = []
for i in intent_parser.requires:
if skill_id not in i[0]:
kw = (skill_id + i[0], skill_id + i[0])
reqs.append(kw)
else:
reqs.append(i)
intent_parser.requires = reqs
# Munge optional keywords
opts = []
for i in intent_parser.optional:
if skill_id not in i[0]:
kw = (skill_id + i[0], skill_id + i[0])
opts.append(kw)
else:
opts.append(i)
intent_parser.optional = opts
# Munge at_least_one keywords
at_least_one = []
for i in intent_parser.at_least_one:
element = [skill_id + e.replace(skill_id, '') for e in i]
at_least_one.append(tuple(element))
intent_parser.at_least_one = at_least_one
|
icamgo/esp-idf | components/idf_test/integration_test/TestCaseScript/TCPStress/TCPConnection.py | Python | apache-2.0 | 12,186 | 0.001969 | import random
import re
import socket
import threading
import time
import TCPConnectionUtility
from NativeLog import NativeLog
from TCAction import PerformanceTCBase
DELAY_RANGE = [10, 3000]
CONNECTION_STRUCTURE = ("Connection handler", "PC socket", "Target socket id",
"Target port", "PC port", "PC state", "Target state")
# max fail count for one connection during test
MAX_FAIL_COUNT = 10
class CheckerBase(threading.Thread):
CHECK_ITEM = ("CONDITION", "NOTIFIER", "ID", "DATA")
SLEEP_TIME = 0.1 # sleep 100ms between each check action
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(True)
self.exit_event = threading.Event()
self.sync_lock = threading.Lock()
self.check_item_list = []
self.check_item_id = 0
def run(self):
while self.exit_event.isSet() is False:
self.process()
pass
def process(self):
pass
def add_check_item(self, condition, notifier):
with self.sync_lock:
check_item_id = self.check_item_id
self.check_item_id += 1
self.check_item_list.append(dict(zip(self.CHECK_ITEM, (condition, notifier, check_item_id, str()))))
return check_item_id
def remove_check_item(self, check_item_id):
ret = None
with self.sync_lock:
check_items = filter(lambda x: x["ID"] == check_item_id, self.check_item_list)
if len(check_items) > 0:
self.check_item_list.remove(check_items[0])
ret = check_items[0]["DATA"]
return ret
def exit(self):
self.exit_event.set()
pass
# check on serial port
class SerialPortChecker(CheckerBase):
def __init__(self, serial_reader):
CheckerBase.__init__(self)
self.serial_reader = serial_reader
pass
# check condition for serial is compiled regular expression pattern
@staticmethod
def do_check(check_item, data):
match = check_item["CONDITION"].search(data)
if match is not None:
pos = data.find(match.group()) + len(match.group())
# notify user
check_item["NOTIFIER"]("serial", match)
else:
pos = -1
return pos
def process(self):
# do check
with self.sync_lock:
# read data
new_data = self.serial_reader()
# NativeLog.add_trace_info("[debug][read data] %s" % new_data)
# do check each item
for check_item in self.check_item_list:
# NativeLog.add_trace_info("[debug][read data][ID][%s]" % check_item["ID"])
check_item["DATA"] += new_data
self.do_check(check_item, check_item["DATA"])
time.sleep(self.SLEEP_TIME)
# handle PC TCP server accept and notify user
class TCPServerChecker(CheckerBase):
def __init__(self, server_sock):
CheckerBase.__init__(self)
self.server_sock = server_sock
server_sock.settimeout(self.SLEEP_TIME)
self.accepted_socket_list = []
# check condition for tcp accepted sock is tcp source port
@staticmethod
def do_check(check_item, data):
for sock_addr_pair in data:
addr = sock_addr_pair[1]
if addr[1] == check_item["CONDITION"]:
# same port, so this is the socket that matched, notify and remove it from list
check_item["NOTIFIER"]("tcp", sock_addr_pair[0])
data.remove(sock_addr_pair)
def process(self):
# do accept
try:
client_sock, addr = self.server_sock.accept()
self.accepted_socket_list.append((client_sock, addr))
except socket.error:
pass
# do check
with self.sync_lock:
check_item_list = self.check_item_list
for check_item in check_item_list:
self.do_check(check_item, self.accepted_socket_list)
pass
# this thread handles one tcp connection.
class ConnectionHandler(threading.Thread):
CHECK_FREQ | = CheckerBase.SLEEP_TIME/2
def __init__(self, utility, serial_checker, tcp_checker, connect_method, disconnect_method, test_case):
threading.Thread.__init__(self)
self.setDaemon(True)
self.utility = utility
self.connect_method = connect_method
self.disconnect_method = disconnect_method
self.exit_event = threading.E | vent()
# following members are used in communication with checker threads
self.serial_checker = serial_checker
self.tcp_checker = tcp_checker
self.serial_notify_event = threading.Event()
self.tcp_notify_event = threading.Event()
self.serial_result = None
self.tcp_result = None
self.serial_check_item_id = None
self.tcp_check_item_id = None
self.data_cache = None
self.fail_count = 0
self.test_case = test_case
pass
def log_error(self):
self.fail_count += 1
if self.fail_count > MAX_FAIL_COUNT:
self.test_case.error_detected()
def new_connection_structure(self):
connection = dict.fromkeys(CONNECTION_STRUCTURE, None)
connection["Connection handler"] = self
return connection
def run(self):
while self.exit_event.isSet() is False:
connection = self.new_connection_structure()
# do connect
connect_method_choice = random.choice(self.connect_method)
if self.utility.execute_tcp_method(connect_method_choice, connection) is False:
self.log_error()
# check if established
if self.utility.is_established_state(connection) is True:
time.sleep(float(random.randint(DELAY_RANGE[0], DELAY_RANGE[1]))/1000)
# do disconnect if established
disconnect_method_choice = random.choice(self.disconnect_method)
if self.utility.execute_tcp_method(disconnect_method_choice, connection) is False:
self.log_error()
# make sure target socket closed
self.utility.close_connection(connection)
time.sleep(float(random.randint(DELAY_RANGE[0], DELAY_RANGE[1]))/1000)
pass
# serial_condition: re string
# tcp_condition: target local port
def add_checkers(self, serial_condition=None, tcp_condition=None):
# cleanup
self.serial_result = None
self.tcp_result = None
self.serial_notify_event.clear()
self.tcp_notify_event.clear()
# serial_checker
if serial_condition is not None:
pattern = re.compile(serial_condition)
self.serial_check_item_id = self.serial_checker.add_check_item(pattern, self.notifier)
else:
# set event so that serial check always pass
self.serial_notify_event.set()
if tcp_condition is not None:
self.tcp_check_item_id = self.tcp_checker.add_check_item(tcp_condition, self.notifier)
else:
# set event so that tcp check always pass
self.tcp_notify_event.set()
# NativeLog.add_trace_info("[Debug] add check item %s, connection is %s" % (self.serial_check_item_id, self))
pass
def get_checker_results(self, timeout=5):
time1 = time.time()
while time.time() - time1 < timeout:
# if one type of checker is not set, its event will be set in add_checkers
if self.serial_notify_event.isSet() is True and self.tcp_notify_event.isSet() is True:
break
time.sleep(self.CHECK_FREQ)
# do cleanup
# NativeLog.add_trace_info("[Debug] remove check item %s, connection is %s" % (self.serial_check_item_id, self))
self.data_cache = self.serial_checker.remove_check_item(self.serial_check_item_id)
self.tcp_checker.remove_check_item(self.tcp_check_item_id)
# self.serial_check_item_id = None
# self.tcp_check_item_id = None
return self.serial_result, self.tcp_result
def notifier(self, typ, result):
if ty |
qilicun/python | python2/PyMOTW-1.132/PyMOTW/threading/threading_daemon.py | Python | gpl-3.0 | 649 | 0.007704 | #!/usr/bi | n/env python
# encoding: utf-8
#
# Copyright (c) 2008 Doug Hellmann All rights reserved.
#
"""Daemon vs. non-daemon threads.
"""
#end_pymotw_header
import threading
import time
import logging
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-10s) %(message)s',
)
def daemon():
logging.debug('Starting')
time.sleep(2)
logging.debug('Exiting')
d = threading.Thread(name='daemon', target=daemon)
d.setDaemon(True)
def non_ | daemon():
logging.debug('Starting')
logging.debug('Exiting')
t = threading.Thread(name='non-daemon', target=non_daemon)
d.start()
t.start()
|
rtam2166/XBAS | task_share.py | Python | mit | 3,530 | 0.011331 | # -*- coding: utf-8 -*-
#
## @file task_share.py
# This file contains classes which allow tasks to share data without the risk
# of data corruption by interrupts.
#
# @copyright This program is copyright (c) JR Ridgely and released under the
# GNU Public License, version 3.0.
import array
import gc
import pyb
import micropython
## This is a system-wide list of all the queues and shared variables. It is
# used to create diagnostic printouts.
share_list = []
class Share:
""" This class implements a shared data item which can be protected
against data corruption by pre-emptive multithreading. Multithr | eading
which can corrupt shared data includes th | e use of ordinary interrupts as
well as the use of a Real-Time Operating System (RTOS). """
## A counter used to give serial numbers to shares for diagnostic use.
ser_num = 0
def __init__ (self, type_code, thread_protect = True, name = None):
""" Allocate memory in which the shared data will be buffered. The
data type code is given as for the Python 'array' type, which
can be any of
* b (signed char), B (unsigned char)
* h (signed short), H (unsigned short)
* i (signed int), I (unsigned int)
* l (signed long), L (unsigned long)
* q (signed long long), Q (unsigned long long)
* f (float), or d (double-precision float)
@param type_code The type of data items which the share can hold
@param thread_protect True if mutual exclusion protection is used
@param name A short name for the share, default @c ShareN where @c N
is a serial number for the share """
self._buffer = array.array (type_code, [0])
self._thread_protect = thread_protect
self._name = str (name) if name != None \
else 'Share' + str (Share.ser_num)
# Add this share to the global share and queue list
share_list.append (self)
@micropython.native
def put (self, data, in_ISR = False):
""" Write an item of data into the share. Any old data is overwritten.
This code disables interrupts during the writing so as to prevent
data corrupting by an interrupt service routine which might access
the same data.
@param data The data to be put into this share
@param in_ISR Set this to True if calling from within an ISR """
# Disable interrupts before writing the data
if self._thread_protect and not in_ISR:
irq_state = pyb.disable_irq ()
self._buffer[0] = data
# Re-enable interrupts
if self._thread_protect and not in_ISR:
pyb.enable_irq (irq_state)
@micropython.native
def get (self, in_ISR = False):
""" Read an item of data from the share. Interrupts are disabled as
the data is read so as to prevent data corruption by changes in
the data as it is being read.
@param in_ISR Set this to True if calling from within an ISR """
# Disable interrupts before reading the data
if self._thread_protect and not in_ISR:
irq_state = pyb.disable_irq ()
to_return = self._buffer[0]
# Re-enable interrupts
if self._thread_protect and not in_ISR:
pyb.enable_irq (irq_state)
return (to_return)
def __repr__ (self):
""" This method puts diagnostic information about the share into a
string. """
return ('{:<12s} Share'.format (self._name)) |
gtoonstra/remap | examples/pagerank/pagerank.py | Python | mit | 1,106 | 0.028933 | import remap
# --- create file i/o objects to be used ----
def create_vertex_reader( filename ):
return remap.TextFileReader( filename, yieldkv=False )
def create_vertex_partitioner( outputdir, partition, mapperid ):
return remap.TextPartitioner( outputdir, partition, mapperid )
NUM_VERTICES = 10
# ---- pagerank vertex implementation ----
def prepare( line ):
line = line.strip()
if len(line) == 0:
return None, None
elems = line.split()
out = []
for i in range(1,len(elems)):
if len(elems[i]) > 0:
out.append( elems[ i ] )
vertex = ( 1.0 / NUM_VERTICES, out )
return elems[0], vertex
def compute( send_fn, superstep, vertex, messages ):
(val, out) = vertex
if (superstep >= 1):
sum = 0
for data in messages:
sum = sum + float(data)
val = 0.15 / NUM_VERTICES + 0.85 * sum
vertex = ( val, out )
if superstep < 30:
for vertex | _id in | out:
send_fn( vertex_id, "%f"%( val / len(out) ))
else:
return vertex, True
return vertex, False
|
WoLpH/EventGhost | plugins/XBMCEventReceiver/__init__.py | Python | gpl-2.0 | 8,990 | 0.010791 | # -*- coding: utf-8 -*-
#
# plugins/XBMCEventReceiver/__init__.py
#
# This file is a plugin for EventGhost.
# Copyright © 2005-2016 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
#
# This plugin is based on the Broadcaster plugin that was originally provided
# by the good work from Kingtd. I have used it as a basis for true 2-way control
# too and from XBMC. The following improvements have been made to v2.0:
#
# - Enabled additional fields for configuration of HTTP.API destination in setup
# - Enabled the invoking of XBMC Host Broadcast function with in the script
# (so it is not necessary to do it manually)
# - Cleaned up some of the code.
# - Fixed error when trying to reconfigure plugin
#
# Future enhancements to make for future ver. as time permits such as:
#
# - Extending response functionality as it applies to XBMC (once it is implemented @ the XBMC Host).
# - Additional parsing of input from XBMC host
#
# If you have any additional comments or suggestions feel free to contact me at vortexrotor@vortexbb.com
import eg
eg.RegisterPlugin(
name = "XBMC Event Receiver",
author = "vortexrotor",
version = "2.0.5",
kind = "program",
guid = "{9872BD49-2022-4F1B-B362-85F1ED203B7E}",
description = (
"Receives events from XBMC Host UDP Broadcasts."
),
url = "http://www.eventghost.net/forum/viewtopic.php?f=10&t=2140",
icon = (
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsRAAALEQF/ZF+RAAAA"
"BGdBTUEAALGeYUxB9wAAACBjSFJNAAB6fAAAfosAAPoBAACHPAAAbUoAAPFDAAA2IAAAHlNX4WK7"
"AAACYElEQVR42tTTPW8ScQDH8d/9n44HESgFKQjFWku1rUYdTEAT0sGHSU10cHZz0piYjqYxbia6"
"+hB3Y0w3kzq0scaojdZqYiMp1dQWAlLg4A7ugbvzRXTy8wK+21dyXRe7QbBLuw6wG3ceJnJnLuTL"
"279DrutySBIhEnUopSbjtMsYVzkXLSFEU5Y9dY/XW5Nlr207DpRWE+zzp6VHxWLpstpWKaEA5RT9"
"XgCcC/jDDihjOpWI6vF4WkLweigULgdD4R/p4ZH30X1Dr6XhbK4i/OH43qSKVikJLhhGz26AEo61"
"+Qz0roWR8RDWixtIJKP4/mUVA5EgkvvjOHEy/1FKj+XnwpHMxdipIhJH29C2o0hMVmH1KJQyxWTw"
"FuKhKYCbaDUVOI4LwzKxOD8PAvkrMazOW1uSUH43ilCqgUYphvJyBitzKUyfLiCVBe7PPkVzp4l7"
"dx9g+lwB5T9bePPqJTIjB4v0uqmVi4cHbx67UkFteRjRAx30mgEcym9iZz2NpRcyfAM6Om0Nruui"
"sr2F8SNZuIQjEhl6Lj0LAY8Hcwtq6nwhStuQJB8sWOh3fTClBgIDOhj1wDAtcEFRq/5FW+shPRRF"
"diyTYJNe4Kr1bfaJHiv0qAtBKTgX4D6CAJXAbQIhaYhyG16iIxvpwEfW0BITM75YrsJm3Ah6SnfB"
"kCtzWmLikmabYLYAIRxO34Zp6nAs9VdX6xSVRn2lb7QWe2b3w9RxplwLy2AL8AOMIa5s3vb6gzUm"
"+5mh1XXL0Lq2pVRVQ2z66J6fpLdaMqu6KjwUXo8XnFH0+w6k/3+mfwMAzwT87LI0qNEAAAAASUVO"
"RK5CYII="
),
)
import wx
import os
import asyncore
import socket
import urllib
class Text:
eventPrefix = "Event prefix:"
xbmcip = "XBMC Host IP:"
xbmchttpport = "XBMC HTTP Port:"
zone = "XBMC Broadcast IP:"
port = "UDP port:"
selfXbmceventbroadcast = "Respond to Self Broadcast"
delim = "Payload Delimiter"
class Xbmceventbroadcast:
name="Xbmceventbroadcast"
class Server(asyncore.dispatcher):
def __init__(self, port, selfXbmceventbroadcast,payDelim, plugin):
self.selfXbmceventbroadcast=selfXbmceventbroadcast
self.plugin=plugin
self.port=port
self.payDelim=payDelim
asyncore.dispatcher.__init__(self)
self.ipadd = socket.gethostbyname(socket.gethostname())
self.create_socket(socket.AF_INET, socket.SOCK_DGRAM)
eg.RestartAsyncore()
self.bind(('', port))
def handle_connect(self):
print "XBMC Event Broadcast listener ENABLED on Local Host @ " + self.ipadd + ":" + str(self.port) + ". Response to XBMC is " + str(self.selfXbmceventbroadcast) + "."
pass
def handle_read(self):
data, addr = self.recvfrom(1024)
if (self.ipadd != addr[0]) or self.selfXbmceventbroadcast:
data = unicode(data, 'UTF8')
#print data
bits = data.split(str(self.payDelim))
commandSize=len(bits)
if commandSize==1:
self.plugin.TriggerEvent(bits[0])
if commandSize==2:
self.plugin.TriggerEvent(bits[0],bits[1])
if commandSize>2:
self.plugin.TriggerEvent(bits[0],bits[1:])
def writable(self):
return False # we don't have anything to send !
class XbmceventbroadcastListener(eg.PluginClass):
canMultiLoad = True
text = Text
def __init__(self):
self.AddAction(self.Xbmceventbroadcast)
def __start__(self, prefix=None, xbmcip="None", xbmchttpport=8080, zone="255.255.255.255", port=8279, selfXbmceventbroadcast=False, payDelim="&&"):
self.info.eventPrefix = prefix
self.xbmcip = xbmcip
self.xbmchttpport = xbmchttpport
self.port = port
self.payDelim=payDelim
self.zone = zone
self.selfXbmceventbroadcast=selfXbmceventbroadcast
try:
self.server = Server(self.port, self.selfXbmceventbroadcast, self.payDelim, self)
except socket.error, exc:
raise self.Exception(exc[1])
def __stop__(self):
if self.server:
self.server.close()
self.server = None
def Configure(self, prefix="XBMC-Event", xbmcip="192.168.1.1", xbmchttpport=8080, zone="224.0.0.2", port=8278, selfXbmceventbroadcast=False, payDelim="<b></b>"):
panel = eg.ConfigPanel(self)
editCtrl = panel.TextCtrl(prefix)
xbmcipCtrl = panel.TextCtrl(xbmcip)
xbmchttpportCtrl = panel.SpinIntCtrl(xbmchttpport, min=1, max=65535)
zoneCtrl = panel.TextCtrl(zone)
portCtrl = panel.SpinIntCtrl(port, min=1, max=65535)
selfXbmceventbroadcastCtrl=panel.CheckBox(selfXbmceventbroadcast)
payDelimCtrl = panel.TextCtrl(payDelim)
panel.AddLine(self.text.eventPrefix, editCtrl)
panel.AddLine(self.text.xb | mcip, xbmcipCtrl)
panel.AddLine(self.text.xbmchttpport, xbmchttpportCtrl)
panel.AddLine(self.text.zone, zoneCtrl)
panel.AddLine(self.text.port, portCtrl)
panel.AddLine(self.text.selfXbmceventbroadcast,selfXbmceventbroadcastCtrl)
panel.AddLine("Payload Delimiter", payDelimCtrl)
while panel.Affirmed():
panel.SetResul | t(editCtrl.GetValue(),xbmcipCtrl.GetValue(),int(xbmchttpportCtrl.GetValue()),zoneCtrl.GetValue(),int(portCtrl.GetValue()),selfXbmceventbroadcastCtrl.GetValue(), payDelimCtrl.GetValue() )
v_header = urllib.quote("This is the Header")
v_message = urllib.quote("This is the Message")
host_xbmc = xbmcipCtrl.GetValue()
port_xbmc = int(xbmchttpportCtrl.GetValue())
udp_xbmc = int(portCtrl.GetValue())
url_xbmc = "http://" + str(host_xbmc) + ":" + str(port_xbmc) + "/xbmcCmds/xbmcHttp?command=SetBroadcast¶meter=2;" + str(udp_xbmc) + "(Notification(" + v_header + "," + v_message + "))"
print "str(url_xbmc)"
try:
urllib.urlopen(url_xbmc)
except IOError:
print 'Connection error'
class Xbmceventbroadcast(eg.ActionWithStringParameter):
def __call__(self, mesg, payload=""):
res = self.bcastSend(mesg, payload)
return res
def bcastSend(self, eventString, payload=""):
addr = (self.plugin.zone, self.plugin.port)
UDPSock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Create socket
UDPSock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
if (payload==None):
UDPSock.sendto(eg.ParseString(eventString),addr)
else:
UDPSock.sen |
kensho-technologies/graphql-compiler | graphql_compiler/interpreter/expression_ops/operators.py | Python | apache-2.0 | 2,903 | 0.002067 | from typing import Any, Callable, Optional
# Define the various operators' behavior for values other than None.
# The behavior with respect to None is defined explicitly in the "apply_operator()" function.
_operator_definitions_for_non_null_values = {
"=": lambda left, right: left == right,
">": lambda left, right: left > right,
">=": lambda left, right: left >= right,
"<": lambda left, right: left < right,
"<=": lambda left, right: left <= right,
"!=": lambda left, right: left != right,
"contains": lambda left, right: right in left,
"not_contains": lambda left, right: right not in left,
"has_substring": lambda left, right: right in left,
"not_has_substring": lambda left, right: right not in left,
"starts_with": lambda left, right: left.startswith(right),
"ends_with": lambda left, right: left.endswith(right),
"in_collection": lambda left, right: left in right,
"&&": lambda left, right: left and right,
"||": lambda left, right: left or right,
}
def apply_operator(operator: str, left_value: Any, right_value: Any) -> Any:
# SQL-like semantics: comparisons with "None" generally produce False unless comparing to None:
# - None is equal to None
# - None != <anything other than None> is True
# - None is not greater than, nor less than, any other value
# - None contains nothing and is never contained in anything
# - None starts/ends with nothing, and nothing starts or ends with None
#
# TODO(predrag): Implement the "filtering with non-existing tag from optional scope passes" rule
# See matching TODO note in the handling of Filter IR blocks.
| left_none = left_value is None
right_none = right_value is None
if left_none and right_none:
if operator in {"=", ">=", "<="}:
# The operation simplifies to None = None, which we define as True.
return True
elif operator == "!=":
# We have None != None, which is False.
return False
else:
# All | other comparisons vs None produce False.
return False
elif left_none or right_none:
# Only one of the values is None. We define this to always be False, for all operators.
return False
elif not left_none and not right_none:
# Neither side of the operator is None, apply operator normally.
operator_handler: Optional[
Callable[[Any, Any], Any]
] = _operator_definitions_for_non_null_values.get(operator, None)
if operator_handler is not None:
return operator_handler(left_value, right_value)
else:
raise NotImplementedError(f"Operator {operator} is not currently implemented.")
raise AssertionError(
f"Unreachable code reached: (left operator right) "
f"was ({left_value} {operator} {right_value})"
)
|
jakesyl/BitTornado | BitTornado/Client/Statistics.py | Python | mit | 6,510 | 0.000307 | import threading
class Statistics_Response:
pass # empty class
class Statistics:
def __init__(self, upmeasure, downmeasure, connecter, httpdl,
ratelimiter, rerequest_lastfailed, fdatflag):
self.upmeasure = upmeasure
self.downmeasure = downmeasure
self.connecter = connecter
self.httpdl = httpdl
self.ratelimiter = ratelimiter
self.downloader = connecter.downloader
self.picker = connecter.downloader.picker
self.storage = connecter.downloader.storage
self.torrentmeasure = connecter.downloader.totalmeasure
self.rerequest_lastfailed = rerequest_lastfailed
self.fdatflag = fdatflag
self.fdatactive = False
self.piecescomplete = None
self.placesopen = None
self.storage_totalpieces = len(self.storage.hashes)
def set_dirstats(self, files, piece_length):
self.piecescomplete = 0
self.placesopen = 0
self.filelistupdated = threading.Event()
self.filelistupdated.set()
nfiles = len(files)
frange = range(nfiles)
self.filepieces = [[] for _ in frange]
self.filepieces2 = [[] for _ in frange]
self.fileamtdone = [0.0] * nfiles
self.filecomplete = [False] * nfiles
self.fileinplace = [False] * nfiles
start = 0
for i in frange:
l = files[i][1]
if l == 0:
self.fileamtdone[i] = 1.0
self.filecomplete[i] = True
self.fileinplace[i] = True
else:
fp = self.filepieces[i]
fp2 = self.filepieces2[i]
for piece in range(int(start / piece_length),
int((start + l - 1) / piece_length) + 1):
fp.append(piece)
fp2.append(piece)
start += l
def update(self):
s = Statistics_Response()
s.upTotal = self.upmeasure.get_total()
s.downTotal = self.downmeasure.get_total()
s.last_failed = self.rerequest_lastfailed()
s.external_connection_made = self.connecter.external_connection_made
if s.downTotal > 0:
s.shareRating = float(s.upTotal) / s.downTotal
elif s.upTotal == 0:
s.shareRating = 0.0
else:
s.shareRating = -1.0
s.torrentRate = self.torrentmeasure.get_rate()
s.torrentTotal = self.torrentmeasure.get_total()
s.numSeeds = self.picker.seeds_connected
s.numOldSeeds = self.downloader.num_disconnected_seeds()
s.numPeers = len(self.downloader.downloads) - s.numSeed | s
s.numCopies = 0.0
for i in self.picker.crosscount:
if i == 0:
s.numCopies += 1
else:
s.numCopies += 1 - float(i) / self.picker.numpieces
break
if self.picker.done:
s.numCopies2 = s.numCop | ies + 1
else:
s.numCopies2 = 0.0
for i in self.picker.crosscount2:
if i == 0:
s.numCopies2 += 1
else:
s.numCopies2 += 1 - float(i) / self.picker.numpieces
break
s.discarded = self.downloader.discarded
s.numSeeds += self.httpdl.seedsfound
s.numOldSeeds += self.httpdl.seedsfound
if s.numPeers == 0 or self.picker.numpieces == 0:
s.percentDone = 0.0
else:
s.percentDone = 100.0 * (float(self.picker.totalcount) /
self.picker.numpieces) / s.numPeers
s.backgroundallocating = self.storage.bgalloc_active
s.storage_totalpieces = len(self.storage.hashes)
s.storage_active = len(self.storage.stat_active)
s.storage_new = len(self.storage.stat_new)
s.storage_dirty = len(self.storage.dirty)
numdownloaded = self.storage.stat_numdownloaded
s.storage_justdownloaded = numdownloaded
s.storage_numcomplete = self.storage.stat_numfound + numdownloaded
s.storage_numflunked = self.storage.stat_numflunked
s.storage_isendgame = self.downloader.endgamemode
s.peers_kicked = self.downloader.kicked.items()
s.peers_banned = self.downloader.banned.items()
try:
s.upRate = int(self.ratelimiter.upload_rate / 1000)
assert s.upRate < 5000
except AssertionError:
s.upRate = 0
s.upSlots = self.ratelimiter.slots
if self.piecescomplete is None: # not a multi-file torrent
return s
if self.fdatflag.is_set():
if not self.fdatactive:
self.fdatactive = True
else:
self.fdatactive = False
if self.piecescomplete != self.picker.numgot:
for i, complete in enumerate(self.filecomplete):
if complete:
continue
oldlist = self.filepieces[i]
newlist = [piece for piece in oldlist
if not self.storage.have[piece]]
if len(newlist) != len(oldlist):
self.filepieces[i] = newlist
self.fileamtdone[i] = (
(len(self.filepieces2[i]) - len(newlist)) /
float(len(self.filepieces2[i])))
if not newlist:
self.filecomplete[i] = True
self.filelistupdated.set()
self.piecescomplete = self.picker.numgot
if self.filelistupdated.is_set() or \
self.placesopen != len(self.storage.places):
for i, complete in enumerate(self.filecomplete):
if not complete or self.fileinplace[i]:
continue
while self.filepieces2[i]:
piece = self.filepieces2[i][-1]
if self.storage.places[piece] != piece:
break
del self.filepieces2[i][-1]
if not self.filepieces2[i]:
self.fileinplace[i] = True
self.storage.set_file_readonly(i)
self.filelistupdated.set()
self.placesopen = len(self.storage.places)
s.fileamtdone = self.fileamtdone
s.filecomplete = self.filecomplete
s.fileinplace = self.fileinplace
s.filelistupdated = self.filelistupdated
return s
|
apeterson5813/nopower | tests/test_models.py | Python | mit | 294 | 0.006803 | from unitte | st import TestCase
from playhouse.test_utils import test_database
from peewee import *
from nopower.models import GeneratorList, Outage, GenerationResource
test_db = SqliteDatabase(':memory:')
class TestGeneratorListOutage(TestCase):
def create_test_data(self):
| pass
|
VHAINNOVATIONS/GE-Pressure-Ulcer | python_gui_decision_support_webportal/python/prevention_clinical_data.py | Python | apache-2.0 | 2,367 | 0.014787 | from sqlalchemy import Column, Integer, String, DateTime, Boolean
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class PreventionClinicalData(Base):
"""
Definition of PreventionClinicalData object. It will be used by SQLAlchemy's ORM to map the object to
the patient_turning table.
Methods:
prevention_clinical_data(data) - sets all data fields
"""
__tablename__ = 'prevention_clinical_data'
id = Column('id',Integer, primary_key=True)
patient_id = Column('pa | tient_id',Integer)
clinical_rounding_time = Column('clinical_rounding_time',DateTime)
repositioning_flag = Column('repositioning_flag',Boolean)
final_position = Column('final_position',String)
repositioning_description = Column('repositioning_description',String)
| keyCol = 'id'
editCols = ['clinical_rounding_time','repositioning_flag','final_position','repositioning_description']
editColsLabels = ['Rounding Time','Repositioned?','Final Position','Notes']
editColsTypes = ['date','boolean','string','string']
displayTableName = 'Patient Prevention Clinical Data'
def setFromData(self,data):
"""
Sets all of the object fields
Arguments:
data - Dictionary containing the data
"""
# self.id = data['id']
self.patient_id = data['patient_id']
self.clinical_rounding_time = data['clinical_rounding_time']
self.repositioning_flag = data['repositioning_flag']
self.final_position = data['final_position']
self.repositioning_description = data['repositioning_description']
def __json__(self, request):
return {'id':self.id, 'patient_id':self.patient_id, 'clinical_rounding_time':self.clinical_rounding_time.isoformat(' '),
'repositioning_flag':str(self.repositioning_flag), 'final_position':self.final_position,
'repositioning_description':self.repositioning_description }
def __repr__(self):
return "<PreventionClinicalData(id='%d', patient_id='%d', clinical_rounding_time='%s', repositioning_flag='%s', final_position='%s', repositioning_description='%s')>" % (
self.id, self.patient_id, self.clinical_rounding_time, self.repositioning_flag,
self.final_position, self.repositioning_description)
|
nirs/vdsm | tests/passwords_test.py | Python | gpl-2.0 | 6,754 | 0 | #
# Copyright 2015-2020 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import marshal
import pickle
from testlib import VdsmTestCase
from testlib import expandPermutations, permutations
from vdsm.common.compat import json
from vdsm.common.password import (
ProtectedPassword,
protect_passwords,
unprotect_passwords,
)
class ProtectedPasswordTests(VdsmTestCase):
def test_str(self):
p = ProtectedPassword("12345678")
self.assertNotIn("12345678", str(p))
def test_repr(self):
p = ProtectedPassword("12345678")
self.assertNotIn("12345678", repr(p))
def test_value(self):
p = ProtectedPassword("12345678")
self.assertEqual("12345678", p.value)
def test_eq(self):
p1 = ProtectedPassword("12345678")
p2 = ProtectedPassword("12345678")
self.assertEqual(p1, p2)
def test_ne(self):
p1 = ProtectedPassword("12345678")
p2 = ProtectedPassword("12345678")
self.assertFalse(p1 != p2)
def test_pickle_copy(self):
p1 = ProtectedPassword("12345678")
p2 = pickle.loads(pickle.dumps(p1))
self.assertEqual(p1, p2)
def test_no_marshal(self):
p1 = ProtectedPassword("12345678")
self.assertRaises(ValueError, marshal.dumps, p1)
def test_no_json(self):
p1 = ProtectedPassword("12345678")
self.assertRaises(TypeError, json.dumps, p1)
@expandPermutations
class ProtectTests(VdsmTestCase):
@permutations([[list()], [dict()], [tuple()]])
def test_protect_empty(self, params):
self.assertEqual(params, protect_passwords(params))
@permutations([[list()], [dict()], [tuple()]])
def test_unprotect_empty(self, result):
self.assertEqual(result, unprotect_passwords(result))
def test_protect_dict(self):
unprotected = dict_unprotected()
protected = dict_protected()
self.assertEqual(protected, protect_passwords(unprotected))
def test_unprotect_dict(self):
protected = dict_protected()
unprotected = dict_unprotected()
self.assertEqual(unprotect | ed, unprotect_passwords(protected))
def test_protect_nested_dicts(self):
unprotected = nested_dicts_unprotected()
protected = nested_dicts_protected()
self.assertEqual(protected, protect_passwords(unprotected))
def test_unprotect_nested_dicts(self):
protected = nested_dicts_protected()
unprotected = nested_dicts_unprotected()
self | .assertEqual(unprotected, unprotect_passwords(protected))
def test_protect_lists_of_dicts(self):
unprotected = lists_of_dicts_unprotected()
protected = lists_of_dicts_protected()
self.assertEqual(protected, protect_passwords(unprotected))
def test_unprotect_lists_of_dicts(self):
protected = lists_of_dicts_protected()
unprotected = lists_of_dicts_unprotected()
self.assertEqual(unprotected, unprotect_passwords(protected))
def test_protect_nested_lists_of_dicts(self):
unprotected = nested_lists_of_dicts_unprotected()
protected = nested_lists_of_dicts_protected()
self.assertEqual(protected, protect_passwords(unprotected))
def test_unprotect_nested_lists_of_dicts(self):
protected = nested_lists_of_dicts_protected()
unprotected = nested_lists_of_dicts_unprotected()
self.assertEqual(unprotected, unprotect_passwords(protected))
def dict_unprotected():
return {
"key": "value",
"_X_key": "secret",
"password": "12345678"
}
def dict_protected():
return {
"key": "value",
"_X_key": ProtectedPassword("secret"),
"password": ProtectedPassword("12345678")
}
def nested_dicts_unprotected():
return {
"key": "value",
"_X_key": "secret",
"nested": {
"password": "12345678",
"nested": {
"key": "value",
"_X_key": "secret",
"password": "87654321",
}
}
}
def nested_dicts_protected():
return {
"key": "value",
"_X_key": ProtectedPassword("secret"),
"nested": {
"password": ProtectedPassword("12345678"),
"nested": {
"key": "value",
"_X_key": ProtectedPassword("secret"),
"password": ProtectedPassword("87654321"),
}
}
}
def lists_of_dicts_unprotected():
return [
{
"key": "value",
"_X_key": "secret",
"password": "12345678",
},
{
"key": "value",
"_X_key": "secret",
"password": "87654321",
}
]
def lists_of_dicts_protected():
return [
{
"key": "value",
"_X_key": ProtectedPassword("secret"),
"password": ProtectedPassword("12345678"),
},
{
"key": "value",
"_X_key": ProtectedPassword("secret"),
"password": ProtectedPassword("87654321"),
}
]
def nested_lists_of_dicts_unprotected():
return {
"key": "value",
"nested": [
{
"key": "value",
"nested": [
{
"key": "value",
"_X_key": "secret",
"password": "12345678",
}
]
}
]
}
def nested_lists_of_dicts_protected():
return {
"key": "value",
"nested": [
{
"key": "value",
"nested": [
{
"key": "value",
"_X_key": ProtectedPassword("secret"),
"password": ProtectedPassword("12345678"),
}
]
}
]
}
|
openqt/algorithms | leetcode/python/ac/lc804-unique-morse-code-words.py | Python | gpl-3.0 | 2,281 | 0.003946 | # coding=utf-8
import unittest
"""804. Unique Morse Code Words
https://leetcode.com/problems/unique-morse-code-words/description/
International Morse Code defines a standard encoding where each letter is
mapped to a series of dots and dashes, as follows: `"a"` maps to `".-"`, `"b"`
maps to `"-..."`, `"c"` maps to `"-.-."`, and so on.
For convenience, the full table for the 26 letters of the English alphabet is
given below:
[".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."]
Now, given a list of words, each word can be written as a concatenation of the
Morse code of each letter. Fo | r example, "cab" can be written as "-.-.-....-",
(which is the concatenation "-.-." \+ "-..." \+ ".-"). We'll call such a
concatenation, the transformation of a word.
Return the number of different transformations among all words we have.
**Example:**
**Input:** words = ["gin", "zen", "gig", "msg"]
**Output:** 2
**Explanation:**
The transformation of each word is:
"gin" -> "--...-."
"zen" -> "--...-."
"gig" - | > "--...--."
"msg" -> "--...--."
There are 2 different transformations, "--...-." and "--...--.".
**Note:**
* The length of `words` will be at most `100`.
* Each `words[i]` will have length in range `[1, 12]`.
* `words[i]` will only consist of lowercase letters.
Similar Questions:
"""
class Solution(object):
def uniqueMorseRepresentations(self, words):
"""
:type words: List[str]
:rtype: int
"""
self.CODE = [
".-", "-...", "-.-.", "-..", ".",
"..-.", "--.", "....", "..", ".---",
"-.-", ".-..", "--", "-.", "---",
".--.", "--.-", ".-.", "...", "-",
"..-", "...-", ".--", "-..-", "-.--", "--.."]
cache = {self._trans(i) for i in words}
return len(cache)
def _trans(self, w):
return ''.join(self.CODE[ord(i) - ord('a')] for i in w)
class T(unittest.TestCase):
def test(self):
s = Solution()
self.assertEqual(s.uniqueMorseRepresentations(["gin", "zen", "gig", "msg"]), 2)
if __name__ == "__main__":
unittest.main()
|
TehJoE/Cat-Herder | cat_herder.py | Python | mit | 5,029 | 0.010141 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Li-aung "Lewis" Yip
minecraft@penwatch.net
Requires:
plac (command line argument parsing)
unshortenit (adf.ly resolving)
"""
STARTUP_SCRIPT_TEMPLATE = """#!/bin/bash
java -Xmx2G -XX:MaxPermSize=256M -jar {fn} nogui"""
import os
import json
from operator import itemgetter
import re
import plac
from file_handling import mkdir, fetch_url
from atlauncher_import import atlauncher_to_catherder
from atlauncher_share_code import get_mod_pack_with_share_code
def safe_name (dirty):
return re.sub('[^A-Za-z0-9]','',dirty)
def get_pack_json():
url = "http://download.nodecdn.net/containers/atl/launcher/json/packs.json"
print ("Grabbing new copy of packs.json from {u}".format(u=url))
fetch_url(url,'packs.json',None)
with open('packs.json', 'r') as f:
packs_json = json.load(f)
return packs_json
def list_packs(packs_json):
packs_json.sort(key=itemgetter('position'))
print(u"{:^38}| {:^19}| {:^19}\r\n{:-<80}".format('Pack Name', 'Latest Version', 'Latest Dev Version', ''))
for pack in packs_json:
n = pack['name']
lv = ''
if pack['versions']:
lv = pack['versions'][0]['version']
ldv = ''
if pack['devVersions']:
ldv = pack['devVersions'][0]['version']
print(u"{name:<38}| {latest_version:<19}| {latest_dev_version:<19}".format(name=n, latest_version=lv,
latest_dev_version=ldv))
def get_latest_pack_version(packs_json, pack_name):
safe_pack_name = safe_name (pack_name)
for pack in packs_json:
if safe_name(pack['name']) == safe_pack_name:
if pack['versions']:
lv = pack['versions'][0]['version']
return lv
else:
return None
raise KeyError("Pack name {P} ({S}) not found in packs.json.").format(
P = pack_name, S = safe_name )
@plac.annotations(
operation=("Operation to perform", "positional", None, str, ['install', 'update', 'list_packs', 'install-from-share-code']),
pack_name=("Name of pack, i.e. 'BevosTechPack' - try 'list_packs' for list of pack names", 'option', 'p'),
pack_version=("Version of pack, i.e. 'BTP-11-Full' - defaults to latest available version", 'option', 'v'),
install_folder=("Folder where server will be installed - defaults to './install/$pack_name/$pack_version'", 'option', 'i'),
cache_folder=("Folder where downloaded files will be cached - defaults to './cache'", 'option', 'c'),
share_code=("Share code - required for 'install-from-share-code'.", 'option', 's'),
dry_run=("Perform a dry run. Lists what would be downloaded and installed, but doesn't actually download or install anything.", 'flag', 'd')
)
def main(operation, pack_name, pack_version, install_folder, cache_folder, share_code, dry_run):
"""A tool for installing and updating Minecraft servers based on ATLauncher mod packs.
Example invocations:
cat_herder.py list_packs
Installing with share code:
cat_herder.py install-from-share-code -s QtDNnl | fZ
As above, but manually specifying the download cache and server install folders:
cat_herder.py install-from-share-code -s QtDNnlfZ -c /home/mc/cache -i /home/mc/install/
Installing with | manually specified pack name and pack version:
cat_herder.py install -p BevosTechPack -v BTP-11-Full -c /home/mc/cache -i /home/mc/install/
"""
if install_folder:
install_folder = os.path.realpath(install_folder)
else:
install_folder = os.path.realpath('./install/{pn}/{pv}'.format(pn=pack_name,pv=pack_version))
if cache_folder:
cache_folder = os.path.realpath(cache_folder)
else:
cache_folder = os.path.realpath('./cache/')
mkdir (cache_folder)
os.chdir(cache_folder)
packs_json = get_pack_json()
pack_names = [p['name'] for p in packs_json]
if operation == 'list_packs':
list_packs(packs_json)
if operation == 'update':
print "Update not implemented yet."
if operation == 'install':
if not pack_version:
pack_version = get_latest_pack_version (packs_json, pack_name)
mp = atlauncher_to_catherder(pack_name, pack_version, cache_folder, install_folder)
if dry_run:
mp.print_mod_files_list()
else:
mp.install_server()
if operation == 'install-from-share-code':
if not share_code:
print ("install-from-share-code option requires a share code to be specified using the -s option.")
if not re.match("[A-Za-z0-9]{8}",share_code):
print ("install-from-share-code requires an 8-character alphanumeric share code.")
mp = get_mod_pack_with_share_code(share_code, cache_folder, install_folder)
if dry_run:
mp.print_mod_files_list()
else:
mp.install_server()
if __name__ == '__main__':
plac.call(main)
|
MaestroGraph/sparse-hyper | experiments/gconv-simple.py | Python | mit | 17,048 | 0.003695 | import sys
import matplotlib as mpl
import torch.nn.functional as F
import torch.optim as optim
import torchvision
import torchvision.transforms as transforms
from tensorboardX import SummaryWriter
from torch import nn
from torch.autograd import Variable
from tqdm import trange
import gaussian
import util
from util import sparsemm
mpl.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from argparse impo | rt ArgumentParser
import networkx as nx
import math
import torch
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Module
"""
Simple Graph convolution experiment. Given a set of random vectors, learn to express each as the sum of some of the
others
"""
def clean(axes=None):
if axes is None:
axes = plt.gca()
[s.set_visible(False) for s in axes.spines.values()]
axes.tick_params(top=False, bottom=False, left=False, right=False, labelbottom=False, labelleft=False)
def densities(points, means, sigmas):
"""
Compute the unnormalized PDFs of the points under the given MVNs
(with sigma a diagonal matrix per MVN)
:param means:
:param sigmas:
:param points:
:return:
"""
# n: number of MVNs
# d: number of points per MVN
# rank: dim of points
batchsize, n, rank = points.size()
batchsize, k, rank = means.size()
# batchsize, k, rank = sigmas.size()
points = points.unsqueeze(2).expand(batchsize, n, k, rank)
means = means.unsqueeze(1).expand_as(points)
sigmas = sigmas.unsqueeze(1).expand_as(points)
sigmas_squared = torch.sqrt(1.0/(gaussian.EPSILON + sigmas))
points = points - means
points = points * sigmas_squared
# Compute dot products for all points
# -- unroll the batch/n dimensions
points = points.view(-1, 1, rank)
# -- dot prod
products = torch.bmm(points, points.transpose(1,2))
# -- reconstruct shape
products = products.view(batchsize, n, k)
num = torch.exp(- 0.5 * products)
return num
class MatrixHyperlayer(nn.Module):
"""
Constrained version of the matrix hyperlayer. Each output get exactly k inputs
"""
def duplicates(self, tuples):
"""
Takes a list of tuples, and for each tuple that occurs mutiple times
marks all but one of the occurences (in the mask that is returned).
:param tuples: A size (batch, k, rank) tensor of integer tuples
:return: A size (batch, k) mask indicating the duplicates
"""
b, k, r = tuples.size()
primes = self.primes[:r]
primes = primes.unsqueeze(0).unsqueeze(0).expand(b, k, r)
unique = ((tuples+1) ** primes).prod(dim=2) # unique identifier for each tuple
sorted, sort_idx = torch.sort(unique, dim=1)
_, unsort_idx = torch.sort(sort_idx, dim=1)
mask = sorted[:, 1:] == sorted[:, :-1]
zs = torch.zeros(b, 1, dtype=torch.uint8, device='cuda' if self.use_cuda else 'cpu')
mask = torch.cat([zs, mask], dim=1)
return torch.gather(mask, 1, unsort_idx)
def cuda(self, device_id=None):
self.use_cuda = True
super().cuda(device_id)
def __init__(self, in_num, out_num, k, radditional=0, gadditional=0, region=(128,),
sigma_scale=0.2, min_sigma=0.0, fix_value=False):
super().__init__()
self.min_sigma = min_sigma
self.use_cuda = False
self.in_num = in_num
self.out_num = out_num
self.k = k
self.radditional = radditional
self.region = region
self.gadditional = gadditional
self.sigma_scale = sigma_scale
self.fix_value = fix_value
self.weights_rank = 2 # implied rank of W
self.params = Parameter(torch.randn(k * out_num, 3))
outs = torch.arange(out_num).unsqueeze(1).expand(out_num, k * (2 + radditional + gadditional)).contiguous().view(-1, 1)
self.register_buffer('outs', outs.long())
outs_inf = torch.arange(out_num).unsqueeze(1).expand(out_num, k).contiguous().view(-1, 1)
self.register_buffer('outs_inf', outs_inf.long())
self.register_buffer('primes', torch.tensor(util.PRIMES))
def size(self):
return (self.out_num, self.in_num)
def generate_integer_tuples(self, means,rng=None, use_cuda=False):
dv = 'cuda' if use_cuda else 'cpu'
c, k, rank = means.size()
assert rank == 1
# In the following, we cut the first dimension up into chunks of size self.k (for which the row index)
# is the same. This then functions as a kind of 'batch' dimension, allowing us to use the code from
# globalsampling without much adaptation
"""
Sample the 2 nearest points
"""
floor_mask = torch.tensor([1, 0], device=dv, dtype=torch.uint8)
fm = floor_mask.unsqueeze(0).unsqueeze(2).expand(c, k, 2, 1)
neighbor_ints = means.data.unsqueeze(2).expand(c, k, 2, 1).contiguous()
neighbor_ints[fm] = neighbor_ints[fm].floor()
neighbor_ints[~fm] = neighbor_ints[~fm].ceil()
neighbor_ints = neighbor_ints.long()
"""
Sample uniformly from a small range around the given index tuple
"""
rr_ints = torch.cuda.FloatTensor(c, k, self.radditional, 1) if use_cuda else torch.FloatTensor(c, k, self.radditional, 1)
rr_ints.uniform_()
rr_ints *= (1.0 - gaussian.EPSILON)
rng = torch.cuda.FloatTensor(rng) if use_cuda else torch.FloatTensor(rng)
rngxp = rng.unsqueeze(0).unsqueeze(0).unsqueeze(0).expand_as(rr_ints) # bounds of the tensor
rrng = torch.cuda.FloatTensor(self.region) if use_cuda else torch.FloatTensor(self.region) # bounds of the range from which to sample
rrng = rrng.unsqueeze(0).unsqueeze(0).unsqueeze(0).expand_as(rr_ints)
mns_expand = means.round().unsqueeze(2).expand_as(rr_ints)
# upper and lower bounds
lower = mns_expand - rrng * 0.5
upper = mns_expand + rrng * 0.5
# check for any ranges that are out of bounds
idxs = lower < 0.0
lower[idxs] = 0.0
idxs = upper > rngxp
lower[idxs] = rngxp[idxs] - rrng[idxs]
rr_ints = (rr_ints * rrng + lower).long()
"""
Sample uniformly from all index tuples
"""
g_ints = torch.cuda.FloatTensor(c, k, self.gadditional, 1) if use_cuda else torch.FloatTensor(c, k, self.gadditional, 1)
rngxp = rng.unsqueeze(0).unsqueeze(0).unsqueeze(0).expand_as(g_ints) # bounds of the tensor
g_ints.uniform_()
g_ints *= (1.0 - gaussian.EPSILON) * rngxp
g_ints = g_ints.long()
ints = torch.cat([neighbor_ints, rr_ints, g_ints], dim=2)
return ints.view(c, -1, rank)
def forward(self, input, train=True):
### Compute and unpack output of hypernetwork
means, sigmas, values = self.hyper(input)
nm = means.size(0)
c = nm // self.k
means = means.view(c, self.k, 1)
sigmas = sigmas.view(c, self.k, 1)
values = values.view(c, self.k)
rng = (self.in_num, )
assert input.size(0) == self.in_num
if train:
indices = self.generate_integer_tuples(means, rng=rng, use_cuda=self.use_cuda)
indfl = indices.float()
# Mask for duplicate indices
dups = self.duplicates(indices)
props = densities(indfl, means, sigmas).clone() # result has size (c, indices.size(1), means.size(1))
props[dups] = 0
props = props / props.sum(dim=1, keepdim=True)
values = values.unsqueeze(1).expand(c, indices.size(1), means.size(1))
values = props * values
values = values.sum(dim=2)
# unroll the batch dimension
indices = indices.view(-1, 1)
values = values.view(-1)
indices = torch.cat([self.outs, indices.long()], dim=1)
else:
indices = means.round().long().view(-1, 1)
values = values.squeeze().view(-1)
indices = torch.cat([self.outs_inf, indices.long()], dim=1)
if self.use_cuda:
| |
Karl-Marka/data-mining | scleroderma-prediction/getGeneDescLocal_Illumina.py | Python | gpl-3.0 | 1,180 | 0.016949 | import pandas as pd
def closeFunc():
print('''Type 'quit' and press enter to exit program''')
answer = input(': ')
if answer == 'quit':
quit()
else:
closeFunc()
def oligosList():
oligosPath = input('Path to the file containing the list of probes: ')
oligos = open(oligosPath)
oligos = oligos.readlines()
oligosList = []
for oligo in oligos:
item = oligo.strip()
oligosList.append(item)
return oligosList
def main(oligosList, fullData = False):
db = pd.read_csv('probes_illumina.txt', sep = '\t', header = 0, low_memory = False, index_col = 11)
output = db.ix[oligosList]
if fullData == False:
output = output['Definition']
print(output)
else:
output = output[['Accession', 'Symbol', 'Definition']]
output.to_csv('getGen | eDescLocal_results.txt', sep = '\t')
closeFunc()
if __name__ == "__main__":
oligosList = oligosList()
ans | wer = input('Do you want full data? (yes/no) ')
if answer == 'no':
main(oligosList)
elif answer == 'yes':
main(oligosList, True)
else:
print('Wrong answer')
closeFunc() |
Chilledheart/chromium | tools/telemetry/third_party/gsutilz/third_party/boto/tests/unit/s3/test_website.py | Python | bsd-3-clause | 9,219 | 0.000434 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from tests.unit import unittest
import xml.dom.minidom
import xml.sax
from boto.s3.website import WebsiteConfiguration
from boto.s3.website import RedirectLocation
from boto.s3.website import RoutingRules
from boto.s3.website import Condition
from boto.s3.website import RoutingRules
from boto.s3.website import RoutingRule
from boto.s3.website import Redirect
from boto import handler
def pretty_print_xml(text):
text = ''.join(t.strip() for t in text.splitlines())
x = xml.dom.minidom.parseString(text)
return x.toprettyxml()
class TestS3WebsiteConfiguration(unittest.TestCase):
maxDiff = None
def setUp(self):
pass
def tearDown(self):
pass
def test_suffix_only(self):
config = WebsiteConfiguration(suffi | x='index.html')
xml = config.to_xml()
self.assertIn(
'<IndexDocument><Suffi | x>index.html</Suffix></IndexDocument>', xml)
def test_suffix_and_error(self):
config = WebsiteConfiguration(suffix='index.html',
error_key='error.html')
xml = config.to_xml()
self.assertIn(
'<ErrorDocument><Key>error.html</Key></ErrorDocument>', xml)
def test_redirect_all_request_to_with_just_host(self):
location = RedirectLocation(hostname='example.com')
config = WebsiteConfiguration(redirect_all_requests_to=location)
xml = config.to_xml()
self.assertIn(
('<RedirectAllRequestsTo><HostName>'
'example.com</HostName></RedirectAllRequestsTo>'), xml)
def test_redirect_all_requests_with_protocol(self):
location = RedirectLocation(hostname='example.com', protocol='https')
config = WebsiteConfiguration(redirect_all_requests_to=location)
xml = config.to_xml()
self.assertIn(
('<RedirectAllRequestsTo><HostName>'
'example.com</HostName><Protocol>https</Protocol>'
'</RedirectAllRequestsTo>'), xml)
def test_routing_rules_key_prefix(self):
x = pretty_print_xml
# This rule redirects requests for docs/* to documentation/*
rules = RoutingRules()
condition = Condition(key_prefix='docs/')
redirect = Redirect(replace_key_prefix='documents/')
rules.add_rule(RoutingRule(condition, redirect))
config = WebsiteConfiguration(suffix='index.html', routing_rules=rules)
xml = config.to_xml()
expected_xml = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns='http://s3.amazonaws.com/doc/2006-03-01/'>
<IndexDocument>
<Suffix>index.html</Suffix>
</IndexDocument>
<RoutingRules>
<RoutingRule>
<Condition>
<KeyPrefixEquals>docs/</KeyPrefixEquals>
</Condition>
<Redirect>
<ReplaceKeyPrefixWith>documents/</ReplaceKeyPrefixWith>
</Redirect>
</RoutingRule>
</RoutingRules>
</WebsiteConfiguration>
"""
self.assertEqual(x(expected_xml), x(xml))
def test_routing_rules_to_host_on_404(self):
x = pretty_print_xml
# Another example from the docs:
# Redirect requests to a specific host in the event of a 404.
# Also, the redirect inserts a report-404/. For example,
# if you request a page ExamplePage.html and it results
# in a 404, the request is routed to a page report-404/ExamplePage.html
rules = RoutingRules()
condition = Condition(http_error_code=404)
redirect = Redirect(hostname='example.com',
replace_key_prefix='report-404/')
rules.add_rule(RoutingRule(condition, redirect))
config = WebsiteConfiguration(suffix='index.html', routing_rules=rules)
xml = config.to_xml()
expected_xml = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns='http://s3.amazonaws.com/doc/2006-03-01/'>
<IndexDocument>
<Suffix>index.html</Suffix>
</IndexDocument>
<RoutingRules>
<RoutingRule>
<Condition>
<HttpErrorCodeReturnedEquals>404</HttpErrorCodeReturnedEquals>
</Condition>
<Redirect>
<HostName>example.com</HostName>
<ReplaceKeyPrefixWith>report-404/</ReplaceKeyPrefixWith>
</Redirect>
</RoutingRule>
</RoutingRules>
</WebsiteConfiguration>
"""
self.assertEqual(x(expected_xml), x(xml))
def test_key_prefix(self):
x = pretty_print_xml
rules = RoutingRules()
condition = Condition(key_prefix="images/")
redirect = Redirect(replace_key='folderdeleted.html')
rules.add_rule(RoutingRule(condition, redirect))
config = WebsiteConfiguration(suffix='index.html', routing_rules=rules)
xml = config.to_xml()
expected_xml = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns='http://s3.amazonaws.com/doc/2006-03-01/'>
<IndexDocument>
<Suffix>index.html</Suffix>
</IndexDocument>
<RoutingRules>
<RoutingRule>
<Condition>
<KeyPrefixEquals>images/</KeyPrefixEquals>
</Condition>
<Redirect>
<ReplaceKeyWith>folderdeleted.html</ReplaceKeyWith>
</Redirect>
</RoutingRule>
</RoutingRules>
</WebsiteConfiguration>
"""
self.assertEqual(x(expected_xml), x(xml))
def test_builders(self):
x = pretty_print_xml
# This is a more declarative way to create rules.
# First the long way.
rules = RoutingRules()
condition = Condition(http_error_code=404)
redirect = Redirect(hostname='example.com',
replace_key_prefix='report-404/')
rules.add_rule(RoutingRule(condition, redirect))
xml = rules.to_xml()
# Then the more concise way.
rules2 = RoutingRules().add_rule(
RoutingRule.when(http_error_code=404).then_redirect(
hostname='example.com', replace_key_prefix='report-404/'))
xml2 = rules2.to_xml()
self.assertEqual(x(xml), x(xml2))
def test_parse_xml(self):
x = pretty_print_xml
xml_in = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns='http://s3.amazonaws.com/doc/2006-03-01/'>
<IndexDocument>
<Suffix>index.html</Suffix>
</IndexDocument>
<ErrorDocument>
<Key>error.html</Key>
</ErrorDocument>
<RoutingRules>
<RoutingRule>
<Condition>
<KeyPrefixEquals>docs/</Key |
uwekamper/matelight-scheduler | docs/conf.py | Python | bsd-3-clause | 7,794 | 0.002309 | # -*- coding: utf-8 -*-
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import inspect
__location__ = os.path.join(os.getcwd(), os.path.dirname(
inspect.getfile(inspect.currentframe())))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo',
'sphinx.ext.autosummary', 'sphinx.ext.viewcode', 'sphinx.ext.coverage',
'sphinx.ext.doctest', 'sphinx.ext.ifconfig', 'sphinx.ext.pngmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'matelight-scheduler'
copyright = u'2014, Uwe Kamper'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# | The short X.Y version.
version = '' # Is set by calling `setup.py docs`
# The full version, including alpha/beta/rc tags.
release = '' # Is set by calling `setup.py docs`
# The language fo | r content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = ""
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'matelight_scheduler-doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'user_guide.tex', u'matelight-scheduler Documentation',
u'Uwe Kamper', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = ""
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- External mapping ------------------------------------------------------------
python_version = '.'.join(map(str, sys.version_info[0:2]))
intersphinx_mapping = {
'sphinx': ('http://sphinx.pocoo.org', None),
'python': ('http://docs.python.org/' + python_version, None),
'matplotlib': ('http://matplotlib.sourceforge.net', None),
'numpy': ('http://docs.scipy.org/doc/numpy', None),
'sklearn': ('http://scikit-learn.org/stable', None),
'pandas': ('http://pandas.pydata.org/pandas-docs/stable', None),
'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
}
|
jidol/Examples | src/avl_tree.py | Python | gpl-2.0 | 5,481 | 0.000182 | from typing import Any
from binary_tree import BinaryTree, BinaryTreeNode
class AvlTree(BinaryTree):
def __init__(self, value):
super(AvlTree, self).__init__(value)
self.height = 1
def _left_rotate(self, node: BinaryTreeNode) -> BinaryTreeNode:
"""
Rotate node left
:param node: Node to roate
:return: Updated node
"""
y: BinaryTreeNode = node.right
t2: BinaryTreeNode = y.left
# Perform rotation
y.left = node
node.right = t2
# Update heights
node.height = 1 + max(self.get_height(node.left),
self.get_height(node.right))
y.height = 1 + max(self.get_height(y.left),
self.get_height(y.right))
# Return the new root
return y
def _right_rotate(self, node: BinaryTreeNode) -> BinaryTreeNode:
"""
Rotate Right
:param node: Node to rotate
:return: Updated Node
"""
y: BinaryTreeNode = node.left
t3: BinaryTreeNode = y.right
# Perform rotation
y.right = node
node.left = t3
# Update heights
node.height = 1 + max(self.get_height(node.left),
self.get_height(node.right))
y.height = 1 + max(self.get_height(y.left),
self.get_height(y.right))
# Return the new root
return y
def _get_balance(self, node: BinaryTreeNode) -> int:
"""
Get the balance at the node
:param node: Node to check
:return: Balance
"""
if not node:
return 0
return self.get_height(node.left) - self.get_height(node.right)
def _remove_node(self, tree: BinaryTreeNode, value: Any) -> BinaryTreeNode:
"""
Remove a node from the tree
:param tree: Tree to remove from within
:param value: Value to remove
:return: Updated tree
"""
if None is tree:
return tree
else:
if value < tree.value:
tree.left = self._remove_node(tree.left, value)
elif value > tree.value:
tree.right = self._remove_node(tree.right, value)
else:
if None is tree.right:
return tree.left
if None is tree.left:
return tree.right
# Get Min Right Value
temp_node = tree.right
the_min = temp_node.value
while temp_node.left:
temp_node = temp_node.left
the_min = temp_node.value
# Now remove it from the right tree
tree.right = self._remove_node(tree.right, the_min)
tree.value = the_min
if tree is None:
return tree
# Step 2 - Update the height of the
# ancestor node
tree.height = 1 + max(self.get_height(tree.left),
self.get_height(tree.right))
# Step 3 - Get the balance factor
balance = self._get_balance(tree)
# Step 4 - If the node is unbalanced,
# then try out the 4 cases
# Case 1 - Left Left
if balance > 1 and self._get_balance(tree.left) >= 0:
return self._right_rotate(tree)
# Case 2 - Right Right
if balance < -1 and self._get_balance(tree.right) <= 0:
return self._left_rotate(tree)
# Case 3 - Left Right
if balance > 1 and self.getBalance(tree.left) < 0:
tree.left = self._left_rotate(tree.left)
return self._right_rotate(tree)
# Case 4 - Right Left
if balance < -1 and self._get_balance(tree.right) > 0:
tree.right = self._right_rotate(tree.right)
return self._left_rotate(tree)
return tree
def _add_node(self, node: BinaryTreeNode, value: Any) -> BinaryTreeNode:
if None is node:
return BinaryTreeNode(value)
else:
if node.value < value:
node.right = self._add_node(node.right, value)
elif node.value > value:
node.left = self._add_node(node.left, value)
node.height = 1 + max(self.get_height(node.left),
self.get_height(node.right))
# Step 3 - Get the balance factor
balance = self._get_balance(node)
# Step 4 - If the node is unbalanced,
# then try out the 4 cases
# Case 1 - Left Left
if balance > 1 and value < node.left.value:
return self._right_rotate(node)
# Case 2 - Right Right
if balance < -1 and value > node.right.value:
| return self._left_rotate(node)
# Case 3 - Left Right
if balance > 1 and value > node.left.value:
node.left = self._left_rotate(node.left | )
return self._right_rotate(node)
# Case 4 - Right Left
if balance < -1 and value < node.right.value:
node.right = self._right_rotate(node.right)
return self._left_rotate(node)
return node
|
fevxie/odoo-infrastructure | infrastructure/models/docker_image.py | Python | agpl-3.0 | 2,128 | 0 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields
class infrastructure_docker_image(models.Model):
_name = 'infrastructure.docker_image'
_description = 'Docker Image'
name = fields.Char(
'Name',
required=True,
)
prefix = fields.Char(
'Prefix',
)
pull_name = fields.Char(
'Pull Name',
required=True,
)
tag_ids = fields.One2many(
'infrastructure.docker_image.tag',
'docker_image_id',
'Tags',
)
odoo_version_id = fields.Many2one(
'infrastructure.odoo_version',
'Odoo Version',
)
service = fields.Selection(
[('odoo', 'Odoo'), ('postgresql', 'Postgresql'), ('other', 'Other')],
string='Service',
default='odoo',
required=True,
)
pg_image_ids = fields.Many2many(
'infrastructure.docker_image',
'infrastructure_odoo_pg_image_rel',
'odoo_image_id', 'pg_image_id',
string='Postgresql Images',
domain=[('servic | e', '=', 'postgresql')],
help='Compatible Postgresql Images',
)
odoo_image_ids = fields.Many2many(
'infrastructure.docker_image',
'infrastructure_odoo_pg_image_rel',
'pg_image_id', 'odoo_image_id',
strin | g='Odoo Images',
domain=[('service', '=', 'odoo')],
help='Compatible Odoo Images',
)
class infrastructure_docker_image_tag(models.Model):
_name = 'infrastructure.docker_image.tag'
_description = 'Docker Image Tag'
_order = 'sequence'
name = fields.Char(
'Name',
required=True,
)
sequence = fields.Integer(
'Name',
default=10,
)
docker_image_id = fields.Many2one(
'infrastructure.docker_image',
'Name',
required=True,
ondelete='cascade',
)
|
julio73/scratchbook | code/work/alu/indenter.py | Python | mit | 1,460 | 0.026027 | # binary tag: refers to tags that have open and close el | ement
# eg: [a]some content[/a]
# standalone tag: refers t | o tags that are self contained
# eg: [b some content]
# Assuming that the text input is well formatted
# open input and output file
with open('textin.txt', 'r') as f1, open('textout.txt', 'w') as f2:
lines = f1.readlines()
# keep track of all binary tags (i.e. closeable tag)
closeables = set()
for line in lines:
if line.startswith('[/'): # closing tag
tag = line[2:].split(']')[0] #
closeables.add(tag) # add to set
# each line updates its position = prev position + directive
# and give a new directive to the next line (1 for indent)
# we start at 0, we use 3 blank spaces for indent
position = 0; indent = ' '; directive = 0
for line in lines:
if line.startswith('[/'): # we are closing
position -= 1 # since we are closing, unindent immediately
position = position + directive
directive = 0 # done
f2.write(indent*position+line)
elif line.startswith('['):
# get the tag
tag = line[1:].split(']',1)[0].split(' ',1)[0]
if tag in closeables: # tag is binary
position = position + directive
directive = 1 # indent next line
else: # tag is standalone
position = position + directive
directive = 0 # done
f2.write(indent*position+line)
else:
# we don't touch lines in between
f2.write(line)
|
XENON1T/pax | pax/plugins/DeleteLowLevelInfo.py | Python | bsd-3-clause | 2,081 | 0.002883 | from pax import plugin
import numpy as np
class DeleteLowLevelInfo(plugin.TransformPlugin):
"""This deletes low-level info from the datastructure, to make the output file smaller.
By default, this is what gets removed:
* hits for all but the main s1
* pulses for all but the main s1
* sum waveforms (but not the peak sum waveforms stored with each peak)
"""
def transform_event(self, event):
# For high energy events, zero the data in expensive fields, except for the 5 largest S1s and S2s in the TPC
if event.n_pulses > self.config.get('shrink_data_threshold', float('inf')):
largest_indices = [event.peaks.index(x) for x in (event.s1s()[:5] + event.s2s()[:])]
for i, p in enumerate(event.peaks):
if i in largest_indices:
continue
p.sum_waveform *= 0
p.sum_waveform_top *= 0
p.area_per_channel *= 0
p.hits_per_channel *= 0
p.n_saturated_per_channel *= 0
if self.config.get('delete_sum_waveforms', True):
event.sum_waveforms = []
delopt = self.config.get('delete_hits_and_pulses', 'not_for_s1s')
if not delopt or delopt == 'none':
pass
elif delopt == 'not_for_s1s':
pulses_to_keep = []
for p in event.peaks:
if p.type == 's1':
| pulses_to_keep.extend(p.hits['found_in_pulse'].tolist())
else:
p.hits = p.hits[:0] # Set hits to an empty array
pulses_to_keep = np.unique(pulses_to_keep)
event.all_hits = event.all_hits[:0]
event.pulses = [p for i, p in enumerate(event.pulses) if i in pulses_to_keep]
elif delopt == 'all':
event.all_hits = event.all_hits[:0]
event.pulses = []
for p in event.peak | s:
p.hits = p.hits[:0]
else:
raise ValueError("Illegal delete_hits_and_pulses value %s" % delopt)
return event
|
bobman/magnemo | main.py | Python | gpl-3.0 | 4,681 | 0.012818 | from flask import Flask, render_template, request
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import Security, SQLAlchemyUserDatastore, UserMixin, RoleMixin, login_required
from flask.ext.login import current_user
import os
import json
from flask_mail import Mail
import base64
def load_json():
config={}
if(not os.path.isfile("config.json")):
create_json()
with open("config.json") as data_file:
config = json.load(data_file)
return config
def create_json():
tempConfig={
"DEBUG": True,
"SECRET_KEY": "super-secret",
"SQLALCHEMY_DATABASE_URI":"sqlite:///magnemo.sqlite",
"MAIL_SERVER": "mailserver",
"MAIL_PORT": 587,
"MAIL_USE_SSL": "True",
"MAIL_USERNAME": "user@mailserver",
"MAIL_PASSWORD": "secret"
}
with open("config.json","w") as data_file:
json.dump(tempConfig,data_file)
# Create app
app = Flask(__name__)
myConfig=load_json()
app.config['DEBUG'] = myConfig['DEBUG']
app.config['SECRET_KEY'] = myConfig['SECRET_KEY']
app.config['SQLALCHEMY_DATABASE_URI'] = myConfig['SQLALCHEMY_DATABASE_URI']
app.config['MAIL_SERVER'] = myConfig['MAIL_SERVER']
app.config['MAIL_PORT'] = myConfig['MAIL_PORT']
app.config['MAIL_USE_SSL'] = myConfig['MAIL_USE_SSL']
app.config['MAIL_USERNAME'] = myConfig['MAIL_USERNAME']
app.config['MAIL_PASSWORD'] = myConfig['MAIL_PASSWORD']
mail = Mail(app)
db = SQLAlchemy(app)
# Define models
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
class Magnet(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(150))
text = db.Column(db.Text)
top = db.Column(db.Integer)
left = db.Column(db.Integer)
height = db.Column(db.Integer)
width = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
user = db.relationship('User', backref=db.backref('magnets', lazy='dynamic'))
def __init__(self,title,text,user,top,left,height,width):
| self.title=title
self.text=text
self.user=user
self.top=top
self.left=left
self.height=height
self.width=width
def get_dataDict(self):
magnetData = {
"id":self.id,
"title":self.title,
"text":self.text,
"top":self.top,
"left":self.left,
"height":self.height,
"width":self.width
}
return magnetData
# Setup Flask-Se | curity
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(app, user_datastore)
# Create a user to test with
# @app.before_first_request
# def create_user():
#db.create_all()
# TODO: User creation via config file or installer
#user_datastore.create_user(email='admin@localhost', password='admin')
#db.session.commit()
@app.route('/')
@login_required
def home():
return render_template('index.html')
@app.route('/magnets')
@login_required
def magnets():
# m = Magnet("titel1","das ist der text",current_user,120,120,300,400)
# m = Magnet("titel2","das ist der text",current_user,280,280,300,400)
# db.session.add(m)
# db.session.commit()
data=[]
for magnet in current_user.magnets.all():
data.append(magnet.get_dataDict())
return json.dumps(data)
@app.route('/magnet/<id>')
@login_required
def show_magnet(id):
magnet = Magnet.query.filter(Magnet.user == current_user,Magnet.id == id).first()
data=magnet.get_dataDict()
return json.dumps(data)
@app.route('/magnet/<id>/edit', methods=['POST'])
@login_required
def update_magnet(id):
magnet = Magnet.query.filter(Magnet.user == current_user,Magnet.id == id).first()
magnet.top = request.form['top']
magnet.left = request.form['left']
magnet.height = request.form['height']
magnet.width = request.form['width']
db.session.commit()
print(request.form['id'])
return json.dumps("ok")
if __name__ == '__main__':
app.run() |
kwilliams-mo/iris | lib/iris/analysis/interpolate.py | Python | gpl-3.0 | 39,449 | 0.002966 | # (C) British Crown Copyright 2010 - 2013, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Interpolation and re-gridding routines.
See also: :mod:`NumPy <numpy>`, and :ref:`SciPy <scipy:modindex>`.
"""
import collections
import warnings
import numpy as np
import numpy.ma as ma
import scipy
import scipy.spatial
from scipy.interpolate.interpolate import interp1d
import iris.cube
import iris.coord_systems
import iris.coords
import iris.exceptions
def _ll_to_cart(lon, lat):
# Based on cartopy.img_transform.ll_to_cart()
x = np.sin(np.deg2rad(90 - lat)) * np.cos(np.deg2rad(lon))
y = np.sin(np.deg2rad(90 - lat)) * np.sin(np.deg2rad(lon))
z = np.cos(np.deg2rad(90 - lat))
return (x, y, z)
def _cartesian_sample_points(sample_points, sample_point_coord_names):
# Replace geographic latlon with cartesian xyz.
# Generates coords suitable for nearest point calculations with scipy.spatial.cKDTree.
#
# Input:
# sample_points[coord][datum] : list of sample_positions for each datum, formatted for fast use of _ll_to_cart()
# sample_point_coord_names[coord] : list of n coord names
#
# Output:
# list of [x,y,z,t,etc] positions, formatted for kdtree
# Find lat and lon coord indices
i_lat = i_lon = None
i_non_latlon = range(len(sample_point_coord_names))
for i, name in enumerate(sample_point_coord_names):
if "latitude" in name:
i_lat = i
i_non_latlon.remove(i_lat)
if "longitude" in name:
i_lon = i
i_non_latlon.remove(i_lon)
if i_lat is None or i_lon is None:
return sample_points.transpose()
num_points = len(sample_points[0])
cartesian_points = [None] * num_points
# Get the point coordinates without the latlon
for p in range(num_points):
cartesian_points[p] = [sample_points[c][p] for c in i_non_latlon]
# Add cartesian xyz coordinates from latlon
x, y, z = _ll_to_cart(sample_points[i_lon], sample_points[i_lat])
for p in range(num_points):
cartesian_point = cartesian_points[p]
cartesian_point.append(x[p])
cartesian_point.append(y[p])
cartesian_point.append(z[p])
return cartesian_points
def nearest_neighbour_indices(cube, sample_points):
"""
Returns the indices to select the data value(s) closest to the given coordinate point values.
The sample_points mapping does not have to include coordinate values corresponding to all data
dimensions. Any dimensions unspecified will default to a full slice.
For example:
>>> cube = iris.load_cube(iris.sample_data_path('ostia_monthly.nc'))
>>> iris.analysis.interpolate.nearest_neighbour_indices(cube, [('latitude', 0), ('longitude', 10)])
(slice(None, None, None), 9, 12)
>>> iris.analysis.interpolate.nearest_neighbour_indices(cube, [('latitude', 0)])
(slice(None, None, None), 9, slice(None, None, None))
Args:
* cube:
An :class:`iris.cube.Cube`.
* sample_points
A list of tuple pairs mapping coordinate instances or unique coordinate names in the cube to point values.
Returns:
The tuple of indices which will select the point in the cube closest to the supplied coordinate values.
"""
if isinstance(sample_points, dict):
warnings.warn('Providing a dictionary to specify points is deprecated. Please provide a list of (coordinate, values) pairs.')
sample_points = sample_points.items()
if sample_points:
try:
coord, values = sample_points[0]
except ValueError:
raise ValueError('Sample points must be a list of (coordinate, value) pairs. Got %r.' % sample_points)
points = []
for coord, values in sample_points:
if isinstance(coord, basestring):
coord = cube.coord(coord)
else:
coord = cube.coord(coord=coord)
points.append((coord, values))
sample_points = points
# Build up a list of indices to span the cube.
indices = [slice(None, None)] * cube.ndim
# Build up a dictionary which maps the cube's data dimensions to a list (which will later
# be populated by coordinates in the sample points list)
dim_to_coord_map = {}
for i in range(cube.ndim):
dim_to_coord_map[i] = []
# Iterate over all of the specifications provided by sample_points
for coord, point in sample_points:
data_dim = cube.coord_dims(coord)
# If no data dimension then we don't need to make any modifications to indices.
if not data_dim:
continue
elif len(data_dim) > 1:
raise iris.exceptions.CoordinateMultiDimError("Nearest neighbour interpolation of multidimensional "
"coordinates is not supported.")
data_dim = data_dim[0]
dim_to_coord_map[data_dim].append(coord)
#calculate the nearest neighbour
min_index = coord.nearest_neighbour_index(point)
if getattr(coord, 'circular', False):
warnings.warn("Nearest neighbour on a circular coordinate may not be picking the nearest point.", DeprecationWarning)
# If the dimension has already been interpolated then assert that the index from this coordinate
# agrees with the index already calculated, otherwise we have a contradicting specification
if indices[data_dim] != slice(None, None) and min_index != indices[data_dim]:
raise ValueError('The coordinates provided (%s) over specify dimension %s.' %
( | ', '.join([coord.name() for coord in dim_to_coord_map[data_dim]]), data_dim))
indices[data_dim] = min_index
return tuple(indices)
def _nearest_neighbour_indices_ndcoords(cube, sample_point, cache=None):
"""
See documentation for :func:`iris.analysis.interpolate.nearest_neighbour_indices`.
This function is adapted for points sampling a multi-dimensional coord,
and can currently only do nearest neighbour interpolation.
Bec | ause this function can be slow for multidimensional coordinates,
a 'cache' dictionary can be provided by the calling code.
"""
# Developer notes:
# A "sample space cube" is made which only has the coords and dims we are sampling on.
# We get the nearest neighbour using this sample space cube.
if isinstance(sample_point, dict):
warnings.warn('Providing a dictionary to specify points is deprecated. Please provide a list of (coordinate, values) pairs.')
sample_point = sample_point.items()
if sample_point:
try:
coord, value = sample_point[0]
except ValueError:
raise ValueError('Sample points must be a list of (coordinate, value) pairs. Got %r.' % sample_point)
# Convert names to coords in sample_point
point = []
ok_coord_ids = set(map(id, cube.dim_coords + cube.aux_coords))
for coord, value in sample_point:
if isinstance(coord, basestring):
coord = cube.coord(coord)
else:
coord = cube.coord(coord=coord)
if id(coord) not in ok_coord_ids:
msg = ('Invalid sample coordinate {!r}: derived coordinates are'
' not allowed.'.format(coord.name()))
raise ValueError(msg)
point.append((coord, value))
# Reformat sample_point for use in _cartesian_sample_points(), below.
sample_point = np.array([[value] for coord, value in point])
|
freelan-developers/chromalog | chromalog/mark/helpers.py | Python | mit | 5,224 | 0 | """
Automatically generate marking helpers functions.
"""
import sys
from .objects import Mark
class SimpleHelpers(object):
"""
A class that is designed to act as a module and implement magic helper
generation.
"""
def __init__(self):
self.__helpers = {}
def make_helper(self, color_tag):
"""
Make a simple helper.
:param color_tag: The color tag to make a helper for.
:returns: The helper function.
"""
helper = self.__helpers.get(color_tag)
if not helper:
def helper(obj):
return Mark(obj=obj, color_tag=color_tag)
helper.__name__ = color_tag
helper.__doc__ = """
Mark an object for coloration.
The color tag is set to {color_tag!r}.
:param obj: The object to mark for coloration.
:returns: A :class:`Mark<chromalog.mark.objects.Mark>` instance.
>>> from chromalog.mark.helpers.simple import {color_tag}
>>> {color_tag}(42).color_tag
['{color_tag}']
""".format(color_tag=color_tag)
self.__helpers[color_tag] = helper
return helper
def __getattr__(self, name):
"""
Get a magic helper.
:param name: The name of the helper to get.
>>> SimpleHelpers().alpha(42).color_tag
['alpha']
>>> getattr(SimpleHelpers(), '_incorrect', None)
"""
if name.startswith('_'):
raise AttributeError(name)
return self.make_helper(color_tag=name)
class ConditionalHelpers(object):
"""
A class that is designed to act as a module and implement magic helper
generation.
"""
def __init__(self):
self.__helpers = {}
def make_helper(self, color_tag_true, color_tag_false):
"""
Make a conditional helper.
:param color_tag_true: The color tag if the condition is met.
:param color_tag_false: The color tag if the condition is not met.
:returns: The helper function.
"""
helper = self.__helpers.get(
(color_tag_true, color_tag_false),
)
if not helper:
def helper(obj, condition=None):
if condition is None:
condition = obj
return Mark(
obj=obj,
color_tag=color_tag_true if condition else color_tag_false,
)
helper.__name__ = '_or_'.join((color_tag_true, color_tag_false))
helper.__doc__ = """
Convenie | nce helper method that marks an object with the
{color_tag_true!r} color tag if `condition` is truthy, and with the
{color_tag_false!r} color tag other | wise.
:param obj: The object to mark for coloration.
:param condition: The condition to verify. If `condition` is
:const:`None`, the `obj` is evaluated instead.
:returns: A :class:`Mark<chromalog.mark.objects.Mark>` instance.
>>> from chromalog.mark.helpers.conditional import {name}
>>> {name}(42, True).color_tag
['{color_tag_true}']
>>> {name}(42, False).color_tag
['{color_tag_false}']
>>> {name}(42).color_tag
['{color_tag_true}']
>>> {name}(0).color_tag
['{color_tag_false}']
""".format(
name=helper.__name__,
color_tag_true=color_tag_true,
color_tag_false=color_tag_false,
)
self.__helpers[
(color_tag_true, color_tag_false),
] = helper
return helper
def __getattr__(self, name):
"""
Get a magic helper.
:param name: The name of the helper to get. Must be of the form
'a_or_b' where `a` and `b` are color tags.
>>> ConditionalHelpers().alpha_or_beta(42, True).color_tag
['alpha']
>>> ConditionalHelpers().alpha_or_beta(42, False).color_tag
['beta']
>>> ConditionalHelpers().alpha_or_beta(42).color_tag
['alpha']
>>> ConditionalHelpers().alpha_or_beta(0).color_tag
['beta']
>>> getattr(ConditionalHelpers(), 'alpha_beta', None)
>>> getattr(ConditionalHelpers(), '_incorrect', None)
"""
if name.startswith('_'):
raise AttributeError(name)
try:
color_tag_true, color_tag_false = name.split('_or_')
except ValueError:
raise AttributeError(name)
return self.make_helper(
color_tag_true=color_tag_true,
color_tag_false=color_tag_false,
)
simple = SimpleHelpers()
simple.__doc__ = """
Pseudo-module that generates simple helpers.
See :class:`SimpleHelpers<chromalog.mark.helpers.SimpleHelpers>`.
"""
conditional = ConditionalHelpers()
conditional.__doc__ = """
Pseudo-module that generates conditional helpers.
See :class:`ConditionalHelpers<chromalog.mark.helpers.ConditionalHelpers>`.
"""
sys.modules['.'.join([__name__, 'simple'])] = simple
sys.modules['.'.join([__name__, 'conditional'])] = conditional
|
bozzzzo/qpid-proton | tests/python/proton_tests/common.py | Python | apache-2.0 | 19,016 | 0.006679 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from unittest import TestCase
try:
from unittest import SkipTest
except:
try:
from unittest2 import SkipTest
except:
class SkipTest(Exception):
pass
from random import randint
from threading import Thread
from socket import socket, AF_INET, SOCK_STREAM
from subprocess import Popen,PIPE,STDOUT
import sys, os, string, subprocess
from proton import Connection, Transport, SASL, Endpoint, Delivery, SSL
from proton.reactor import Container
from proton.handlers import CHandshaker, CFlowController
from string import Template
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
# this is for compatibility, apparently the version of jython we
# use doesn't have the next() builtin.
# we should remove this when we upgrade to a python 2.6+ compatible version
# of jython
#_DEF = object() This causes the test loader to fail (why?)
class _dummy(): pass
_DEF = _dummy
def next(iter, default=_DEF):
try:
return iter.next()
except StopIteration:
if default is _DEF:
raise
else:
return default
# I may goto hell for this:
import __builtin__
__builtin__.__dict__['next'] = next
def free_tcp_ports(count=1):
""" return a list of 'count' TCP ports that are free to used (ie. unbound)
"""
retry = 0
ports = []
sockets = []
while len(ports) != count:
port = randint(49152, 65535)
sockets.append( socket( AF_INET, SOCK_STREAM ) )
try:
sockets[-1].bind( ("0.0.0.0", port ) )
ports.append( port )
retry = 0
except:
retry += 1
assert retry != 100, "No free sockets available for test!"
for s in sockets:
s.close()
return ports
def free_tcp_port():
return free_tcp_ports(1)[0]
def pump_uni(src, dst, buffer_size=1024):
p = src.pending()
c = dst.capacity()
if c < 0:
if p < 0:
return False
else:
src.close_head()
return True
if p < 0:
dst.close_tail()
elif p == 0 or c == 0:
return False
else:
binary = src.peek(min(c, buffer_size))
dst.push(binary)
src.pop(len(binary))
return True
def pump(transport1, transport2, buffer_size=1024):
""" Transfer all pending bytes between two Proton engines
by repeatedly calling peek/pop and push.
Asserts that each engine accepts some bytes every time
(unless it's already closed).
"""
while (pump_uni(transport1, transport2, buffer_size) or
pump_uni(transport2, transport1, buffer_size)):
pass
def isSSLPresent():
return SSL.present()
createdSASLDb = False
def _cyrusSetup(conf_dir):
"""Write out simple SASL config.
"""
saslpasswd = ""
if 'SASLPASSWD' in os.environ:
saslpasswd = os.environ['SASLPASSWD']
if os.path.exists(saslpasswd):
t = Template("""sasldb_path: ${db}
mech_list: EXTERNAL DIGEST-MD5 SCRAM-SHA-1 CRAM-MD5 PLAIN ANONYMOUS
""")
abs_conf_dir = os.path.abspath(conf_dir)
subprocess.call(args=['rm','-rf',abs_conf_dir])
os.mkdir(abs_conf_dir)
db = os.path.join(abs_conf_dir,'proton.sasldb')
conf = os.path.join(abs_conf_dir,'proton-server.conf')
f = open(conf, 'w')
f.write(t.substitute(db=db))
f.close()
cmd_template = Template("echo password | ${saslpasswd} -c -p -f ${db} -u proton user")
cmd = cmd_template.substitute(db=db, saslpasswd=saslpasswd)
subprocess.call(args=cmd, shell=True)
os.environ['PN_SASL_CONFIG_PATH'] = abs_conf_dir
global createdSASLDb
createdSASLDb = True
# Globally initialize Cyrus SASL configuration
if SASL.extended():
_cyrusSetup('sasl_conf')
def ensureCanTestExtendedSASL():
if not SASL.extended():
raise Skipped('Extended SASL not supported')
if not createdSASLDb:
raise Skipped("Can't Test Extended SASL: Couldn't create auth db")
class DefaultConfig:
defines = {}
class Test(TestCase):
config = DefaultConfig()
def __init__(self, name):
super(Test, self).__init__(name)
self.name = name
def configure(self, config):
self.config = config
def default(self, name, value, **profiles):
default = value
profile = self.config.defines.get("profile")
if profile:
default = profiles.get(profile, default)
return self.config.defines.get(name, default)
@property
def delay(self):
return float(self.default("delay", "1", fast=" | 0.1"))
@property
def timeout(self):
return float(self.default("timeout", "60", fast="10"))
@property
def verbose(self):
return int(self.default("verbose", 0))
class Skipped(SkipTest):
skipped = True
class TestServer(object):
""" Base class for creating test-specific message servers.
"""
def __init__(self, **kwargs):
self.args = kwargs
| self.reactor = Container(self)
self.host = "127.0.0.1"
self.port = 0
if "host" in kwargs:
self.host = kwargs["host"]
if "port" in kwargs:
self.port = kwargs["port"]
self.handlers = [CFlowController(10), CHandshaker()]
self.thread = Thread(name="server-thread", target=self.run)
self.thread.daemon = True
self.running = True
self.conditions = []
def start(self):
self.reactor.start()
retry = 0
if self.port == 0:
self.port = str(randint(49152, 65535))
retry = 10
while retry > 0:
try:
self.acceptor = self.reactor.acceptor(self.host, self.port)
break
except IOError:
self.port = str(randint(49152, 65535))
retry -= 1
assert retry > 0, "No free port for server to listen on!"
self.thread.start()
def stop(self):
self.running = False
self.reactor.wakeup()
self.thread.join()
# Note: all following methods all run under the thread:
def run(self):
self.reactor.timeout = 3.14159265359
while self.reactor.process():
if not self.running:
self.acceptor.close()
self.reactor.stop()
break
def on_connection_bound(self, event):
if "idle_timeout" in self.args:
event.transport.idle_timeout = self.args["idle_timeout"]
def on_connection_local_close(self, event):
self.conditions.append(event.connection.condition)
def on_delivery(self, event):
event.delivery.settle()
#
# Classes that wrap the messenger applications msgr-send and msgr-recv.
# These applications reside in the tests/tools/apps directory
#
class MessengerApp(object):
""" Interface to control a MessengerApp """
def __init__(self):
self._cmdline = None
# options common to Receivers and Senders:
self.ca_db = None
self.certificate = None
self.privatekey = None
self.password = None
self._output = None
def findfile(self, filename, searchpath):
"""Find filename in the searchpath
return absolute path to the file or None
"""
paths = string.split(searchpath, os.pathsep)
for path in paths:
if os.path.exists(os.path.join(path, filename)):
return os.path.abspath(os.path.join(path, filename))
return None
def start(self, verbose=False):
""" Begin executing the test """
cmd = self.cmdline()
self._verbose = verbose
if self._verbose:
print("COMMAND='%s'" % str(cmd))
#print("ENV='%s'" % str(os.environ.copy()))
try:
if os.name=="nt":
|
kawamon/hue | desktop/core/ext-py/SQLAlchemy-1.3.17/lib/sqlalchemy/events.py | Python | apache-2.0 | 53,052 | 0 | # sqlalchemy/events.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Core event interfaces."""
from . import event
from . import exc
from . import util
from .engine import Connectable
from .engine import Dialect
from .engine import Engine
from .pool import Pool
from .sql.base import SchemaEventTarget
class DDLEvents(event.Events):
"""
Define event listeners for schema objects,
that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget`
subclasses, including :class:`_schema.MetaData`, :class:`_schema.Table`,
:class:`_schema.Column`.
:class:`_schema.MetaData` and :class:`_schema.Table` support events
specifically regarding when CREATE and DROP
DDL is emitted to the database.
Attachment events are also provided to customize
behavior whenever a child schema element is associated
with a parent, such as, when a :class:`_schema.Column` is associated
with its :class:`_schema.Table`, when a
:class:`_schema.ForeignKeyConstraint`
is associated with a :class:`_schema.Table`, etc.
Example using the ``after_create`` event::
from sqlalchemy import event
from sqlalchemy import Table, Column, Metadata, Integer
m = MetaData()
some_table = Table('some_table', m, Column('data', Integer))
def after_create(target, connection, **kw):
connection.execute("ALTER TABLE %s SET name=foo_%s" %
(target.name, target.name))
event.listen(some_table, "after_create", after_create)
DDL events integrate closely with the
:class:`.DDL` class and the :class:`.DDLElement` hierarchy
of DDL clause constructs, which are themselves appropriate
as listener callables::
from sqlalchemy import DDL
event.listen(
some_table,
"after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
)
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
For all :class:`.DDLEvent` events, the ``propagate=True`` keyword argument
will ensure that a given event handler is propagated to copies of the
object, which are made when using the :meth:`_schema.Table.tometadata`
method::
from sqlalchemy import DDL
event.listen(
some_table,
"after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s"),
propagate=True
)
new_table = some_table.tometadata(new_metadata)
The above :class:`.DDL` object will also be associated with the
:class:`_schema.Table` object represented by ``new_table``.
.. seealso::
:ref:`event_toplevel`
:class:`.DDLElement`
:class:`.DDL`
:ref:`schema_ddl_sequences`
"""
_target_class_doc = "SomeSchemaClassOrObject"
_dispatch_target = SchemaEventTarget
def before_create(self, target, connection, **kw):
r"""Called before CREATE statements are emitted.
:param target: the :class:`_schema.MetaData` or :class:`_schema.Table`
object which is the target of the event.
:param connection: the :class:`_engine.Connection` where the
CREATE statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`_schema.Table.tometadata` is used.
"""
def after_create(self, target, connection, **kw):
r"""Called after CREATE statements are emitted.
:param target: the :class:`_schema.MetaData` or :class:`_schema.Table`
object which is the target of the event.
:param connection: the :class:`_engine.Connection` where the
CREATE statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`_schema.Table.tometadata` is used.
"""
def before_drop(self, target, connection, **kw):
r"""Called before DROP statements are emitted.
:param target: the :class:`_schema.MetaData` or :class:`_schema.Table`
object which is the target of the event.
:param connection: the :class:`_engine.Connection` where the
DROP statement or statements will be emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`_schema.Table.tometadata` is used.
"""
def after_drop(self, target, connection, **kw):
r"""Called after DROP statements are emitted.
:param target: the :class:`_schema.MetaData` or :class:`_schema.Table`
object which is the target of the event.
:param connection: the :class:`_engine.Connection` where the
DROP statement or statements have been emitted.
:param \**kw: additional keyword arguments relevant
to the event. The contents of this dictionary
may vary across releases, and include the
list of tables being generated for a metadata-level
event, the checkfirst flag, and other
elements used by internal events.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`_schema.Table.tometadata` is used.
"""
def before_parent_attach(self, target, parent):
"""Called before a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached.
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
| be established for any copies made of the target object,
i.e. those copies that are generated when
:meth:`_schema.Table.tometadata` is used.
"""
def after_parent_attach(self, target, parent):
"""Called after a :class:`.SchemaItem` is associated with
a parent :class:`.SchemaItem`.
:param target: the target object
:param parent: the parent to which the target is being attached. |
:func:`.event.listen` also accepts the ``propagate=True``
modifier for this event; when True, the listener function will
be established for any copies made of the target object,
|
gencer/sentry | src/sentry/deletions/__init__.py | Python | bsd-3-clause | 4,094 | 0.00171 | """
The deletions subsystem managers bulk deletes as well as cascades. It attempts
to optimize around various patterns while using a standard approach to do so.
For example, let's say you want to delete an organization:
>>> from sentry import deletions
>>> task = deletions.get(model=Organization)
>>> work = True
>>> while work:
>>> work = task.chunk()
The system has a default task implementation to handle Organization which will
efficiently cascade deletes. This behavior varies based on the input object,
as the task can override the behavior for it's children.
For example, when you delete a Group, it will cascade in a more traditional
manner. It will batch each child (such as Event). However, when you delete a
project, it won't actually cascade to the registered Group task. It will instead
take a more efficient approach of batch deleting its indirect descedancts, such
as Event, so it can more efficiently bulk delete rows.
"""
from __future__ import absolute_import
from .base import BulkModelDeletionTask, ModelDeletionTask, ModelRelation # NOQA
from .manager import DeletionTaskManager
default_manager = DeletionTaskManager(default_task=ModelDeletionTask)
def load_defaults():
from sentry import models
from . import defaults
default_manager.register(models.Activity, BulkModelDeletionTask)
default_manager.register(models.ApiApplication, defaults.ApiApplicationDeletionTask)
default_manager.register(models.ApiKey, BulkModelDeletionTask)
default_manager.register(models.ApiGrant, BulkModelDeletionTask)
default_manager.register(models.ApiToken, BulkModelDeletionTask)
default_manager.register(models.CommitAuthor, BulkModelDeletionTask)
default_manager.register(models.CommitFileChange, BulkModelDeletionTask)
default_manager.register(models.EnvironmentProject, BulkModelDeletionTask)
default_manager.register(models.Event, defaults.EventDeletionTask)
default_manager.register(models.EventMapping, BulkModelDeletionTask)
default_manager.register(models.EventUser, BulkModelDeletionTask)
defau | lt_manager.register(models.Group, defaults.GroupDeletionTask)
default_manager.register(models.GroupAssignee, BulkModelDeletionTask)
default_manager.register(models.GroupBookmark, BulkModelDeletionTask)
default_manager.register(models.GroupCommitResolution, BulkModelDeletionTask)
default_manager.register(models.GroupLink, BulkModelDe | letionTask)
default_manager.register(models.GroupEmailThread, BulkModelDeletionTask)
default_manager.register(models.GroupHash, BulkModelDeletionTask)
default_manager.register(models.GroupMeta, BulkModelDeletionTask)
default_manager.register(models.GroupRedirect, BulkModelDeletionTask)
default_manager.register(models.GroupRelease, BulkModelDeletionTask)
default_manager.register(models.GroupResolution, BulkModelDeletionTask)
default_manager.register(models.GroupRuleStatus, BulkModelDeletionTask)
default_manager.register(models.GroupSeen, BulkModelDeletionTask)
default_manager.register(models.GroupShare, BulkModelDeletionTask)
default_manager.register(models.GroupSnooze, BulkModelDeletionTask)
default_manager.register(models.GroupSubscription, BulkModelDeletionTask)
default_manager.register(models.Organization, defaults.OrganizationDeletionTask)
default_manager.register(models.OrganizationMemberTeam, BulkModelDeletionTask)
default_manager.register(models.Project, defaults.ProjectDeletionTask)
default_manager.register(models.ProjectBookmark, BulkModelDeletionTask)
default_manager.register(models.ProjectKey, BulkModelDeletionTask)
default_manager.register(models.Repository, defaults.RepositoryDeletionTask)
default_manager.register(models.SavedSearch, BulkModelDeletionTask)
default_manager.register(models.SavedSearchUserDefault, BulkModelDeletionTask)
default_manager.register(models.Team, defaults.TeamDeletionTask)
default_manager.register(models.UserReport, BulkModelDeletionTask)
load_defaults()
get = default_manager.get
register = default_manager.register
|
suryakencana/niimanga | niimanga/views/main.py | Python | lgpl-3.0 | 4,364 | 0.000687 | """
# Copyright (c) 04 2015 | surya
# 21/04/15 nanang.ask@kubuskotak.com
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either | version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of | the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# main.py
"""
from niimanga.configs.view import ZHandler
from niimanga.libs.crawlable import CrawlAble
from niimanga.models.manga import Manga
from pyramid.view import view_config
class MainView(ZHandler):
@view_config(route_name='home', renderer='layouts/home.html')
@CrawlAble()
def home_view(self):
return {'project': 'moori'}
@view_config(route_name='url_popular', renderer='layouts/home.html')
@CrawlAble()
def popular_view(self):
return {'project': 'moori'}
@view_config(route_name='url_latest', renderer='layouts/home.html')
@CrawlAble()
def latest_view(self):
return {'project': 'moori'}
@view_config(route_name='url_series', renderer='layouts/series.html')
@CrawlAble()
def series_view(self):
_ = self.R
slug = _.matchdict.get('seriesSlug', "No Title")
print(slug)
qry = Manga.query
manga = qry.filter(Manga.slug == slug.strip()).first()
if manga is not None:
filename = '/'.join([manga.id, manga.thumb])
thumb = _.storage.url(filename)
aka = manga.aka
artists = manga.get_artist()
authors = manga.get_authors()
description = manga.description
name = manga.title
last = Manga.last_chapter(manga.id)
last_chapter = ' '.join([str(last.chapter), last.title])
return dict(
aka=aka,
url='/manga/{slug}'.format(slug=slug),
thumb_url=thumb,
artists=artists,
authors=authors,
description=description,
name=name,
last_chapter=last_chapter
)
return {'project': 'moori'}
@view_config(route_name='url_chapter', renderer='layouts/chapter.html')
@CrawlAble()
def chapter_view(self):
_ = self.R
slug = _.matchdict.get('seriesSlug', "No Title")
chap_slug = _.matchdict.get('chapterSlug', "No Title")
# cari manga by slug
manga = Manga.query.filter(Manga.slug == slug).first()
if manga is not None:
filename = '/'.join([manga.id, manga.thumb])
thumb = _.storage.url(filename)
aka = manga.aka
artists = manga.get_artist()
authors = manga.get_authors()
description = manga.description
name = manga.title
last = Manga.last_chapter(manga.id)
last_chapter = ' '.join([str(last.chapter), last.title])
# cari chapter manga
chapter = manga.get_chapter(manga, chap_slug)
return dict(
aka=aka,
url='/chapter/{slug}/{chap}'.format(slug=slug, chap=chap_slug),
thumb_url=thumb,
artists=artists,
authors=authors,
description=description,
name=' '.join([name, '|', 'Ch.', str(chapter.chapter).replace('.0', ''), chapter.title]),
last_chapter=last_chapter
)
return {'project': 'moori'}
@view_config(route_name='url_search', renderer='layouts/home.html')
@CrawlAble()
def search_view(self):
return {'project': 'moori'}
@view_config(route_name='url_genre', renderer='layouts/home.html')
@CrawlAble()
def genre_view(self):
return {'project': 'moori'}
@view_config(context='pyramid.exceptions.NotFound', renderer='layouts/404.html')
def not_found_view(self):
return {'project': 'moori'} |
BrainTech/openbci | obci/logic/configs/config_multiple_8.py | Python | gpl-3.0 | 2,287 | 0.012243 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import config_speller_8
import config_robot_8
class Config(object):
def __init__(self):
self.number_of_decisions = 8
speller = config_speller_8.Config()
robot = config_robot_8.Config()
self.state = []
self.actions = []
self.letters = []
#MENU
menu_state = 0
self.letters.append([u"Speller",u"Robot"
,"Switch", "SSVEP", #u"High SSVEP",u"Low SSVEP"
u"", u"", u"", u""])
self.actions.append([
"",
"start_robot_feedback()",
"transform_scenario('switch')", #restart_scenario('"+self._high_ssvep_scenario()+"')",
"transform_scenario('ssvep')", #restart_scenario('"+self._low_ssvep_scenario()+"')",
"", "", "", ""])
self.state.append([0]*self.number_of_decisions)
self._setup_menu()
zero_sta | te = 1
#SPELLER
speller_state = zero_state
for i, s in enumerate(speller.state):
self.state.append([x+speller_state for x in s])
self.actions.append(speller.actions[i])
self.letters | .append(speller.letters[i])
self.state[zero_state][-1] = 0 #GOTO MENU
self.actions[zero_state][-1] = "clear()"
zero_state += len(speller.state)
#ROBOT
robot_state = zero_state
for i, s in enumerate(robot.state):
self.state.append([x+robot_state for x in s])
self.actions.append(robot.actions[i])
self.letters.append(robot.letters[i])
self.state[zero_state][-1] = 0 #GOTO MENU
self.actions[zero_state][-1] = "stop_robot_feedback()"
zero_state += len(robot.state)
self.state[menu_state][0] = speller_state
self.state[menu_state][1] = robot_state
self.number_of_states = zero_state
self.states_configs = ['state', 'letters', 'actions', 'letters_solver', 'actions_solver']
self.other_configs = []
self.letters_solver = self.number_of_states * [self.number_of_decisions * [""]]
self.actions_solver = self.number_of_states * [self.number_of_decisions * [""]]
def _setup_menu(self):
pass
|
jimi-c/ansible | lib/ansible/modules/utilities/logic/wait_for.py | Python | gpl-3.0 | 24,145 | 0.003106 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: wait_for
short_description: Waits for a condition before continuing
description:
- You can wait for a set amount of time C(timeout), this is the default if nothing is specified or just C(timeout) is specified.
This does not produce an error.
- Waiting for a port to become available is useful for when services are not immediately available after their init scripts return
which is true of certain Java application servers. It is also useful when starting guests with the M(virt) module and
needing to pause until they are ready.
- This module can also be used to wait for a regex match a string to be present in a file.
- In 1.6 and later, this module can also be used to wait for a file to be available or
absent on the filesystem.
- In 1.8 and later, this module can also be used to wait for active connections to be closed before continuing, useful if a node
is being rotated out of a load balancer pool.
- For Windows targets, use the M(win_wait_for) module instead.
version_added: "0.7"
options:
host:
description:
- A resolvable hostname or IP address to wait for.
default: "127.0.0.1"
timeout:
description:
- Maximum number of seconds to wait for, when used with another condition it will force an error.
- When used without other conditions it is equivalent of just sleeping.
default: 300
connect_timeout:
description:
- Maximum number of seconds to wait for a connection to happen before closing and retrying.
default: 5
delay:
description:
- Number of seconds to wait before starting to poll.
default: 0
port:
description:
- Port number to poll.
active_connection_states:
description:
- The list of TCP connection states which are counted as active connections.
default: [ ESTABLISHED, FIN_WAIT1, FIN_WAIT2, SYN_RECV, SYN_SENT, TIME_WAIT ]
version_added: "2.3"
state:
description:
- Either C(present), C(started), or C(stopped), C(absent), or C(drained).
- When checking a port C(started) will ensure the port is open, C(stopped) will check that it is closed, C(drained) will check for active connections.
- When checking for a file or a search string C(present) or C(started) will ensure that the file or string is present before continuing,
C(absent) will check that file is absent or removed.
choices: [ absent, drained, present, started, stopped ]
default: started
path:
version_added: "1.4"
description:
- Path to a file on the filesystem that must exist before continuing.
search_regex:
version_added: "1.4"
description:
- Can be used to match a string in either a file or a socket connection.
- Defaults to a multiline regex.
exclude_hosts:
version_added: "1.8"
description:
- List of hosts or IPs to ignore when looking for active TCP connections for C(drained) state.
sleep:
version_added: "2.3"
default: 1
description:
- Number of seconds to sleep between checks, before 2.3 this was hardcoded to 1 second.
msg:
version_added: "2.4"
description:
- This overrides the normal error message from a failure to meet the required conditions.
notes:
- The ability to use search_regex with a port connection was added in 1.7.
- Prior to 2.4, testing for the absense of a directory or UNIX socket did not work correctly.
- Prior to 2.4, testing for | the presence of a file did not work correctly if the remote user did not have read access to that file | .
- Under some circumstances when using mandatory access control, a path may always be treated as being absent even if it exists, but
can't be modified or created by the remote user either.
- When waiting for a path, symbolic links will be followed. Many other modules that manipulate files do not follow symbolic links,
so operations on the path using other modules may not work exactly as expected.
- This module is also supported for Windows targets.
- See also M(wait_for_connection)
author:
- Jeroen Hoekx (@jhoekx)
- John Jarvis (@jarv)
- Andrii Radyk (@AnderEnder)
'''
EXAMPLES = r'''
- name: sleep for 300 seconds and continue with play
wait_for: timeout=300
delegate_to: localhost
- name: Wait for port 8000 to become open on the host, don't start checking for 10 seconds
wait_for:
port: 8000
delay: 10
- name: Waits for port 8000 of any IP to close active connections, don't start checking for 10 seconds
wait_for:
host: 0.0.0.0
port: 8000
delay: 10
state: drained
- name: Wait for port 8000 of any IP to close active connections, ignoring connections for specified hosts
wait_for:
host: 0.0.0.0
port: 8000
state: drained
exclude_hosts: 10.2.1.2,10.2.1.3
- name: Wait until the file /tmp/foo is present before continuing
wait_for:
path: /tmp/foo
- name: Wait until the string "completed" is in the file /tmp/foo before continuing
wait_for:
path: /tmp/foo
search_regex: completed
- name: Wait until the lock file is removed
wait_for:
path: /var/lock/file.lock
state: absent
- name: Wait until the process is finished and pid was destroyed
wait_for:
path: /proc/3466/status
state: absent
- name: Output customized message when failed
wait_for:
path: /tmp/foo
state: present
msg: Timeout to find file /tmp/foo
# Don't assume the inventory_hostname is resolvable and delay 10 seconds at start
- name: Wait 300 seconds for port 22 to become open and contain "OpenSSH"
wait_for:
port: 22
host: '{{ (ansible_ssh_host|default(ansible_host))|default(inventory_hostname) }}'
search_regex: OpenSSH
delay: 10
connection: local
# Same as above but you normally have ansible_connection set in inventory, which overrides 'connection'
- name: Wait 300 seconds for port 22 to become open and contain "OpenSSH"
wait_for:
port: 22
host: '{{ (ansible_ssh_host|default(ansible_host))|default(inventory_hostname) }}'
search_regex: OpenSSH
delay: 10
vars:
ansible_connection: local
'''
import binascii
import datetime
import errno
import math
import os
import re
import select
import socket
import sys
import time
from ansible.module_utils.basic import AnsibleModule, load_platform_subclass
from ansible.module_utils._text import to_native
HAS_PSUTIL = False
try:
import psutil
HAS_PSUTIL = True
# just because we can import it on Linux doesn't mean we will use it
except ImportError:
pass
class TCPConnectionInfo(object):
"""
This is a generic TCP Connection Info strategy class that relies
on the psutil module, which is not ideal for targets, but necessary
for cross platform support.
A subclass may wish to override some or all of these methods.
- _get_exclude_ips()
- get_active_connections()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
match_all_ips = {
socket.AF_INET: '0.0.0.0',
socket.AF_INET6: '::',
}
ipv4_mapped_ipv6_address = {
'prefix': '::ffff',
'match_all': '::ffff:0.0.0.0'
}
def __new__(cls, *args, **kwargs):
return load_platform_subclass(TCPConnectionInfo, args, kwargs)
def __init__(self, module):
self.module = module
self.ips = _convert_host_to_ip(module.params['host'])
self.port = int(self.module.params['port'])
self.exclude_ips = self._get_exclude_ips()
if not HAS_PSUTIL:
module.fail_json(msg="psutil module required for wait_for")
def _get_exclude_ips(self):
exclude_hosts = self.module.para |
teamfx/openjfx-8u-dev-rt | modules/web/src/main/native/Source/JavaScriptCore/inspector/scripts/codegen/cpp_generator.py | Python | gpl-2.0 | 14,357 | 0.00209 | #!/usr/bin/env python
#
# Copyright (c) 2014, 2016 Apple Inc. All rights reserved.
# Copyright (c) 2014 University of Washington. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
import logging
import os.path
import re
from generator import ucfirst, Generator
from models import PrimitiveType, ObjectType, ArrayType, EnumType, AliasedType, Frameworks
log = logging.getLogger('global')
_PRIMITIVE_TO_CPP_NAME_MAP = {
'boolean': 'bool',
'integer': 'int',
'number': 'double',
'string': 'String',
'object': 'JSON::Object',
'array': 'JSON::Array',
'any': 'JSON::Value'
}
class CppGenerator(Generator):
def __init__(self, *args, **kwargs):
Generator.__init__(self, *args, **kwargs)
def protocol_name(self):
return self.model().framework.setting('cpp_protocol_group', '')
def helpers_namespace(self):
return '%sHelpers' % self.protocol_name()
# Miscellaneous text manipulation routines.
@staticmethod
def cpp_getter_method_for_type(_type):
if isinstance(_type, ObjectType):
return 'getObject'
if isinstance(_type, ArrayType):
return 'getArray'
if isinstance(_type, PrimitiveType):
if _type.raw_name() is 'integer':
return 'getInteger'
elif _type.raw_name() is 'number':
return 'getDouble'
elif _type.raw_name() is 'any':
return 'getValue'
else:
return 'get' + ucfirst(_type.raw_name())
if isinstance(_type, AliasedType):
return CppGenerator.cpp_getter_method_for_type(_type.aliased_type)
if isinstance(_type, EnumType):
return CppGenerator.cpp_getter_method_for_type(_type.primitive_type)
@staticmethod
def cpp_setter_method_for_type(_type):
if isinstance(_type, ObjectType):
return 'setObject'
if isinstance(_type, ArrayType):
return 'setArray'
if isinstance(_type, PrimitiveType):
if _type.raw_name() is 'integer':
return 'setInteger'
elif _type.raw_name() is 'number':
return 'setDouble'
elif _type.raw_name() is 'any':
return 'setValue'
else:
return 'set' + ucfirst(_type.raw_name())
if isinstance(_type, AliasedType):
return CppGenerator.cpp_setter_method_for_type(_type.aliased_type)
if isinstance(_type, EnumType):
return CppGenerator.cpp_setter_method_for_type(_type.primitive_type)
# Generate type representations for various situations.
@staticmethod
def cpp_protocol_type_for_type(_type):
if isinstance(_type, AliasedType):
_type = _type.aliased_type # Fall through to enum or primitive.
if isinstance(_type, ObjectType) and len(_type.members) == 0:
return 'JSON::Object'
if isinstance(_type, ArrayType):
if _type.raw_name() is None: # Otherwise, fall through and use typedef'd name.
return 'JSON::ArrayOf<%s>' % CppGenerator.cpp_protocol_type_for_type(_type.element_type)
if isinstance(_type, (ObjectType, EnumType, ArrayType)):
return 'Inspector::Protocol::%s::%s' % (_type.type_domain().domain_name, _type.raw_name())
if isinstance(_type, PrimitiveType):
return CppGenerator.cpp_name_for_primitive_type(_type)
@staticmethod
def cpp_protocol_type_for_type_member(type_member, object_declaration):
if isinstance(type_member.type, EnumType) and type_member.type.is_anonymous:
return '::'.join([CppGenerator.cpp_protocol_type_for_type(object_declaration.type), ucfirst(type_member.member_name)])
else:
return CppGenerator.cpp_protocol_type_for_type(type_member.type)
@staticmethod
def cpp_type_for_unchecked_formal_in_parameter(parameter):
_type = parameter.type
if isinstance(_type, AliasedType):
_type = _type.aliased_type # Fall through to enum or primitive.
if isinstance(_type, EnumType):
_type = _type.primitive_type # Fall through to primitive.
# This handles the 'any' type and objects with defined properties.
if isinstance(_type, ObjectType) or _type.qualified_name() is 'object':
cpp_name = 'JSON::Object'
if parameter.is_optional:
return 'const %s*' % cpp_name
else:
return 'const %s&' % cpp_name
if isinstance(_type, ArrayType):
cpp_name = 'JSON::Array'
if parameter.is_optional:
return 'const %s*' % cpp_name
else:
return 'const %s&' % cpp_name
if isinstance(_type, PrimitiveType):
cpp_name = CppGenerator.cpp_name_for_primitive_type(_type)
if parameter.is_optional:
return 'const %s* const' % cpp_name
elif _type.raw_name() in ['string']:
return 'const %s&' % cpp_name
else:
return cpp_name
return "unknown_unchecked_formal_in_parameter_type"
@staticmethod
def cpp_type_for_checked_formal_event_parameter(parameter):
return CppGenerator.cpp_type_for_type_with_name(parameter.type, parameter.parameter_name, parameter.is_optional)
@staticmethod
def cpp_type_for_type_member(member):
return CppGenerator.cpp_type_for_type_with_name(member.type, member.member_name, False)
@staticmethod
def cpp_type_for_type_with_name(_type, type_name, is_optional):
if isinstance(_typ | e, (ArrayType, ObjectType)):
| return 'RefPtr<%s>' % CppGenerator.cpp_protocol_type_for_type(_type)
if isinstance(_type, AliasedType):
builder_type = CppGenerator.cpp_protocol_type_for_type(_type)
if is_optional:
return 'const %s* const' % builder_type
elif _type.aliased_type.qualified_name() in ['integer', 'number']:
return CppGenerator.cpp_name_for_primitive_type(_type.aliased_type)
elif _type.aliased_type.qualified_name() in ['string']:
return 'const %s&' % builder_type
else:
return builder_type
if isinstance(_type, PrimitiveType):
cpp_name = CppGenerator.cpp_name_for_primitive_type(_type)
if _type.qualified_name() in ['object']:
return 'RefPtr<JSON::Object>'
elif _type.qualified_name() in ['any']:
return 'RefPtr<JSON::Value>'
elif is_optional:
return 'const %s* const' % cpp_name
elif _type.qualified_name() in ['string']:
return 'const %s&' % cpp_name
else:
return cpp_name
if isinstance(_type, EnumType):
if _type.is_anonymous:
enum_type_ |
ikben/bless | bless/ssh/certificate_authorities/rsa_certificate_authority.py | Python | apache-2.0 | 2,420 | 0.002066 | """
.. module: bless.ssh.certificate_authorities.rsa_certificate_authority
:copyright: (c) 2016 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
"""
from bless.ssh.certificate_authorities.ssh_certificate_authority import \
SSHCertificateAuthority
from bless.ssh.protocol.ssh_protocol import pack_ssh_mpint, pack_ssh_string
from bless.ssh.public_keys.ssh_public_key import SSHPublicKeyType
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.serialization import load_pem_private_key
class RSACertificateAuthority(SSHCertificateAuthority):
def __init__(self, pem_private_key, private_key_password=None):
"""
RSA Certificate Authority used to sign certificates.
:param pem_private_key: PEM formatted RSA Private Key. It should be encrypted with a
password, but that is not required.
:param private_key_password: Password to decrypt the PEM RSA Private Key, if it is
encrypted. Which it should be.
"""
super(SSHCertificateAuthority, self).__init__()
self.public_key_type = SSHPublicKeyType.RSA
self.private_key = load_pem_private_key(pem_private_key,
private_key_password,
default_backend())
ca_pub_numbers = self.private_key.public_key().public_numbers()
self.e = ca_pub_numbers.e
self.n = ca_pub_numbers.n
def get_signature_key(self):
"""
Get the | SSH Public Key associated with this CA.
Packed per RFC4253 section 6.6.
:return: SSH Public Key.
"""
key = pack_ssh_string(self.public_key_type)
key += pack_ssh_mpint(self.e)
key += pack_ | ssh_mpint(self.n)
return key
def sign(self, body):
"""
Sign the certificate body with the RSA private key. Signatures are computed and
encoded per RFC4253 section 6.6
:param body: All other fields of the SSH Certificate, from the initial string to the
signature key.
:return: SSH RSA Signature.
"""
signature = self.private_key.sign(body, padding.PKCS1v15(), hashes.SHA1())
return self._serialize_signature(signature)
|
mhbu50/frappe | frappe/core/doctype/version/test_version.py | Python | mit | 1,227 | 0.02119 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import frappe
import unittest, copy
from frappe.test_runner import make_test_objects
from frappe.core.doctype.versi | on.version import get_diff
class TestVersion(unittest.TestCase):
def test_get_diff(self):
frappe.set_user('Administrator')
test_records = make_test_objects('Event', reset = True)
old_doc = frappe.get_doc("Event", test_records[0])
new_doc = copy.deepcopy(old_doc)
old_doc.color = None
new_doc.color = '#fafafa'
diff = get_diff(old_doc, new_doc)['changed']
self.assertEqual(get_fieldnames(diff)[0], 'color')
self.assertTrue(get_ol | d_values(diff)[0] is None)
self.assertEqual(get_new_values(diff)[0], '#fafafa')
new_doc.starts_on = "2017-07-20"
diff = get_diff(old_doc, new_doc)['changed']
self.assertEqual(get_fieldnames(diff)[1], 'starts_on')
self.assertEqual(get_old_values(diff)[1], '01-01-2014 00:00:00')
self.assertEqual(get_new_values(diff)[1], '07-20-2017 00:00:00')
def get_fieldnames(change_array):
return [d[0] for d in change_array]
def get_old_values(change_array):
return [d[1] for d in change_array]
def get_new_values(change_array):
return [d[2] for d in change_array]
|
ISeaTeL/ISeaTeL_Cup_Site | oj_judge/models.py | Python | mit | 619 | 0.008078 | from django.db import models
from datetime import datetime
class JudgeResult(models.Model):
sid = models.IntegerField()
pid = models.IntegerField()
username = models.CharField(max_length=50)
result = models.CharField(max_length=50)
time = models.IntegerField()
memory = models.IntegerField()
message = models.CharField(max_length=500)
status = models.IntegerField()
submit_time = models.DateTimeField(default=datetime.now(), editable=True, auto_now_add=True)
language = models.CharField | (max_length=50)
def __str__(self):
return self.us | ername + ': ' + self.result
|
laysakura/shellstreaming | test/ostream/test_localfile.py | Python | apache-2.0 | 1,074 | 0.002793 | # -*- coding: utf-8 -*-
import nose.tools as ns
import os
from os.path import join
from tempfile import gettempdir
from relshell.record import Record
from relshell.recorddef import RecordDef
from relshell.batch import Batch
from shellstreaming.core.batch_queue import BatchQueue
from shellstreaming.ostream.localfile import LocalFile
TEST_FILE = join(gettempdir(), 'shellstreaming_test_localfile.txt')
def teardown():
os.remove(TEST_FILE)
def test_localfile_usage():
# prepare input queue
q = BatchQueue()
for batch in _create_batches(): |
q.push(batch) # [fix] - Batch's output format has to be customized by user
q.push(None)
# run ostream
ostream = LocalFile(TEST_FILE, output_format='csv', input_queue=q)
ostream.join()
# check contents
with open(TEST_FILE) as f:
| ns.eq_(f.read(),
'''"111"
"222"
"333"
'''
)
def _create_batches():
rdef = RecordDef([{'name': 'col0', 'type': 'INT'}])
return (
Batch(rdef, (Record(111), Record(222), )),
Batch(rdef, (Record(333), )),
)
|
magyarm/periphondemand-code | src/bin/core/generic.py | Python | lgpl-2.1 | 4,911 | 0.012014 | #! /usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Name: Generic.py
# Purpose:
# Author: Fabien Marteau <fabien.marteau@armadeus.com>
# Created: 21/05/2008
#-----------------------------------------------------------------------------
# Copyright (2008) Armadeus Systems
#
# This program is free software; you can redistribute it and/or modify
# | it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version. |
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#-----------------------------------------------------------------------------
# Revision list :
#
# Date By Changes
#
#-----------------------------------------------------------------------------
__doc__ = ""
__version__ = "1.0.0"
__author__ = "Fabien Marteau <fabien.marteau@armadeus.com>"
import re
from periphondemand.bin.utils.wrapperxml import WrapperXml
from periphondemand.bin.utils.error import Error
DESTINATION = ["fpga","driver","both"]
PUBLIC = ["true","false"]
class Generic(WrapperXml):
""" Manage generic instance value
"""
def __init__(self,parent,**keys):
""" init Generic,
__init__(self,parent,node)
__init__(self,parent,nodestring)
__init__(self,parent,name)
"""
self.parent=parent
if "node" in keys:
self.__initnode(keys["node"])
elif "nodestring" in keys:
self.__initnodestring(keys["nodestring"])
elif "name" in keys:
self.__initname(keys["name"])
else:
raise Error("Keys unknown in Generic init()",0)
def __initnode(self,node):
WrapperXml.__init__(self,node=node)
def __initnodestring(self,nodestring):
WrapperXml.__init__(self,nodestring=nodestring)
def __initname(self,name):
WrapperXml.__init__(self,nodename="generic")
self.setName(name)
def getOp(self):
return self.getAttributeValue("op")
def setOp(self,op):
self.setAttribute("op",op)
def getTarget(self):
return self.getAttributeValue("target")
def setTarget(self,target):
self.setAttribute("target",target)
def isPublic(self):
if self.getAttributeValue("public")=="true":
return "true"
else:
return "false"
def setPublic(self,public):
public = public.lower()
if not public in PUBLIC:
raise Error("Public value "+str(public)+" wrong")
self.setAttribute("public",public)
def getType(self):
the_type = self.getAttributeValue("type")
if the_type == None:
raise Error("Generic "+self.getName()+\
" description malformed, type must be defined",0)
else:
return the_type
def setType(self,type):
self.setAttribute("type",type)
def getMatch(self):
try:
return self.getAttributeValue("match").encode("utf-8")
except AttributeError:
return None
def setMatch(self,match):
self.setAttribute("match",match)
def getValue(self):
""" return the generic value
"""
component = self.getParent()
if self.getOp() == None:
return self.getAttributeValue("value")
else:
target = self.getTarget().split(".")
if self.getOp() == "realsizeof":
# return the number of connected pin
return str(int(component.getInterface(target[0]).getPort(target[1]).getMaxPinNum())+1)
else:
raise Error("Operator unknown "+self.getOp(),1)
def setValue(self,value):
if self.getMatch() == None:
self.setAttribute("value",value)
elif re.compile(self.getMatch()).match(value):
self.setAttribute("value",value)
else:
raise Error("Value doesn't match for attribute "+str(value),0)
def getDestination(self):
""" return the generic destination (fpga,driver or both)
"""
return self.getAttributeValue("destination")
def setDestination(self,destination):
destination = destination.lower()
if not destination in DESTINATION:
raise Error("Destination value "+str(destination)+\
" unknown")
self.setAttribute("destination",destination)
|
kpanic/lymph | iris/core/trace.py | Python | apache-2.0 | 983 | 0.001017 | import gevent
import gevent.pool
import uuid
import logging
def get_trace(greenlet=None):
greenlet = greenlet or gevent.getcurrent()
if not hasattr(greenlet, '_iris_trace'):
greenlet._iris_trace = {}
return greenlet._iris_trace
def spawn(*args, **kwargs):
greenlet = gevent.Greenlet(*args, **kwargs)
greenlet._iris_trace = get_trace().copy()
greenlet.start()
return greenlet
_spawn = spawn
class Group(gevent.pool.Group):
def spawn(self, *args, **kwargs): |
g = _spawn(*args, **kwargs)
self.add(g)
return g
def trace(**kwargs):
get_trace().update(kwargs)
def set_id(trace_id=None):
trace_id = trace_id or uuid.uuid4().hex
trace(iris_trace_id=trace_id)
return tr | ace_id
def get_id():
return get_trace().get('iris_trace_id')
class TraceFormatter(logging.Formatter):
def format(self, record):
record.trace_id = get_id()
return super(TraceFormatter, self).format(record)
|
jawilson/home-assistant | homeassistant/components/template/cover.py | Python | apache-2.0 | 12,433 | 0.000563 | """Support for covers which integrate with other components."""
import logging
import voluptuous as vol
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
DEVICE_CLASSES_SCHEMA,
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
SUPPORT_STOP_TILT,
CoverEntity,
)
from homeassistant.const import (
CONF_COVERS,
CONF_DEVICE_CLASS,
CONF_ENTITY_ID,
CONF_FRIENDLY_NAME,
CONF_OPTIMISTIC,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.script import Script
from .const import DOMAIN
from .template_entity import (
TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY,
TemplateEntity,
rewrite_common_legacy_to_modern_conf,
)
_LOGGER = logging.getLogger(__name__)
_VALID_STATES = [
STATE_OPEN,
STATE_OPENING,
STATE_CLOSED,
STATE_CLOSING,
"true",
"false",
]
CONF_POSITION_TEMPLATE = "position_template"
CONF_TILT_TEMPLATE = "tilt_template"
OPEN_ACTION = "open_cover"
CLOSE_ACTION = "close_cover"
STOP_ACTION = "stop_cover"
POSITION_ACTION = "set_cover_position"
TILT_ACTION = "set_cover_tilt_position"
CONF_TILT_OPTIMISTIC = "tilt_optimistic"
CONF_OPEN_AND_CLOSE = "open_or_close"
TILT_FEATURES = (
SUPPORT_OPEN_TILT
| SUPPORT_CLOSE_TILT
| SUPPORT_STOP_TILT
| SUPPORT_SET_TILT_POSITION
)
COVER_SCHEMA = vol.All(
cv.deprecated(CONF_ENTITY_ID),
vol.Schema(
{
vol.Inclusive(OPEN_ACTION, CONF_OPEN_AND_CLOSE): cv.SCRIPT_SCHEMA,
vol.Inclusive(CLOSE_ACTION, CONF_OPEN_AND_CLOSE): cv.SCRIPT_SCHEMA,
vol.Optional(STOP_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_POSITION_TEMPLATE): cv.template,
vol.Optional(CONF_TILT_TEMPLATE): cv.template,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_TILT_OPTIMISTIC): cv.boolean,
vol.Optional(POSITION_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(TILT_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_FRIENDLY_NAME): cv.string,
vol.Optional(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
).extend(TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY.schema),
cv.has_at_least_one_key(OPEN_ACTION, POSITION_ACTION),
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA)}
)
async def _async_create_entities(hass, config):
"""Create the Template cover."""
covers = []
for object_id, entity_config in config[CONF_COVERS].items():
entity_config = rewrite_common_legacy_to_modern_conf(entity_config)
unique_id = entity_config.get(CONF_UNIQUE_ID)
covers.append(
CoverTemplate(
hass,
object_id,
| entity_config,
unique_id,
)
)
return covers
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Template cover."""
async_add_entities(await _async_create_entities(hass, config))
class CoverTemplate(TemplateEntity, CoverEntity):
"""Representation of a Templat | e cover."""
def __init__(
self,
hass,
object_id,
config,
unique_id,
):
"""Initialize the Template cover."""
super().__init__(config=config)
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, object_id, hass=hass
)
self._name = friendly_name = config.get(CONF_FRIENDLY_NAME, object_id)
self._template = config.get(CONF_VALUE_TEMPLATE)
self._position_template = config.get(CONF_POSITION_TEMPLATE)
self._tilt_template = config.get(CONF_TILT_TEMPLATE)
self._device_class = config.get(CONF_DEVICE_CLASS)
self._open_script = None
if (open_action := config.get(OPEN_ACTION)) is not None:
self._open_script = Script(hass, open_action, friendly_name, DOMAIN)
self._close_script = None
if (close_action := config.get(CLOSE_ACTION)) is not None:
self._close_script = Script(hass, close_action, friendly_name, DOMAIN)
self._stop_script = None
if (stop_action := config.get(STOP_ACTION)) is not None:
self._stop_script = Script(hass, stop_action, friendly_name, DOMAIN)
self._position_script = None
if (position_action := config.get(POSITION_ACTION)) is not None:
self._position_script = Script(hass, position_action, friendly_name, DOMAIN)
self._tilt_script = None
if (tilt_action := config.get(TILT_ACTION)) is not None:
self._tilt_script = Script(hass, tilt_action, friendly_name, DOMAIN)
optimistic = config.get(CONF_OPTIMISTIC)
self._optimistic = optimistic or (
not self._template and not self._position_template
)
tilt_optimistic = config.get(CONF_TILT_OPTIMISTIC)
self._tilt_optimistic = tilt_optimistic or not self._tilt_template
self._position = None
self._is_opening = False
self._is_closing = False
self._tilt_value = None
self._unique_id = unique_id
async def async_added_to_hass(self):
"""Register callbacks."""
if self._template:
self.add_template_attribute(
"_position", self._template, None, self._update_state
)
if self._position_template:
self.add_template_attribute(
"_position",
self._position_template,
None,
self._update_position,
none_on_template_error=True,
)
if self._tilt_template:
self.add_template_attribute(
"_tilt_value",
self._tilt_template,
None,
self._update_tilt,
none_on_template_error=True,
)
await super().async_added_to_hass()
@callback
def _update_state(self, result):
super()._update_state(result)
if isinstance(result, TemplateError):
self._position = None
return
state = str(result).lower()
if state in _VALID_STATES:
if not self._position_template:
if state in ("true", STATE_OPEN):
self._position = 100
else:
self._position = 0
self._is_opening = state == STATE_OPENING
self._is_closing = state == STATE_CLOSING
else:
_LOGGER.error(
"Received invalid cover is_on state: %s. Expected: %s",
state,
", ".join(_VALID_STATES),
)
if not self._position_template:
self._position = None
@callback
def _update_position(self, result):
try:
state = float(result)
except ValueError as err:
_LOGGER.error(err)
self._position = None
return
if state < 0 or state > 100:
self._position = None
_LOGGER.error(
"Cover position value must be" " between 0 and 100." " Value was: %.2f",
state,
)
else:
self._position = state
@callback
def _update_tilt(self, result):
try:
state = float(result)
except ValueError as err:
_LOGGER.error(err)
self._tilt_value = None
return
if state < 0 or state > 100:
self._tilt_value = None
|
hpfn/charcoallog | charcoallog/investments/admin.py | Python | gpl-3.0 | 915 | 0 | from django.contrib import admin
from charcoall | og.investments.models import NewInvestment, NewInves | tmentDetails
class NewInvestmentModelAdmin(admin.ModelAdmin):
list_display = ('user_name', 'date', 'money', 'kind', 'tx_op', 'brokerage')
readonly_fields = ('user_name',)
search_fields = ('date',)
date_hierarchy = 'date'
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.filter(newinvestmentdetails=None)
admin.site.register(NewInvestment, NewInvestmentModelAdmin)
class NewInvestmentDetailsModelAdmin(admin.ModelAdmin):
list_display = ('user_name', 'date', 'money', 'kind', 'tx_op', 'brokerage',
'which_target', 'segment', 'tx_or_price', 'quant')
readonly_fields = ('user_name',)
search_fields = ('date',)
date_hierarchy = 'date'
admin.site.register(NewInvestmentDetails, NewInvestmentDetailsModelAdmin)
|
gxxjjj/QuantEcon.py | quantecon/tests/test_lqcontrol.py | Python | bsd-3-clause | 2,299 | 0.00261 | """
Author: Chase Coleman
Filename: test_lqcontrol
Tests for lqcontrol.py file
"""
import sys
import os
import unittest
import numpy as np
from scipy.linalg import LinAlgError
from numpy.testing import assert_allclose
from quantecon.lqcontrol import LQ
class TestLQControl(unittest.TestCase):
def setUp(self):
| # Initial Values
q = 1.
r = 1.
rf = 1.
a = .95
b = -1.
c = .05
beta = .95
T = 1
self.lq_scalar = LQ(q, r, a, b, C=c, beta=beta, T=T, Rf=rf)
Q = np.array([[0., 0.], [0., 1]])
R = np.array([[1., 0.], [0., 0]])
RF = np.eye(2) * 100
A = np.ones((2, 2)) * .95
B = np.ones((2, 2)) * -1
self.lq_mat = LQ(Q, R | , A, B, beta=beta, T=T, Rf=RF)
def tearDown(self):
del self.lq_scalar
del self.lq_mat
def test_scalar_sequences(self):
lq_scalar = self.lq_scalar
x0 = 2
x_seq, u_seq, w_seq = lq_scalar.compute_sequence(x0)
# Solution found by hand
u_0 = (-2*lq_scalar.A*lq_scalar.B*lq_scalar.beta*lq_scalar.Rf) / \
(2*lq_scalar.Q+lq_scalar.beta*lq_scalar.Rf*2*lq_scalar.B**2) \
* x0
x_1 = lq_scalar.A * x0 + lq_scalar.B * u_0 + w_seq[0, -1]
assert_allclose(u_0, u_seq, rtol=1e-4)
assert_allclose(x_1, x_seq[0, -1], rtol=1e-4)
def test_mat_sequences(self):
lq_mat = self.lq_mat
x0 = np.random.randn(2) * 25
x_seq, u_seq, w_seq = lq_mat.compute_sequence(x0)
assert_allclose(np.sum(u_seq), .95 * np.sum(x0), atol=1e-3)
assert_allclose(x_seq[:, -1], np.zeros_like(x0), atol=1e-3)
def test_stationary_mat(self):
x0 = np.random.randn(2) * 25
lq_mat = self.lq_mat
P, F, d = lq_mat.stationary_values()
f_answer = np.array([[-.95, -.95], [0., 0.]])
p_answer = np.array([[1., 0], [0., 0.]])
val_func_lq = np.dot(x0, P).dot(x0)
val_func_answer = x0[0]**2
assert_allclose(f_answer, F, atol=1e-3)
assert_allclose(val_func_lq, val_func_answer, atol=1e-3)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestLQControl)
unittest.TextTestRunner(verbosity=2, stream=sys.stderr).run(suite)
|
windj007/python-gremlin-rest | gremlin_rest/tests/all_tests.py | Python | apache-2.0 | 1,682 | 0.013674 | import unittest
from gremlin_rest import GremlinClient
class TestClient(unittest.TestCase):
def setUp(self):
self.client = GremlinClient('http://172.17.0.248:8182')
def tearDown(self):
for v in self.client.V().run():
self.client.delete_vertex(vertex_id = v.vertex_id)
def test_add_vertex(self):
init_cnt = len(self.client.V().run())
v = self.client.addVertex(label = "a", a = 123, b = 'qweq').first()
print repr(v)
vs | = self.client.V().run()
print repr(vs)
self.assertEqual(len(vs) - init_cnt, 1)
self.client.delete_vertex(vertex_id = v.vertex_id)
self.assertEqual(len(self.client.V().run()), init_cnt)
def test_query_vertices(self):
print 'before', repr(self.client.V().run())
v1 = self.client.addVertex(a = 123, b = 'qwe').first()
v2 = self.client.addVertex(a = 1234, b = 'qwe').first() |
print 'all', repr(self.client.V().run())
print '12', repr(self.client.V().has('a', 12).run())
print '123', repr(self.client.V().has('a', 123).run())
print '1234', repr(self.client.V().has('a', 1234).run())
print 'qwe', repr(self.client.V().has('b', 'qwe').run())
self.assertEqual(len(self.client.V().has('a', 12).run()), 0)
self.assertEqual(len(self.client.V().has('a', 123).run()), 1)
self.assertEqual(len(self.client.V().has('a', 1234).run()), 1)
self.assertEqual(len(self.client.V().has('b', 'qwe').run()), 2)
self.client.delete_vertex(vertex_id = v1.vertex_id)
self.client.delete_vertex(vertex_id = v2.vertex_id)
if __name__ == '__main__':
unittest.main()
|
openafs-contrib/afspy | afs/model/DBServer.py | Python | bsd-2-clause | 971 | 0.014418 | """
Declares Model object of a database-server
"""
from afs.model.BaseModel import BaseModel
class DBServer(BaseModel):
"""
Model object of a database-server
"""
def __init__(self):
"""
initialize an empty object.
"""
# declare db-internal attributes
BaseMo | del.__init__(self)
## for db index
self.db_id = None
## list of DNS-hostnames
self.servernames = None
self.servernames_js = ""
## list of ipaddrs
self.ipaddr = " | "
## Flag if it is a clone or real db-server
self.is_clone = True
## type of db : vldb or ptdb
self.afsdb_type = ""
## local version of the DB
self.local_afsdb_version = ""
## rxdebug version string
self.version = ""
self.build_date = ""
## list of attributes not to put into the DB
self.unmapped_attributes_list= ['BNode', 'ExtServAttr' ]
|
asposebarcode/Aspose_BarCode_Cloud | Examples/Python/generating-saving/without-cloud-storage/generate-barcode-and-get-image-as-stream.py | Python | mit | 1,515 | 0.014521 | import asposebarcodecloud
from asposebarcodecloud.BarcodeApi import BarcodeApi
from asposebarcodecloud.BarcodeApi import ApiException
import ConfigParser
config = ConfigParser.ConfigParser()
config.readfp(open(r'../../data/config.properties'))
apiKey = config.get('AppConfig', 'api_key')
appSid = config.get('AppConfig', 'app_sid')
out_folder = config.get('AppConfig', 'out_folder')
data_folder = "../../data/" #resouece data folder
#ExStart:1
#Instantiate Aspose.Barcode Cloud SDK
api_client = asposebarcodecloud.ApiClient.ApiClient(apiKey, appSid, True)
barcodeApi = BarcodeApi(api_client);
#Set the barcode file name created on server
name = "sample-barcode"
#Set Text to encode inside barcode
text = "Aspose.BarCode"
#Set Ba | rcode Symbology
type = "datamatrix"
#Set Generated Barcode Image Format
format = "PNG"
try:
#invoke Aspose.BarCode Cloud SDK API to create barcode and get image stream
response = barcodeApi.GetBarcodeGenerate(text=text, type=type, format | =format)
if response.Status == "OK":
#download generated barcode from api response
outfilename = out_folder + name + "." + format
with open(outfilename, 'wb') as f:
for chunk in response.InputStream:
f.write(chunk)
except ApiException as ex:
print "ApiException:"
print "Code:" + str(ex.code)
print "Message:" + ex.message
#ExEnd:1 |
hujiajie/chromium-crosswalk | tools/telemetry/third_party/webpagereplay/third_party/dns/name.py | Python | bsd-3-clause | 21,899 | 0.002831 | # Copyright (C) 2001-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Names.
@var root: The DNS root name.
@type root: dns.name.Name object
@var empty: The empty DNS name.
@type empty: dns.name.Name object
"""
import cStringIO
import struct
import sys
if sys.hexversion >= 0x02030000:
import encodings.idna
import dns.exception
NAMERELN_NONE = 0
NAMERELN_SUPERDOMAIN = 1
NAMERELN_SUBDOMAIN = 2
NAMERELN_EQUAL = 3
NAMERELN_COMMONANCESTOR = 4
class EmptyLabel(dns.exception.SyntaxError):
"""Raised if a label is empty."""
pass
class BadEscape(dns.exception.SyntaxError):
"""Raised if an escaped code in a text format name is invalid."""
pass
class BadPointer(dns.exception.FormError):
"""Raised if a compression pointer points forward instead of backward."""
pass
class BadLabelType(dns.exception.FormError):
"""Raised if the label type of a wire format name is unknown."""
pass
class NeedAbsoluteNameOrOrigin(dns.exception.DNSException):
"""Raised if an attempt is made to convert a non-absolute name to
wire when there is also a non-absolute (or missing) origin."""
pass
class NameTooLong(dns.exception.FormError):
"""Raised if a name is > 255 octets long."""
pass
class LabelTooLong(dns.exception.SyntaxError):
"""Raised if a label is > 63 octets long."""
pass
class AbsoluteConcatenation(dns.exception.DNSException):
"""Raised if an attempt is made to append anything other than the
empty name to an absolute name."""
pass
class NoParent(dns.exception.DNSException):
"""Raised if an attempt is made to get the parent of the root name
or the empty name."""
pass
_escaped = {
'"' : True,
'(' : True,
')' : True,
'.' : True,
';' : True,
'\\' : True,
'@' : True,
'$' : True
}
def _escapify(label):
"""Escape the characters in label which need it.
@returns: the escaped string
@rtype: string"""
text = ''
for c in label:
if c in _escaped:
| text += '\\' + c
elif ord(c) > 0x20 and ord(c) < 0x7F:
text += c
else:
text += '\\%03d' % ord(c)
return text
def _validate_labels(labels):
"""Check for empty labels in the middle of a label sequence,
labels that are too long, and for too many labels.
@raises NameTooLong: the name as a whole is too long
@raises LabelTooLong: an individual label is too long
@raises EmptyLab | el: a label is empty (i.e. the root label) and appears
in a position other than the end of the label sequence"""
l = len(labels)
total = 0
i = -1
j = 0
for label in labels:
ll = len(label)
total += ll + 1
if ll > 63:
raise LabelTooLong
if i < 0 and label == '':
i = j
j += 1
if total > 255:
raise NameTooLong
if i >= 0 and i != l - 1:
raise EmptyLabel
class Name(object):
"""A DNS name.
The dns.name.Name class represents a DNS name as a tuple of labels.
Instances of the class are immutable.
@ivar labels: The tuple of labels in the name. Each label is a string of
up to 63 octets."""
__slots__ = ['labels']
def __init__(self, labels):
"""Initialize a domain name from a list of labels.
@param labels: the labels
@type labels: any iterable whose values are strings
"""
super(Name, self).__setattr__('labels', tuple(labels))
_validate_labels(self.labels)
def __setattr__(self, name, value):
raise TypeError("object doesn't support attribute assignment")
def is_absolute(self):
"""Is the most significant label of this name the root label?
@rtype: bool
"""
return len(self.labels) > 0 and self.labels[-1] == ''
def is_wild(self):
"""Is this name wild? (I.e. Is the least significant label '*'?)
@rtype: bool
"""
return len(self.labels) > 0 and self.labels[0] == '*'
def __hash__(self):
"""Return a case-insensitive hash of the name.
@rtype: int
"""
h = 0L
for label in self.labels:
for c in label:
h += ( h << 3 ) + ord(c.lower())
return int(h % sys.maxint)
def fullcompare(self, other):
"""Compare two names, returning a 3-tuple (relation, order, nlabels).
I{relation} describes the relation ship between the names,
and is one of: dns.name.NAMERELN_NONE,
dns.name.NAMERELN_SUPERDOMAIN, dns.name.NAMERELN_SUBDOMAIN,
dns.name.NAMERELN_EQUAL, or dns.name.NAMERELN_COMMONANCESTOR
I{order} is < 0 if self < other, > 0 if self > other, and ==
0 if self == other. A relative name is always less than an
absolute name. If both names have the same relativity, then
the DNSSEC order relation is used to order them.
I{nlabels} is the number of significant labels that the two names
have in common.
"""
sabs = self.is_absolute()
oabs = other.is_absolute()
if sabs != oabs:
if sabs:
return (NAMERELN_NONE, 1, 0)
else:
return (NAMERELN_NONE, -1, 0)
l1 = len(self.labels)
l2 = len(other.labels)
ldiff = l1 - l2
if ldiff < 0:
l = l1
else:
l = l2
order = 0
nlabels = 0
namereln = NAMERELN_NONE
while l > 0:
l -= 1
l1 -= 1
l2 -= 1
label1 = self.labels[l1].lower()
label2 = other.labels[l2].lower()
if label1 < label2:
order = -1
if nlabels > 0:
namereln = NAMERELN_COMMONANCESTOR
return (namereln, order, nlabels)
elif label1 > label2:
order = 1
if nlabels > 0:
namereln = NAMERELN_COMMONANCESTOR
return (namereln, order, nlabels)
nlabels += 1
order = ldiff
if ldiff < 0:
namereln = NAMERELN_SUPERDOMAIN
elif ldiff > 0:
namereln = NAMERELN_SUBDOMAIN
else:
namereln = NAMERELN_EQUAL
return (namereln, order, nlabels)
def is_subdomain(self, other):
"""Is self a subdomain of other?
The notion of subdomain includes equality.
@rtype: bool
"""
(nr, o, nl) = self.fullcompare(other)
if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL:
return True
return False
def is_superdomain(self, other):
"""Is self a superdomain of other?
The notion of subdomain includes equality.
@rtype: bool
"""
(nr, o, nl) = self.fullcompare(other)
if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL:
return True
return False
def canonicalize(self):
"""Return a name which is equal to the current name, but is in
DNSSEC canonical form.
@rtype: dns.name.Name object
"""
return Name([x.lower() for x in self.labels])
def __eq__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] == 0
else:
return False
def __ne__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] != 0 |
ivmech/iviny-scope | lib/xlsxwriter/test/comparison/test_set_start_page01.py | Python | gpl-3.0 | 1,929 | 0.001037 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013, John McNamara, jmcnamara@cpan.org
#
import unittest
import os
from ...workbook import Workbook
from ..helperfunctions import _compare_xlsx_files
class TestCompareXLSXFiles(unittest.TestCase):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'set_start_page01.xlsx'
test_dir = 'xlsxwriter/test/compa | rison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = ['xl/printerSettings/printerSettings1.bin',
'xl/worksheets/_rels/sheet1.xml.rels']
self.ignore_elements = {'[Content_Types].xml': ['<Default Extension="bin"'],
'xl/worksheets/sheet1.xml': ['<pageMargins']}
def test_create_file(self):
" | ""Test the creation of a simple XlsxWriter file with printer settings."""
filename = self.got_filename
####################################################
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
worksheet.set_start_page(1)
worksheet.set_paper(9)
worksheet.write('A1', 'Foo')
workbook.close()
####################################################
got, exp = _compare_xlsx_files(self.got_filename,
self.exp_filename,
self.ignore_files,
self.ignore_elements)
self.assertEqual(got, exp)
def tearDown(self):
# Cleanup.
if os.path.exists(self.got_filename):
os.remove(self.got_filename)
if __name__ == '__main__':
unittest.main()
|
googleapis/python-monitoring-dashboards | scripts/fixup_dashboard_v1_keywords.py | Python | apache-2.0 | 6,186 | 0.00097 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class dashboardCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'create_dashboard': ('parent', 'dashboard', 'validate_only', ),
'delete_dashboard': ('name', ),
'get_dashboard': ('name', ),
'list_dashboards': ('parent', 'page_size', 'page_token', ),
'update_dashboard': ('dashboard', 'validate_only', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: a.keyword.value not in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=dashboardCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the dashboard client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
| '--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
| if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
|
bat-serjo/vivisect | vivisect/impemu/platarch/h8.py | Python | apache-2.0 | 464 | 0.002155 | import envi.archs.h8.emu as h8_emu
import envi.archs.h8.regs as h8_regs
import vivisect.i | mpemu.emulator as v_i_emulator
class H8WorkspaceEmulator(v_i_emulator.WorkspaceEmulator, h8_emu.H8Emulator):
taintregs = [h8_regs.REG_ER0, h8_regs.REG_ER1, h8_regs.REG_ER2]
def __init__(self, vw, logwrite=False, logread=False):
h8_emu.H8Emulator.__init__(self)
v_i_emulator.WorkspaceEmulator.__init__(self, vw, logwrite=logwrite, logread=l | ogread)
|
ods94065/opost | opost/settings.py | Python | mit | 6,192 | 0.00113 | # Django settings for opost project.
import os
import dj_database_url
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
('Opost Admin', 'ods94043@yahoo.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': dj_database_url.config()
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = 'staticfiles'
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
# NOTE: We use the application name because this is serving our whole front-end!
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'omsgfq6n&se=325+rh0_w5s_7gl=-w+zk$jk+gnazpmf0d04ks'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'opost.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'opost.wsgi.app | lication'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contr | ib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.markup',
'django_extensions',
'postapi',
'postweb',
'rest_framework',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'console'],
'level': 'ERROR',
'propagate': True,
},
'postweb.services': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
'postweb.views': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
}
}
}
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
REST_FRAMEWORK = {
# Our API is generally hyperlinked, and most serializers
# therefore use this as a base.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
# By default, allow write access for currently logged in users,
# and read access otherwise. This still allows any logged-in
# user to edit anything, however, so be careful!
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticatedOrReadOnly'
]
}
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/web/login'
SERVICES = {
'postapi': {
'endpoint': 'http://localhost:5100/postapi/',
'user': 'udapost',
'password': 'admin123'
}
}
|
sysadmin75/ansible | test/lib/ansible_test/_internal/commands/integration/__init__.py | Python | gpl-3.0 | 36,116 | 0.003544 | """Ansible integration test infrastructure."""
from __future__ import annotations
| import contextlib
import datetime
import json
import os
import re
import shutil
import tempfile
import time
import typing as t
from ...encoding import (
| to_bytes,
)
from ...ansible_util import (
ansible_environment,
)
from ...executor import (
get_changes_filter,
AllTargetsSkipped,
Delegate,
ListTargets,
)
from ...python_requirements import (
install_requirements,
)
from ...ci import (
get_ci_provider,
)
from ...target import (
analyze_integration_target_dependencies,
walk_integration_targets,
IntegrationTarget,
walk_internal_targets,
TIntegrationTarget,
IntegrationTargetType,
)
from ...config import (
IntegrationConfig,
NetworkIntegrationConfig,
PosixIntegrationConfig,
WindowsIntegrationConfig,
TIntegrationConfig,
)
from ...io import (
make_dirs,
read_text_file,
)
from ...util import (
ApplicationError,
display,
SubprocessError,
remove_tree,
)
from ...util_common import (
named_temporary_file,
ResultType,
run_command,
write_json_test_results,
check_pyyaml,
)
from ...coverage_util import (
cover_python,
)
from ...cache import (
CommonCache,
)
from .cloud import (
CloudEnvironmentConfig,
cloud_filter,
cloud_init,
get_cloud_environment,
get_cloud_platforms,
)
from ...data import (
data_context,
)
from ...host_configs import (
OriginConfig,
)
from ...host_profiles import (
ControllerProfile,
HostProfile,
PosixProfile,
SshTargetHostProfile,
)
from ...provisioning import (
HostState,
prepare_profiles,
)
from ...pypi_proxy import (
configure_pypi_proxy,
)
from ...inventory import (
create_controller_inventory,
create_windows_inventory,
create_network_inventory,
create_posix_inventory,
)
from .filters import (
get_target_filter,
)
from .coverage import (
CoverageManager,
)
THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTarget]) -> t.Dict[str, t.Set[IntegrationTarget]]
"""Analyze the given list of integration test targets and return a dictionary expressing target names and the targets on which they depend."""
targets_dict = dict((target.name, target) for target in integration_targets)
target_dependencies = analyze_integration_target_dependencies(integration_targets)
dependency_map = {}
invalid_targets = set()
for dependency, dependents in target_dependencies.items():
dependency_target = targets_dict.get(dependency)
if not dependency_target:
invalid_targets.add(dependency)
continue
for dependent in dependents:
if dependent not in dependency_map:
dependency_map[dependent] = set()
dependency_map[dependent].add(dependency_target)
if invalid_targets:
raise ApplicationError('Non-existent target dependencies: %s' % ', '.join(sorted(invalid_targets)))
return dependency_map
def get_files_needed(target_dependencies): # type: (t.List[IntegrationTarget]) -> t.List[str]
"""Return a list of files needed by the given list of target dependencies."""
files_needed = []
for target_dependency in target_dependencies:
files_needed += target_dependency.needs_file
files_needed = sorted(set(files_needed))
invalid_paths = [path for path in files_needed if not os.path.isfile(path)]
if invalid_paths:
raise ApplicationError('Invalid "needs/file/*" aliases:\n%s' % '\n'.join(invalid_paths))
return files_needed
def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> None
"""Check the given inventory for issues."""
if not isinstance(args.controller, OriginConfig):
if os.path.exists(inventory_path):
inventory = read_text_file(inventory_path)
if 'ansible_ssh_private_key_file' in inventory:
display.warning('Use of "ansible_ssh_private_key_file" in inventory with the --docker or --remote option is unsupported and will likely fail.')
def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
"""Return the inventory path used for the given integration configuration relative to the content root."""
inventory_names = {
PosixIntegrationConfig: 'inventory',
WindowsIntegrationConfig: 'inventory.winrm',
NetworkIntegrationConfig: 'inventory.networking',
} # type: t.Dict[t.Type[IntegrationConfig], str]
return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
"""Make the given inventory available during delegation."""
if isinstance(args, PosixIntegrationConfig):
return
def inventory_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""
Add the inventory file to the payload file list.
This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
"""
inventory_path = get_inventory_relative_path(args)
inventory_tuple = inventory_path_src, inventory_path
if os.path.isfile(inventory_path_src) and inventory_tuple not in files:
originals = [item for item in files if item[1] == inventory_path]
if originals:
for original in originals:
files.remove(original)
display.warning('Overriding inventory file "%s" with "%s".' % (inventory_path, inventory_path_src))
else:
display.notice('Sourcing inventory file "%s" from "%s".' % (inventory_path, inventory_path_src))
files.append(inventory_tuple)
data_context().register_payload_callback(inventory_callback)
@contextlib.contextmanager
def integration_test_environment(
args, # type: IntegrationConfig
target, # type: IntegrationTarget
inventory_path_src, # type: str
): # type: (...) -> t.ContextManager[IntegrationEnvironment]
"""Context manager that prepares the integration test environment and cleans it up."""
ansible_config_src = args.get_ansible_config()
ansible_config_relative = os.path.join(data_context().content.integration_path, '%s.cfg' % args.command)
if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
integration_dir = os.path.join(data_context().content.root, data_context().content.integration_path)
targets_dir = os.path.join(data_context().content.root, data_context().content.integration_targets_path)
inventory_path = inventory_path_src
ansible_config = ansible_config_src
vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
return
# When testing a collection, the temporary directory must reside within the collection.
# This is necessary to enable support for the default collection for non-collection content (playbooks and roles).
root_temp_dir = os.path.join(ResultType.TMP.path, 'integration')
prefix = '%s-' % target.name
suffix = u'-\u00c5\u00d1\u015a\u00cc\u03b2\u0141\u00c8'
if args.no_temp_unicode or 'no/temp_unicode/' in target.aliases:
display.warning('Disabling unicode in the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
suffix = '-ansible'
if args.explain:
temp_dir = os.path.join(root_temp_dir, '%stemp%s' % (prefix, suffix))
else:
make_dirs(root_temp_dir)
temp_dir = tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=root_temp_dir)
try:
display.info('Preparing temporary directory: %s' % temp_d |
shoopio/shoop | shuup/core/utils/line_unit_mixin.py | Python | agpl-3.0 | 921 | 0 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2017, Anders Innovations. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.core.exceptions import ObjectDoesNotExist
from shuup.core.models._units import PiecesSalesUnit, UnitInterface
class LineWithUnit(object):
@property
def unit(self):
"""
Unit of this line.
:rtype: UnitInterface
"""
# TODO: Store the sales unit and display unit to the line
if not self.product or not self.product.sales_unit or not self.shop:
return Uni | tInterface(PiecesSalesUnit())
try:
shop_product = self.product.get_shop_instance(self.shop)
except ObjectDoesNotExist:
return UnitInterface(self. | product.sales_unit)
return shop_product.unit
|
bedubs/hkstairs | stairdb/admin.py | Python | gpl-3.0 | 1,433 | 0.020237 | from django.contrib import admin
from leaflet.admin import LeafletGeoAdmin
from models import Stair, Photo
from django.conf import settings
class OverrideLeafletGeoAdmin(LeafletGeoAdmin):
# straight hint @ https://github.com/makinacorpus/django-leaflet/pull/28#issuecomment-23943492
settings_overrides = {
'TILES': [
# base layers by preference
('City Map','http://stairculture.com/tiles/hk_clr1_2/{z}/{x}/{y}.png',{
'maxZoom':19
}),
('OSM', 'https://api.mapbox.com/styles/v1/mapbox/outdoors-v10/tiles/256/{z}/{x}/{y}?access_token='+settings.MAPBOX_API_KEY,{
'maxZoom':19,
| 'attribution':'<a href="http://www.openstreetmap.org/copyright" target="_blank"> OpenStreetMap</a> contributors'
}),
],
'MINIMAP': False, # instantiate this later in the admin-map.js file to set basemap
}
class PhotoInline(admin.TabularInline):
model = Photo
extra = 0
exclude = ('thumbnail',)
class StairAdmin(OverrideLeafletGeoAdmin):
search_fields | = ['stairid','type','location']
inlines = [PhotoInline,]
ordering = ('stairid',)
class PhotoAdmin(OverrideLeafletGeoAdmin):
fields = ('image_tag','image','stairid','geom')
readonly_fields = ('image_tag',)
ordering = ('geom','stairid')
admin.site.register(Stair,StairAdmin)
admin.site.register(Photo,PhotoAdmin)
|
yotchang4s/cafebabepy | src/main/python/idlelib/idle_test/test_debugger.py | Python | bsd-3-clause | 533 | 0.003752 | ''' Test idlelib.debugger.
Coverage: 19%
'''
from idlelib import debugger
from test.support import requires
requires('gui')
import unittest
from tkinter import Tk
class NameSpaceT | est(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.root = Tk()
cls.root.withdraw()
@classmethod
def tearDownClass(cls):
cls.root.destroy()
del cls.root
def test_init(self):
debugger.NamespaceViewer(self.root, 'Test')
if _ | _name__ == '__main__':
unittest.main(verbosity=2)
|
yankcrime/telegraf | scripts/build.py | Python | mit | 38,977 | 0.004413 | #!/usr/bin/python -u
import sys
import os
import subprocess
import time
from datetime import datetime
import shutil
import tempfile
import hashlib
import re
import logging
import argparse
################
#### Telegraf Variables
################
# Packaging variables
PACKAGE_NAME = "telegraf"
INSTALL_ROOT_DIR = "/usr/bin"
LOG_DIR = "/var/log/telegraf"
SCRIPT_DIR = "/usr/lib/telegraf/scripts"
CONFIG_DIR = "/etc/telegraf"
LOGROTATE_DIR = "/etc/logrotate.d"
INIT_SCRIPT = "scripts/init.sh"
SYSTEMD_SCRIPT = "scripts/telegraf.service"
LOGROTATE_SCRIPT = "etc/logrotate.d/telegraf"
DEFAULT_CONFIG = "etc/telegraf.conf"
DEFAULT_WINDOWS_CONFIG = "etc/telegraf_windows.conf"
POSTINST_SCRIPT = "scripts/post-install.sh"
PREINST_SCRIPT = "scripts/pre-install.sh"
POSTREMOVE_SCRIPT = "scripts/post-remove.sh"
PREREMOVE_SCRIPT = "scripts/pre-remove.sh"
# Default AWS S3 bucket for uploads
DEFAULT_BUCKET = "dl.influxdata.com/telegraf/artifacts"
CONFIGURATION_FILES = [
CONFIG_DIR + '/telegraf.conf',
LOGROTATE_DIR + '/telegraf',
]
# META-PACKAGE VARIABLES
PACKAGE_LICENSE = "MIT"
PACKAGE_URL = "https://github.com/influxdata/telegraf"
MAINTAINER = "support@influxdb.com"
VENDOR = "InfluxData"
DESCRIPTION = "Plugin-driven server agent for reporting metrics into InfluxDB."
# SCRIPT START
prereqs = [ 'git', 'go' ]
go_vet_command = "go tool vet -composites=true ./"
optional_prereqs = [ 'gvm', 'fpm', 'rpmbuild' ]
fpm_common_args = "-f -s dir --log error \
--vendor {} \
--url {} \
--license {} \
--maintainer {} \
--config-files {} \
--config-files {} \
--after-install {} \
--before-install {} \
--after-remove {} \
--before-remove {} \
--description \"{}\"".format(
VENDOR,
PACKAGE_URL,
PACKAGE_LICENSE,
MAINTAINER,
CONFIG_DIR + '/telegraf.conf',
LOGROTATE_DIR + '/telegraf',
POSTINST_SCRIPT,
PREINST_SCRIPT,
POSTREMOVE_SCRIPT,
PREREMOVE_SCRIPT,
DESCRIPTION)
targets = {
'telegraf' : './cmd/telegraf',
}
supported_builds = {
"darwin": [ "amd64" ],
"windows": [ "amd64" ],
"linux": [ "amd64", "i386", "armhf", "armel", "arm64", "static_amd64" ],
"freebsd": [ "amd64" ]
}
supported_packages = {
"darwin": [ "tar" ],
"linux": [ "deb", "rpm", "tar" ],
"windows": [ "zip" ],
"freebsd": [ "tar" ]
}
supported_tags = {
# "linux": {
# "amd64": ["sensors"]
# }
}
prereq_cmds = {
# "linux": "sudo apt-get install lm-sensors libsensors4-dev"
}
################
#### Telegraf Functions
################
def print_banner():
logging.info("""
_____ _ __
/__ \\___| | ___ __ _ _ __ __ _ / _|
/ /\\/ _ \\ |/ _ \\/ _` | '__/ _` | |_
/ / | __/ | __/ (_| | | | (_| | _|
\\/ \\___|_|\\___|\\__, |_| \\__,_|_|
|___/
Build Script
""")
def create_package_fs(build_root):
"""Create a filesystem structure to mimic the package filesystem.
"""
logging.debug("Creating a filesystem hierarchy from directory: {}".format(build_root))
# Using [1:] for the path names due to them being absolute
# (will overwrite previous paths, per 'os.path.join' documentation)
dirs = [ INSTALL_ROOT_DIR[1:], LOG_DIR[1:], SCRIPT_DIR[1:], CONFIG_DIR[1:], LOGROTATE_DIR[1:] ]
for d in dirs:
os.makedirs(os.path.join(build_root, d))
os.chmod(os.path.join(build_root, d), 0o755)
def package_scripts(build_root, config_only=False, windows=False):
"""Copy the necessary scripts and configuration files to the package
filesystem.
"""
if config_only or windows:
logging.info("Copying configuration to build directory")
if windows:
shutil.copyfile(DEFAULT_WINDOWS_CONFIG, os.path.join(build_root, "telegraf.conf"))
else:
shutil.copyfile(DEFAULT_CONFIG, os.path.join(build_root, "telegraf.conf"))
os.chmod(os.path.join(build_root, "telegraf.conf"), 0o644)
else:
logging.info("Copying scripts and configuration to build directory")
shutil.copyfile(INIT_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]))
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]), 0o644)
shutil.copyfile(SYSTEMD_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]))
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]), 0o644)
shutil.copyfile(LOGROTATE_SCRIPT, os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"))
os.chmod(os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"), 0o644)
shutil.copyfile(DEFAULT_CONFIG, os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf"))
os.chmod(os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf"), 0o644)
def run_generate():
# NOOP for Telegraf
return True
def go_get(branch, update=False, no_uncommitted=False):
"""Retrieve build dependencies or restore pinned dependencies.
"""
if local_changes() and no_uncommitted:
logging.error("There are uncommitted changes in the current directory.")
return False
if not check_path_for("gdm"):
logging.info("Downloading `gdm`...")
get_command = "go get github.com/sparrc/gdm"
run(get_command)
logging.info("Retrieving dependencies with `gdm`...")
run("{}/bin/gdm restore -v -f Godeps_windows".format(os.environ.get("GOPATH")))
run("{}/bin/gdm restore -v".format(os.environ.get("GOPATH")))
return True
def run_tests(race, parallel, timeout, no_vet):
# Currently a NOOP for Telegraf
return True
################
#### All Telegraf-specific content above this line
################
def run(command, allow_failure=False, shell=False):
"""Run shell command (convenience wrapper around subprocess).
"""
out = None
logging.debug("{}".format(command))
try:
if shell:
out = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=shell)
else:
out = subprocess.check_output(command.split(), stderr=subprocess.STDOUT)
out = out.decode('utf-8').strip()
# logging.debug("Command output: {}".format(out))
except subprocess.CalledProcessError as e:
if allow_failure:
logging.warn("Command '{}' failed with error: {}".format(command, e.output))
return None
else:
logging.error("Command '{}' failed with error: {}".format(command, e.output))
sys.exit(1)
except OSError as e:
if allow_failure:
logging.warn("Command '{}' failed with error: {}".format(command, e))
return out
else:
logging.error("Command '{}' failed with error: {}".format(command, e))
sys.exit(1)
else:
return out
def create_temp_dir(prefix = None):
""" Create temporary directory with optional prefix.
"""
if prefix is None:
return tempfile.mkdtemp(prefix="{}-build.".format(PACKAGE_NAME))
else:
return tempfile.mkdtemp(prefix=prefix)
def increment_minor_version(version):
"""Return the version with the minor version incremented and patch
version set to zero.
"""
ver_list = version.split('.')
if | len(ver_list) != 3:
logging.warn("Could not determine how to increment version '{}', will just use provided version.".format(version))
return version
ver_list[1] = str(int(ver_list[1]) + 1)
ver_list[2] = str(0)
inc_version = '.'.join(ver_list)
logging.debug("Incremented version from '{}' to '{}'.".format(version, inc_version))
return inc_version
|
def get_current_version_tag():
"""Retrieve the raw git version tag.
"""
version = run("git describe --always --tags --abbrev=0")
return version
def get_current_version():
"""Parse version information from git tag output.
"""
version_tag = get_current_version_tag()
# Remove leading 'v'
if version_tag[0] == 'v':
version_tag = version_tag[1:]
# Replace any '-'/'_' with '~'
if '-' in version_tag:
version_tag = version_tag.replace("-","~")
if '_' in version_tag:
version_tag = version_tag.replac |
bp-kelley/rdkit | Data/Pains/test_data/run_tests.py | Python | bsd-3-clause | 1,219 | 0.011485 | #
# Copyright (C) 2015 Greg Landrum
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
import unittest, os, csv
from rdkit import Chem, RDConfig
class TestCase(unittest.TestCase):
def setUp(self):
self.basePath = os.path.join(RDConfig.RDDataDir, 'Pains')
self.painsFile = os.path.join(self.basePath, 'wehi_pains.csv')
with open(self.painsFile, 'r') as inf:
self.painsDefs = [x for x in csv.reader(inf)]
self.matchers = [Chem.MolFromSmarts(x[0], mergeHs=True) for x in self.painsDefs]
def test1(self):
" molecules that we know should match "
with open(os.path.join(self.basePath, 'test_data', 'test_set3.txt'), 'r') as inf:
testData = [x.strip().split() for x in inf if x[0] != '#']
for line in testData:
self.assertEqual(len(line), 5)
id_ = int(line[0])
m = Chem.MolFromSmiles(l | ine[2])
self.assertTrue(m is not None)
self.assertTrue(m.HasSubstructMatch(self.matchers[id_]))
| self.assertTrue(Chem.AddHs(m).HasSubstructMatch(self.matchers[id_]))
if __name__ == '__main__':
unittest.main()
|
cloudcopy/seahub | seahub/views/wiki.py | Python | apache-2.0 | 8,863 | 0.001805 | # -*- coding: utf-8 -*-
"""
File related views, including view_file, edit_file, view_history_file,
view_trash_file, view_snapshot_file
"""
import os
import hashlib
import json
import stat
import tempfile
import urllib
import urllib2
import chardet
from django.contrib.sites.models import Site, RequestSite
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.http import HttpResponse, HttpResponseBadRequest, Http404, \
HttpResponseRedirect
from django.shortcuts import render_to_response, redirect
from django.template import Context, loader, RequestContext
from django.template.loader import render_to_string
from django.utils.http import urlquote
from django.utils.translation import ugettext as _
import seaserv
from seaserv import seafile_api
from pysearpc import SearpcError
from seahub.auth.decorators import login_required
from seahub.base.decorators import user_mods_check
from seahub.wiki.models import PersonalWiki, WikiDoesNotExist, WikiPageMissing
from seahub.wiki import get_personal_wiki_page, get_personal_wiki_repo, \
convert_wiki_link, get_wiki_pages
from seahub.wiki.forms import WikiCreateForm, WikiNewPageForm
from seahub.wiki.utils import clean_page_name
from seahub.utils import render_error
@login_required
@user_mods_check
def personal_wiki(request, page_name="home"):
username = request.user.username
wiki_exists = True
try:
content, repo, dirent = get_personal_wiki_page(username, page_name)
except WikiDoesNotExist:
wiki_exists = False
owned_repos = seafile_api.get_owned_repo_list(username)
owned_repos = [r for r in owned_repos if not r.encrypted]
return render_to_response("wiki/personal_wiki.html", {
"wiki_exists": wiki_exists,
"owned_repos": owned_repos,
}, context_instance=RequestContext(request))
except WikiPageMissing:
repo = get_personal_wiki_repo(username)
filename = clean_page_name(page_name) + '.md'
if not seaserv.post_empty_file(repo.id, "/", filename, username):
return render_error(request, _("Failed to create wiki page. Please retry later."))
return HttpResponseRedirect(reverse('personal_wiki', args=[page_name]))
else:
url_prefix = reverse('personal_wiki', args=[])
content = convert_wiki_link(content, url_prefix, repo.id, username)
# fetch file modified time and modifier
path = '/' + dirent.obj_name
try:
dirent = seafile_api.get_dirent_by_path(repo.id, path)
latest_contributor, last_modified = dirent.modifier, dirent.mtime
except SearpcError as e:
latest_contributor, last_modified = None, 0
wiki_index_exists = True
index_pagename = 'index'
index_content = None
try:
index_content, index_repo, index_dirent = get_personal_wiki_page(username, index_pagename)
except (WikiDoesNotExist, WikiPageMissing) as e:
wiki_index_exists = False
else:
index_content = convert_wiki_link(index_content, url_prefix, index_repo.id, username)
return render_to_response("wiki/personal_wiki.html", {
"wiki_exists": wiki_exists,
"content": content,
"page": os.path.splitext(dirent.obj_name)[0],
"last_modified": last_modified,
"latest_contributor": latest_contributor or _("Unknown"),
"path": path,
"repo_id": repo.id,
"search_repo_id": repo.id,
"search_wiki": True,
"wiki_index_exists": wiki_index_exists,
"index_c | ontent": index_content,
}, context_instance=RequestContext(request))
@login_required
@user_mods_check
def personal_wiki_pages(request):
"""
List personal wiki pages.
"""
try:
username = requ | est.user.username
repo = get_personal_wiki_repo(username)
pages = get_wiki_pages(repo)
except SearpcError:
return render_error(request, _('Internal Server Error'))
except WikiDoesNotExist:
return render_error(request, _('Wiki does not exists.'))
return render_to_response("wiki/personal_wiki_pages.html", {
"pages": pages,
"repo_id": repo.id,
"search_repo_id": repo.id,
"search_wiki": True,
}, context_instance=RequestContext(request))
@login_required
def personal_wiki_create(request):
if request.method != 'POST':
raise Http404
content_type = 'application/json; charset=utf-8'
def json_error(err_msg, status=400):
result = {'error': err_msg}
return HttpResponse(json.dumps(result), status=status,
content_type=content_type)
if not request.user.permissions.can_add_repo():
return json_error(_('You do not have permission to create wiki'), 403)
form = WikiCreateForm(request.POST)
if not form.is_valid():
return json_error(str(form.errors.values()[0]))
# create group repo in user context
repo_name = form.cleaned_data['repo_name']
repo_desc = form.cleaned_data['repo_desc']
username = request.user.username
passwd = None
permission = "rw"
repo_id = seaserv.create_repo(repo_name, repo_desc, username, passwd)
if not repo_id:
return json_error(_(u'Failed to create'), 500)
PersonalWiki.objects.save_personal_wiki(username=username, repo_id=repo_id)
# create home page
page_name = "home.md"
if not seaserv.post_empty_file(repo_id, "/", page_name, username):
return json_error(_(u'Failed to create home page. Please retry later'), 500)
next = reverse('personal_wiki', args=[])
return HttpResponse(json.dumps({'href': next}), content_type=content_type)
@login_required
def personal_wiki_use_lib(request):
if request.method != 'POST':
raise Http404
repo_id = request.POST.get('dst_repo', '')
username = request.user.username
next = reverse('personal_wiki', args=[])
repo = seafile_api.get_repo(repo_id)
if repo is None:
messages.error(request, _('Failed to set wiki library.'))
return HttpResponseRedirect(next)
PersonalWiki.objects.save_personal_wiki(username=username, repo_id=repo_id)
# create home page if not exist
page_name = "home.md"
if not seaserv.get_file_id_by_path(repo_id, "/" + page_name):
if not seaserv.post_empty_file(repo_id, "/", page_name, username):
messages.error(request, _('Failed to create home page. Please retry later'))
return HttpResponseRedirect(next)
@login_required
def personal_wiki_page_new(request, page_name="home"):
if request.method == 'POST':
page_name = request.POST.get('page_name', '')
if not page_name:
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
page_name = clean_page_name(page_name)
try:
repo = get_personal_wiki_repo(request.user.username)
except WikiDoesNotExist:
return render_error(request, _('Wiki is not found.'))
filename = page_name + ".md"
filepath = "/" + page_name + ".md"
# check whether file exists
if seaserv.get_file_id_by_path(repo.id, filepath):
return render_error(request, _('Page "%s" already exists.') % filename)
if not seaserv.post_empty_file(repo.id, "/", filename, request.user.username):
return render_error(request, _('Failed to create wiki page. Please retry later.'))
url = "%s?p=%s&from=personal_wiki_page_new" % (
reverse('file_edit', args=[repo.id]),
urlquote(filepath.encode('utf-8')))
return HttpResponseRedirect(url)
@login_required
def personal_wiki_page_edit(request, page_name="home"):
try:
repo = get_personal_wiki_repo(request.user.username)
except WikiDoesNotExist:
return render_error(request, _('Wiki is not found.'))
filepath = "/" + page_name + ".md"
url = "%s?p=%s&from=personal_wiki_page_edit" % (
reverse('file_edit', args=[repo.id]),
urllib |
jmosky12/huxley | huxley/api/tests/test_user.py | Python | bsd-3-clause | 17,518 | 0.000228 | # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.accounts.models import User
from huxley.api.tests import (CreateAPITestCase, DestroyAPITestCase,
ListAPITestCase, PartialUpdateAPITestCase,
RetrieveAPITestCase)
from huxley.utils.test import TestSchools, TestUsers
class UserDetailGetTestCase(RetrieveAPITestCase):
url_name = 'api:user_detail'
def test_anonymous_user(self):
'''It should reject request from an anonymous user.'''
user = TestUsers.new_user()
response = self.get_response(user.id)
self.assertNotAuthenticated(response)
def test_other_user(self):
'''It should reject request from another user.'''
user1 = TestUsers.new_user(username='user1')
user2 = TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(user1.id)
self.assertPermissionDenied(response)
def test_superuser(self):
'''It should return the correct fields for a superuser.'''
user1 = TestUsers.new_user(username='user1')
user2 = TestUsers.new_superuser(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(user1.id)
self.assertEqual(response.data, {
'id': user1.id,
'username': user1.username,
'first_name': user1.first_name,
'last_name': user1.last_name,
'user_type': user1.user_type,
'school': user1.school_id,
'committee': user1.committee_id})
def test_self(self):
'''It should return the correct fields for a single user.'''
school = TestSchools.new_school()
user = school.advisor
self.client.login(username=user.username, password='test')
response = self.get_response(user.id)
self.assertEqual(response.data, {
'id': user.id,
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'user_type': user.user_type,
'school': {
'id': school.id,
'registered': school.registered.isoformat(),
'name': school.name,
'address': school.address,
'city': school.city,
'state': school.state,
'zip_code': school.zip_code,
'country': school.country,
'primary_name': school.primary_name,
'primary_gender': school.primary_gender,
'primary_email': school.primary_email,
'primary_phone': school.primary_phone,
'primary_type': school.primary_type,
'secondary_name': school.secondary_name,
'secondary_gender': school.secondary_gender,
'secondary_email': school.secondary_email,
'secondary_phone': school.secondary_phone,
'secondary_type': school.secondary_type,
'program_type': school.program_type,
'times_attended': school.times_attended,
'international': school.international,
'waitlist': school.waitlist,
'beginner_delegates': school.beginner_delegates,
'intermediate_delegates': school.intermediate_delegates,
'advanced_delegates': school.advanced_delegates,
'spanish_speaking_delegates': school.spanish_speaking_delegates,
'country_preferences': school.country_preference_ids,
'prefers_bilingual': school.prefers_bilingual,
'prefers_specialized_regional':
school.prefers_specialized_regional,
'prefers_crisis': school.prefers_crisis,
'prefers_alternative': school.prefers_alternative,
'prefers_press_corps': school.prefers_press_corps,
'registration_comments': school.registration_comments,
'fees_owed': float(school.fees_owed),
'fees_paid': float(school.fees_paid),
},
'committee': user.committee_id})
def test_chair(self):
'''It should have the correct fields for chairs.'''
user = TestUsers.new_user(user_type=User.TYPE_CHAIR,
committee_id=4)
self.client.login(username='testuser', password='test')
response = self.get_response(user.id)
self.assertEqual(response.data, {
'id': user.id,
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'user_type': user.user_type,
'school': user.school_id,
'committee': user.committee_id})
class UserDetailDeleteTestCase(DestroyAPITestCase):
url_name = 'api:user_detail'
def setUp(self):
self.user = TestUsers.new_user(username='user1', password='user1')
def test_anonymous_user(self):
'''It should reject the request from an anonymous user.'''
response = self.get_response(self.user.id)
self.assertNotAuthenticated(response)
self.assertTrue(User.objects.filter(id=self.user.id).exists())
def test_other_user(self):
'''It should reject the request from another user.'''
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.user.id)
self.assertPermissionDenied(response)
self.assertTrue(User.objects.filter(id=self.user.id).exists())
def test_self(self):
'''It should allow a user to delete themself.'''
self.client.login(username='user1', password='user1')
response = self.get_response(self.user.id)
self.assertEqual(response.status_code, 204)
self.assertFalse(User.objects.filter(id=self.user.id).exists())
def test_superuser(self):
'''It should allow a superuser to delete | a user.'''
TestUsers.new_superuser(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.user.id)
self.assertEqual(response.status_code, 204)
self.assertFalse(User.obje | cts.filter(id=self.user.id).exists())
class UserDetailPatchTestCase(PartialUpdateAPITestCase):
url_name = 'api:user_detail'
params = {'first_name': 'first',
'last_name': 'last'}
def setUp(self):
self.user = TestUsers.new_user(username='user1', password='user1')
def test_anonymous_user(self):
'''An anonymous user should not be able to change information.'''
response = self.get_response(self.user.id, params=self.params)
self.assertNotAuthenticated(response)
user = User.objects.get(id=self.user.id)
self.assertEqual(user.first_name, 'Test')
self.assertEqual(user.last_name, 'User')
def test_other_user(self):
'''Another user should not be able to change information about any other user.'''
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.user.id, params=self.params)
self.assertPermissionDenied(response)
user = User.objects.get(id=self.user.id)
self.assertEqual(user.first_name, 'Test')
self.assertEqual(user.last_name, 'User')
def test_self(self):
'''A User should be allowed to change information about himself.'''
self.client.login(username='user1', password='user1')
response = self.get_response(self.user.id, params=self.params)
user = User.objects.get(id=self.user.id)
self.assertEqual(response.data['fir |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.