code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
import do
import setvar
import sys
import show
import getvalue
lines = 0
baseVariables = {
"URL":None,
"TMPCOOKIE":None,
"COOKIE":None,
"HTML":None,
"POSTDATA":None,
"USERAGENT":"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0",
"STATUS":None,
"VALUE":None,
"LINKS":None,
}
keywords = {
"do":do.do,
"set":setvar.setvar,
"show":show.show,
"getvalue":getvalue.getvalue
}
def typeDetermin(data):
if data.startswith("$"):
data = data[1:]
if data in baseVariables:
return baseVariables[data]
else:
sys.exit("Error on line {0}, variable ${1} is not a language variable.".format(lines,data))
return data
| Danisan/HTTPLang | httplang/utils.py | Python | mit | 894 |
#!/usr/bin/python
import getopt
import sys
import pymqi, CMQC, CMQCFC
STATE_OK = 0
STATE_WARNING = 1
STATE_CRITICAL = 2
STATE_UNKNOWN = 3
def usage():
print """Usage: rbh_check_mq_channel_status -H <HostName> -g <QMGRName> -p <PortNumber> -a <ChannelName for connection> -t <ChannelName for test>"""
def show_help():
usage()
print """
Checks MQ channel status
-H, --host Host name
-g, --qmgr Queue Manager Name
-p, --port-number port number (default 1414)
-a, --channel-name-conn channel name for connection
-t, --channel-name channel name for test
example:
rbh_check_mq_channel_status.py -H host1 -g QM1 -a SYSTEM.ADMIN.SVRCONN -t nameofthechannel
"""
def exit_with_state(exit_code):
global qmgr
try:
qmgr.disconnect()
except:
pass
sys.exit(exit_code)
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hH:g:p:a:t:", ["help", "host","qmgr=","port=","channel-name=","channel-name-conn="])
except getopt.GetoptError, err:
print str(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
hostName=None
qmgrName=None
portNumber=1414
channelNameConn=None
channelNameTest=None
for o, a in opts:
if o in ("-h", "--help"):
show_help()
sys.exit()
elif o in ("-H", "--host"):
hostName = a
elif o in ("-g", "--qmgr"):
qmgrName = a
elif o in ("-p", "--port"):
portNumber = int(a)
elif o in ("-a", "--channel-name-conn"):
channelNameConn = a
elif o in ("-t", "--channel-name"):
channelNameTest = a
else:
assert False, "unhandled option"
if not (hostName and portNumber and channelNameTest and qmgrName and channelNameConn):
usage()
exit_with_state(STATE_UNKNOWN)
# if len(channelNameConn) > MQ_CHANNEL_NAME_LENGTH:
# print "UNKNOWN - Channel name are too long."
conn_info="%s(%s)" % (hostName,portNumber)
global qmgr
try:
qmgr = pymqi.connect(qmgrName,channelNameConn,conn_info)
except pymqi.MQMIError, e:
print "UNKNOWN - unable to connect to Qmanager, reason: %s" % (e)
exit_with_state(STATE_UNKNOWN)
channel_name = ''
try:
pcf = pymqi.PCFExecute(qmgr)
channel_names = pcf.MQCMD_INQUIRE_CHANNEL({CMQCFC.MQCACH_CHANNEL_NAME: channelNameTest})
if channel_names[0]:
channel_name = channel_names[0][CMQCFC.MQCACH_CHANNEL_NAME].rstrip()
channel_type = channel_names[0][CMQCFC.MQIACH_CHANNEL_TYPE]
else:
print("CRITICAL - Channel %s does not exists." % (channelNameTest))
exit_with_state(STATE_UNKNOWN)
except pymqi.MQMIError,e :
print("UNKNOWN - Can not list MQ channels. reason: %s" % (e))
exit_with_state(STATE_UNKNOWN)
status_available = True
try:
attrs = "MQCACH_CHANNEL_NAME MQIACH_BYTES_RCVD MQIACH_BYTES_SENT"
pcf = pymqi.PCFExecute(qmgr)
channels = pcf.MQCMD_INQUIRE_CHANNEL_STATUS({CMQCFC.MQCACH_CHANNEL_NAME: channelNameTest})
except pymqi.MQMIError, e:
if e.comp == CMQC.MQCC_FAILED and e.reason == CMQCFC.MQRCCF_CHL_STATUS_NOT_FOUND:
status_available = False
pass
else:
print "UNKNOWN - Can not get status information, reason: %s" % (e)
exit_with_state(STATE_UNKNOWN)
infomsg = {CMQCFC.MQCHS_INACTIVE:"Channel is inactive",
CMQCFC.MQCHS_BINDING:"Channel is negotiating with the partner.",
CMQCFC.MQCHS_STARTING:"Channel is waiting to become active.",
CMQCFC.MQCHS_RUNNING:"Channel is transferring or waiting for messages.",
CMQCFC.MQCHS_PAUSED:"Channel is paused.",
CMQCFC.MQCHS_STOPPING:"Channel is in process of stopping.",
CMQCFC.MQCHS_RETRYING:"Channel is reattempting to establish connection.",
CMQCFC.MQCHS_STOPPED:"Channel is stopped.",
CMQCFC.MQCHS_REQUESTING:"Requester channel is requesting connection.",
CMQCFC.MQCHS_INITIALIZING:"Channel is initializing."}
if status_available:
status = channels[0][CMQCFC.MQIACH_CHANNEL_STATUS]
msg = "Channel: %s state is %s (%s)" % (channel_name,status,infomsg[status])
if (status == CMQCFC.MQCHS_RUNNING or
(status == CMQCFC.MQCHS_INACTIVE and not channel_type in (CMQC.MQCHT_REQUESTER,CMQC.MQCHT_CLUSSDR))):
print("OK - %s" % (msg))
exit_with_state(STATE_OK)
if status in (CMQCFC.MQCHS_PAUSED,CMQCFC.MQCHS_STOPPED):
print("CRITICAL - %s" % (msg))
exit_with_state(STATE_CRITICAL)
else:
print("WARNING - %s" % (msg))
exit_with_state(STATE_WARNING)
else:
if channel_type in (CMQC.MQCHT_REQUESTER,CMQC.MQCHT_CLUSSDR):
print("CRITICAL - Channel %s is defined, but status is not available. As this channel is defined as CLUSDR or REQUESTER type channel, therefore it should be running." % (channelNameTest))
exit_with_state(STATE_CRITICAL)
else:
print("OK - Channel %s is defined, but status is not available. This may indicate that the channel has not been used." % (channelNameTest))
exit_with_state(STATE_OK)
if __name__ == "__main__":
main()
| klapper/nagios-plugins-mq | check_mq_channel.py | Python | mit | 5,483 |
from mock import MagicMock, mock_open, patch
from unittest import TestCase
from warnings import simplefilter
import pconf
from pconf.store.env import Env
TEST_ENV_BASE_VARS = {
"env__var": "result",
"env__var_2": "second_result",
}
TEST_ENV_MATCHED_VARS = {"matched_var": "match"}
TEST_ENV_WHITELIST_VARS = {"whitelisted_var": "whitelist"}
TEST_SEPARATED_VARS = {"env": {"var": "result", "var_2": "second_result"}}
TEST_ENV_VARS = dict(TEST_ENV_WHITELIST_VARS, **TEST_ENV_MATCHED_VARS)
TEST_SEPARATED_VARS = dict(TEST_SEPARATED_VARS, **TEST_ENV_VARS)
TEST_ENV_VARS = dict(TEST_ENV_VARS, **TEST_ENV_BASE_VARS)
TEST_ENV_CONVERTED = {
"env--var": "result",
"env--var-2": "second_result",
"matched-var": "match",
"whitelisted-var": "whitelist",
}
TEST_ENV_CONVERTED_SEPARATED = {
"env": {"var": "result", "var-2": "second_result"},
"matched-var": "match",
"whitelisted-var": "whitelist",
}
TEST_ENV_UPPERCASE = {
"ENV__VAR": "result",
"ENV__VAR_2": "second_result",
"MATCHED_VAR": "match",
"WHITELISTED_VAR": "whitelist",
}
TEST_ENV_TYPED_VARS = {
"key": "value",
"int": "123",
"float": "1.23",
"complex": "1+2j",
"list": "['list1', 'list2', {'dict_in_list': 'value'}]",
"dict": "{'nested_dict': 'nested_value'}",
"tuple": "(123, 'string')",
"bool": "True",
"boolstring": "false",
"string_with_specials": "Test!@#$%^&*()-_=+[]{};:,<.>/?\\'\"`~",
} # noqa: E501
TEST_ENV_TYPED_VARS_PARSED = {
"key": "value",
"int": 123,
"float": 1.23,
"complex": 1 + 2j,
"list": ["list1", "list2", {"dict_in_list": "value"}],
"dict": {"nested_dict": "nested_value"},
"tuple": (123, "string"),
"bool": True,
"boolstring": False,
"string_with_specials": "Test!@#$%^&*()-_=+[]{};:,<.>/?\\'\"`~",
} # noqa: E501
TEST_ENV_DOCKER_SECRETS = {"MY_EXAMPLE_SECRET_FILE": "/run/secrets/my_example_secret"}
TEST_ENV_DOCKER_SECRETS_INVALID_POSTFIX = {
"MY_EXAMPLE_SECRET": "/run/secrets/my_example_secret"
}
TEST_DOCKER_SECRET_CONTENT = "mysecret"
TEST_DOCKER_SECRETS_RESULT = {"MY_EXAMPLE_SECRET": TEST_DOCKER_SECRET_CONTENT}
TEST_SEPARATOR = "__"
TEST_MATCH = r"^matched"
TEST_WHITELIST = ["whitelisted_var", "whitelist2"]
TEST_PARSE_VALUES = True
TEST_TO_LOWER = True
TEST_CONVERT_UNDERSCORES = True
TEST_DOCKER_SECRETS = list(TEST_ENV_DOCKER_SECRETS.keys())
TEST_DOCKER_SECRETS_INVALID_POSTFIX = ["MY_EXAMPLE_SECRET"]
TEST_DOCKER_SECRETS_PATH = str(list(TEST_DOCKER_SECRETS_RESULT.values())[0])
MOCK_OPEN_FUNCTION = "builtins.open"
def throw_ioerror(*args, **kwargs):
raise IOError("test")
class TestEnv(TestCase):
def test_default_params(self):
env_store = Env()
self.assertEqual(env_store.separator, None)
self.assertEqual(env_store.match, None)
self.assertEqual(env_store.whitelist, None)
self.assertEqual(env_store.parse_values, False)
self.assertEqual(env_store.to_lower, False)
self.assertEqual(env_store.convert_underscores, False)
def test_optional_params(self):
env_store = Env(
separator=TEST_SEPARATOR,
match=TEST_MATCH,
whitelist=TEST_WHITELIST,
parse_values=TEST_PARSE_VALUES,
to_lower=TEST_TO_LOWER,
convert_underscores=TEST_CONVERT_UNDERSCORES,
)
self.assertEqual(env_store.separator, TEST_SEPARATOR)
self.assertEqual(env_store.match, TEST_MATCH)
self.assertEqual(env_store.whitelist, TEST_WHITELIST)
self.assertEqual(env_store.parse_values, TEST_PARSE_VALUES)
self.assertEqual(env_store.to_lower, TEST_TO_LOWER)
self.assertEqual(env_store.convert_underscores, TEST_CONVERT_UNDERSCORES)
@patch("pconf.store.env.os", new=MagicMock())
def test_get_all_vars(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env()
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_get_idempotent(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env()
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
pconf.store.env.os.environ = TEST_ENV_BASE_VARS
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_whitelist(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(whitelist=TEST_WHITELIST)
result = env_store.get()
self.assertEqual(result, TEST_ENV_WHITELIST_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_match(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(match=TEST_MATCH)
result = env_store.get()
self.assertEqual(result, TEST_ENV_MATCHED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_whitelist_and_match(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(match=TEST_MATCH, whitelist=TEST_WHITELIST)
result = env_store.get()
self.assertEqual(result, dict(TEST_ENV_MATCHED_VARS, **TEST_ENV_WHITELIST_VARS))
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_separator(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(separator=TEST_SEPARATOR)
result = env_store.get()
self.assertEqual(result, TEST_SEPARATED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_parse_values(self):
pconf.store.env.os.environ = TEST_ENV_TYPED_VARS
env_store = Env(parse_values=TEST_PARSE_VALUES)
result = env_store.get()
self.assertEqual(result, TEST_ENV_TYPED_VARS_PARSED)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_lowercase_conversion(self):
pconf.store.env.os.environ = TEST_ENV_UPPERCASE
env_store = Env(to_lower=TEST_TO_LOWER)
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_lowercase_and_separator(self):
pconf.store.env.os.environ = TEST_ENV_UPPERCASE
env_store = Env(separator=TEST_SEPARATOR, to_lower=TEST_TO_LOWER)
result = env_store.get()
self.assertEqual(result, TEST_SEPARATED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_convert_underscore_replacement(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(convert_underscores=TEST_CONVERT_UNDERSCORES)
result = env_store.get()
self.assertEqual(result, TEST_ENV_CONVERTED)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_convert_underscore_and_separator(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(
separator=TEST_SEPARATOR, convert_underscores=TEST_CONVERT_UNDERSCORES
)
result = env_store.get()
self.assertEqual(result, TEST_ENV_CONVERTED_SEPARATED)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_parse_and_split_order(self):
pconf.store.env.os.environ = TEST_ENV_VARS
try:
env_store = Env(separator=TEST_SEPARATOR, parse_values=TEST_PARSE_VALUES)
except AttributeError:
self.fail("Parsing environment variables raised AttributeError")
result = env_store.get()
self.assertEqual(result, TEST_SEPARATED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
@patch(MOCK_OPEN_FUNCTION, mock_open(read_data=TEST_DOCKER_SECRETS_PATH))
def test_docker_secrets(self):
pconf.store.env.os.environ = TEST_ENV_DOCKER_SECRETS
env_store = Env(docker_secrets=TEST_DOCKER_SECRETS)
result = env_store.get()
self.assertEqual(list(result.keys()), list(TEST_DOCKER_SECRETS_RESULT.keys()))
self.assertEqual(result, TEST_DOCKER_SECRETS_RESULT)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_docker_secrets_invalid_postfix(self):
pconf.store.env.os.environ = TEST_ENV_DOCKER_SECRETS_INVALID_POSTFIX
env_store = Env(docker_secrets=TEST_DOCKER_SECRETS_INVALID_POSTFIX)
result = env_store.get()
self.assertEqual(result, {})
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
@patch(MOCK_OPEN_FUNCTION, side_effect=throw_ioerror)
def test_docker_secrets_nonexistent_file(self, mock_open):
simplefilter("ignore")
pconf.store.env.os.environ = TEST_ENV_DOCKER_SECRETS
env_store = Env(docker_secrets=TEST_DOCKER_SECRETS)
result = env_store.get()
self.assertEqual(result, {})
self.assertIsInstance(result, dict)
| andrasmaroy/pconf | tests/test_env.py | Python | mit | 9,384 |
from biokbase.workspace.client import Workspace
import requests
import json
import sys
from time import time
from fix_workspace_info import fix_all_workspace_info
from pprint import pprint
kb_port = 9999
mini_ws_url = f"http://localhost:{kb_port}/services/ws"
mini_auth_url = f"http://localhost:{kb_port}/services/auth/testmode"
mini_ws_admin = "wsadmin"
narrative_spec_file = '../../../narrative_object.spec'
old_narrative_spec_file = 'old_narrative_object.spec'
test_narrative_data = 'narrative_test_data.json'
test_user = "kbasetest"
####
# BEFORE YOU RUN THIS:
# 1. Spin up mini_kb with the workspace env pointed to my branch:
# that is, the "-env" line in the ws command points to
# "https://raw.githubusercontent.com/briehl/mini_kb/master/deployment/conf/workspace-minikb.ini"
#
# 2. When this starts up, the workspace will complain. Auth is in testmode, and there's no test user/token set up
# for the Shock configuration. Do the following:
# a. enter the mongo container
# > docker exec -it mini_kb_ci-mongo_1 /bin/bash
# b. start mongo (just "mongo" at the prompt)
# c. Run the following to use gridFS:
# > use workspace
# > db.settings.findAndModify({ query: {backend: "shock"}, update: { $set: {"backend": "gridFS"} } })
# d. Exit that container, and restart the workspace container
# > docker-compose restart ws
#
# With the setup done, this script should do the job of creating accounts, importing the Narrative type,
# loading test data, etc.
def create_user(user_id):
"""
Returns a token for that user.
"""
headers = {
"Content-Type": "application/json"
}
r = requests.post(mini_auth_url + '/api/V2/testmodeonly/user', headers=headers, data=json.dumps({'user': user_id, 'display': "User {}".format(user_id)}))
if r.status_code != 200 and r.status_code != 400:
print("Can't create dummy user!")
r.raise_for_status()
r = requests.post(mini_auth_url + '/api/V2/testmodeonly/token', headers=headers, data=json.dumps({'user': user_id, 'type': 'Login'}))
if r.status_code != 200:
print("Can't make dummy token!")
r.raise_for_status()
token = json.loads(r.text)
return token['token']
def load_narrative_type(ws):
"""
Loads the KBaseNarrative.Narrative type info into mini kb.
ws = Workspace client configured for admin
"""
ws.request_module_ownership("KBaseNarrative")
ws.administer({
'command': 'approveModRequest',
'module': 'KBaseNarrative'
})
with open(old_narrative_spec_file, "r") as f:
old_spec = f.read()
ws.register_typespec({
'spec': old_spec,
'dryrun': 0,
'new_types': [
'Narrative',
'Cell',
'Worksheet',
'Metadata'
]
})
ws.release_module('KBaseNarrative')
for n in ws.get_module_info({'mod': 'KBaseNarrative'})['types'].keys():
if '.Narrative' in n:
old_ver = n.split('-')[-1]
with open(narrative_spec_file, "r") as f:
spec = f.read()
ws.register_typespec({
'spec': spec,
'dryrun': 0,
'new_types': []
})
ws.release_module('KBaseNarrative')
for n in ws.get_module_info({'mod': 'KBaseNarrative'})['types'].keys():
if '.Narrative' in n:
new_ver = n.split('-')[-1]
return {
'old_ver': old_ver,
'new_ver': new_ver
}
def load_narrative_test_data(ws, vers):
"""
Loads the test data set into mini kb ws.
Returns this structure:
wsid: {
narrative_id: int
correct_ws_meta: {}
correct_ws_perms: {}
}
there's more than 1 wsid (should be ~7-10), but that's it.
"""
with open(test_narrative_data, 'r') as f:
test_data = json.loads(f.read().strip())
uploaded_data = list()
for ws_data in test_data["old"]:
uploaded_data.append(_load_workspace_data(ws, ws_data, len(uploaded_data), vers['old_ver']))
for ws_data in test_data["new"]:
uploaded_data.append(_load_workspace_data(ws, ws_data, len(uploaded_data), vers['new_ver']))
return uploaded_data
def _load_workspace_data(ws, ws_data, idx, narrative_ver):
"""
Loads up a single workspace with data and returns a dict about it.
Dict contains:
id = the workspace id
perms = the workspace permissions
correct_meta = the correct workspace metadata (for validation)
"""
print(ws_data.keys())
narratives = ws_data['narratives']
ws_meta = ws_data['ws_meta']
ws_info = ws.create_workspace({"workspace": "NarrativeWS-{}-{}".format(idx, int(time()*1000))})
ws_id = ws_info[0]
info = {
"ws_id": ws_id,
"ws_info": ws_info,
"nar_info": [],
"perms": ws_data["perms"],
"correct_meta": ws_data["correct_meta"],
"loaded_meta": ws_meta
}
if len(narratives):
for idx, nar in enumerate(narratives):
objects = ws.save_objects({
'id': ws_id,
'objects': [{
'type': 'KBaseNarrative.Narrative-{}'.format(narrative_ver),
'data': nar,
'name': 'Narrative-{}'.format(idx)
}]
})
info['nar_info'].append(objects[0])
if len(ws_meta):
ws.alter_workspace_metadata({
'wsi': {'id': ws_id},
'new': ws_meta
})
perms = ws_data["perms"]
if len(perms) > 1:
admin_perm = perms['wsadmin']
ws.set_permissions({
'id': ws_id,
'new_permission': admin_perm,
'users': ['wsadmin']
})
return info
def main():
admin_token = create_user(mini_ws_admin)
admin_ws = Workspace(url=mini_ws_url, token=admin_token)
versions = load_narrative_type(admin_ws)
versions = {
'old_ver': '1.0',
'new_ver': '2.0'
}
user_token = create_user(test_user)
user_ws = Workspace(url=mini_ws_url, token=user_token)
loaded_info = load_narrative_test_data(user_ws, versions)
pprint(loaded_info)
# fix_all_workspace_info(mini_ws_url, mini_auth_url, admin_token, 100)
# for ws_data in loaded_info:
# ws_id = ws_data['ws_id']
# ws_meta = user_ws.get_workspace_info({'id': ws_id})[8]
# try:
# assert(ws_meta == ws_data['correct_meta'])
# except:
# print("WS: {}".format(ws_id))
# pprint(ws_meta)
# print("doesn't match")
# pprint(ws_data['correct_meta'])
if __name__ == '__main__':
sys.exit(main())
| pranjan77/narrative | src/scripts/test_data_uploader/populate_mini_ws.py | Python | mit | 6,648 |
# Ant-FS
#
# Copyright (c) 2012, Gustav Tiger <gustav@tiger.name>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import array
from ant.fs.commandpipe import parse, CreateFile
def main():
# Test create file
data = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09]
request = CreateFile(len(data), 0x80, [0x04, 0x00, 0x00], [0x00, 0xff, 0xff])
print request
print request.get()
# Test create file response
response_data = array.array('B', [2, 0, 0, 0, 4, 0, 0, 0, 128, 4, 123, 0, 103, 0, 0, 0])
response = parse(response_data)
assert response.get_request_id() == 0x04
assert response.get_response() == 0x00
assert response.get_data_type() == 0x80 #FIT
assert response.get_identifier() == array.array('B', [4, 123, 0])
assert response.get_index() == 103
| ddboline/Garmin-Forerunner-610-Extractor_fork | ant/fs/test/commandpipe_test.py | Python | mit | 1,838 |
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name='profile')
display_name = models.CharField(
max_length=200, verbose_name='Name for Security Check In')
show = models.BooleanField(
default=False, verbose_name="Show my information in the member list")
@receiver(post_save, sender=User)
def create_profile(sender, instance, created, **kwargs):
"""Create a matching profile whenever a user object is created."""
if created:
profile, new = UserProfile.objects.get_or_create(
user=instance, display_name=instance.get_full_name())
| agfor/chipy.org | chipy_org/apps/profiles/models.py | Python | mit | 772 |
from __future__ import absolute_import
import random
from django.core.exceptions import ValidationError
from django.http.request import HttpRequest
from django.urls import reverse
from django.utils.text import slugify
from test_plus import TestCase
from ..context_processors import scoped
from ..models import Blog, Post, Section
ascii_lowercase = "abcdefghijklmnopqrstuvwxyz"
def randomword(length):
return "".join(random.choice(ascii_lowercase) for i in range(length))
class TestBlog(TestCase):
def setUp(self):
"""
Create default Sections and Posts.
"""
self.blog = Blog.objects.first()
self.apples = Section.objects.create(name="Apples", slug="apples")
self.oranges = Section.objects.create(name="Oranges", slug="oranges")
self.user = self.make_user("patrick")
self.markup = "markdown"
# Create two published Posts, one in each section.
self.orange_title = "Orange You Wonderful"
self.orange_slug = slugify(self.orange_title)
self.orange_post = Post.objects.create(blog=self.blog,
section=self.oranges,
title=self.orange_title,
slug=self.orange_slug,
author=self.user,
markup=self.markup,
state=Post.STATE_CHOICES[-1][0])
self.apple_title = "Apple of My Eye"
self.apple_slug = slugify(self.apple_title)
self.apple_post = Post.objects.create(blog=self.blog,
section=self.apples,
title=self.apple_title,
slug=self.apple_slug,
author=self.user,
markup=self.markup,
state=Post.STATE_CHOICES[-1][0])
class TestViewGetSection(TestBlog):
def test_invalid_section_slug(self):
"""
Ensure invalid section slugs do not cause site crash.
"""
invalid_slug = "bananas"
url = reverse("pinax_blog:blog_section", kwargs={"section": invalid_slug})
try:
response = self.get(url)
except Section.DoesNotExist:
self.fail("section '{}' does not exist".format(invalid_slug))
self.assertEqual(response.status_code, 404)
def test_valid_section_slug(self):
"""
Verify that existing section slug works fine
"""
valid_slug = "oranges"
url = reverse("pinax_blog:blog_section", kwargs={"section": valid_slug})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
class TestViewGetPosts(TestBlog):
def test_section_posts(self):
"""
Verify only the expected Post is in context for section "orange".
"""
url = reverse("pinax_blog:blog_section", kwargs={"section": "oranges"})
response = self.client.get(url)
self.assertIn(self.orange_post, response.context_data["post_list"])
self.assertNotIn(self.apple_post, response.context_data["post_list"])
def test_all_posts(self):
"""
Verify all Posts are in context for All.
"""
url = reverse("pinax_blog:blog")
response = self.client.get(url)
self.assertEqual(response.context_data["post_list"].count(), 2)
self.assertIn(self.orange_post, response.context_data["post_list"])
self.assertIn(self.apple_post, response.context_data["post_list"])
class TestModelFieldValidation(TestBlog):
def test_overlong_slug(self):
title_len = Post._meta.get_field("title").max_length
title = randomword(title_len)
slug_len = Post._meta.get_field("slug").max_length
slug = randomword(slug_len + 1)
slug_post = Post(blog=self.blog,
section=self.apples,
title=title,
slug=slug,
author=self.user,
state=Post.STATE_CHOICES[-1][0])
with self.assertRaises(ValidationError) as context_manager:
slug_post.save()
the_exception = context_manager.exception
self.assertIn(
"Ensure this value has at most {} characters (it has {})."
.format(slug_len, len(slug)),
the_exception.messages
)
class TestContextProcessors(TestBlog):
def test_no_resolver_match(self):
"""
Ensure no problem when `request.resolver_match` is None
"""
request = HttpRequest()
self.assertEqual(request.resolver_match, None)
result = scoped(request)
self.assertEqual(result, {"scoper_lookup": ""})
class TestViews(TestBlog):
def test_manage_post_create_get(self):
"""
Ensure template with external URL references renders properly
for user with proper credentials.
"""
with self.login(self.user):
response = self.client.get("pinax_blog:manage_post_create")
self.assertEqual(response.status_code, 404)
self.user.is_staff = True
self.user.save()
with self.login(self.user):
self.get("pinax_blog:manage_post_create")
self.response_200()
self.assertTemplateUsed("pinax/blog/manage_post_create")
pinax_images_upload_url = reverse("pinax_images:imageset_new_upload")
self.assertResponseContains(pinax_images_upload_url, html=False)
def test_manage_post_create_post(self):
"""
Ensure template with external URL references renders properly
for user with proper credentials.
"""
self.user.is_staff = True
self.user.save()
post_title = "You'll never believe what happened next!"
post_data = dict(
section=self.apples.pk,
title=post_title,
teaser="teaser",
content="content",
description="description",
state=Post.STATE_CHOICES[-1][0],
)
with self.login(self.user):
self.post("pinax_blog:manage_post_create", data=post_data)
self.assertRedirects(self.last_response, reverse("pinax_blog:manage_post_list"))
self.assertTrue(Post.objects.get(title=post_title))
| pinax/pinax-blog | pinax/blog/tests/tests.py | Python | mit | 6,575 |
import os
import sys
import argparse
import configparser
from utils.process import Process
# They are imported as all lowercase
# so it is case insensitive in the config file
from modules.tuebl import Tuebl as tuebl
from modules.itebooks import ItEbooks as itebooks
from modules.wallhaven import Wallhaven as wallhaven
parser = argparse.ArgumentParser()
parser.add_argument('config', help='custom config file', nargs='?', default='./config.ini')
args = parser.parse_args()
config = configparser.ConfigParser()
def stop():
"""
Save any data before exiting
"""
for site in scrape:
print(scrape[site].log("Exiting..."))
scrape[site].stop()
sys.exit(0)
if __name__ == "__main__":
# Read config file
if not os.path.isfile(args.config):
print("Invalid config file")
sys.exit(0)
config.read(args.config)
# Parse config file
scrape = {}
for site in config.sections():
if config[site]['enabled'].lower() == 'true':
try: # If it not a class skip it
site_class = getattr(sys.modules[__name__], site.lower())
except AttributeError as e:
print("\nThere is no module named " + site + "\n")
continue
dl_path = os.path.expanduser(config[site]['download_path'])
num_files = int(config[site]['number_of_files'])
threads = int(config[site]['threads'])
scrape[site] = Process(site_class, dl_path, num_files, threads)
# Start site parser
try:
for site in scrape:
print("#### Scrapeing: " + site)
scrape[site].start()
except Exception as e:
print("Exception [main]: " + str(e))
stop() | xtream1101/web-scraper | main.py | Python | mit | 1,737 |
# -*- coding: utf-8 -*-
"""Unit tests for gridmap classed"""
# ----------------------------------
# Bjørn Ådlandsvik <bjorn@imr.no>
# Institute of Marine Research
# ----------------------------------
import sys
from math import pi
import unittest
import numpy as np
sys.path = ['..'] + sys.path # import from developing version
import gridmap
# ------------------------------------
class test_PolarStereographic0(unittest.TestCase):
"""Test some analytic properties of the polar stereographic map"""
xp, yp, dx, ylon = 418.25, 257.25, 10000.0, 58.0
map0 = gridmap.PolarStereographic(xp, yp, dx, ylon)
map1 = gridmap.PolarStereographic(xp, yp, dx, ylon,
ellipsoid=gridmap.WGS84)
# Flytt de to første, til test_Interface
def test_scalar(self):
"""Should return a scalar for scalar input"""
pass
def test_vector(self):
"""Return arrays of the same shape as the input"""
def test_north_pole_forward(self):
"""The coordinates of the North Pole are xp, yp"""
lon, lat = 17.2, 90.0
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
self.assertEqual((x0, y0), (self.xp, self.yp))
# WGS84
x1, y1 = self.map1.ll2grid(lon, lat)
self.assertEqual((x1, y1), (self.xp, self.yp))
def test_north_pole_backward(self):
"""Longitude is not defined at the North Pole"""
# Should raise an exception
# sphere
lon0, lat0 = self.map0.grid2ll(self.xp, self.yp)
# WGS84
lon1, lat1 = self.map1.grid2ll(self.xp, self.yp)
def test_ylon(self):
"""lon = ylon <=> x = xp"""
# lon = ylon => x = xp
lon, lat = self.ylon, 72.3
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
self.assertEqual(x0, self.xp)
# WGS84
x1, y1 = self.map1.ll2grid(lon, lat)
self.assertEqual(x1, self.xp)
# x = xp => y = ylon
x, y = self.xp, 222.222
# sphere
lon0, lat0 = self.map0.grid2ll(x, y)
self.assertAlmostEqual(lon0, self.ylon, places=13)
# WGS84
lon1, lat1 = self.map1.grid2ll(x, y)
self.assertAlmostEqual(lon1, self.ylon, places=13)
# x = xp, => angle = 0
x, y = self.xp, 222.222
# sphere
angle0 = self.map0.angle(x, y)
self.assertEqual(angle0, 0.0)
# WGS84
angle1 = self.map1.angle(x, y)
self.assertEqual(angle1, 0.0)
def test_inverse(self):
"""grid2ll and ll2grid are inverse"""
lon, lat = 5.323333, 60.3925 # Bergen
# sphere: ll -> xy -> ll
x0, y0 = self.map0.ll2grid(lon, lat)
lon0, lat0 = self.map0.grid2ll(x0, y0)
self.assertAlmostEqual(lon0, lon, places=14)
self.assertEqual(lat0, lat)
# WGS84: ll -> zy -> ll
x1, y1 = self.map1.ll2grid(lon, lat)
lon1, lat1 = self.map1.grid2ll(x1, y1)
self.assertAlmostEqual(lon1, lon, places=14)
self.assertAlmostEqual(lat1, lat, places=10)
x, y = 200.0, 133.12345 # "Arbitrary"
# sphere xy -> ll -> xy
lon0, lat0 = self.map0.grid2ll(x, y)
x0, y0 = self.map0.ll2grid(lon0, lat0)
self.assertAlmostEqual(x0, x, places=12)
self.assertAlmostEqual(y0, y, places=12)
# WGS84: xy -> ll -> xy
lon1, lat1 = self.map1.grid2ll(x, y)
x1, y1 = self.map1.ll2grid(lon1, lat1)
self.assertAlmostEqual(x1, x, places=9)
self.assertAlmostEqual(y1, y, places=9)
def test_angle(self):
"""angle = ylon - lon [rad]"""
lon, lat = 5.323333, 60.3925 # Bergen
angle = (self.ylon - lon)*pi/180
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
angle0 = self.map0.angle(x0, y0)
self.assertAlmostEqual(angle0, angle, places=15)
# WGS84
x1, y1 = self.map1.ll2grid(lon, lat)
angle1 = self.map1.angle(x1, y1)
self.assertAlmostEqual(angle1, angle, places=15)
def test_scale(self):
"""scale = 1 at 60 deg"""
lon, lat = -10.0, 60.0
# sphere
x0, y0 = self.map0.ll2grid(lon, lat)
scale0 = self.map0.map_scale(x0, y0)
self.assertAlmostEqual(scale0, 1.0, places=15)
# WGS84
x1, y1 = self.map1.ll2grid(lon, lat)
scale1 = self.map1.map_scale(x1, y1)
self.assertAlmostEqual(scale1, 1.0, places=12)
if __name__ == '__main__':
unittest.main()
| bjornaa/gridmap | test/test_gridmap0.py | Python | mit | 4,541 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pyramid_sendgrid_webhooks
----------------------------------
Tests for `pyramid_sendgrid_webhooks` module.
"""
from __future__ import unicode_literals
import unittest
import pyramid_sendgrid_webhooks as psw
from pyramid_sendgrid_webhooks import events, errors
class EventGrabber(object):
""" Grabs events as they're dispatched """
def __init__(self):
self.events = []
self.last = None
def __call__(self, event):
self.events.append(event)
self.last = event
def simple_app(global_config, **settings):
from pyramid.config import Configurator
config = Configurator(settings=settings)
config.include('pyramid_sendgrid_webhooks', WebhookTestBase._PREFIX)
config.registry.grabber = EventGrabber()
config.add_subscriber(config.registry.grabber, events.BaseWebhookEvent)
return config.make_wsgi_app()
class WebhookTestBase(unittest.TestCase):
_PREFIX = '/webhook'
_PATH = _PREFIX + '/receive'
def setUp(self):
from pyramid import testing
self.request = testing.DummyRequest()
self.config = testing.setUp(request=self.request)
def tearDown(self):
from pyramid import testing
testing.tearDown()
def _createGrabber(self, event_cls=events.BaseWebhookEvent):
grabber = EventGrabber()
self.config.add_subscriber(grabber, event_cls)
return grabber
def _createRequest(self, event_body):
if not isinstance(event_body, list):
event_body = [event_body]
self.request.json_body = event_body
return self.request
def _createApp(self, event_cls=events.BaseWebhookEvent):
from webtest.app import TestApp
app = TestApp(simple_app({}))
app.grabber = app.app.registry.grabber
return app
class TestBaseEvents(WebhookTestBase):
def _makeOne(self, event_type='bounce', category='category'):
return {
'asm_group_id': 1,
'category': category,
'cert_error': '0',
'email': 'email@example.com',
'event': event_type,
'ip': '127.0.0.1',
'reason': '500 No Such User',
'smtp-id': '<original-smtp-id@domain.com>',
'status': '5.0.0',
'timestamp': 1249948800,
'tls': '1',
'type': 'bounce',
'unique_arg_key': 'unique_arg_value',
}
def _create_dt(self):
import datetime
return datetime.datetime(2009, 8, 11, 0, 0)
def test_event_parsed(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 1)
def test_event_parsed_from_request(self):
app = self._createApp()
grabber = app.grabber
app.post_json(self._PATH, [self._makeOne()])
self.assertEqual(len(grabber.events), 1)
def test_multiple_events_parsed_from_request(self, n=3):
app = self._createApp()
grabber = app.grabber
app.post_json(self._PATH, [self._makeOne()] * n)
self.assertEqual(len(grabber.events), n)
def test_specific_event_caught(self):
grabber = self._createGrabber(events.BounceEvent)
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 1)
def test_unspecified_event_ignored(self):
grabber = self._createGrabber(events.DeferredEvent)
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 0)
def test_timestamp_parsed(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(grabber.last.dt, self._create_dt())
def test_unique_arguments_extracted(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertDictEqual(grabber.last.unique_arguments, {
'unique_arg_key': 'unique_arg_value',
})
def test_correct_subclass(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertIsInstance(grabber.last, events.BounceEvent)
def test_unknown_event_raises_exception(self):
request = self._createRequest(self._makeOne(event_type='UNKNOWN'))
self.assertRaises(
errors.UnknownEventError, psw.receive_events, request)
def test_single_category_is_list_wrapped(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual([grabber.last.category], grabber.last.categories)
def test_multiple_categories_are_unchanged(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne(category=['c1', 'c2']))
psw.receive_events(request)
self.assertEqual(grabber.last.category, grabber.last.categories)
def test_empty_categories_is_empty_list(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne(category=None))
psw.receive_events(request)
self.assertEqual(grabber.last.categories, [])
class TestDeliveryEvents(WebhookTestBase):
def _makeOne(self):
return {
'asm_group_id': 1,
'category': ['category1', 'category2'],
'cert_error': '0',
'email': 'email@example.com',
'event': 'bounce',
'ip': '127.0.0.1',
'reason': '500 No Such User',
'smtp-id': '<original-smtp-id@domain.com>',
'status': '5.0.0',
'timestamp': 1249948800,
'tls': '1',
'type': 'bounce',
'unique_arg_key': 'unique_arg_value',
}
class TestEngagementEvents(WebhookTestBase):
def _makeOne(self):
return {
'asm_group_id': 1,
'category': ['category1', 'category2'],
'email': 'email@example.com',
'event': 'click',
'ip': '255.255.255.255',
'timestamp': 1249948800,
'unique_arg_key': 'unique_arg_value',
'url': 'http://yourdomain.com/blog/news.html',
'useragent': 'Example Useragent',
}
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
| GoodRx/pyramid-sendgrid-webhooks | tests/test_pyramid_sendgrid_webhooks.py | Python | mit | 6,637 |
import sys
import threading
from netfilterqueue import NetfilterQueue
import dpkt
import socket
import config
logger = config.logger
class FlowControlQueue:
''' The class creates a NetfilterQueue that blocks/releases IP packets of
given types.
'''
def __init__(self, worker, queue_num=config.WorkerConfig.queue_num):
self.worker = worker
self.queue_num = queue_num
self.nfqueue = None
self.thread_recv_pkt = None
self.lck = threading.Lock()
self.block_table = {} # map "$dip:$dport.$proto" -> [blocked_pkts]
self.worker_already_informed = {}
self._debug_flag = True # nfqueue is sensitive of performance
@classmethod
def _block_name(cls, dip, dport, proto):
return "{0}:{1}.{2}".format(dip, dport, proto)
# need parameter to say whether you should inform us based on this rule
# because that callback should only be called once
def block(self, dip, dport, proto):
bname = self._block_name(dip, dport, proto)
with self.lck:
if bname not in self.block_table:
self.block_table[bname] = []
self.worker_already_informed[bname] = False
self._debug("block {0}".format(bname))
def release(self, dip, dport, proto):
bname = self._block_name(dip, dport, proto)
blocked_pkts = []
with self.lck:
if bname in self.block_table:
blocked_pkts = self.block_table[bname]
del self.block_table[bname]
del self.worker_already_informed[bname]
n = len(blocked_pkts)
for pkt in blocked_pkts:
pkt.accept()
self._debug("release {0} packets of {1}".format(n, bname))
def _run_nfqueue(self):
self.nfqueue = NetfilterQueue()
self.nfqueue.bind(self.queue_num, self._process_pkt)
try:
self._debug("bind to queue #{0}".format(self.queue_num))
self.nfqueue.run()
except KeyboardInterrupt:
self._debug("No.%s nfqueue stops.", self.queue_num)
self.nfqueue.unbind()
self.nfqueue = None
def async_run_nfqueue(self):
is_alive = self.thread_recv_pkt and self.thread_recv_pkt.is_alive()
if is_alive or self.nfqueue:
raise Exception("NetfilterQueue has started.")
self.thread_recv_pkt = threading.Thread(target=self._run_nfqueue)
self.thread_recv_pkt.daemon = True # kill thread if main exits
self.thread_recv_pkt.start()
def inform_worker(self, dip, dport, proto):
self.worker.loop.add_callback(
lambda: self.worker.port_callback(dip, dport, proto)
)
def _process_pkt(self, pkt):
# pkt has these functions
# ['accept', 'drop', 'get_payload', 'get_payload_len',
# 'get_timestamp', 'hook', 'hw_protocol', 'id', 'payload', 'set_mark']
data = pkt.get_payload()
ip = dpkt.ip.IP(data)
# sip = socket.inet_ntop(socket.AF_INET, ip.src)
# sport = ip.data.sport
dip = socket.inet_ntop(socket.AF_INET, ip.dst)
dport = ip.data.dport
proto = "UNKNOWN"
if ip.p == dpkt.ip.IP_PROTO_TCP:
proto = "TCP"
elif ip.p == dpkt.ip.IP_PROTO_UDP:
proto = "UDP"
bname = self._block_name(dip, dport, proto)
blocked = False
already_informed = False
with self.lck:
if bname in self.block_table:
self.block_table[bname].append(pkt)
blocked = True
if bname in self.worker_already_informed:
already_informed = self.worker_already_informed[bname]
if (blocked and (not already_informed)):
self.inform_worker(dip, dport, proto)
self.worker_already_informed[bname] = True
else:
pkt.accept()
self._debug("_process_pkt {0}".format(bname))
def _debug(self, msg):
if self._debug_flag:
logger.debug("[FlowControlQueue] "+msg)
| LeoLiangZhang/Picocenter | worker/netfilter.py | Python | mit | 4,059 |
#!/usr/bin/python2
# test.py
# nroberts 04/10/2017
# Instead of lighting up a bridge, we light up the terminal
from tennis_show import TennisShow
import current_bridge
from threading import Thread
import Queue
from colors import Colors
thread_continuing = True
class OutQueue:
def put(self, event):
print "Put in outqueue: %s" % str(event)
def main(bridge):
global thread_continuing
print "Usage: Press 1 for player 1 swing, 2 for player 2 swing (followed by Enter)"
print "To quit, press Ctrl+C and then Enter"
inqueue = Queue.Queue()
outqueue = OutQueue()
show = TennisShow(bridge(), inqueue=inqueue, outqueue=outqueue)
def cause_problems():
global thread_continuing
while thread_continuing:
inp = raw_input()
if inp == "r":
inqueue.put(("game_reset", None))
continue
try:
x = int(inp[0])
if len(inp) > 1:
if inp[1] == "s":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.RED }))
elif inp[1] == "t":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.GREEN }))
elif inp[1] == "c":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.PURPLE }))
elif inp[1] == "x":
inqueue.put(("init_color_choice", { "player_num": x, "color": Colors.SKY_BLUE }))
else:
inqueue.put(("game_swing", { "player_num": x, "hand": 1, "strength": 1.0 }))
except:
pass
# put something new on the inqueue every 10 seconds
thread = Thread(target = cause_problems)
thread.start()
# run the show
try:
show.run(framerate=40)
finally:
thread_continuing = False
if __name__ == "__main__":
main(current_bridge.bridge)
| alexoneill/15-love | game/test.py | Python | mit | 1,989 |
# Core Django imports
from django.contrib import admin
# Third party app imports
from reversion import VersionAdmin
# Local app imports
from mantises.models import Mantis, Breed, Molt
class MantisAdmin(VersionAdmin):
list_display = ('__str__',)
def save_model(self, request, obj, form, change):
obj.user = request.user
obj.save()
class BreedAdmin(VersionAdmin):
list_display = ('__str__', 'long_name')
def save_model(self, request, obj, form, change):
obj.user = request.user
obj.save()
class MoltAdmin(VersionAdmin):
list_display = ('__str__',)
def save_model(self, request, obj, form, change):
obj.user = request.user
obj.save()
admin.site.register(Mantis, MantisAdmin)
admin.site.register(Breed, BreedAdmin)
admin.site.register(Molt, MoltAdmin) | archen/mantistrack | mantistrack/mantises/admin.py | Python | mit | 831 |
#!/usr/bin/env python
# encoding: utf-8
"""
models.py
Created by Darcy Liu on 2012-03-03.
Copyright (c) 2012 Close To U. All rights reserved.
"""
from django.db import models
from django.contrib.auth.models import User
# class Setting(models.Model):
# sid = models.AutoField(primary_key=True)
# option = models.CharField(unique=True,max_length=128,verbose_name='Option')
# value = models.CharField(max_length=256,verbose_name='Value')
class Minisite(models.Model):
key = models.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(unique=True,max_length=128,verbose_name='slug')
meta = models.TextField(blank=True, verbose_name='meta')
description = models.TextField(blank=True, verbose_name='description')
author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = models.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self):
result = self.name
return unicode(result)
class Page(models.Model):
key = models.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(max_length=128,verbose_name='slug')
#type=//insite standlone
Mode_Choices = (
('0', 'insite'),
('1', 'standlone'),
)
mode = models.CharField(verbose_name='format',max_length=1,default=0,choices=Mode_Choices)
#content-type
mime = models.CharField(max_length=64,default='text/html;charset=utf-8',verbose_name='mime')
#format
Format_Choices = (
('0', 'txt'),
('1', 'html'),
('2', 'markdown'),
('3', 'textile'),
)
format = models.CharField(verbose_name='format',max_length=1,default=0,choices=Format_Choices)
text = models.TextField(blank=True, verbose_name='content')
script = models.TextField(blank=True, verbose_name='script')
style = models.TextField(blank=True, verbose_name='style')
text_html = models.TextField(blank=True, verbose_name='html')
minisite = models.ForeignKey(Minisite,verbose_name='minisite')
author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = models.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self):
result = self.name
return unicode(result)
class Meta:
unique_together = (('slug', 'minisite'),) | darcyliu/storyboard | home/models.py | Python | mit | 2,582 |
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='melody_scripter',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.0.8',
description='Melody Scripter, for parsing melodies from a simple textual format',
long_description=long_description,
# The project's main homepage.
url='https://github.com/pdorrell/melody_scripter',
# Author details
author='Philip Dorrell',
author_email='http://thinkinghard.com/email.html',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Musicians',
'Topic :: Software Development :: Music',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# What does your project relate to?
keywords='music parsing',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['regex>=2015.09.15',
'midi>=0.2.3'],
dependency_links=[
"https://github.com/vishnubob/python-midi/archive/v0.2.3.zip#egg=midi-0.2.3"
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['nose'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'song2midi=melody_scripter.song2midi:main',
'play_song=melody_scripter.play_song:main',
],
},
)
| pdorrell/melody_scripter | setup.py | Python | mit | 3,795 |
# Copyright 2017-2019 Tom Eulenfeld, MIT license
"""Stack correlations"""
import numpy as np
import obspy
from obspy import UTCDateTime as UTC
from yam.util import _corr_id, _time2sec, IterTime
def stack(stream, length=None, move=None):
"""
Stack traces in stream by correlation id
:param stream: |Stream| object with correlations
:param length: time span of one trace in the stack in seconds
(alternatively a string consisting of a number and a unit
-- ``'d'`` for days and ``'h'`` for hours -- can be specified,
i.e. ``'3d'`` stacks together all traces inside a three days time
window, default: None, which stacks together all traces)
:param move: define a moving stack, float or string,
default: None -- no moving stack,
if specified move usually is smaller than length to get an overlap
in the stacked traces
:return: |Stream| object with stacked correlations
"""
stream.sort()
stream_stack = obspy.Stream()
ids = {_corr_id(tr) for tr in stream}
ids.discard(None)
for id_ in ids:
traces = [tr for tr in stream if _corr_id(tr) == id_]
if length is None:
data = np.mean([tr.data for tr in traces], dtype='float16',
axis=0)
tr_stack = obspy.Trace(data, header=traces[0].stats)
tr_stack.stats.key = tr_stack.stats.key + '_s'
if 'num' in traces[0].stats:
tr_stack.stats.num = sum(tr.stats.num for tr in traces)
else:
tr_stack.stats.num = len(traces)
stream_stack.append(tr_stack)
else:
t1 = traces[0].stats.starttime
lensec = _time2sec(length)
movesec = _time2sec(move) if move else lensec
if (lensec % (24 * 3600) == 0 or
isinstance(length, str) and 'd' in length):
t1 = UTC(t1.year, t1.month, t1.day)
elif (lensec % 3600 == 0 or
isinstance(length, str) and 'm' in length):
t1 = UTC(t1.year, t1.month, t1.day, t1.hour)
t2 = max(t1, traces[-1].stats.endtime - lensec)
for t in IterTime(t1, t2, dt=movesec):
sel = [tr for tr in traces
if -0.1 <= tr.stats.starttime - t <= lensec + 0.1]
if len(sel) == 0:
continue
data = np.mean([tr.data for tr in sel], dtype='float16',
axis=0)
tr_stack = obspy.Trace(data, header=sel[0].stats)
key_add = '_s%s' % length + (move is not None) * ('m%s' % move)
tr_stack.stats.key = tr_stack.stats.key + key_add
tr_stack.stats.starttime = t
if 'num' in traces[0].stats:
tr_stack.stats.num = sum(tr.stats.num for tr in sel)
else:
tr_stack.stats.num = len(sel)
stream_stack.append(tr_stack)
return stream_stack
| trichter/yam | yam/stack.py | Python | mit | 3,034 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def DVSManagerDvsConfigTarget(vim, *args, **kwargs):
'''Configuration specification for a DistributedVirtualSwitch or
DistributedVirtualPortgroup.'''
obj = vim.client.factory.create('ns0:DVSManagerDvsConfigTarget')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'distributedVirtualPortgroup', 'distributedVirtualSwitch', 'dynamicProperty',
'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| xuru/pyvisdk | pyvisdk/do/dvs_manager_dvs_config_target.py | Python | mit | 1,114 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import re
import os
import sys
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.search(
"^__version__ = ['\"]([^'\"]+)['\"]",
init_py, re.MULTILINE).group(1)
package = 'iosfu'
version = get_version(package)
if sys.argv[-1] == 'publish':
os.system("python setup.py sdist upload")
args = {'version': version}
print("You probably want to also tag the version now:")
print(" git tag -a %(version)s -m 'version %(version)s'" % args)
print(" git push --tags")
sys.exit()
setup(
name='iosfu',
version=version,
url='http://github.com/fmartingr/iosfu',
license='MIT',
description='iOS Forensics Utility',
author='Felipe Martin',
author_email='fmartingr@me.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=open('requirements.txt').read().split('\n'),
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
'Operating System :: OS Independent',
'Programming Language :: Python ;; 2.7',
'Programming Language :: Python ;; 3.3',
'Topic :: Security',
]
)
| fmartingr/iosfu | setup.py | Python | mit | 1,441 |
"""Support for ISY994 covers."""
from __future__ import annotations
from typing import Any
from pyisy.constants import ISY_VALUE_UNKNOWN
from homeassistant.components.cover import (
ATTR_POSITION,
DOMAIN as COVER,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
_LOGGER,
DOMAIN as ISY994_DOMAIN,
ISY994_NODES,
ISY994_PROGRAMS,
UOM_8_BIT_RANGE,
UOM_BARRIER,
)
from .entity import ISYNodeEntity, ISYProgramEntity
from .helpers import migrate_old_unique_ids
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the ISY994 cover platform."""
hass_isy_data = hass.data[ISY994_DOMAIN][entry.entry_id]
entities: list[ISYCoverEntity | ISYCoverProgramEntity] = []
for node in hass_isy_data[ISY994_NODES][COVER]:
entities.append(ISYCoverEntity(node))
for name, status, actions in hass_isy_data[ISY994_PROGRAMS][COVER]:
entities.append(ISYCoverProgramEntity(name, status, actions))
await migrate_old_unique_ids(hass, COVER, entities)
async_add_entities(entities)
class ISYCoverEntity(ISYNodeEntity, CoverEntity):
"""Representation of an ISY994 cover device."""
@property
def current_cover_position(self) -> int | None:
"""Return the current cover position."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
if self._node.uom == UOM_8_BIT_RANGE:
return round(self._node.status * 100.0 / 255.0)
return int(sorted((0, self._node.status, 100))[1])
@property
def is_closed(self) -> bool | None:
"""Get whether the ISY994 cover device is closed."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
return bool(self._node.status == 0)
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
async def async_open_cover(self, **kwargs: Any) -> None:
"""Send the open cover command to the ISY994 cover device."""
val = 100 if self._node.uom == UOM_BARRIER else None
if not await self._node.turn_on(val=val):
_LOGGER.error("Unable to open the cover")
async def async_close_cover(self, **kwargs: Any) -> None:
"""Send the close cover command to the ISY994 cover device."""
if not await self._node.turn_off():
_LOGGER.error("Unable to close the cover")
async def async_set_cover_position(self, **kwargs: Any) -> None:
"""Move the cover to a specific position."""
position = kwargs[ATTR_POSITION]
if self._node.uom == UOM_8_BIT_RANGE:
position = round(position * 255.0 / 100.0)
if not await self._node.turn_on(val=position):
_LOGGER.error("Unable to set cover position")
class ISYCoverProgramEntity(ISYProgramEntity, CoverEntity):
"""Representation of an ISY994 cover program."""
@property
def is_closed(self) -> bool:
"""Get whether the ISY994 cover program is closed."""
return bool(self._node.status)
async def async_open_cover(self, **kwargs: Any) -> None:
"""Send the open cover command to the ISY994 cover program."""
if not await self._actions.run_then():
_LOGGER.error("Unable to open the cover")
async def async_close_cover(self, **kwargs: Any) -> None:
"""Send the close cover command to the ISY994 cover program."""
if not await self._actions.run_else():
_LOGGER.error("Unable to close the cover")
| rohitranjan1991/home-assistant | homeassistant/components/isy994/cover.py | Python | mit | 3,841 |
"""
@author: Nikhith !!
"""
from pycricbuzz import Cricbuzz
import json
import sys
""" Writing a CLI for Live score """
try:
cric_obj = Cricbuzz() # cric_obj contains object instance of Cricbuzz Class
matches = cric_obj.matches()
except:
print "Connection dobhindhi bey!"
sys.exit(0)
# matches func is returning List of dictionaries
""" Key items in match dict : 1) status -- ex) Starts on Jun 15 at 09:30 GMT
2) mnum -- ex) 2nd Semi-Final (A2 VS B1)
3) mchdesc-- ex) BAN vs IND
4) srs -- ex) ICC Champions Trophy, 2017
5) mchstate- ex) preview / abandon / Result / complete
6) type -- ex) ODI
7) id -- ex) 4 / 6 (anything random given)
"""
"""CLI must contain commands for
-- current matches
-- selecting match by match id
-- getCommentary
"""
def upcomingmatches():
"""Prints upcoming matches list
"""
count = 1
for match in matches:
if match['mchstate'] == "preview":
print str(count)+". "+str(match['mchdesc'])+ " - "+ str(match['srs'])+"- - "+str(match['status'])
count = count + 1
def currentlive():
"""Prints Current LIVE MATCHES"""
count = 1
for match in matches:
#print str(match['mchdesc']) + " match id: " + str(match['mchstate'])
if (match['mchstate'] == "innings break" ) :
print str(match['mchdesc'])+" match id: "+str(match['id'])
count = count + 1
if (match['mchstate'] == "inprogress" ) :
print str(match['mchdesc'])+" match id: "+str(match['id'])
count = count + 1
if match['mchstate'] == "delay":
print str(match['mchdesc'])+" -> match has been delayed due to rain..! Enjoy the drizzle..!!"
if count == 1:
print "\nNO LIVE MATCHES RIGHT NOW!\n"
print "UPCOMING MATCHES TODAY!"
upcomingmatches()
else:
id = input("Enter corresponding match id : ")
gotolive(id)
return id
def calculate_runrate(runs, overs):
balls = str(overs)
arr = balls.split('.')
if len(arr) == 2:
rr = float(int(arr[0])*6)+int(arr[1])
else:
rr = float(int(arr[0])*6)
return (float(runs)/rr)*6
def gotolive(matchid):
batobj = cric_obj.livescore(matchid)['batting']
bowlobj = cric_obj.livescore(matchid)['bowling']
print "\n "+str(batobj['team'])+" vs "+str(bowlobj['team'])+"\n"
print " "+str(cric_obj.livescore(matchid)['matchinfo']['status'])+"\n"
if (bowlobj['score'] == []):
print "1st INNINGS: "+str(batobj['team'])+" => "+str(batobj['score'][0]['runs'])+"/"+str(batobj['score'][0]['wickets'])+" ("+str(batobj['score'][0]['overs'])+" Overs)"
print "Batting:"
try:
print " " + str(batobj['batsman'][0]['name']) + " : " + str(batobj['batsman'][0]['runs']) + " (" + str(batobj['batsman'][0]['balls']) + ")"
print " " + str(batobj['batsman'][1]['name']) + " : " + str(batobj['batsman'][1]['runs']) + " (" + str(batobj['batsman'][1]['balls']) + ")"
except:
print "Wicket!!!!"
print "Bowling:"
print " " + str(bowlobj['bowler'][0]['name']) + " : " + str(bowlobj['bowler'][0]['runs']) + " /" + str(bowlobj['bowler'][0]['wickets']) + " (" + str(bowlobj['bowler'][0]['overs']) + ")"
print " " + str(bowlobj['bowler'][1]['name']) + " : " + str(bowlobj['bowler'][1]['runs']) + " /" + str(bowlobj['bowler'][1]['wickets']) + " (" + str(bowlobj['bowler'][1]['overs']) + ")"
print "Runrate:"
print ' {:1.2f}'.format(calculate_runrate(str(batobj['score'][0]['runs']),str(batobj['score'][0]['overs'])))
else:
print "1st INNINGS: "+str(bowlobj['team'])+" => "+str(bowlobj['score'][0]['runs'])+"/"+str(bowlobj['score'][0]['wickets'])+" ("+str(bowlobj['score'][0]['overs'])+" Overs)"
print "2nd INNINGS: "+str(batobj['team'])+" => "+str(batobj['score'][0]['runs'])+"/"+str(batobj['score'][0]['wickets'])+" ("+str(batobj['score'][0]['overs'])+" Overs)"
print "Batting:"
try:
print " "+str(batobj['batsman'][0]['name'])+" : "+str(batobj['batsman'][0]['runs'])+" ("+str(batobj['batsman'][0]['balls'])+")"
print " " + str(batobj['batsman'][1]['name']) + " : " + str(batobj['batsman'][1]['runs']) + " (" + str(batobj['batsman'][1]['balls']) + ")"
except:
print "Wicket!!"
print "Bowling:"
print " " + str(bowlobj['bowler'][0]['name']) + " : " + str(bowlobj['bowler'][0]['runs'])+" /"+str(bowlobj['bowler'][0]['wickets']) + " (" + str(bowlobj['bowler'][0]['overs']) + ")"
print " " + str(bowlobj['bowler'][1]['name']) + " : " + str(bowlobj['bowler'][1]['runs']) + " /" + str(bowlobj['bowler'][1]['wickets']) + " (" + str(bowlobj['bowler'][1]['overs']) + ")"
print "Summary:"
print " " + str(cric_obj.livescore(matchid)['matchinfo']['status'])
def last12Balls():
pass
def commentary(matchid):
print "\nCommentary: "
try:
for i in range(6):
print " "+str(cric_obj.commentary(matchid)['commentary'][i])
print "************************************************************************************************"
except:
print "No running commentary.. now..!!"
if __name__ == '__main__':
matchid=currentlive()
commentary(matchid)
| nikkitricky/nikbuzz | score.py | Python | mit | 5,706 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class GrantAccessData(Model):
"""Data used for requesting a SAS.
:param access: Possible values include: 'None', 'Read'
:type access: str or
~azure.mgmt.compute.v2016_04_30_preview.models.AccessLevel
:param duration_in_seconds: Time duration in seconds until the SAS access
expires.
:type duration_in_seconds: int
"""
_validation = {
'access': {'required': True},
'duration_in_seconds': {'required': True},
}
_attribute_map = {
'access': {'key': 'access', 'type': 'AccessLevel'},
'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'int'},
}
def __init__(self, access, duration_in_seconds):
super(GrantAccessData, self).__init__()
self.access = access
self.duration_in_seconds = duration_in_seconds
| AutorestCI/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/grant_access_data.py | Python | mit | 1,336 |
import time
from bs4 import BeautifulSoup
import sys
if (sys.version_info > (3, 0)):
# Python 3 code in this block
import urllib.request as urllib2
else:
# Python 2 code in this block
import urllib2
import datetime, re, os
class NonNCEPModel:
'''''
Base Class for all Non-NCEP models.
'''''
def __init__(self):
self.modelUrls = ''
self.isNCEPSource = False
return
'''''
Gets the previous forecast hour for a given model, and forecast hour.
'''''
def getPreviousTime(self, model, currentHour):
if currentHour == '000':
return '000'
defaultHours = self.getDefaultHours()
defaultHours.sort() #assert ascending order
for (idx,hour) in enumerate(defaultHours):
if currentHour == hour:
return defaultHours[idx-1]
return '000'
'''''
Intialze all of our models hour stamp data to defaults.
'''''
def setDefaultHours(self):
# Default times.
self.modelTimes = self.defaultTimes
return
'''''
Intialze all of our models hour stamp data to defaults.
'''''
def getDefaultHours(self):
# Default times.
return self.defaultTimes
'''''
Intialze all of our models hour stamp data to defaults.
'''''
def getDefaultHours(self):
# Default times.
modelTimes = self.defaultTimes
return modelTimes
def getName(self):
return self.name
def getAlias(self):
if self.modelAliases != "":
return self.modelAlias
else:
return self.name
def getForecastHourInt(self, filename, noPrefix = False):
fhour = self.getForecastHour(filename, noPrefix)
return int(fhour[1:])
def getForecastHour(self, fileName, noPrefix = False):
return ""
def getLastForecastHour(self):
return "000"
def getRun(self):
return
def getName(self):
return self.name | cacraig/grib-inventory | gribinventory/models/NonNCEPModel.py | Python | mit | 1,819 |
import numpy as np
from scipy.special import sph_harm, lpmv
try:
from scipy.misc import factorial
except:
from scipy.special import factorial
def sh(l, m, theta, phi, field='real', normalization='quantum', condon_shortley=True):
if field == 'real':
return rsh(l, m, theta, phi, normalization, condon_shortley)
elif field == 'complex':
return csh(l, m, theta, phi, normalization, condon_shortley)
else:
raise ValueError('Unknown field: ' + str(field))
def sh_squared_norm(l, normalization='quantum', normalized_haar=True):
"""
Compute the squared norm of the spherical harmonics.
The squared norm of a function on the sphere is defined as
|f|^2 = int_S^2 |f(x)|^2 dx
where dx is a Haar measure.
:param l: for some normalization conventions, the norm of a spherical harmonic Y^l_m depends on the degree l
:param normalization: normalization convention for the spherical harmonic
:param normalized_haar: whether to use the Haar measure da db sinb or the normalized Haar measure da db sinb / 4pi
:return: the squared norm of the spherical harmonic with respect to given measure
"""
if normalization == 'quantum' or normalization == 'seismology':
# The quantum and seismology spherical harmonics are normalized with respect to the Haar measure
# dmu(theta, phi) = dtheta sin(theta) dphi
sqnorm = 1.
elif normalization == 'geodesy':
# The geodesy spherical harmonics are normalized with respect to the *normalized* Haar measure
# dmu(theta, phi) = dtheta sin(theta) dphi / 4pi
sqnorm = 4 * np.pi
elif normalization == 'nfft':
sqnorm = 4 * np.pi / (2 * l + 1)
else:
raise ValueError('Unknown normalization')
if normalized_haar:
return sqnorm / (4 * np.pi)
else:
return sqnorm
def block_sh_ph(L_max, theta, phi):
"""
Compute all spherical harmonics up to and including degree L_max, for angles theta and phi.
This function is currently rather hacky, but the method used here is very fast and stable, compared
to builtin scipy functions.
:param L_max:
:param theta:
:param phi:
:return:
"""
from .pinchon_hoggan.pinchon_hoggan import apply_rotation_block, make_c2b
from .irrep_bases import change_of_basis_function
irreps = np.arange(L_max + 1)
ls = [[ls] * (2 * ls + 1) for ls in irreps]
ls = np.array([ll for sublist in ls for ll in sublist]) # 0, 1, 1, 1, 2, 2, 2, 2, 2, ...
ms = [list(range(-ls, ls + 1)) for ls in irreps]
ms = np.array([mm for sublist in ms for mm in sublist]) # 0, -1, 0, 1, -2, -1, 0, 1, 2, ...
# Get a vector Y that selects the 0-frequency component from each irrep in the centered basis
# If D is a Wigner D matrix, then D Y is the center column of D, which is equal to the spherical harmonics.
Y = (ms == 0).astype(float)
# Change to / from the block basis (since the rotation code works in that basis)
c2b = change_of_basis_function(irreps,
frm=('real', 'quantum', 'centered', 'cs'),
to=('real', 'quantum', 'block', 'cs'))
b2c = change_of_basis_function(irreps,
frm=('real', 'quantum', 'block', 'cs'),
to=('real', 'quantum', 'centered', 'cs'))
Yb = c2b(Y)
# Rotate Yb:
c2b = make_c2b(irreps)
import os
J_block = np.load(os.path.join(os.path.dirname(__file__), 'pinchon_hoggan', 'J_block_0-278.npy'), allow_pickle=True)
J_block = list(J_block[irreps])
g = np.zeros((theta.size, 3))
g[:, 0] = phi
g[:, 1] = theta
TYb = apply_rotation_block(g=g, X=Yb[np.newaxis, :],
irreps=irreps, c2b=c2b,
J_block=J_block, l_max=np.max(irreps))
print(Yb.shape, TYb.shape)
# Change back to centered basis
TYc = b2c(TYb.T).T # b2c doesn't work properly for matrices, so do a transpose hack
print(TYc.shape)
# Somehow, the SH obtained so far are equal to real, nfft, cs spherical harmonics
# Change to real quantum centered cs
c = change_of_basis_function(irreps,
frm=('real', 'nfft', 'centered', 'cs'),
to=('real', 'quantum', 'centered', 'cs'))
TYc2 = c(TYc)
print(TYc2.shape)
return TYc2
def rsh(l, m, theta, phi, normalization='quantum', condon_shortley=True):
"""
Compute the real spherical harmonic (RSH) S_l^m(theta, phi).
The RSH are obtained from Complex Spherical Harmonics (CSH) as follows:
if m < 0:
S_l^m = i / sqrt(2) * (Y_l^m - (-1)^m Y_l^{-m})
if m == 0:
S_l^m = Y_l^0
if m > 0:
S_l^m = 1 / sqrt(2) * (Y_l^{-m} + (-1)^m Y_l^m)
(see [1])
Various normalizations for the CSH exist, see the CSH() function. Since the CSH->RSH change of basis is unitary,
the orthogonality and normalization properties of the RSH are the same as those of the CSH from which they were
obtained. Furthermore, the operation of changing normalization and that of changeing field
(complex->real or vice-versa) commute, because the ratio c_m of normalization constants are always the same for
m and -m (to see this that this implies commutativity, substitute Y_l^m * c_m for Y_l^m in the above formula).
Pinchon & Hoggan [2] define a different change of basis for CSH -> RSH, but they also use an unusual definition
of CSH. To obtain RSH as defined by Pinchon-Hoggan, use this function with normalization='quantum'.
References:
[1] http://en.wikipedia.org/wiki/Spherical_harmonics#Real_form
[2] Rotation matrices for real spherical harmonics: general rotations of atomic orbitals in space-fixed axes.
:param l: non-negative integer; the degree of the CSH.
:param m: integer, -l <= m <= l; the order of the CSH.
:param theta: the colatitude / polar angle,
ranging from 0 (North Pole, (X,Y,Z)=(0,0,1)) to pi (South Pole, (X,Y,Z)=(0,0,-1)).
:param phi: the longitude / azimuthal angle, ranging from 0 to 2 pi.
:param normalization: how to normalize the RSH:
'seismology', 'quantum', 'geodesy'.
these are immediately passed to the CSH functions, and since the change of basis
from CSH to RSH is unitary, the orthogonality and normalization properties are unchanged.
:return: the value of the real spherical harmonic S^l_m(theta, phi)
"""
l, m, theta, phi = np.broadcast_arrays(l, m, theta, phi)
# Get the CSH for m and -m, using Condon-Shortley phase (regardless of whhether CS is requested or not)
# The reason is that the code that changes from CSH to RSH assumes CS phase.
a = csh(l=l, m=m, theta=theta, phi=phi, normalization=normalization, condon_shortley=True)
b = csh(l=l, m=-m, theta=theta, phi=phi, normalization=normalization, condon_shortley=True)
#if m > 0:
# y = np.array((b + ((-1.)**m) * a).real / np.sqrt(2.))
#elif m < 0:
# y = np.array((1j * a - 1j * ((-1.)**(-m)) * b).real / np.sqrt(2.))
#else:
# # For m == 0, the complex spherical harmonics are already real
# y = np.array(a.real)
y = ((m > 0) * np.array((b + ((-1.)**m) * a).real / np.sqrt(2.))
+ (m < 0) * np.array((1j * a - 1j * ((-1.)**(-m)) * b).real / np.sqrt(2.))
+ (m == 0) * np.array(a.real))
if condon_shortley:
return y
else:
# Cancel the CS phase of y (i.e. multiply by -1 when m is both odd and greater than 0)
return y * ((-1.) ** (m * (m > 0)))
def csh(l, m, theta, phi, normalization='quantum', condon_shortley=True):
"""
Compute Complex Spherical Harmonics (CSH) Y_l^m(theta, phi).
Unlike the scipy.special.sph_harm function, we use the common convention that
theta is the polar angle (0 to pi) and phi is the azimuthal angle (0 to 2pi).
The spherical harmonic 'backbone' is:
Y_l^m(theta, phi) = P_l^m(cos(theta)) exp(i m phi)
where P_l^m is the associated Legendre function as defined in the scipy library (scipy.special.sph_harm).
Various normalization factors can be multiplied with this function.
-> seismology: sqrt( ((2 l + 1) * (l - m)!) / (4 pi * (l + m)!) )
-> quantum: (-1)^2 sqrt( ((2 l + 1) * (l - m)!) / (4 pi * (l + m)!) )
-> unnormalized: 1
-> geodesy: sqrt( ((2 l + 1) * (l - m)!) / (l + m)! )
-> nfft: sqrt( (l - m)! / (l + m)! )
The 'quantum' and 'seismology' CSH are normalized so that
<Y_l^m, Y_l'^m'>
=
int_S^2 Y_l^m(theta, phi) Y_l'^m'* dOmega
=
delta(l, l') delta(m, m')
where dOmega is the volume element for the sphere S^2:
dOmega = sin(theta) dtheta dphi
The 'geodesy' convention have unit power, meaning the norm is equal to the surface area of the unit sphere (4 pi)
<Y_l^m, Y_l'^m'> = 4pi delta(l, l') delta(m, m')
So these are orthonormal with respect to the *normalized* Haar measure sin(theta) dtheta dphi / 4pi
On each of these normalizations, one can optionally include a Condon-Shortley phase factor:
(-1)^m (if m > 0)
1 (otherwise)
Note that this is the definition of Condon-Shortley according to wikipedia [1], but other sources call a
phase factor of (-1)^m a Condon-Shortley phase (without mentioning the condition m > 0).
References:
[1] http://en.wikipedia.org/wiki/Spherical_harmonics#Conventions
:param l: non-negative integer; the degree of the CSH.
:param m: integer, -l <= m <= l; the order of the CSH.
:param theta: the colatitude / polar angle,
ranging from 0 (North Pole, (X,Y,Z)=(0,0,1)) to pi (South Pole, (X,Y,Z)=(0,0,-1)).
:param phi: the longitude / azimuthal angle, ranging from 0 to 2 pi.
:param normalization: how to normalize the CSH:
'seismology', 'quantum', 'geodesy', 'unnormalized', 'nfft'.
:return: the value of the complex spherical harmonic Y^l_m(theta, phi)
"""
# NOTE: it seems like in the current version of scipy.special, sph_harm no longer accepts keyword arguments,
# so I'm removing them. I hope the order of args hasn't changed
if normalization == 'quantum':
# y = ((-1.) ** m) * sph_harm(m, l, theta=phi, phi=theta)
y = ((-1.) ** m) * sph_harm(m, l, phi, theta)
elif normalization == 'seismology':
# y = sph_harm(m, l, theta=phi, phi=theta)
y = sph_harm(m, l, phi, theta)
elif normalization == 'geodesy':
# y = np.sqrt(4 * np.pi) * sph_harm(m, l, theta=phi, phi=theta)
y = np.sqrt(4 * np.pi) * sph_harm(m, l, phi, theta)
elif normalization == 'unnormalized':
# y = sph_harm(m, l, theta=phi, phi=theta) / np.sqrt((2 * l + 1) * factorial(l - m) /
# (4 * np.pi * factorial(l + m)))
y = sph_harm(m, l, phi, theta) / np.sqrt((2 * l + 1) * factorial(l - m) /
(4 * np.pi * factorial(l + m)))
elif normalization == 'nfft':
# y = sph_harm(m, l, theta=phi, phi=theta) / np.sqrt((2 * l + 1) / (4 * np.pi))
y = sph_harm(m, l, phi, theta) / np.sqrt((2 * l + 1) / (4 * np.pi))
else:
raise ValueError('Unknown normalization convention:' + str(normalization))
if condon_shortley:
# The sph_harm function already includes CS phase
return y
else:
# Cancel the CS phase in sph_harm (i.e. multiply by -1 when m is both odd and greater than 0)
return y * ((-1.) ** (m * (m > 0)))
# For testing only:
def _naive_csh_unnormalized(l, m, theta, phi):
"""
Compute unnormalized SH
"""
return lpmv(m, l, np.cos(theta)) * np.exp(1j * m * phi)
def _naive_csh_quantum(l, m, theta, phi):
"""
Compute orthonormalized spherical harmonics in a naive way.
"""
return (((-1.) ** m) * lpmv(m, l, np.cos(theta)) * np.exp(1j * m * phi) *
np.sqrt(((2 * l + 1) * factorial(l - m))
/
(4 * np.pi * factorial(l + m))))
def _naive_csh_seismology(l, m, theta, phi):
"""
Compute the spherical harmonics according to the seismology convention, in a naive way.
This appears to be equal to the sph_harm function in scipy.special.
"""
return (lpmv(m, l, np.cos(theta)) * np.exp(1j * m * phi) *
np.sqrt(((2 * l + 1) * factorial(l - m))
/
(4 * np.pi * factorial(l + m))))
def _naive_csh_ph(l, m, theta, phi):
"""
CSH as defined by Pinchon-Hoggan. Same as wikipedia's quantum-normalized SH = naive_Y_quantum()
"""
if l == 0 and m == 0:
return 1. / np.sqrt(4 * np.pi)
else:
phase = ((1j) ** (m + np.abs(m)))
normalizer = np.sqrt(((2 * l + 1.) * factorial(l - np.abs(m)))
/
(4 * np.pi * factorial(l + np.abs(m))))
P = lpmv(np.abs(m), l, np.cos(theta))
e = np.exp(1j * m * phi)
return phase * normalizer * P * e
def _naive_rsh_ph(l, m, theta, phi):
if m == 0:
return np.sqrt((2 * l + 1.) / (4 * np.pi)) * lpmv(m, l, np.cos(theta))
elif m < 0:
return np.sqrt(((2 * l + 1.) * factorial(l + m)) /
(2 * np.pi * factorial(l - m))) * lpmv(-m, l, np.cos(theta)) * np.sin(-m * phi)
elif m > 0:
return np.sqrt(((2 * l + 1.) * factorial(l - m)) /
(2 * np.pi * factorial(l + m))) * lpmv(m, l, np.cos(theta)) * np.cos(m * phi)
| AMLab-Amsterdam/lie_learn | lie_learn/representations/SO3/spherical_harmonics.py | Python | mit | 13,535 |
import unittest
import two_fer
class Two_Fer_test(unittest.TestCase):
def test_empty(self):
self.assertEqual(two_fer.two_fer(), 'One for you, one for me.')
def test_eve(self):
self.assertEqual(two_fer.two_fer("Eve"), "One for Eve, one for me.")
def test_bob(self):
self.assertEqual(two_fer.two_fer("Bob"), "One for Bob, one for me.")
if __name__ == '__main__':
unittest.main()
| mweb/python | exercises/two-fer/two_fer_test.py | Python | mit | 424 |
"""
RenderPipeline
Copyright (c) 2014-2016 tobspr <tobias.springer1@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
"""
Script to download the Render Pipeline samples
"""
import os
import sys
sys.path.insert(0, "../")
sys.path.insert(0, "../rpcore/util")
from submodule_downloader import download_submodule
if __name__ == "__main__":
# Make sure we are in the right directory
main_dir = os.path.dirname(os.path.realpath(__file__))
os.chdir(main_dir)
# Now extract the samples
download_submodule("tobspr", "RenderPipeline-Samples", ".", ["README.md", "LICENSE"])
| croxis/SpaceDrive | spacedrive/renderpipeline/samples/download_samples.py | Python | mit | 1,629 |
#!/usr/bin/python
#
# Copyright 2014: wycomco GmbH (choules@wycomco.de)
# 2015: modifications by Tim Sutton
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""See docstring for AdobeReaderURLProvider class"""
# Disabling warnings for env members and imports that only affect recipe-
# specific processors.
#pylint: disable=e1101
import urllib2
import plistlib
from autopkglib import Processor, ProcessorError
__all__ = ["AdobeReaderUpdatesURLProvider"]
MAJOR_VERSION_DEFAULT = "11"
CHECK_OS_VERSION_DEFAULT = "10.8"
MAJOR_VERSION_MATCH_STR = "adobe/reader/mac/%s"
AR_UPDATER_DOWNLOAD_URL = (
"http://download.adobe.com/"
"pub/adobe/reader/mac/%s.x/%s/misc/AdbeRdrUpd%s.dmg")
AR_UPDATER_DOWNLOAD_URL2 = "http://ardownload.adobe.com"
AR_UPDATER_BASE_URL = "https://armmf.adobe.com/arm-manifests/mac"
AR_URL_TEMPLATE = "/%s/current_version_url_template.txt"
AR_MANIFEST_TEMPLATE = "/%s/manifest_url_template.txt"
AR_MAJREV_IDENTIFIER = "{MAJREV}"
OSX_MAJREV_IDENTIFIER = "{OS_VER_MAJ}"
OSX_MINREV_IDENTIFIER = "{OS_VER_MIN}"
AR_PROD_IDENTIFIER = '{PROD}'
AR_PROD_ARCH_IDENTIFIER = '{PROD_ARCH}'
AR_PROD = 'com_adobe_Reader'
AR_PROD_ARCH = 'univ'
class AdobeReaderUpdatesURLProvider(Processor):
"""Provides URL to the latest Adobe Reader release."""
description = __doc__
input_variables = {
"major_version": {
"required": False,
"description": ("Major version. Examples: '10', '11'. Defaults to "
"%s" % MAJOR_VERSION_DEFAULT)
},
"os_version": {
"required": False,
"default": CHECK_OS_VERSION_DEFAULT,
"description": ("Version of OS X to check. Default: %s" %
CHECK_OS_VERSION_DEFAULT)
}
}
output_variables = {
"url": {
"description": "URL to the latest Adobe Reader release.",
},
"version": {
"description": "Version for this update.",
},
}
def get_reader_updater_pkg_url(self, major_version):
'''Returns download URL for Adobe Reader Updater DMG'''
request = urllib2.Request(
AR_UPDATER_BASE_URL + AR_MANIFEST_TEMPLATE % major_version)
try:
url_handle = urllib2.urlopen(request)
version_string = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't open manifest template: %s" % (err))
os_maj, os_min = self.env["os_version"].split(".")
version_string = version_string.replace(
AR_MAJREV_IDENTIFIER, major_version)
version_string = version_string.replace(OSX_MAJREV_IDENTIFIER, os_maj)
version_string = version_string.replace(OSX_MINREV_IDENTIFIER, os_min)
version_string = version_string.replace(AR_PROD_IDENTIFIER, AR_PROD)
version_string = version_string.replace(AR_PROD_ARCH_IDENTIFIER, AR_PROD_ARCH)
request = urllib2.Request(
AR_UPDATER_BASE_URL + version_string)
try:
url_handle = urllib2.urlopen(request)
plist = plistlib.readPlistFromString(url_handle.read())
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't get or read manifest: %s" % (err))
url = AR_UPDATER_DOWNLOAD_URL2 + plist['PatchURL']
return url
def get_reader_updater_dmg_url(self, major_version):
'''Returns download URL for Adobe Reader Updater DMG'''
request = urllib2.Request(
AR_UPDATER_BASE_URL + AR_URL_TEMPLATE % major_version)
try:
url_handle = urllib2.urlopen(request)
version_string = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't open URL template: %s" % (err))
os_maj, os_min = self.env["os_version"].split(".")
version_string = version_string.replace(
AR_MAJREV_IDENTIFIER, major_version)
version_string = version_string.replace(OSX_MAJREV_IDENTIFIER, os_maj)
version_string = version_string.replace(OSX_MINREV_IDENTIFIER, os_min)
request = urllib2.Request(
AR_UPDATER_BASE_URL + version_string)
try:
url_handle = urllib2.urlopen(request)
version = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't get version string: %s" % (err))
versioncode = version.replace('.', '')
url = AR_UPDATER_DOWNLOAD_URL % (major_version, version, versioncode)
return (url, version)
def main(self):
major_version = self.env.get("major_version", MAJOR_VERSION_DEFAULT)
(url, version) = self.get_reader_updater_dmg_url(major_version)
# only need the version, getting the URL from the manifest now
url = self.get_reader_updater_pkg_url(major_version)
self.env["url"] = url
self.env["version"] = version
self.output("Found URL %s" % self.env["url"])
if __name__ == "__main__":
PROCESSOR = AdobeReaderUpdatesURLProvider()
PROCESSOR.execute_shell()
| FabianN/autopkg_recipies | AdobeReader/AdobeReaderUpdatesURLProvider.py | Python | mit | 5,706 |
from django import forms
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import ugettext_lazy as _
from .models import User
class UserCreationForm(forms.ModelForm):
password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
password2 = forms.CharField(
label=_("Password confirmation"), widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification.")
)
class Meta:
model = User
fields = ("email",)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
_("The two password fields didn't match."),
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(
label=_("Password"),
help_text=_("Raw passwords are not stored, so there is no way to see "
"this user's password, but you can change the password "
"using <a href=\"password/\">this form</a>."))
class Meta:
model = User
fields = '__all__'
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions', None)
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
| esistgut/django-content-toolkit | accounts/forms.py | Python | mit | 2,072 |
import os
import sys
class Widget(object):
"""
Widget is a User Interface (UI) component object. A widget
object claims a rectagular region of its content, is responsible
for all drawing within that region.
"""
def __init__(self, name, width=50, height=50):
self.name = name
self.resize(width, height)
def size(self):
return (self.width, self.height)
def resize(self, width, height):
self.width, self.height = width, height | ishikawa/modipyd | examples/widget/002/widget.py | Python | mit | 491 |
import click
import requests
@click.command()
@click.argument('url')
@click.option('--show-headers', '-H', is_flag=True, default=False)
@click.option('--show-status', '-S', is_flag=True, default=False)
@click.option('--quiet', '-Q', is_flag=True, default=False)
@click.option('--allow-redirects/--no-allow-redirects', default=True)
@click.option('--verbose', '-v', is_flag=True, default=False)
def cli(url, show_headers, show_status, quiet, allow_redirects, verbose):
# Make the request
if verbose:
click.secho('Making HTTP request to "{0}"...'.format(url), err=True, fg='white')
try:
response = requests.get(url, allow_redirects=allow_redirects)
response.raise_for_status()
except requests.exceptions.RequestException as e:
click.secho(str(e), err=True, fg='yellow' )
raise click.Abort()
except Exception as e:
click.secho(str(e), err=True, fg='red' )
raise click.Abort()
status_colors = {
2: 'green',
3: 'blue',
4: 'yellow',
5: 'red',
}
# Show the response status
if show_status:
status_color = status_colors.get(int(response.status_code) / 100)
click.secho('Status: {0}'.format(response.status_code), err=True, fg=status_color)
# Show the response headers
if show_headers:
click.echo(format_headers(response.headers), err=True)
# Show the response body
if not quiet:
click.echo(response.text)
if __name__ == '__main__':
cli()
def format_headers(headers):
formatted = ['{0}: {1}'.format(k, v) for k, v in headers.items()]
return '\n'.join(formatted)
| tylerdave/reqcli | reqcli/cli.py | Python | mit | 1,667 |
import os
import socket
import venusian
from botocore.exceptions import ClientError
from flowy.swf.client import SWFClient, IDENTITY_SIZE
from flowy.swf.decision import SWFActivityDecision
from flowy.swf.decision import SWFWorkflowDecision
from flowy.swf.history import SWFExecutionHistory
from flowy.utils import logger
from flowy.utils import setup_default_logger
from flowy.worker import Worker
__all__ = ['SWFWorkflowWorker', 'SWFActivityWorker']
class SWFWorker(Worker):
def __init__(self):
super(SWFWorker, self).__init__()
self.remote_reg_callbacks = []
def __call__(self, name, version, input_data, decision, *extra_args):
return super(SWFWorker, self).__call__(
(str(name), str(version)), input_data, decision, *extra_args)
def register_remote(self, swf_client, domain):
"""Register or check compatibility of all configs in Amazon SWF."""
for remote_reg_callback in self.remote_reg_callbacks:
# Raises if there are registration problems
remote_reg_callback(swf_client, domain)
def register(self, config, func, version, name=None):
super(SWFWorker, self).register(config, func, (name, version))
def add_remote_reg_callback(self, callback):
self.remote_reg_callbacks.append(callback)
def make_scanner(self):
return venusian.Scanner(
register_task=self.register_task,
add_remote_reg_callback=self.add_remote_reg_callback)
class SWFWorkflowWorker(SWFWorker):
categories = ['swf_workflow']
# Be explicit about what arguments are expected
def __call__(self, name, version, input_data, decision, execution_history):
super(SWFWorkflowWorker, self).__call__(
name, version, input_data, decision, # needed for worker logic
decision, execution_history) # extra_args passed to proxies
def break_loop(self):
"""Used to exit the loop in tests. Return True to break."""
return False
def run_forever(self, domain, task_list,
swf_client=None,
setup_log=True,
register_remote=True,
identity=None):
"""Starts an endless single threaded/single process worker loop.
The worker polls endlessly for new decisions from the specified domain
and task list and runs them.
If reg_remote is set, all registered workflow are registered remotely.
An identity can be set to track this worker in the SWF console,
otherwise a default identity is generated from this machine domain and
process pid.
If setup_log is set, a default configuration for the logger is loaded.
A custom SWF client can be passed in swf_client, otherwise a default
client is used.
"""
if setup_log:
setup_default_logger()
identity = default_identity() if identity is None else identity
swf_client = SWFClient() if swf_client is None else swf_client
if register_remote:
self.register_remote(swf_client, domain)
try:
while 1:
if self.break_loop():
break
name, version, input_data, exec_history, decision = poll_decision(
swf_client, domain, task_list, identity)
self(name, version, input_data, decision, exec_history)
except KeyboardInterrupt:
pass
class SWFActivityWorker(SWFWorker):
categories = ['swf_activity']
# Be explicit about what arguments are expected
def __call__(self, name, version, input_data, decision):
# No extra arguments are used
super(SWFActivityWorker, self).__call__(
name, version, input_data, decision, # needed for worker logic
decision.heartbeat) # extra_args
def break_loop(self):
"""Used to exit the loop in tests. Return True to break."""
return False
def run_forever(self, domain, task_list,
swf_client=None,
setup_log=True,
register_remote=True,
identity=None):
"""Same as SWFWorkflowWorker.run_forever but for activities."""
if setup_log:
setup_default_logger()
identity = default_identity() if identity is None else identity
swf_client = SWFClient() if swf_client is None else swf_client
if register_remote:
self.register_remote(swf_client, domain)
try:
while 1:
if self.break_loop():
break
swf_response = {}
while not swf_response.get('taskToken'):
try:
swf_response = swf_client.poll_for_activity_task(
domain, task_list, identity=identity)
except ClientError:
# add a delay before retrying?
logger.exception('Error while polling for activities:')
at = swf_response['activityType']
decision = SWFActivityDecision(swf_client, swf_response['taskToken'])
self(at['name'], at['version'], swf_response['input'], decision)
except KeyboardInterrupt:
pass
def default_identity():
"""Generate a local identity string for this process."""
identity = "%s-%s" % (socket.getfqdn(), os.getpid())
return identity[-IDENTITY_SIZE:] # keep the most important part
def poll_decision(swf_client, domain, task_list, identity=None):
"""Poll a decision and create a SWFWorkflowContext structure.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll decision
:param identity: an identity str of the request maker
:rtype: tuple
:returns: a tuple consisting of (name, version, input_data,
:class:'SWFExecutionHistory', :class:`SWFWorkflowDecision`)
"""
first_page = poll_first_page(swf_client, domain, task_list, identity)
token = first_page['taskToken']
all_events = events(swf_client, domain, task_list, first_page, identity)
# Sometimes the first event is on the second page,
# and the first page is empty
first_event = next(all_events)
assert first_event['eventType'] == 'WorkflowExecutionStarted'
wesea = 'workflowExecutionStartedEventAttributes'
assert first_event[wesea]['taskList']['name'] == task_list
task_duration = first_event[wesea]['taskStartToCloseTimeout']
workflow_duration = first_event[wesea]['executionStartToCloseTimeout']
tags = first_event[wesea].get('tagList', None)
child_policy = first_event[wesea]['childPolicy']
name = first_event[wesea]['workflowType']['name']
version = first_event[wesea]['workflowType']['version']
input_data = first_event[wesea]['input']
try:
running, timedout, results, errors, order = load_events(all_events)
except _PaginationError:
# There's nothing better to do than to retry
return poll_decision(swf_client, domain, task_list, identity)
execution_history = SWFExecutionHistory(running, timedout, results, errors, order)
decision = SWFWorkflowDecision(swf_client, token, name, version, task_list,
task_duration, workflow_duration, tags,
child_policy)
return name, version, input_data, execution_history, decision
def poll_first_page(swf_client, domain, task_list, identity=None):
"""Return the response from loading the first page. In case of errors,
empty responses or whatnot retry until a valid response.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll for events
:param identity: an identity str of the request maker
:rtype: dict[str, str|int|list|dict]
:returns: a dict containing workflow information and list of events
"""
swf_response = {}
while not swf_response.get('taskToken'):
try:
swf_response = swf_client.poll_for_decision_task(domain, task_list,
identity=identity)
except ClientError:
logger.exception('Error while polling for decisions:')
return swf_response
def poll_page(swf_client, domain, task_list, token, identity=None):
"""Return a specific page. In case of errors retry a number of times.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll for events
:param token: the token string for the requested page
:param identity: an identity str of the request maker
:rtype: dict[str, str|int|list|dict]
:returns: a dict containing workflow information and list of events
"""
for _ in range(7): # give up after a limited number of retries
try:
swf_response = swf_client.poll_for_decision_task(
domain, task_list, identity=identity, next_page_token=token)
break
except ClientError:
logger.exception('Error while polling for decision page:')
else:
raise _PaginationError()
return swf_response
def events(swf_client, domain, task_list, first_page, identity=None):
"""Load pages one by one and generate all events found.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll for events
:param first_page: the page dict structure from which to start generating
the events, usually the response from :func:`poll_first_page`
:param identity: an identity str of the request maker
:rtype: collections.Iterator[dict[str, int|str|dict[str, int|str|dict]]
:returns: iterator over all of the events
"""
page = first_page
while 1:
for event in page['events']:
yield event
if not page.get('nextPageToken'):
break
page = poll_page(swf_client, domain, task_list, page['nextPageToken'],
identity=identity)
def load_events(event_iter):
"""Combine all events in their order.
This returns a tuple of the following things:
running - a set of the ids of running tasks
timedout - a set of the ids of tasks that have timedout
results - a dictionary of id -> result for each finished task
errors - a dictionary of id -> error message for each failed task
order - an list of task ids in the order they finished
"""
running, timedout = set(), set()
results, errors = {}, {}
order = []
event2call = {}
for event in event_iter:
e_type = event.get('eventType')
if e_type == 'ActivityTaskScheduled':
eid = event['activityTaskScheduledEventAttributes']['activityId']
event2call[event['eventId']] = eid
running.add(eid)
elif e_type == 'ActivityTaskCompleted':
atcea = 'activityTaskCompletedEventAttributes'
eid = event2call[event[atcea]['scheduledEventId']]
result = event[atcea]['result']
running.remove(eid)
results[eid] = result
order.append(eid)
elif e_type == 'ActivityTaskFailed':
atfea = 'activityTaskFailedEventAttributes'
eid = event2call[event[atfea]['scheduledEventId']]
reason = event[atfea]['reason']
running.remove(eid)
errors[eid] = reason
order.append(eid)
elif e_type == 'ActivityTaskTimedOut':
attoea = 'activityTaskTimedOutEventAttributes'
eid = event2call[event[attoea]['scheduledEventId']]
running.remove(eid)
timedout.add(eid)
order.append(eid)
elif e_type == 'ScheduleActivityTaskFailed':
satfea = 'scheduleActivityTaskFailedEventAttributes'
eid = event[satfea]['activityId']
reason = event[satfea]['cause']
# when a job is not found it's not even started
errors[eid] = reason
order.append(eid)
elif e_type == 'StartChildWorkflowExecutionInitiated':
scweiea = 'startChildWorkflowExecutionInitiatedEventAttributes'
eid = _subworkflow_call_key(event[scweiea]['workflowId'])
running.add(eid)
elif e_type == 'ChildWorkflowExecutionCompleted':
cwecea = 'childWorkflowExecutionCompletedEventAttributes'
eid = _subworkflow_call_key(
event[cwecea]['workflowExecution']['workflowId'])
result = event[cwecea]['result']
running.remove(eid)
results[eid] = result
order.append(eid)
elif e_type == 'ChildWorkflowExecutionFailed':
cwefea = 'childWorkflowExecutionFailedEventAttributes'
eid = _subworkflow_call_key(
event[cwefea]['workflowExecution']['workflowId'])
reason = event[cwefea]['reason']
running.remove(eid)
errors[eid] = reason
order.append(eid)
elif e_type == 'ChildWorkflowExecutionTimedOut':
cwetoea = 'childWorkflowExecutionTimedOutEventAttributes'
eid = _subworkflow_call_key(
event[cwetoea]['workflowExecution']['workflowId'])
running.remove(eid)
timedout.add(eid)
order.append(eid)
elif e_type == 'StartChildWorkflowExecutionFailed':
scwefea = 'startChildWorkflowExecutionFailedEventAttributes'
eid = _subworkflow_call_key(event[scwefea]['workflowId'])
reason = event[scwefea]['cause']
errors[eid] = reason
order.append(eid)
elif e_type == 'TimerStarted':
eid = event['timerStartedEventAttributes']['timerId']
running.add(eid)
elif e_type == 'TimerFired':
eid = event['timerFiredEventAttributes']['timerId']
running.remove(eid)
results[eid] = None
return running, timedout, results, errors, order
class _PaginationError(Exception):
"""Can't retrieve the next page after X retries."""
def _subworkflow_call_key(w_id):
return w_id.split(':')[-1]
| severb/flowy | flowy/swf/worker.py | Python | mit | 14,853 |
__author__ = 'had'
# The MIT License (MIT)
# Copyright (c) [2015] [Houtmann Hadrien]
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the Aidez-moi), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django_tables2 import RequestConfig
from ticket.forms.forms import TicketForm, ResponseForm, StatusForm
from ticket.models import Tickets, UserProfile, Follow
from ticket.tables import TicketsTables
from django.contrib import messages
from django.utils.translation import ugettext as _
from ticket.tasks import send_new_ticket_all_staff, incomplete_ticket
from djangoticket.settings import USE_MAIL
import json
from django.views.decorators.cache import cache_page
@login_required(login_url='login/')
def home(request):
if request.user.is_staff:
ticket_list = Tickets\
.objects\
.select_related('create_by',
'assign_to',
'category')\
.filter()\
.order_by('-created')[:10:1]
ticket_incomplete = Tickets.objects.filter(complete=0).count()
else:
ticket_list = Tickets.objects.filter(
create_by=request.user).order_by('-created')[:10:1]
ticket_incomplete = Tickets.objects.filter(
create_by=request.user,
complete=0).count()
return render(request, 'home.html', {'ticket_list': ticket_list}) | hadmagic/Aidez-moi | ticket/views/home.py | Python | mit | 2,583 |
from pivoteer import *
from pivotEngine import *
from pivotUtils import *
| BechtelCIRT/pivoteer | pivoteer/__init__.py | Python | mit | 74 |
from typing import List, Optional
from backend.common.consts.ranking_sort_orders import SORT_ORDER_INFO
from backend.common.models.event_details import EventDetails
from backend.common.models.event_ranking import EventRanking
from backend.common.models.event_team_status import WLTRecord
from backend.common.models.keys import TeamKey, Year
from backend.common.models.ranking_sort_order_info import RankingSortOrderInfo
class RankingsHelper:
NO_RECORD_YEARS = {2010, 2015, 2021}
QUAL_AVERAGE_YEARS = {2015}
@classmethod
def build_ranking(
cls,
year: Year,
rank: int,
team_key: TeamKey,
wins: int,
losses: int,
ties: int,
qual_average: Optional[float],
matches_played: int,
dq: int,
sort_orders: List[float],
) -> EventRanking:
record: Optional[WLTRecord] = None
if year not in cls.NO_RECORD_YEARS:
record = {
"wins": int(wins),
"losses": int(losses),
"ties": int(ties),
}
if year not in cls.QUAL_AVERAGE_YEARS:
qual_average = None
sort_orders_sanitized = []
for so in sort_orders:
try:
sort_orders_sanitized.append(float(so))
except Exception:
sort_orders_sanitized.append(0.0)
return {
"rank": int(rank),
"team_key": team_key,
"record": record, # None if record doesn't affect rank (e.g. 2010, 2015)
"qual_average": qual_average, # None if qual_average doesn't affect rank (all years except 2015)
"matches_played": int(matches_played),
"dq": int(dq),
"sort_orders": sort_orders_sanitized,
}
@classmethod
def get_sort_order_info(
cls, event_details: EventDetails
) -> Optional[List[RankingSortOrderInfo]]:
return SORT_ORDER_INFO.get(event_details.game_year)
| the-blue-alliance/the-blue-alliance | src/backend/common/helpers/rankings_helper.py | Python | mit | 1,985 |
## Copyright (c) 2003 Henk Punt
## Permission is hereby granted, free of charge, to any person obtaining
## a copy of this software and associated documentation files (the
## "Software"), to deal in the Software without restriction, including
## without limitation the rights to use, copy, modify, merge, publish,
## distribute, sublicense, and/or sell copies of the Software, and to
## permit persons to whom the Software is furnished to do so, subject to
## the following conditions:
## The above copyright notice and this permission notice shall be
## included in all copies or substantial portions of the Software.
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
from ctypes import *
#TODO auto ie/comctl detection
WIN32_IE = 0x0550
#TODO: auto unicode selection,
#if unicode:
# CreateWindowEx = windll.user32.CreateWindowExW
#else:
# CreateWindowEx = windll.user32.CreateWindowExA
#etc, etc
DWORD = c_ulong
HANDLE = c_ulong
UINT = c_uint
BOOL = c_int
HWND = HANDLE
HINSTANCE = HANDLE
HICON = HANDLE
HDC = HANDLE
HCURSOR = HANDLE
HBRUSH = HANDLE
HMENU = HANDLE
HBITMAP = HANDLE
HIMAGELIST = HANDLE
HGDIOBJ = HANDLE
HMETAFILE = HANDLE
ULONG = DWORD
ULONG_PTR = DWORD
UINT_PTR = DWORD
LONG_PTR = DWORD
INT = c_int
LPCTSTR = c_char_p
LPTSTR = c_char_p
PSTR = c_char_p
LPCSTR = c_char_p
LPCWSTR = c_wchar_p
LPSTR = c_char_p
LPWSTR = c_wchar_p
PVOID = c_void_p
USHORT = c_ushort
WORD = c_ushort
ATOM = WORD
SHORT = c_short
LPARAM = c_ulong
WPARAM = c_uint
LPVOID = c_voidp
LONG = c_long
BYTE = c_byte
TCHAR = c_char #TODO depends on unicode/wide conventions
DWORD_PTR = c_ulong #TODO what is this exactly?
INT_PTR = c_ulong #TODO what is this exactly?
COLORREF = c_ulong
CLIPFORMAT = WORD
FLOAT = c_float
CHAR = c_char
WCHAR = c_wchar
FXPT16DOT16 = c_long
FXPT2DOT30 = c_long
LCSCSTYPE = c_long
LCSGAMUTMATCH = c_long
COLOR16 = USHORT
LRESULT = LONG_PTR
#### Windows version detection ##############################
class OSVERSIONINFO(Structure):
_fields_ = [("dwOSVersionInfoSize", DWORD),
("dwMajorVersion", DWORD),
("dwMinorVersion", DWORD),
("dwBuildNumber", DWORD),
("dwPlatformId", DWORD),
("szCSDVersion", TCHAR * 128)]
def isMajorMinor(self, major, minor):
return (self.dwMajorVersion, self.dwMinorVersion) == (major, minor)
GetVersion = windll.kernel32.GetVersionExA
versionInfo = OSVERSIONINFO()
versionInfo.dwOSVersionInfoSize = sizeof(versionInfo)
GetVersion(byref(versionInfo))
def MAKELONG(w1, w2):
return w1 | (w2 << 16)
MAKELPARAM = MAKELONG
def RGB(r,g,b):
return r | (g<<8) | (b<<16)
##### Windows Callback functions ################################
WNDPROC = WINFUNCTYPE(c_int, HWND, UINT, WPARAM, LPARAM)
DialogProc = WINFUNCTYPE(c_int, HWND, UINT, WPARAM, LPARAM)
CBTProc = WINFUNCTYPE(c_int, c_int, c_int, c_int)
MessageProc = CBTProc
EnumChildProc = WINFUNCTYPE(c_int, HWND, LPARAM)
MSGBOXCALLBACK = WINFUNCTYPE(c_int, HWND, LPARAM) #TODO look up real def
class WNDCLASSEX(Structure):
_fields_ = [("cbSize", UINT),
("style", UINT),
("lpfnWndProc", WNDPROC),
("cbClsExtra", INT),
("cbWndExtra", INT),
("hInstance", HINSTANCE),
("hIcon", HICON),
("hCursor", HCURSOR),
("hbrBackground", HBRUSH),
("lpszMenuName", LPCTSTR),
("lpszClassName", LPCTSTR),
("hIconSm", HICON)]
class POINT(Structure):
_fields_ = [("x", LONG),
("y", LONG)]
def __str__(self):
return "POINT {x: %d, y: %d}" % (self.x, self.y)
POINTL = POINT
class POINTS(Structure):
_fields_ = [("x", SHORT),
("y", SHORT)]
PtInRect = windll.user32.PtInRect
class RECT(Structure):
_fields_ = [("left", LONG),
("top", LONG),
("right", LONG),
("bottom", LONG)]
def __str__(self):
return "RECT {left: %d, top: %d, right: %d, bottom: %d}" % (self.left, self.top,
self.right, self.bottom)
def getHeight(self):
return self.bottom - self.top
height = property(getHeight, None, None, "")
def getWidth(self):
return self.right - self.left
width = property(getWidth, None, None, "")
def getSize(self):
return self.width, self.height
size = property(getSize, None, None, "")
def ContainsPoint(self, pt):
"""determines if this RECT contains the given POINT pt
returns True if pt is in this rect
"""
return bool(PtInRect(byref(self), pt))
RECTL = RECT
class SIZE(Structure):
_fields_ = [('cx', LONG),
('cy', LONG)]
SIZEL = SIZE
##class MSG(Structure):
## _fields_ = [("hWnd", HWND),
## ("message", UINT),
## ("wParam", WPARAM),
## ("lParam", LPARAM),
## ("time", DWORD),
## ("pt", POINT)]
## def __str__(self):
## return "MSG {%d %d %d %d %d %s}" % (self.hWnd, self.message, self.wParam, self.lParam,
## self.time, str(self.pt))
#Hack: we need to use the same MSG type as ctypes uses!
from ctypes.wintypes import MSG
class ACCEL(Structure):
_fields_ = [("fVirt", BYTE),
("key", WORD),
("cmd", WORD)]
class CREATESTRUCT(Structure):
_fields_ = [("lpCreateParams", LPVOID),
("hInstance", HINSTANCE),
("hMenu", HMENU),
("hwndParent", HWND),
("cx", INT),
("cy", INT),
("x", INT),
("y", INT),
("style", LONG),
("lpszName", LPCTSTR),
("lpszClass", LPCTSTR),
("dwExStyle", DWORD)]
class NMHDR(Structure):
_fields_ = [("hwndFrom", HWND),
("idFrom", UINT),
("code", UINT)]
class PAINTSTRUCT(Structure):
_fields_ = [("hdc", HDC),
("fErase", BOOL),
("rcPaint", RECT),
("fRestore", BOOL),
("fIncUpdate", BOOL),
("rgbReserved", c_char * 32)]
class MENUITEMINFO(Structure):
_fields_ = [("cbSize", UINT),
("fMask", UINT),
("fType", UINT),
("fState", UINT),
("wID", UINT),
("hSubMenu", HMENU),
("hbmpChecked", HBITMAP),
("hbmpUnchecked", HBITMAP),
("dwItemData", ULONG_PTR),
("dwTypeData", LPTSTR),
("cch", UINT),
("hbmpItem", HBITMAP)]
class DLGTEMPLATE(Structure):
_pack_ = 2
_fields_ = [
("style", DWORD),
("exStyle", DWORD),
("cDlgItems", WORD),
("x", c_short),
("y", c_short),
("cx", c_short),
("cy", c_short)
]
class DLGITEMTEMPLATE(Structure):
_pack_ = 2
_fields_ = [
("style", DWORD),
("exStyle", DWORD),
("x", c_short),
("y", c_short),
("cx", c_short),
("cy", c_short),
("id", WORD)
]
class COPYDATASTRUCT(Structure):
_fields_ = [
("dwData", ULONG_PTR),
("cbData", DWORD),
("lpData", PVOID)]
def LOWORD(dword):
return dword & 0x0000ffff
def HIWORD(dword):
return dword >> 16
TRUE = 1
FALSE = 0
NULL = 0
IDI_APPLICATION = 32512
SW_SHOW = 5
SW_SHOWNORMAL = 1
SW_HIDE = 0
EN_CHANGE = 768
MSGS = [('WM_NULL', 0),
('WM_CREATE', 1),
('WM_CANCELMODE', 31),
('WM_CAPTURECHANGED', 533),
('WM_CLOSE', 16),
('WM_COMMAND', 273),
('WM_DESTROY', 2),
('WM_ERASEBKGND', 20),
('WM_GETFONT', 49),
('WM_INITDIALOG', 272),
('WM_INITMENUPOPUP', 279),
('WM_KEYDOWN', 256),
('WM_KEYFIRST', 256),
('WM_KEYLAST', 264),
('WM_KEYUP', 257),
('WM_LBUTTONDBLCLK', 515),
('WM_LBUTTONDOWN', 513),
('WM_LBUTTONUP', 514),
('WM_MBUTTONDBLCLK', 521),
('WM_MBUTTONDOWN', 519),
('WM_MBUTTONUP', 520),
('WM_MENUSELECT', 287),
('WM_MOUSEFIRST', 512),
('WM_MOUSEHOVER', 673),
('WM_MOUSELEAVE', 675),
('WM_MOUSEMOVE', 512),
('WM_MOVE', 3),
('WM_NCCREATE', 129),
('WM_NCDESTROY', 130),
('WM_NOTIFY', 78),
('WM_PAINT', 15),
('WM_RBUTTONDBLCLK', 518),
('WM_RBUTTONDOWN', 516),
('WM_RBUTTONUP', 517),
('WM_SETCURSOR', 32),
('WM_SETFONT', 48),
('WM_SETREDRAW', 11),
('WM_SIZE', 5),
('WM_SYSKEYDOWN', 260),
('WM_SYSKEYUP', 261),
('WM_USER', 1024),
('WM_WINDOWPOSCHANGED', 71),
('WM_WINDOWPOSCHANGING', 70),
('WM_SETTEXT', 12),
('WM_GETTEXT', 13),
('WM_GETTEXTLENGTH', 14),
('WM_ACTIVATE', 6),
('WM_HSCROLL', 276),
('WM_VSCROLL', 277),
('WM_CTLCOLORBTN', 309),
('WM_CTLCOLORDLG', 310),
('WM_CTLCOLOREDIT', 307),
('WM_CTLCOLORLISTBOX', 308),
('WM_CTLCOLORMSGBOX', 306),
('WM_CTLCOLORSCROLLBAR', 311),
('WM_CTLCOLORSTATIC', 312),
('WM_TIMER', 0x0113),
('WM_CONTEXTMENU', 0x007B),
('WM_COPYDATA', 0x004A)
]
#insert wm_* msgs as constants in this module:
for key, val in MSGS:
exec('%s = %d' % (key, val)) #TODO without using 'exec'?
BN_CLICKED = 0
VK_DOWN = 40
VK_LEFT = 37
VK_RIGHT = 39
VK_DELETE = 0x2E
CS_HREDRAW = 2
CS_VREDRAW = 1
WHITE_BRUSH = 0
BLACK_BRUSH = 4
MIIM_STATE= 1
MIIM_ID= 2
MIIM_SUBMENU =4
MIIM_CHECKMARKS= 8
MIIM_TYPE= 16
MIIM_DATA= 32
MIIM_STRING= 64
MIIM_BITMAP= 128
MIIM_FTYPE =256
MFT_BITMAP= 4
MFT_MENUBARBREAK =32
MFT_MENUBREAK= 64
MFT_OWNERDRAW= 256
MFT_RADIOCHECK= 512
MFT_RIGHTJUSTIFY= 0x4000
MFT_SEPARATOR =0x800
MFT_RIGHTORDER= 0x2000L
MFT_STRING = 0
MF_ENABLED =0
MF_GRAYED =1
MF_DISABLED =2
MF_BITMAP =4
MF_CHECKED =8
MF_MENUBARBREAK= 32
MF_MENUBREAK =64
MF_OWNERDRAW =256
MF_POPUP =16
MF_SEPARATOR =0x800
MF_STRING =0
MF_UNCHECKED =0
MF_DEFAULT =4096
MF_SYSMENU =0x2000
MF_HELP =0x4000
MF_END =128
MF_RIGHTJUSTIFY= 0x4000
MF_MOUSESELECT = 0x8000
MF_INSERT= 0
MF_CHANGE= 128
MF_APPEND= 256
MF_DELETE= 512
MF_REMOVE= 4096
MF_USECHECKBITMAPS= 512
MF_UNHILITE= 0
MF_HILITE= 128
MF_BYCOMMAND= 0
MF_BYPOSITION= 1024
MF_UNCHECKED= 0
MF_HILITE = 128
MF_UNHILITE = 0
LOCALE_SYSTEM_DEFAULT = 0x800
MFS_GRAYED = 0x00000003L
MFS_DISABLED = MFS_GRAYED
MFS_CHECKED = MF_CHECKED
MFS_HILITE = MF_HILITE
MFS_ENABLED = MF_ENABLED
MFS_UNCHECKED = MF_UNCHECKED
MFS_UNHILITE = MF_UNHILITE
MFS_DEFAULT = MF_DEFAULT
WS_BORDER = 0x800000
WS_CAPTION = 0xc00000
WS_CHILD = 0x40000000
WS_CHILDWINDOW = 0x40000000
WS_CLIPCHILDREN = 0x2000000
WS_CLIPSIBLINGS = 0x4000000
WS_DISABLED = 0x8000000
WS_DLGFRAME = 0x400000
WS_GROUP = 0x20000
WS_HSCROLL = 0x100000
WS_ICONIC = 0x20000000
WS_MAXIMIZE = 0x1000000
WS_MAXIMIZEBOX = 0x10000
WS_MINIMIZE = 0x20000000
WS_MINIMIZEBOX = 0x20000
WS_OVERLAPPED = 0
WS_OVERLAPPEDWINDOW = 0xcf0000
WS_POPUP = 0x80000000l
WS_POPUPWINDOW = 0x80880000
WS_SIZEBOX = 0x40000
WS_SYSMENU = 0x80000
WS_TABSTOP = 0x10000
WS_THICKFRAME = 0x40000
WS_TILED = 0
WS_TILEDWINDOW = 0xcf0000
WS_VISIBLE = 0x10000000
WS_VSCROLL = 0x200000
WS_EX_TOOLWINDOW = 128
WS_EX_LEFT = 0
WS_EX_LTRREADING = 0
WS_EX_RIGHTSCROLLBAR = 0
WS_EX_WINDOWEDGE = 256
WS_EX_STATICEDGE = 0x20000
WS_EX_CLIENTEDGE = 512
WS_EX_OVERLAPPEDWINDOW = 0x300
WS_EX_APPWINDOW = 0x40000
WA_INACTIVE = 0
WA_ACTIVE = 1
WA_CLICKACTIVE = 2
RB_SETBARINFO = WM_USER + 4
RB_GETBANDCOUNT = WM_USER + 12
RB_INSERTBANDA = WM_USER + 1
RB_INSERTBANDW = WM_USER + 10
RB_INSERTBAND = RB_INSERTBANDA
RBBIM_STYLE = 1
RBBIM_COLORS = 2
RBBIM_TEXT = 4
RBBIM_IMAGE = 8
RBBIM_CHILD = 16
RBBIM_CHILDSIZE = 32
RBBIM_SIZE = 64
RBBIM_BACKGROUND = 128
RBBIM_ID = 256
RBBIM_IDEALSIZE = 0x00000200
TPM_CENTERALIGN =4
TPM_LEFTALIGN =0
TPM_RIGHTALIGN= 8
TPM_LEFTBUTTON= 0
TPM_RIGHTBUTTON= 2
TPM_HORIZONTAL= 0
TPM_VERTICAL= 64
TPM_TOPALIGN= 0
TPM_VCENTERALIGN= 16
TPM_BOTTOMALIGN= 32
TPM_NONOTIFY= 128
TPM_RETURNCMD= 256
TBIF_TEXT = 0x00000002
DT_NOPREFIX = 0x00000800
DT_HIDEPREFIX = 1048576
WH_CBT = 5
WH_MSGFILTER = (-1)
I_IMAGENONE = -2
TBSTATE_ENABLED = 4
BTNS_SHOWTEXT = 0x00000040
CW_USEDEFAULT = 0x80000000
COLOR_3DFACE = 15
BF_LEFT = 1
BF_TOP = 2
BF_RIGHT = 4
BF_BOTTOM = 8
BDR_RAISEDOUTER = 1
BDR_SUNKENOUTER = 2
BDR_RAISEDINNER = 4
BDR_SUNKENINNER = 8
BDR_OUTER = 3
BDR_INNER = 0xc
BDR_RAISED = 5
BDR_SUNKEN = 10
EDGE_RAISED = (BDR_RAISEDOUTER|BDR_RAISEDINNER)
EDGE_SUNKEN = (BDR_SUNKENOUTER|BDR_SUNKENINNER)
EDGE_ETCHED = (BDR_SUNKENOUTER|BDR_RAISEDINNER)
EDGE_BUMP = (BDR_RAISEDOUTER|BDR_SUNKENINNER)
IDC_SIZENWSE = 32642
IDC_SIZENESW = 32643
IDC_SIZEWE = 32644
IDC_SIZENS = 32645
IDC_SIZEALL = 32646
IDC_SIZE = 32640
IDC_ARROW = 32512
TCIF_TEXT =1
TCIF_IMAGE =2
TCIF_RTLREADING= 4
TCIF_PARAM = 8
TCS_MULTILINE = 512
MK_LBUTTON = 1
MK_RBUTTON = 2
MK_SHIFT = 4
MK_CONTROL = 8
MK_MBUTTON = 16
ILC_COLOR = 0
ILC_COLOR4 = 4
ILC_COLOR8 = 8
ILC_COLOR16 = 16
ILC_COLOR24 = 24
ILC_COLOR32 = 32
ILC_COLORDDB = 254
ILC_MASK = 1
ILC_PALETTE = 2048
IMAGE_BITMAP = 0
IMAGE_ICON = 1
LR_LOADFROMFILE = 16
LR_VGACOLOR = 0x0080
LR_LOADMAP3DCOLORS = 4096
LR_LOADTRANSPARENT = 32
LVSIL_NORMAL = 0
LVSIL_SMALL = 1
LVSIL_STATE = 2
TVSIL_NORMAL = 0
TVSIL_STATE = 2
SRCCOPY = 0xCC0020
GWL_WNDPROC = -4
HWND_BOTTOM = 1
HWND_TOP=0
HWND_TOPMOST=-1
SWP_DRAWFRAME= 32
SWP_FRAMECHANGED= 32
SWP_HIDEWINDOW= 128
SWP_NOACTIVATE= 16
SWP_NOCOPYBITS= 256
SWP_NOMOVE= 2
SWP_NOSIZE= 1
SWP_NOREDRAW= 8
SWP_NOZORDER= 4
SWP_SHOWWINDOW= 64
SWP_NOOWNERZORDER =512
SWP_NOREPOSITION= 512
SWP_NOSENDCHANGING= 1024
SWP_DEFERERASE= 8192
SWP_ASYNCWINDOWPOS= 16384
DCX_WINDOW = 1
DCX_CACHE = 2
DCX_PARENTCLIP = 32
DCX_CLIPSIBLINGS= 16
DCX_CLIPCHILDREN= 8
DCX_NORESETATTRS= 4
DCX_LOCKWINDOWUPDATE= 0x400
DCX_EXCLUDERGN= 64
DCX_INTERSECTRGN =128
DCX_VALIDATE= 0x200000
GCL_STYLE = -26
SB_HORZ = 0
SB_VERT = 1
SB_CTL = 2
SB_BOTH = 3
SB_LINEUP =0
SB_LINELEFT =0
SB_LINEDOWN =1
SB_LINERIGHT =1
SB_PAGEUP =2
SB_PAGELEFT =2
SB_PAGEDOWN =3
SB_PAGERIGHT =3
SB_THUMBPOSITION =4
SB_THUMBTRACK =5
SB_TOP =6
SB_LEFT =6
SB_BOTTOM =7
SB_RIGHT =7
SB_ENDSCROLL =8
MB_OK = 0x00000000
MB_OKCANCEL = 0x00000001
MB_ABORTRETRYIGNORE = 0x00000002
MB_YESNOCANCEL = 0x00000003
MB_YESNO = 0x00000004
MB_RETRYCANCEL = 0x00000005
MB_ICONASTERISK = 64
MB_ICONEXCLAMATION= 0x30
MB_ICONWARNING= 0x30
MB_ICONERROR= 16
MB_ICONHAND= 16
MB_ICONQUESTION= 32
MB_ICONINFORMATION= 64
MB_ICONSTOP= 16
MB_ICONMASK= 240
IDOK = 1
IDCANCEL = 2
IDABORT = 3
IDRETRY = 4
IDIGNORE = 5
IDYES = 6
IDNO = 7
IDCLOSE = 8
IDHELP = 9
COLOR_3DDKSHADOW = 21
COLOR_3DFACE = 15
COLOR_3DHILIGHT = 20
COLOR_3DHIGHLIGHT= 20
COLOR_3DLIGHT= 22
COLOR_BTNHILIGHT= 20
COLOR_3DSHADOW= 16
COLOR_ACTIVEBORDER =10
COLOR_ACTIVECAPTION= 2
COLOR_APPWORKSPACE= 12
COLOR_BACKGROUND= 1
COLOR_DESKTOP= 1
COLOR_BTNFACE= 15
COLOR_BTNHIGHLIGHT= 20
COLOR_BTNSHADOW= 16
COLOR_BTNTEXT= 18
COLOR_CAPTIONTEXT= 9
COLOR_GRAYTEXT= 17
COLOR_HIGHLIGHT= 13
COLOR_HIGHLIGHTTEXT= 14
COLOR_INACTIVEBORDER= 11
COLOR_INACTIVECAPTION= 3
COLOR_INACTIVECAPTIONTEXT= 19
COLOR_INFOBK= 24
COLOR_INFOTEXT= 23
COLOR_MENU= 4
COLOR_MENUTEXT= 7
COLOR_SCROLLBAR= 0
COLOR_WINDOW= 5
COLOR_WINDOWFRAME= 6
COLOR_WINDOWTEXT= 8
CTLCOLOR_MSGBOX= 0
CTLCOLOR_EDIT= 1
CTLCOLOR_LISTBOX= 2
CTLCOLOR_BTN= 3
CTLCOLOR_DLG= 4
CTLCOLOR_SCROLLBAR= 5
CTLCOLOR_STATIC= 6
CTLCOLOR_MAX= 7
GMEM_FIXED = 0x0000
GMEM_MOVEABLE = 0x0002
GMEM_NOCOMPACT = 0x0010
GMEM_NODISCARD = 0x0020
GMEM_ZEROINIT = 0x0040
GMEM_MODIFY = 0x0080
GMEM_DISCARDABLE = 0x0100
GMEM_NOT_BANKED = 0x1000
GMEM_SHARE = 0x2000
GMEM_DDESHARE = 0x2000
GMEM_NOTIFY = 0x4000
GMEM_LOWER = GMEM_NOT_BANKED
GMEM_VALID_FLAGS = 0x7F72
GMEM_INVALID_HANDLE= 0x8000
RT_DIALOG = "5"
CF_TEXT = 1
BS_PUSHBUTTON = 0x00L
BS_DEFPUSHBUTTON = 0x01L
BS_GROUPBOX = 0x7
PUSHBUTTON = 0x80
EDITTEXT = 0x81
LTEXT = 0x82
LISTBOX = 0x83
SCROLLBAR = 0x84
COMBOXBOX = 0x85
ES_MULTILINE = 4
ES_AUTOVSCROLL = 0x40L
ES_AUTOHSCROLL = 0x80L
ES_READONLY = 0x800
CP_ACP = 0
DS_SETFONT = 0x40
DS_MODALFRAME = 0x80
SYNCHRONIZE = (0x00100000L)
STANDARD_RIGHTS_REQUIRED = (0x000F0000L)
EVENT_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED|SYNCHRONIZE|0x3)
MAX_PATH = 260
def GET_XY_LPARAM(lParam):
x = LOWORD(lParam)
if x > 32768:
x = x - 65536
y = HIWORD(lParam)
if y > 32768:
y = y - 65536
return x, y
def GET_POINT_LPARAM(lParam):
x, y = GET_XY_LPARAM(lParam)
return POINT(x, y)
FVIRTKEY = 0x01
FNOINVERT = 0x02
FSHIFT = 0x04
FCONTROL = 0x08
FALT = 0x10
def ValidHandle(value):
if value == 0:
raise WinError()
else:
return value
def Fail(value):
if value == -1:
raise WinError()
else:
return value
GetModuleHandle = windll.kernel32.GetModuleHandleA
GetCurrentProcess = windll.kernel32.GetCurrentProcess
GetCurrentProcessId = windll.kernel32.GetCurrentProcessId
PostQuitMessage= windll.user32.PostQuitMessage
DefWindowProc = windll.user32.DefWindowProcA
CallWindowProc = windll.user32.CallWindowProcA
GetDCEx = windll.user32.GetDCEx
GetDC = windll.user32.GetDC
ReleaseDC = windll.user32.ReleaseDC
LoadIcon = windll.user32.LoadIconA
DestroyIcon = windll.user32.DestroyIcon
LoadCursor = windll.user32.LoadCursorA
LoadCursor.restype = ValidHandle
LoadImage = windll.user32.LoadImageA
LoadImage.restype = ValidHandle
RegisterClassEx = windll.user32.RegisterClassExA
SetCursor = windll.user32.SetCursor
CreateWindowEx = windll.user32.CreateWindowExA
CreateWindowEx.restype = ValidHandle
ShowWindow = windll.user32.ShowWindow
UpdateWindow = windll.user32.UpdateWindow
GetMessage = windll.user32.GetMessageA
TranslateMessage = windll.user32.TranslateMessage
DispatchMessage = windll.user32.DispatchMessageA
GetWindowRect = windll.user32.GetWindowRect
MoveWindow = windll.user32.MoveWindow
DestroyWindow = windll.user32.DestroyWindow
CloseWindow = windll.user32.CloseWindow
CreateMenu = windll.user32.CreateMenu
CreatePopupMenu = windll.user32.CreatePopupMenu
DestroyMenu = windll.user32.DestroyMenu
AppendMenu = windll.user32.AppendMenuA
EnableMenuItem = windll.user32.EnableMenuItem
SendMessage = windll.user32.SendMessageA
PostMessage = windll.user32.PostMessageA
GetClientRect = windll.user32.GetClientRect
GetWindowRect = windll.user32.GetWindowRect
IsDialogMessage = windll.user32.IsDialogMessage
RegisterWindowMessage = windll.user32.RegisterWindowMessageA
GetParent = windll.user32.GetParent
SetWindowLong = windll.user32.SetWindowLongA
SetClassLong = windll.user32.SetClassLongA
GetClassLong = windll.user32.GetClassLongA
SetWindowPos = windll.user32.SetWindowPos
InvalidateRect = windll.user32.InvalidateRect
BeginPaint = windll.user32.BeginPaint
EndPaint = windll.user32.EndPaint
SetCapture = windll.user32.SetCapture
GetCapture = windll.user32.GetCapture
ReleaseCapture = windll.user32.ReleaseCapture
ScreenToClient = windll.user32.ScreenToClient
ClientToScreen = windll.user32.ClientToScreen
GetMessagePos = windll.user32.GetMessagePos
BeginDeferWindowPos = windll.user32.BeginDeferWindowPos
DeferWindowPos = windll.user32.DeferWindowPos
EndDeferWindowPos = windll.user32.EndDeferWindowPos
CreateAcceleratorTable = windll.user32.CreateAcceleratorTableA
DestroyAcceleratorTable = windll.user32.DestroyAcceleratorTable
TranslateAccelerator = windll.user32.TranslateAccelerator
ExpandEnvironmentStrings = windll.kernel32.ExpandEnvironmentStringsA
GetModuleHandle = windll.kernel32.GetModuleHandleA
GetModuleHandle.restype = ValidHandle
LoadLibrary = windll.kernel32.LoadLibraryA
LoadLibrary.restype = ValidHandle
FindResource = windll.kernel32.FindResourceA
FindResource.restype = ValidHandle
FindWindow = windll.user32.FindWindowA
GetForegroundWindow = windll.user32.GetForegroundWindow
ChildWindowFromPoint = windll.user32.ChildWindowFromPoint
TrackPopupMenuEx = windll.user32.TrackPopupMenuEx
GetMenuItemCount = windll.user32.GetMenuItemCount
GetMenuItemCount.restype = Fail
GetMenuItemInfo = windll.user32.GetMenuItemInfoA
GetMenuItemInfo.restype = ValidHandle
GetSubMenu = windll.user32.GetSubMenu
SetMenuItemInfo = windll.user32.SetMenuItemInfoA
SetWindowsHookEx = windll.user32.SetWindowsHookExA
CallNextHookEx = windll.user32.CallNextHookEx
UnhookWindowsHookEx = windll.user32.UnhookWindowsHookEx
GetCurrentThreadId = windll.kernel32.GetCurrentThreadId
GetModuleFileName = windll.kernel32.GetModuleFileNameA
GetTempPath = windll.kernel32.GetTempPathA
MessageBox = windll.user32.MessageBoxA
SetWindowText = windll.user32.SetWindowTextA
GetFocus = windll.user32.GetFocus
GlobalAlloc = windll.kernel32.GlobalAlloc
GlobalLock = windll.kernel32.GlobalLock
GlobalUnlock = windll.kernel32.GlobalUnlock
GlobalFree = windll.kernel32.GlobalFree
OpenClipboard = windll.user32.OpenClipboard
EmptyClipboard = windll.user32.EmptyClipboard
SetClipboardData = windll.user32.SetClipboardData
GetClipboardData = windll.user32.GetClipboardData
RegisterClipboardFormat = windll.user32.RegisterClipboardFormatA
CloseClipboard = windll.user32.CloseClipboard
EnumClipboardFormats = windll.user32.EnumClipboardFormats
IsClipboardFormatAvailable = windll.user32.IsClipboardFormatAvailable
DialogBoxParam = windll.user32.DialogBoxParamA
GetDlgItem = windll.user32.GetDlgItem
GetClassName = windll.user32.GetClassNameA
EndDialog = windll.user32.EndDialog
ShowScrollBar = windll.user32.ShowScrollBar
GetDesktopWindow = windll.user32.GetDesktopWindow
SetFocus = windll.user32.SetFocus
MultiByteToWideChar = windll.kernel32.MultiByteToWideChar
CreateDialogIndirectParam = windll.user32.CreateDialogIndirectParamA
DialogBoxIndirectParam = windll.user32.DialogBoxIndirectParamA
EnumChildWindows = windll.user32.EnumChildWindows
GetMenu = windll.user32.GetMenu
SetTimer = windll.user32.SetTimer
KillTimer = windll.user32.KillTimer
IsWindowVisible = windll.user32.IsWindowVisible
IsIconic = windll.user32.IsIconic
GetCursorPos = windll.user32.GetCursorPos
SetForegroundWindow = windll.user32.SetForegroundWindow
SetMenuDefaultItem = windll.user32.SetMenuDefaultItem
GetClassInfo = windll.user32.GetClassInfoA
OpenEvent = windll.kernel32.OpenEventA
CreateEvent = windll.kernel32.CreateEventA
LockWindowUpdate = windll.user32.LockWindowUpdate
| toymachine/venster | venster/windows.py | Python | mit | 23,583 |
#!/usr/bin/env python
import sys
import time
from timerasp import gmail
if __name__ == '__main__':
time.sleep(300)
gmail.send_email('notify.py: IP:{}'.format(' '.join(sys.argv[1:])),
'Notify was run and is telling you something')
| ajmendez/timerasp | bin/notify.py | Python | mit | 261 |
# -*- coding: utf-8 -*-
# Actualiza el valor de los hedge cue discontinuos como either..or
# están marcados ambos como B-SPECCUE, pero si aparecen en el mismo scope
# tengo que cambiar al segundo para que diga D-SPECCUE
# lo hago directamente en las tablas
# Uso: python update_discontinuous_hc.py working_dir
import pln_inco.bioscope as bioscope
import pln_inco.bioscope.util
import os.path
import time
import sys
import yaml
import sqlite3
working_dir=sys.argv[1]
dbname=os.path.join(working_dir,'bioscope.db')
script_name=os.path.join(sys.path[0],sys.argv[0])
# Inicializo la conexión
conn= sqlite3.connect(dbname)
conn.text_factory = str
conn.row_factory=sqlite3.Row
c=conn.cursor()
c1=conn.cursor()
# Obtengo todos los tokens del corpus de entrenamiento que forman parte de un mismo scope con otro B-SPECCUE
# y son B-SPECCUE, y les cambio la etiqueta
c.execute('select * from bioscope_train b1 where hedge_cue1=\'B-SPECCUE\' and exists (select * from bioscope_train b2 where b2.document_id=b1.document_id and b2.sentence_id=b1.sentence_id and b2.hedge_cue1=\'B-SPECCUE\' and b2.token_num<b1.token_num and not exists (select * from bioscope_train b4 where b4.document_id=b1.document_id and b4.sentence_id=b1.sentence_id and b4.token_num>=b2.token_num and b4.token_num<=b1.token_num and b4.hedge_scope1=\'O\') and 1>=(select count(*) from bioscope_train b3 where b3.document_id=b1.document_id and b3.sentence_id=b1.sentence_id and b3.token_num>=b2.token_num and b3.token_num<=b1.token_num and hedge_scope1=\'B-SPECXCOPE\'))')
for row in c:
print "Actualizo nivel 1...",[row['document_id'],row['sentence_id'],row['token_num']]
c1.execute('update bioscope_train set hedge_cue1=\'D-SPECCUE\', hedge_cue=\'D-SPECCUE\' where document_id=? and sentence_id=? and token_num=?',[row['document_id'],row['sentence_id'],row['token_num']])
c.execute('select * from bioscope_train b1 where hedge_cue2=\'B-SPECCUE\' and exists (select * from bioscope_train b2 where b2.document_id=b1.document_id and b2.sentence_id=b1.sentence_id and b2.hedge_cue2=\'B-SPECCUE\' and b2.token_num<b1.token_num and not exists (select * from bioscope_train b4 where b4.document_id=b1.document_id and b4.sentence_id=b1.sentence_id and b4.token_num>=b2.token_num and b4.token_num<=b1.token_num and b4.hedge_scope2=\'O\') and 1>=(select count(*) from bioscope_train b3 where b3.document_id=b1.document_id and b3.sentence_id=b1.sentence_id and b3.token_num>=b2.token_num and b3.token_num<=b1.token_num and hedge_scope2=\'B-SPECXCOPE\'))')
for row in c:
print "Actualizo nivel 2...",[row['document_id'],row['sentence_id'],row['token_num']]
c1.execute('update bioscope_train set hedge_cue2=\'D-SPECCUE\', hedge_cue=\'D-SPECCUE\' where document_id=? and sentence_id=? and token_num=?',[row['document_id'],row['sentence_id'],row['token_num']])
c.execute('select * from bioscope_train b1 where hedge_cue3=\'B-SPECCUE\' and exists (select * from bioscope_train b2 where b2.document_id=b1.document_id and b2.sentence_id=b1.sentence_id and b2.hedge_cue3=\'B-SPECCUE\' and b2.token_num<b1.token_num and not exists (select * from bioscope_train b4 where b4.document_id=b1.document_id and b4.sentence_id=b1.sentence_id and b4.token_num>=b2.token_num and b4.token_num<=b1.token_num and b4.hedge_scope3=\'O\') and 1>=(select count(*) from bioscope_train b3 where b3.document_id=b1.document_id and b3.sentence_id=b1.sentence_id and b3.token_num>=b2.token_num and b3.token_num<=b1.token_num and hedge_scope3=\'B-SPECXCOPE\'))')
for row in c:
print "Actualizo nivel 3...",[row['document_id'],row['sentence_id'],row['token_num']]
c1.execute('update bioscope_train set hedge_cue3=\'D-SPECCUE\', hedge_cue=\'D-SPECCUE\' where document_id=? and sentence_id=? and token_num=?',[row['document_id'],row['sentence_id'],row['token_num']])
#Hago lo mismo con las tablas de evaluación
c.execute('select * from bioscope_test b1 where hedge_cue1=\'B-SPECCUE\' and exists (select * from bioscope_test b2 where b2.document_id=b1.document_id and b2.sentence_id=b1.sentence_id and b2.hedge_cue1=\'B-SPECCUE\' and b2.token_num<b1.token_num and not exists (select * from bioscope_test b4 where b4.document_id=b1.document_id and b4.sentence_id=b1.sentence_id and b4.token_num>=b2.token_num and b4.token_num<=b1.token_num and b4.hedge_scope1=\'O\') and 1>=(select count(*) from bioscope_test b3 where b3.document_id=b1.document_id and b3.sentence_id=b1.sentence_id and b3.token_num>=b2.token_num and b3.token_num<=b1.token_num and hedge_scope1=\'B-SPECXCOPE\'))')
for row in c:
print "Actualizo nivel 1...",[row['document_id'],row['sentence_id'],row['token_num']]
c1.execute('update bioscope_test set hedge_cue1=\'D-SPECCUE\', hedge_cue=\'D-SPECCUE\' where document_id=? and sentence_id=? and token_num=?',[row['document_id'],row['sentence_id'],row['token_num']])
c.execute('select * from bioscope_test b1 where hedge_cue2=\'B-SPECCUE\' and exists (select * from bioscope_test b2 where b2.document_id=b1.document_id and b2.sentence_id=b1.sentence_id and b2.hedge_cue2=\'B-SPECCUE\' and b2.token_num<b1.token_num and not exists (select * from bioscope_test b4 where b4.document_id=b1.document_id and b4.sentence_id=b1.sentence_id and b4.token_num>=b2.token_num and b4.token_num<=b1.token_num and b4.hedge_scope2=\'O\') and 1>=(select count(*) from bioscope_test b3 where b3.document_id=b1.document_id and b3.sentence_id=b1.sentence_id and b3.token_num>=b2.token_num and b3.token_num<=b1.token_num and hedge_scope2=\'B-SPECXCOPE\'))')
for row in c:
print "Actualizo nivel 2...",[row['document_id'],row['sentence_id'],row['token_num']]
c1.execute('update bioscope_test set hedge_cue2=\'D-SPECCUE\', hedge_cue=\'D-SPECCUE\' where document_id=? and sentence_id=? and token_num=?',[row['document_id'],row['sentence_id'],row['token_num']])
c.execute('select * from bioscope_test b1 where hedge_cue3=\'B-SPECCUE\' and exists (select * from bioscope_test b2 where b2.document_id=b1.document_id and b2.sentence_id=b1.sentence_id and b2.hedge_cue3=\'B-SPECCUE\' and b2.token_num<b1.token_num and not exists (select * from bioscope_test b4 where b4.document_id=b1.document_id and b4.sentence_id=b1.sentence_id and b4.token_num>=b2.token_num and b4.token_num<=b1.token_num and b4.hedge_scope3=\'O\') and 1>=(select count(*) from bioscope_test b3 where b3.document_id=b1.document_id and b3.sentence_id=b1.sentence_id and b3.token_num>=b2.token_num and b3.token_num<=b1.token_num and hedge_scope3=\'B-SPECXCOPE\'))')
for row in c:
print "Actualizo nivel 3...",[row['document_id'],row['sentence_id'],row['token_num']]
c1.execute('update bioscope_test set hedge_cue3=\'D-SPECCUE\', hedge_cue=\'D-SPECCUE\' where document_id=? and sentence_id=? and token_num=?',[row['document_id'],row['sentence_id'],row['token_num']])
conn.commit()
| gmonce/bioscope | thesis/src/scripts/update_discontinuous_hc.py | Python | mit | 6,790 |
# -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2018 by XESS Corporation / Hildo Guillardi Júnior
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__author__ = 'XESS Corporation'
__email__ = 'info@xess.com'
from .distributor import distributor_class
# Export the ORDER_COL_USERFIELDS content
from .distributors_info import ORDER_COL_USERFIELDS # noqa: F401
# Import and register here the API / local / scrape modules.
from .dist_local_template import dist_local_template # noqa: F401
from .api_octopart import api_octopart # noqa: F401
from .api_partinfo_kitspace import api_partinfo_kitspace # noqa: F401
#
# Some wrappers
#
def init_distributor_dict():
distributor_class.init_dist_dict()
def get_dist_parts_info(parts, dist_list, currency):
distributor_class.get_dist_parts_info(parts, dist_list, currency)
def get_registered_apis():
return distributor_class.registered
def get_distributors_list():
''' List of distributors registered by the API modules '''
return list(distributor_class.get_distributors_iter())
def get_distributors_iter():
''' Iterator for the distributors registered by the API modules '''
return distributor_class.get_distributors_iter()
def get_distributor_info(name):
''' Gets all the information about a supported distributor.
This information comes from the list collected from the APIs, not from the fixed template. '''
return distributor_class.get_distributor_info(name)
def get_dist_name_from_label(label):
''' Returns the internal distributor name for a provided label. '''
return distributor_class.label2name.get(label.lower())
def set_distributors_logger(logger):
''' Sets the logger used by the class '''
distributor_class.logger = logger
def set_distributors_progress(cls):
''' Configures the class used to indicate progress '''
distributor_class.progress = cls
def set_api_options(api, **kwargs):
''' Configure an API (by name) '''
distributor_class.set_api_options(api, **kwargs)
def set_api_status(api, enabled):
''' Enable/Disable a particular API '''
distributor_class.set_api_status(api, enabled)
def get_api_status(api):
''' Find if an API is enabled '''
return distributor_class.get_api_status(api)
# Init distributor dict during import.
init_distributor_dict()
| xesscorp/KiCost | kicost/distributors/__init__.py | Python | mit | 3,347 |
import os
import logging
import boto3
import mimetypes
from datetime import datetime
from morphium.util import env, TAG_LATEST
log = logging.getLogger(__name__)
config = {}
class Archive(object):
"""A scraper archive on S3. This is called when a scraper has generated a
file which needs to be backed up to a bucket."""
def __init__(self, bucket=None, prefix=None):
self.tag = datetime.utcnow().date().isoformat()
self.bucket = bucket or env('aws_bucket')
self.prefix = prefix or 'data'
@property
def client(self):
if not hasattr(self, '_client'):
if self.bucket is None:
log.warning("No $AWS_BUCKET, skipping upload.")
self._client = None
return None
access_key = env('aws_access_key_id')
if access_key is None:
log.warning("No $AWS_ACCESS_KEY_ID, skipping upload.")
self._client = None
return None
secret_key = env('aws_secret_access_key')
if secret_key is None:
log.warning("No $AWS_SECRET_ACCESS_KEY, skipping upload.")
self._client = None
return None
session = boto3.Session(aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
self._client = session.client('s3')
return self._client
def upload_file(self, source_path, file_name=None, mime_type=None):
"""Upload a file to the given bucket."""
if self.client is None:
return
if file_name is None:
file_name = os.path.basename(source_path)
if mime_type is None:
mime_type, _ = mimetypes.guess_type(file_name)
mime_type = mime_type or 'application/octet-stream'
key_name = os.path.join(self.prefix, self.tag, file_name)
log.info("Uploading [%s]: %s", self.bucket, key_name)
args = {
'ContentType': mime_type,
'ACL': 'public-read',
}
self.client.upload_file(source_path, self.bucket, key_name,
ExtraArgs=args)
copy_name = os.path.join(self.prefix, TAG_LATEST, file_name)
copy_source = {'Key': key_name, 'Bucket': self.bucket}
self.client.copy(copy_source, self.bucket, copy_name,
ExtraArgs=args)
return 'http://%s/%s' % (self.bucket, key_name)
| pudo/morphium | morphium/archive.py | Python | mit | 2,477 |
# Copyright (c) 2017 Nick Gashkov
#
# Distributed under MIT License. See LICENSE file for details.
class ValidationError(Exception):
def __init__(self, *args, **kwargs):
self.error_dict = kwargs.pop('error_dict')
super(ValidationError, self).__init__(*args, **kwargs)
| nickgashkov/virtualspace | virtualspace/utils/exceptions.py | Python | mit | 290 |
# -*- coding: utf-8 -*-
# Copyright (C) 2007-2018, Raffaele Salmaso <raffaele@salmaso.org>
# Copyright (c) 2012 Omoto Kenji
# Copyright (c) 2011 Sam Stephenson
# Copyright (c) 2011 Josh Peek
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
import io
import json
import re
import os
from subprocess import Popen, PIPE, STDOUT
import tempfile
from .exceptions import RuntimeError, ProgramError, RuntimeUnavailable
from .utils import json2_source, which
def encode_unicode_codepoints(str):
r"""
>>> encode_unicode_codepoints("a") == 'a'
True
>>> ascii = ''.join(chr(i) for i in range(0x80))
>>> encode_unicode_codepoints(ascii) == ascii
True
>>> encode_unicode_codepoints('\u4e16\u754c') == '\\u4e16\\u754c'
True
"""
codepoint_format = '\\u{0:04x}'.format
def codepoint(m):
return codepoint_format(ord(m.group(0)))
return re.sub('[^\x00-\x7f]', codepoint, str)
class Runtime(object):
def __init__(self, name, command, runner_source, encoding='utf8'):
self._name = name
if isinstance(command, str):
command = [command]
self._command = command
self._runner_source = runner_source
self._encoding = encoding
def __str__(self):
return "{class_name}({runtime_name})".format(
class_name=type(self).__name__,
runtime_name=self._name,
)
@property
def name(self):
return self._name
def exec_(self, source):
if not self.is_available():
raise RuntimeUnavailable()
return self.Context(self).exec_(source)
def eval(self, source):
if not self.is_available():
raise RuntimeUnavailable()
return self.Context(self).eval(source)
def compile(self, source):
if not self.is_available():
raise RuntimeUnavailable()
return self.Context(self, source)
def is_available(self):
return self._binary() is not None
def runner_source(self):
return self._runner_source
def _binary(self):
"""protected"""
if not hasattr(self, "_binary_cache"):
self._binary_cache = which(self._command)
return self._binary_cache
def _execfile(self, filename):
"""protected"""
cmd = self._binary() + [filename]
p = None
try:
p = Popen(cmd, stdout=PIPE, stderr=STDOUT)
stdoutdata, stderrdata = p.communicate()
ret = p.wait()
finally:
del p
if ret == 0:
return stdoutdata
else:
raise RuntimeError(stdoutdata)
class Context(object):
def __init__(self, runtime, source=''):
self._runtime = runtime
self._source = source
def eval(self, source):
if not source.strip():
data = "''"
else:
data = "'('+" + json.dumps(source, ensure_ascii=True) + "+')'"
code = 'return eval({data})'.format(data=data)
return self.exec_(code)
def exec_(self, source):
if self._source:
source = self._source + '\n' + source
(fd, filename) = tempfile.mkstemp(prefix='babeljs', suffix='.js')
os.close(fd)
try:
with io.open(filename, "w+", encoding=self._runtime._encoding) as fp:
fp.write(self._compile(source))
output = self._runtime._execfile(filename)
finally:
os.remove(filename)
output = output.decode(self._runtime._encoding)
output = output.replace("\r\n", "\n").replace("\r", "\n")
output = self._extract_result(output.split("\n")[-2])
return output
def call(self, identifier, *args):
args = json.dumps(args)
return self.eval("{identifier}.apply(this, {args})".format(identifier=identifier, args=args))
def _compile(self, source):
"""protected"""
runner_source = self._runtime.runner_source()
replacements = {
'#{source}': lambda: source,
'#{encoded_source}': lambda: json.dumps(
"(function(){ " +
encode_unicode_codepoints(source) +
" })()"
),
'#{json2_source}': json2_source,
}
pattern = "|".join(re.escape(k) for k in replacements)
runner_source = re.sub(pattern, lambda m: replacements[m.group(0)](), runner_source)
return runner_source
def _extract_result(self, output_last_line):
"""protected"""
if not output_last_line:
status = value = None
else:
ret = json.loads(output_last_line)
if len(ret) == 1:
ret = [ret[0], None]
status, value = ret
if status == "ok":
return value
elif value and value.startswith('SyntaxError:'):
raise RuntimeError(value)
else:
raise ProgramError(value)
class PyV8Runtime(object):
def __init__(self):
try:
import PyV8
except ImportError:
self._is_available = False
else:
self._is_available = True
@property
def name(self):
return "PyV8"
def exec_(self, source):
return self.Context().exec_(source)
def eval(self, source):
return self.Context().eval(source)
def compile(self, source):
return self.Context(source)
def is_available(self):
return self._is_available
class Context:
def __init__(self, source=""):
self._source = source
def exec_(self, source):
source = '''\
(function() {{
{0};
{1};
}})()'''.format(
encode_unicode_codepoints(self._source),
encode_unicode_codepoints(source)
)
source = str(source)
import PyV8
import contextlib
#backward compatibility
with contextlib.nested(PyV8.JSContext(), PyV8.JSEngine()) as (ctxt, engine):
js_errors = (PyV8.JSError, IndexError, ReferenceError, SyntaxError, TypeError)
try:
script = engine.compile(source)
except js_errors as e:
raise RuntimeError(e)
try:
value = script.run()
except js_errors as e:
raise ProgramError(e)
return self.convert(value)
def eval(self, source):
return self.exec_('return ' + encode_unicode_codepoints(source))
def call(self, identifier, *args):
args = json.dumps(args)
return self.eval("{identifier}.apply(this, {args})".format(identifier=identifier, args=args))
@classmethod
def convert(cls, obj):
from PyV8 import _PyV8
if isinstance(obj, bytes):
return obj.decode('utf8')
if isinstance(obj, _PyV8.JSArray):
return [cls.convert(v) for v in obj]
elif isinstance(obj, _PyV8.JSFunction):
return None
elif isinstance(obj, _PyV8.JSObject):
ret = {}
for k in obj.keys():
v = cls.convert(obj[k])
if v is not None:
ret[cls.convert(k)] = v
return ret
else:
return obj
| rsalmaso/django-babeljs | babeljs/execjs/runtime.py | Python | mit | 8,806 |
import os
import asyncio
import logging
from functools import partial
from collections import deque
from lxml import etree
from . import stanzas
from .stanzas import Iq
from .parser import Parser
from .utils import signalEvent
from .utils import benchmark as timedWait
from . import getLogger
log = getLogger(__name__)
if "VEX_TIMED_WAITS" in os.environ and int(os.environ["VEX_TIMED_WAITS"]):
from .metrics import ValueMetric
stream_wait_met = ValueMetric("stream:wait_time", type_=float)
else:
stream_wait_met = None
_ENFORCE_TIMEOUTS = bool("VEX_ENFORCE_TIMEOUTS" in os.environ and
int(os.environ["VEX_ENFORCE_TIMEOUTS"]))
class QueuedStanza:
def __init__(self, s):
self.task_set = set()
self.stanza = s
class ParserTask(asyncio.Task):
def __init__(self, stream, loop=None):
super().__init__(self._run(), loop=loop)
self._parser = Parser()
self._data_queue = asyncio.Queue()
self._stream = stream
def parse(self, bytes_):
self._data_queue.put_nowait(bytes_)
def reset(self):
self._parser.reset()
async def _run(self):
while True:
try:
data = await self._data_queue.get()
elems = self._parser.parse(data)
for e in elems:
stanza = stanzas.makeStanza(e)
if log.getEffectiveLevel() <= logging.VERBOSE:
log.verbose("[STANZA IN]:\n%s" %
stanza.toXml(pprint=True).decode("utf-8"))
await self._stream._handleStanza(stanza)
except asyncio.CancelledError:
pass
except Exception as ex:
log.exception(ex)
class Stream(asyncio.Protocol):
"""Base class for XMPP streams."""
def __init__(self, creds, state_callbacks=None, mixins=None,
default_timeout=None):
self.creds = creds
self._transport = None
self._waiter_futures = []
self._tls_active = False
self._callbacks = state_callbacks
self._mixins = mixins or []
for mixin in self._mixins:
for name, obj in mixin._exports:
if name in self.__dict__:
raise ValueError("Mixin '%s' exports ambiguous "
"data named '%s'" % (str(mixin), name))
else:
# Add the symbol to the stream's namespace
self.__dict__[name] = obj
self._parser_task = ParserTask(self)
self.default_timeout = default_timeout
# Stream errors
self.error = None
self._stanza_queue = deque(maxlen=10)
@property
def connected(self):
if not self._transport:
return False
else:
if (getattr(self._transport, "_closing") and
self._transport._closing):
# SSL transport
return False
return True
@property
def tls_active(self):
return self._tls_active
@property
def jid(self):
return self.creds.jid
def close(self):
if self.connected:
self.send(b"</stream:stream>")
self._transport.close()
self._parser_task.cancel()
def send(self, data):
"""Send ``data`` which can be a vexmpp.stanza.Stanza,
lxml.etree.Element, a str, or bytes. The the case of bytes the
encoding MUST be utf-8 encoded (per XMPP specification).
In the case of Stanza and Element the Mixin.onSend callback is
invoked. Currently there is not a Mixin callback for strings or bytes.
"""
def _send(bytes_):
if not self._transport:
log.warn("Data send with disconnected transport")
return
self._transport.write(bytes_)
log.debug("[BYTES OUT]: %s", bytes_)
stanza = None
if isinstance(data, stanzas.Stanza):
stanza = data
raw_data = data.toXml()
elif isinstance(data, str):
raw_data = data.encode("utf-8")
elif isinstance(data, etree._Element):
stanza = stanzas.Stanza(xml=data)
raw_data = etree.tostring(data, encoding="utf-8")
elif isinstance(data, bytes):
raw_data = data
else:
raise ValueError("Unable to send type {}".format(type(data)))
if stanza and log.getEffectiveLevel() <= logging.VERBOSE:
log.verbose("[STANZA OUT]:\n%s" %
stanza.toXml(pprint=True).decode("utf-8"))
_send(raw_data)
if stanza:
for m in self._mixins:
hook = partial(m.onSend, self, stanza)
asyncio.ensure_future(self._runMixin(hook))
async def sendAndWaitIq(self, child_ns, to=None, child_name="query",
type="get", raise_on_error=False, timeout=None,
id_prefix=None):
iq = Iq(to=to, type=type, request=(child_name, child_ns),
id_prefix=id_prefix)
resp = await self.sendAndWait(iq, raise_on_error=raise_on_error,
timeout=timeout)
return resp
async def sendAndWait(self, stanza, raise_on_error=False, timeout=None):
if not stanza.id:
stanza.setId()
xpath = "/%s[@id='%s']" % (stanza.name, stanza.id)
self.send(stanza)
resp = await self.wait([(xpath, None)], timeout=timeout)
if resp.error is not None and raise_on_error:
raise resp.error
else:
return resp
async def negotiate(self, timeout=None):
raise NotImplementedError()
async def wait(self, xpaths, timeout=None):
"""``xpaths`` is a 2-tuple of the form (xpath, nsmap), or a list of
the same tuples to wait on a choice of matches. The first matched
stanza is returned. Passing a ``timeout`` argument will raise a
asyncio.TimeoutError if not matches are found."""
global stream_wait_met
if not isinstance(xpaths, list):
xpaths = [xpaths]
if timeout is None and self.default_timeout:
timeout = self.default_timeout
log.debug("Stream wait for %s [timeout=%s]" % (xpaths, timeout))
if _ENFORCE_TIMEOUTS and not timeout:
raise RuntimeError("Timeout not set error")
fut = _StreamWaitFuture(xpaths)
# Run thru queue. Note, once a tasklet has seen a stanza it is skipped
# by _StreamWaitFuture.matchStanza
for queued_stanza in self._stanza_queue:
matched = fut.matchStanza(queued_stanza)
if matched:
return queued_stanza.stanza
self._waiter_futures.append(fut)
try:
with timedWait() as timer_stat:
match = await asyncio.wait_for(fut, timeout)
if stream_wait_met:
stream_wait_met.update(timer_stat["total"])
log.debug("Stream wait - time: {:.3f} "
"min/max/avg: {:.6f}/{:.6f}/{:.6f}"
.format(stream_wait_met.value,
stream_wait_met.min, stream_wait_met.max,
stream_wait_met.average))
return match
except asyncio.TimeoutError as ex:
raise asyncio.TimeoutError(
"Timeout ({}s) while waiting for xpaths: {}"
.format(timeout, xpaths)) from ex
finally:
self._waiter_futures.remove(fut)
# asyncio.Protocol implementation
def connection_made(self, transport, tls=False):
log.debug("Connection_made: %s", transport)
self._transport = transport
self._tls_active = tls
signalEvent(self._callbacks, "connected", self, tls)
def starttls_made(self, transport):
self.connection_made(transport, tls=True)
async def _handleStanza(self, stanza):
if isinstance(stanza, stanzas.StreamError):
signalEvent(self._callbacks, "streamError", self, stanza)
self._transport.close()
return
for m in self._mixins:
hook = partial(m.onStanza, self, stanza)
asyncio.ensure_future(self._runMixin(hook))
self._stanza_queue.append(QueuedStanza(stanza))
if self._waiter_futures:
for queued_stanza in self._stanza_queue:
for fut in [f for f in self._waiter_futures if not f.done()]:
matched = fut.matchStanza(queued_stanza)
if matched:
# XXX: How useful is this since _stanza_queue?
# Yield the event loop, which is essential for a handle
# and wait in quick succession.
await asyncio.sleep(0)
# asyncio.Protocol implementation
def data_received(self, data):
log.debug('[BYTES IN]: {!r}'.format(data.decode()))
self._parser_task.parse(data)
# asyncio.Protocol implementation
def connection_lost(self, reason):
self._transport = None
self._tls_active = False
log.debug('The server closed the connection: %s' % str(reason))
signalEvent(self._callbacks, "disconnected", self, reason)
@property
def default_timeout(self):
return self._default_timeout
@default_timeout.setter
def default_timeout(self, t):
if t is not None:
t = int(t)
self._default_timeout = t
async def _runMixin(self, functor):
try:
await functor()
except:
log.exception("{} mixin error".format(functor.__class__.__name__))
class Mixin(object):
def __init__(self, export_tuples=None):
"""
``export_tuples`` is a list of 2-tuples (name, obj) that added to the
stream object's __dict__, as in __dict__[name] = obj. By default no
values are exported.
"""
self._exports = export_tuples if export_tuples else []
async def postSession(self, stream):
"""Called after stream negotiation and session creation."""
pass
async def onStanza(self, stream, stanza):
"""Called for each incoming Stanza.
See :func:`vexmpp.utils.xpathFilter` for a decorator that can filter
only the stanzas the implementation is interested in.
"""
pass
async def onSend(self, stream, stanza):
"""Called for each outgoing stanza."""
pass
class StreamCallbacks:
def connected(self, stream, tls_active):
pass
def disconnected(self, stream, reason):
pass
def streamError(self, stream, error):
pass
class _StreamWaitFuture(asyncio.Future):
def __init__(self, xpaths, *args, loop=None):
super().__init__(*args, loop=loop)
self._xpaths = xpaths
self._task = asyncio.Task.current_task()
def matchStanza(self, queued_stanza):
log.debug(f"MatchStanza: {queued_stanza.stanza.toXml()} xpaths: "
"{0} - @{1}".format(self._xpaths, id(self._task)))
if self._task in queued_stanza.task_set:
# seen this...
return False
queued_stanza.task_set.add(self._task)
stanza = queued_stanza.stanza
for xp, nsmap in self._xpaths:
log.debug("MatchStanza: Testing xpath {} against stanza {}"
.format((xp, nsmap), stanza.toXml()))
if stanza.xml.xpath(xp, namespaces=nsmap):
log.debug("MatchStanza: matched")
self.set_result(stanza)
return True
log.debug("MatchStanza: NOT matched")
return False
| nicfit/vexmpp | vexmpp/stream.py | Python | mit | 11,877 |
# Generated by Django 1.10.1 on 2016-09-07 20:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("institutions", "0005_auto_20160907_1814")]
operations = [
migrations.AlterModelOptions(
name="email",
options={
"ordering": ["priority", "institution"],
"verbose_name": "Email",
"verbose_name_plural": "Emails",
},
),
migrations.AlterModelOptions(
name="tag",
options={
"ordering": ["name"],
"verbose_name": "Tag",
"verbose_name_plural": "Tags",
},
),
migrations.AlterUniqueTogether(
name="email", unique_together={("institution", "email")}
),
]
| watchdogpolska/feder | feder/institutions/migrations/0006_auto_20160907_2016.py | Python | mit | 828 |
import os
from tornado import gen
import tornado.web
class DocsHandler(tornado.web.RequestHandler):
@gen.coroutine
def get(self):
version = "{}://{}/docs/version/v1.yml".format(
self.request.protocol, self.request.host)
self.render("swagger/index.html", version=version)
class HomeHandler(tornado.web.StaticFileHandler):
@gen.coroutine
def get(self, path, include_body=True):
yield super().get('index.html', include_body)
class CachingFrontendHandler(tornado.web.StaticFileHandler):
"""If a file exists in the root folder, serve it, otherwise serve index.html
"""
@gen.coroutine
def get(self, path, include_body=True):
absolute_path = self.get_absolute_path(self.root, path)
is_file = os.path.isfile(absolute_path)
if is_file:
yield super().get(path, include_body)
else:
yield super().get('index.html', include_body)
| vladiibine/trust-network | src/python/trust_network_backend/tnb/apps/core/views.py | Python | mit | 954 |
#!/usr/bin/env python
# coding: utf-8
from datetime import datetime
from distutils import spawn
import argparse
import json
import os
import platform
import shutil
import socket
import sys
import urllib
import urllib2
import main
from main import config
###############################################################################
# Options
###############################################################################
PARSER = argparse.ArgumentParser()
PARSER.add_argument(
'-d', '--dependencies', dest='install_dependencies', action='store_true',
help='install virtualenv and python dependencies',
)
PARSER.add_argument(
'-s', '--start', dest='start', action='store_true',
help='starts the dev_appserver.py with storage_path pointing to temp',
)
PARSER.add_argument(
'-o', '--host', dest='host', action='store', default='127.0.0.1',
help='the host to start the dev_appserver.py',
)
PARSER.add_argument(
'-p', '--port', dest='port', action='store', default='8080',
help='the port to start the dev_appserver.py',
)
PARSER.add_argument(
'--appserver-args', dest='args', nargs=argparse.REMAINDER, default=[],
help='all following args are passed to dev_appserver.py',
)
PARSER.add_argument(
'-v', '--version', dest='show_version', action='store_true',
help='Show gae-init version',
)
PARSER.add_argument(
'-i', '--pybabel-init', dest='pybabel_init', action='store_true',
help='''create new message catalogs from messages.pot that are defined
in config.py and still not present (pybabel init..)''',
)
PARSER.add_argument(
'-u', '--pybabel-update', dest='pybabel_update', action='store_true',
help='''extracts messages from source files to generate messages.pot
(pybabel extract..), updates existing catalogs (pybabel update..) and
compiles message catalogs to MO files (pybable compile)''',
)
PARSER.add_argument(
'-l', '--pybabel-init-locale', dest='pybabel_locale', action='store',
help='create new message catalogs from messages.pot (pybabel init..)',
)
ARGS = PARSER.parse_args()
###############################################################################
# Globals
###############################################################################
BAD_ENDINGS = ['pyc', 'pyo', '~']
GAE_PATH = ''
IS_WINDOWS = platform.system() == 'Windows'
###############################################################################
# Directories
###############################################################################
DIR_MAIN = 'main'
DIR_TEMP = 'temp'
DIR_VENV = os.path.join(DIR_TEMP, 'venv')
DIR_LIB = os.path.join(DIR_MAIN, 'lib')
DIR_LIBX = os.path.join(DIR_MAIN, 'libx')
FILE_LIB = '%s.zip' % DIR_LIB
FILE_REQUIREMENTS = 'requirements.txt'
FILE_PIP_GUARD = os.path.join(DIR_TEMP, 'pip.guard')
FILE_VENV = os.path.join(DIR_VENV, 'Scripts', 'activate.bat') \
if IS_WINDOWS \
else os.path.join(DIR_VENV, 'bin', 'activate')
DIR_STORAGE = os.path.join(DIR_TEMP, 'storage')
FILE_UPDATE = os.path.join(DIR_TEMP, 'update.json')
DIR_TRANSLATIONS = os.path.join(DIR_MAIN, 'translations')
FILE_BABEL_CFG = os.path.join(DIR_TRANSLATIONS, 'babel.cfg')
FILE_MESSAGES_POT = os.path.join(DIR_TRANSLATIONS, 'messages.pot')
###############################################################################
# Other global variables
###############################################################################
CORE_VERSION_URL = 'https://gae-init.appspot.com/_s/version/'
INTERNET_TEST_URL = 'https://www.google.com'
REQUIREMENTS_URL = 'http://docs.gae-init.appspot.com/requirement/'
###############################################################################
# Helpers
###############################################################################
def print_out(script, filename=''):
timestamp = datetime.now().strftime('%H:%M:%S')
if not filename:
filename = '-' * 46
script = script.rjust(12, '-')
print '[%s] %12s %s' % (timestamp, script, filename)
def make_dirs(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def listdir(directory, split_ext=False):
try:
if split_ext:
return [os.path.splitext(dir_)[0] for dir_ in os.listdir(directory)]
else:
return os.listdir(directory)
except OSError:
return []
def site_packages_path():
if IS_WINDOWS:
return os.path.join(DIR_VENV, 'Lib', 'site-packages')
py_version = 'python%s.%s' % sys.version_info[:2]
return os.path.join(DIR_VENV, 'lib', py_version, 'site-packages')
def create_virtualenv():
if not os.path.exists(FILE_VENV):
os.system('virtualenv --no-site-packages %s' % DIR_VENV)
os.system('echo %s >> %s' % (
'set PYTHONPATH=' if IS_WINDOWS else 'unset PYTHONPATH', FILE_VENV
))
pth_file = os.path.join(site_packages_path(), 'gae.pth')
echo_to = 'echo %s >> {pth}'.format(pth=pth_file)
os.system(echo_to % find_gae_path())
os.system(echo_to % os.path.abspath(DIR_LIBX))
fix_path_cmd = 'import dev_appserver; dev_appserver.fix_sys_path()'
os.system(echo_to % (
fix_path_cmd if IS_WINDOWS else '"%s"' % fix_path_cmd
))
return True
def exec_pip_commands(command):
script = []
if create_virtualenv():
activate_cmd = 'call %s' if IS_WINDOWS else 'source %s'
activate_cmd %= FILE_VENV
script.append(activate_cmd)
script.append('echo %s' % command)
script.append('%s SKIP_GOOGLEAPICLIENT_COMPAT_CHECK=1' %
('set' if IS_WINDOWS else 'export'))
script.append(command)
script = '&'.join(script) if IS_WINDOWS else \
'/bin/bash -c "%s"' % ';'.join(script)
os.system(script)
def make_guard(fname, cmd, spec):
with open(fname, 'w') as guard:
guard.write('Prevents %s execution if newer than %s' % (cmd, spec))
def guard_is_newer(guard, watched):
if os.path.exists(guard):
return os.path.getmtime(guard) > os.path.getmtime(watched)
return False
def check_if_pip_should_run():
return not guard_is_newer(FILE_PIP_GUARD, FILE_REQUIREMENTS)
def install_py_libs():
if not check_if_pip_should_run() and os.path.exists(DIR_LIB):
return
exec_pip_commands('pip install -q -r %s' % FILE_REQUIREMENTS)
exclude_ext = ['.pth', '.pyc', '.egg-info', '.dist-info', '.so']
exclude_prefix = ['setuptools-', 'pip-', 'Pillow-']
exclude = [
'test', 'tests', 'pip', 'setuptools', '_markerlib', 'PIL',
'easy_install.py', 'pkg_resources.py'
]
def _exclude_prefix(pkg):
for prefix in exclude_prefix:
if pkg.startswith(prefix):
return True
return False
def _exclude_ext(pkg):
for ext in exclude_ext:
if pkg.endswith(ext):
return True
return False
def _get_dest(pkg):
make_dirs(DIR_LIB)
return os.path.join(DIR_LIB, pkg)
site_packages = site_packages_path()
dir_libs = listdir(DIR_LIB)
dir_libs.extend(listdir(DIR_LIBX))
for dir_ in listdir(site_packages):
if dir_ in dir_libs or dir_ in exclude:
continue
if _exclude_prefix(dir_) or _exclude_ext(dir_):
continue
src_path = os.path.join(site_packages, dir_)
copy = shutil.copy if os.path.isfile(src_path) else shutil.copytree
copy(src_path, _get_dest(dir_))
make_guard(FILE_PIP_GUARD, 'pip', FILE_REQUIREMENTS)
def install_dependencies():
make_dirs(DIR_TEMP)
install_py_libs()
def check_for_update():
if os.path.exists(FILE_UPDATE):
mtime = os.path.getmtime(FILE_UPDATE)
last = datetime.utcfromtimestamp(mtime).strftime('%Y-%m-%d')
today = datetime.utcnow().strftime('%Y-%m-%d')
if last == today:
return
try:
with open(FILE_UPDATE, 'a'):
os.utime(FILE_UPDATE, None)
request = urllib2.Request(
CORE_VERSION_URL,
urllib.urlencode({'version': main.__version__}),
)
response = urllib2.urlopen(request)
with open(FILE_UPDATE, 'w') as update_json:
update_json.write(response.read())
except (urllib2.HTTPError, urllib2.URLError):
pass
def print_out_update(force_show=False):
try:
import pip
SemVer = pip.util.version.SemanticVersion
except AttributeError:
import pip._vendor.distlib.version
SemVer = pip._vendor.distlib.version.SemanticVersion
try:
with open(FILE_UPDATE, 'r') as update_json:
data = json.load(update_json)
if SemVer(main.__version__) < SemVer(data['version']) or force_show:
print_out('UPDATE')
print_out(data['version'], 'Latest version of gae-init')
print_out(main.__version__, 'Your version is a bit behind')
print_out('CHANGESET', data['changeset'])
except (ValueError, KeyError):
os.remove(FILE_UPDATE)
except IOError:
pass
###############################################################################
# Doctor
###############################################################################
def internet_on():
try:
urllib2.urlopen(INTERNET_TEST_URL, timeout=2)
return True
except (urllib2.URLError, socket.timeout):
return False
def check_requirement(check_func):
result, name, help_url_id = check_func()
if not result:
print_out('NOT FOUND', name)
if help_url_id:
print 'Please see %s%s' % (REQUIREMENTS_URL, help_url_id)
return False
return True
def find_gae_path():
global GAE_PATH
if GAE_PATH:
return GAE_PATH
if IS_WINDOWS:
gae_path = None
for path in os.environ['PATH'].split(os.pathsep):
if os.path.isfile(os.path.join(path, 'dev_appserver.py')):
gae_path = path
else:
gae_path = spawn.find_executable('dev_appserver.py')
if gae_path:
gae_path = os.path.dirname(os.path.realpath(gae_path))
if not gae_path:
return ''
gcloud_exec = 'gcloud.cmd' if IS_WINDOWS else 'gcloud'
if not os.path.isfile(os.path.join(gae_path, gcloud_exec)):
GAE_PATH = gae_path
else:
gae_path = os.path.join(gae_path, '..', 'platform', 'google_appengine')
if os.path.exists(gae_path):
GAE_PATH = os.path.realpath(gae_path)
return GAE_PATH
def check_internet():
return internet_on(), 'Internet', ''
def check_gae():
return bool(find_gae_path()), 'Google App Engine SDK', '#gae'
def check_git():
return bool(spawn.find_executable('git')), 'Git', '#git'
def check_nodejs():
return bool(spawn.find_executable('node')), 'Node.js', '#nodejs'
def check_pip():
return bool(spawn.find_executable('pip')), 'pip', '#pip'
def check_virtualenv():
return bool(spawn.find_executable('virtualenv')), 'virtualenv', '#virtualenv'
def doctor_says_ok():
checkers = [check_gae, check_git, check_nodejs, check_pip, check_virtualenv]
if False in [check_requirement(check) for check in checkers]:
sys.exit(1)
return check_requirement(check_internet)
###############################################################################
# Babel Stuff
###############################################################################
def pybabel_extract():
exec_pip_commands(
'"pybabel" extract -k _ -k __ -F %s --sort-by-file --omit-header -o %s %s' % (
FILE_BABEL_CFG, FILE_MESSAGES_POT, DIR_MAIN,
))
def pybabel_update():
exec_pip_commands('"pybabel" update -i %s -d %s -N --no-wrap' % (
FILE_MESSAGES_POT, DIR_TRANSLATIONS,
))
def pybabel_init(locale):
exec_pip_commands('"pybabel" init -i %s -d %s -l %s' % (
FILE_MESSAGES_POT, DIR_TRANSLATIONS, locale,
))
def pybabel_init_missing():
if not os.path.exists(FILE_MESSAGES_POT):
pybabel_extract()
for locale in config.LOCALE:
msg = os.path.join(DIR_TRANSLATIONS, locale, 'LC_MESSAGES', 'messages.po')
if not os.path.exists(msg):
pybabel_init(locale)
def pybabel_compile():
exec_pip_commands('"pybabel" compile -f -d %s' % (DIR_TRANSLATIONS))
def run_start():
port = int(ARGS.port)
run_command = ' '.join(map(str, [
'dev_appserver.py',
DIR_MAIN,
'--host %s' % ARGS.host,
'--port %s' % port,
'--admin_port %s' % (port + 1),
'--storage_path=%s' % DIR_STORAGE,
'--skip_sdk_update_check',
] + ARGS.args))
os.system(run_command)
def run():
if len(sys.argv) == 1 or (ARGS.args and not ARGS.start):
PARSER.print_help()
sys.exit(1)
os.chdir(os.path.dirname(os.path.realpath(__file__)))
if doctor_says_ok():
install_dependencies()
check_for_update()
if ARGS.pybabel_init:
pybabel_init_missing()
if ARGS.pybabel_update:
pybabel_extract()
pybabel_init_missing()
pybabel_update()
pybabel_compile()
if ARGS.pybabel_locale:
pybabel_init(ARGS.pybabel_locale)
if ARGS.show_version:
print_out_update(force_show=True)
else:
print_out_update()
if ARGS.start:
run_start()
if ARGS.install_dependencies:
install_dependencies()
if __name__ == '__main__':
run()
| lipis/life-line | run.py | Python | mit | 12,739 |
from shutit_module import ShutItModule
import base64
class openshift_airflow(ShutItModule):
def build(self, shutit):
shutit.send('cd /tmp/openshift_vm')
shutit.login(command='vagrant ssh')
shutit.login(command='sudo su -',password='vagrant',note='Become root (there is a problem logging in as admin with the vagrant user')
# AIRFLOW BUILD
# Takes too long.
#shutit.send('oc describe buildconfig airflow',note='Ideally you would take this github url, and update your github webhooks for this project. But there is no public URL for this server so we will skip and trigger a build manually.')
#shutit.send('oc start-build airflow',note='Trigger a build by hand')
#shutit.send('sleep 60 && oc logs -f build/airflow-1',note='Follow the build and wait for it to terminate')
# IMAGE STREAM
shutit.send_file('/tmp/imagestream.json','''
{
"kind": "ImageStream",
"apiVersion": "v1",
"metadata": {
"name": "airflow"
},
"spec": {},
"status": {
"dockerImageRepository": ""
}
}''')
shutit.send('oc create -f /tmp/imagestream.json')
# BUILD CONFIG
shutit.send_file('secret.json','''{
"apiVersion": "v1",
"kind": "Secret",
"metadata": {
"name": "mysecret"
},
"namespace": "user2",
"data": {
"username": "''' + base64.b64encode('myusername') + '''"
}
}''')
shutit.send('oc create -f secret.json')
shutit.send_file('/tmp/buildconfig.json','''
{
"kind": "BuildConfig",
"apiVersion": "v1",
"metadata": {
"name": "airflow",
"labels": {
"name": "airflow-build"
}
},
"spec": {
"source": {
"type": "Git",
"git": {
"uri": "https://github.com/ianmiell/shutit-airflow"
}
},
"strategy": {
"type": "Docker"
},
"output": {
"to": {
"kind": "ImageStreamTag",
"name": "airflow:latest"
}
},
"volumes": {
"name": "secvol",
"secret": {
"secretname": "mysecret"
}
}
}
}
''')
shutit.send('oc create -f /tmp/buildconfig.json')
# DEPLOYMENT CONFIG
shutit.send_file('/tmp/deploymentconfig.json','''
{
"kind": "DeploymentConfig",
"apiVersion": "v1",
"metadata": {
"name": "airflow"
},
"spec": {
"strategy": {
"type": "Rolling",
"rollingParams": {
"updatePeriodSeconds": 1,
"intervalSeconds": 1,
"timeoutSeconds": 120
},
"resources": {}
},
"triggers": [
{
"type": "ImageChange",
"imageChangeParams": {
"automatic": true,
"containerNames": [
"nodejs-helloworld"
],
"from": {
"kind": "ImageStreamTag",
"name": "airflow:latest"
}
}
},
{
"type": "ConfigChange"
}
],
"replicas": 1,
"selector": {
"name":"airflow"
},
"template": {
"metadata": {
"labels": {
"name": "airflow"
}
},
"spec": {
"containers": [
{
"name": "airflow",
"image": "airflow",
"ports": [
{
"containerPort": 8080,
"protocol": "TCP"
}
],
"resources": {},
"terminationMessagePath": "/dev/termination-log",
"imagePullPolicy": "IfNotPresent",
"securityContext": {
"capabilities": {},
"privileged": false
}
}
],
"restartPolicy": "Always",
"dnsPolicy": "ClusterFirst"
}
}
},
"status": {}
}
''')
shutit.send('oc create -f /tmp/deploymentconfig.json')
shutit.logout()
shutit.logout()
return True
def module():
return openshift_airflow(
'shutit.openshift_vm.openshift_vm.openshift_airflow', 1418326706.005,
description='',
maintainer='',
delivery_methods=['bash'],
depends=['shutit.openshift_vm.openshift_vm.openshift_vm']
)
| ianmiell/shutit-openshift-vm | airflow.py | Python | mit | 4,398 |
import os
import shutil
import re
import zipfile
import xml.etree.ElementTree as ET
from tempfile import TemporaryDirectory
import psycopg2
conn = psycopg2.connect(
database='innoter', user='postgres', password='postgres', host='192.168.0.107', port='5432')
cursor = conn.cursor()
dst_dir = r"\\nas1\storage\DG_archive\sat"
path_list = []
cursor.execute(
"""SELECT path, order_id
FROM geoarchive.dg_orders
WHERE aero is not TRUE""")
results = cursor.fetchall()
for i, result in enumerate(results):
zip_path, order_id = result[0], result[1]
print(i + 1, zip_path)
dst_filepath = os.path.join(dst_dir, os.path.basename(zip_path))
shutil.move(zip_path, dst_filepath)
cursor.execute("""UPDATE geoarchive.dg_orders
SET path = %s
WHERE order_id = %s""", [dst_filepath, order_id, ], )
conn.commit()
print('Готово...\n')
# with zipfile.ZipFile(zip_path) as zf:
# order_shape = [fnm for fnm in zf.namelist() if re.match(r'.+ORDER_SHAPE.+', fnm, re.I)]
# if not order_shape:
# # for fnm in zf.namelist():
# # if re.match(r'.+ORDER_SHAPE.+', fnm, re.I) is None:
# cursor.execute("""UPDATE geoarchive.dg_orders
# SET aero = TRUE
# WHERE order_id = %s""", [order_id, ],)
# conn.commit()
# print(80*'=', order_id, 80*'=')
# aero_list.append(order_id)
#
# print('\nDone:\n', len(aero_list))
# for i in aero_list:
# print(i)
| bazzile/imScrape | Scripts/v1/dev/auxiliary/move_imagery.py | Python | mit | 1,527 |
# parser2.py
# parses sentences from the CSV files
# J. Hassler Thurston
# RocHack Hackathon December 7, 2013
# Modified December 11, 2013
import nltk
from random import choice
cfg_file = 'upenn_grammar.cfg'
tbank_productions = []
nonterminals = []
rightside = []
def get_initial_rules():
global tbank_productions, nonterminals
# from http://stackoverflow.com/questions/7056996/how-do-i-get-a-set-of-grammar-rules-from-penn-treebank-using-python-nltk
tbank_productions = [production for sent in nltk.corpus.treebank.parsed_sents() for production in sent.productions()]
nonterminals = [production.lhs().__str__() for production in tbank_productions]
rightside = [production.rhs().__str__() for production in tbank_productions]
tbank_grammar = nltk.grammar.ContextFreeGrammar(nltk.grammar.Nonterminal('S'), tbank_productions)
print generate_sample(tbank_grammar)
# modified from http://stackoverflow.com/questions/15009656/how-to-use-nltk-to-generate-sentences-from-an-induced-grammar
def generate_sample(grammar, items=[nltk.grammar.Nonterminal('S')]):
frags = []
if len(items) == 1:
print items
if isinstance(items[0], nltk.grammar.Nonterminal):
frags.append(generate_sample(grammar, grammar.productions(lhs=items[0])))
else:
frags.append(items[0])
else:
print items[:2]
# This is where we need to make our changes
chosen_expansion = choice(items)
#print type(chosen_expansion)
frags.append(generate_sample(grammar, [chosen_expansion]))
return frags
| jthurst3/newspeeches | parser2.py | Python | mit | 1,487 |
#!/usr/bin/env python
import itertools
import time
import rospy
from std_msgs.msg import String
class QueueProc(object):
def __init__(self):
rospy.init_node('motor_queue_proc', anonymous=True)
self.state_change = rospy.Publisher('/motor/state_change', String, queue_size=10)
rospy.Subscriber("/motor/commands", String, self.parse_string)
self.queue = []
self.timer_class = TimeTimer
#
# Parses published strings and loads the next item into the queue
#
def parse_string(self, data):
commands = ["".join(x) for _, x in itertools.groupby(data.data, key=str.isdigit)]
queue_start = len(self.queue)
i = 0
while i < len(commands):
action = commands[i]
val = commands[i+1]
if action == "f":
self.queue.append(["forward", int(val)])
elif action == "b":
self.queue.append(["backward", int(val)])
elif action == "r":
rounded = int(val)%360
if rounded > 180:
rounded = 360-rounded
self.queue.append(["left", rounded])
else:
self.queue.append(["right", rounded])
elif action == "flush":
self.queue = []
i += 2
if queue_start == 0:
self.timer_expire()
#
# changes the state and sets a timer based on the next item in the queue
#
def timer_expire(self):
if len(self.queue) == 0:
self.state_change.publish("stop")
return
nxt = self.queue.pop(0)
if (nxt[0] == "left" or nxt[0] == "right"):
self.state_change.publish("stop")
time.sleep(2)
self.state_change.publish(nxt[0])
tmer = self.timer_class(nxt[1], self.timer_expire, (nxt[0] == "left" or nxt[0] == "right"))
tmer.start()
#
# General timer class, does nothing
#
class GeneralTimer():
def __init__(self, distance, callback, is_angle):
self.callback = callback
def start(self):
self.callback()
def get_time(self):
return 0
#
# A time based timer
#
class TimeTimer(GeneralTimer):
def __init__(self, distance, callback, is_angle):
self.callback = callback
#distance in m*10
self.distance = distance
self.is_angle = is_angle
#meters per second
self.mps = 1
#amount of angles turned per second
self.aps = 40
def start(self):
rospy.Timer(rospy.Duration(self.get_time()), self.timer_callback, True)
def timer_callback(self, tEvent):
self.callback()
def get_time(self):
if not self.is_angle:
return float(self.distance)/(self.mps*10)
else:
return self.distance/self.aps
if __name__ == "__main__":
proc = QueueProc()
rospy.spin()
| OSURoboticsClub/Rover2015 | software/ros-packages/motor_controller/scripts/motor_queue_proc.py | Python | mit | 2,620 |
from pyspark import SparkConf, SparkContext
import collections
conf = SparkConf().setMaster("local").setAppName("RatingsHistogram")
sc = SparkContext(conf=conf)
lines = sc.textFile("file:///SparkCourse/ml-100k/u.data")
ratings = lines.map(lambda x: x.split()[2])
result = ratings.countByValue()
sortedResults = collections.OrderedDict(sorted(result.items()))
for key, value in sortedResults.items():
print("%s %i" % (key, value))
| TalhaAsmal/Taming-Big-Data-Pyspark-Udemy | ratings-counter.py | Python | mit | 437 |
from .graph_exception import GraphException
from .quality import Quality
from .node_factory import NodeFactory
# noinspection PyProtectedMember
class GraphNode(object):
"""
Base class for nodes in the calculation graph.
See: http:#richard-shepherd.github.io/calculation_graph/GraphNode.html
"""
# 'enum' for node GC collectability...
class GCType(object):
COLLECTABLE = 1 # Node can be GC'd if not referred to by other nodes.
NON_COLLECTABLE = 2 # Node will not be GC'd, even if not referred to by other nodes.
#'enum' for whether child nodes should be calculated after the current node has finished calculating...
class CalculateChildrenType(object):
CALCULATE_CHILDREN = 1
DO_NOT_CALCULATE_CHILDREN = 2
def __init__(self, node_id, graph_manager, environment, *args, **kwargs):
"""
The constructor.
"""
# The node's unique ID in the graph...
self.node_id = node_id
# The graph manager...
self.graph_manager = graph_manager
# The environment. This can be any object useful for the particular application
# which this graph and its nodes are used for...
self.environment = environment
# The quality of the data managed by this node...
self.quality = Quality()
# The set of parent nodes...
self._parent_nodes = set()
# The set of child nodes...
self._child_nodes = set()
# The set of child nodes to be calculated during one calculation cycle.
# When we calculate, we first take a copy of the _child_nodes (above), as
# the set may change during calculation...
self._child_nodes_for_this_calculation_cycle = set()
# The number of parent nodes which have caused this node to calculate during
# one calculation cycle...
self._invalid_count = 0
# True if this node is marked for calculation in the next cycle...
self._needs_calculation = True
# The set of parent nodes that caused this node to calculate in
# the current calculation cycle...
self._updated_parent_nodes = set()
# Garbage collection...
self._gc_type = GraphNode.GCType.COLLECTABLE
self._gc_ref_count = 0
# Indicates whether the node has calculated in the most recent
# calculation cycle.
# Note: This flag is only valid if the graph-manager's use_has_calculated_flags
# property is True.
self.has_calculated = False
# We automatically reset dependencies if any of these
# nodes has updated in the current calculation cycle...
self._auto_rebuild_nodes = set()
@staticmethod
def make_node_id(*args):
"""
Override this if you need to make the ID of the node yourself.
This method automatically creates the ID by stringifying the parameters
which identify the node. If this cannot be done, you should override the
method and create the ID yourself.
This static method is called by the add_parent_node "factory" function
to help find nodes based on their parameters.
"""
if len(args) == 0:
node_id = "ID"
else:
node_id = "_".join((str(x) for x in args))
return node_id
@staticmethod
def get_type():
"""
Returns the type (class name) of this node.
In some fairly rare cases you may want to override this in derived classes.
You might do this, for example, if you want to 'mock' a node.
"""
return ""
def cleanup(self):
"""
Cleans up the node and calls dispose() on derived classes.
"""
self.remove_parents()
self.remove_children()
self.dispose()
def dispose(self):
"""
Should be implemented by derived classes, if there are any non-node
resources to be cleaned up.
"""
pass
def set_dependencies(self):
"""
Should be implemented by derived classes, if they depend on any parent nodes.
"""
pass
def pre_calculate(self):
"""
Called just before calculation. You may want to implement this if you
need to do any custom resetting of dependencies.
"""
# If any of the updated parent nodes is in the auto-rebuild collection,
# we reset dependencies...
if len(set.intersection(self._updated_parent_nodes, self._auto_rebuild_nodes)) > 0:
self.reset_dependencies()
def calculate_quality(self):
"""
Called after pre_calculate() and before calculate().
Merges data quality from parent nodes. You should override this if you
need to calculate quality in a custom way.
"""
self.quality.clear_to_good()
for parent_node in self._parent_nodes:
self.quality.merge(parent_node.quality)
def calculate(self):
"""
Should be implemented by derived classes if they perform any calculations.
"""
return GraphNode.CalculateChildrenType.CALCULATE_CHILDREN
def get_info_message(self):
"""
Should be implemented by derived classes, if you want to provide graph-dump
information about your node.
"""
return ""
def add_parent(self, node):
"""
Adds a parent node for this node and updates the child node collection
of the parent
"""
if node not in self._parent_nodes:
self._parent_nodes.add(node)
node._child_nodes.add(self)
def remove_parent(self, node):
"""
Removes a parent node for this node and update the child node collection
of the parent.
"""
if node not in self._parent_nodes:
return # The node passed in is not one of our parent nodes.
# We remove the parent, and remove us as a child from the parent...
self._parent_nodes.remove(node)
node._child_nodes.remove(self)
# We mark the graph as needing garbage collection, as removing
# the parent link may leave unreferenced nodes...
self.graph_manager.link_removed()
def remove_parents(self):
"""
Removes all parent nodes for this node, also updates the child collections
of the parents.
"""
while len(self._parent_nodes) > 0:
node = self._parent_nodes.pop()
node._child_nodes.remove(self)
# We mark the graph as needing garbage collection, as removing
# the parents may leave unreferenced nodes...
self.graph_manager.link_removed()
def remove_children(self):
"""
Removes all child nodes for this node, also updates the parent collections
of the children.
"""
while len(self._child_nodes) > 0:
node = self._child_nodes.pop()
node._parent_nodes.remove(self)
def has_children(self):
"""
True if this node has any child nodes.
"""
return len(self._child_nodes) > 0
def invalidate(self, parent):
"""
Marks this node as invalid and, if this is the first invalidation, mark all
direct child nodes as invalid.
The parent node that is marking this node as needing calculation is passed
in, so that nodes can see who triggered them to calculate.
"""
# We add the parent to the collection of nodes that caused us to
# recalculate. (If this is one of the 'root' changed nodes for this
# calculation, the parent will be NULL, so we don't include it.)
if parent is not None:
self.add_updated_parent(parent)
self._invalid_count += 1
if self._invalid_count == 1:
# We have just gone invalid.
# Capture child set, as this may change as a result of calculation, and
# make recursive call for each node in captured child set
self._child_nodes_for_this_calculation_cycle = self._child_nodes.copy()
for node in self._child_nodes_for_this_calculation_cycle:
node.invalidate(self)
def validate(self):
"""
Called when one of the parent nodes has been calculated. We decrease the
invalidation count and if it has gone to zero, then all parents have been
calculated and we can calculate this node.
We then notify child nodes that they need to be calculated (by calling
validate on them).
"""
if self._invalid_count <= 0:
# Something has gone badly wrong in invalidate/validate...
raise GraphException(self.node_id + ": Invalidation count is unexpectedly non-positive")
self._invalid_count -= 1
if self._invalid_count == 0:
# All our parents are now valid, so we calculate our
# output value if necessary...
calculate_children = GraphNode.CalculateChildrenType.DO_NOT_CALCULATE_CHILDREN
if self._needs_calculation is True:
# We call pre-calculate. (This allows the node to do custom
# resetting of dependencies.)
self.pre_calculate()
# We merge data-quality...
self.calculate_quality()
# We do the calculation itself...
calculate_children = self.calculate()
self._needs_calculation = False
self.has_calculated = True
# We tell the graph-manager that the node has been calculated...
self.graph_manager.node_calculated(self)
# We calculate our child nodes...
for child_node in self._child_nodes_for_this_calculation_cycle:
# If this node's value has changed, force the _needs_calculation
# flag in the child node...
if calculate_children == GraphNode.CalculateChildrenType.CALCULATE_CHILDREN:
child_node._needs_calculation = True
# We tell the child node that this parent has calculated...
child_node.validate()
def reset_dependencies(self):
"""
Asks node to recreate its dependencies on other nodes and data objects.
"""
# We clear the collection of nodes that cause an auto-reset.
# (It will be repopulated when the new dependencies are set up.)
self._auto_rebuild_nodes.clear()
# We need to know if any new parents have been added to this node
# by this reset-dependencies operation. So we note the collection
# before and after setting them up...
parents_before_reset = self._parent_nodes.copy()
# We remove any existing parents, and add the new ones...
self.remove_parents()
self.set_dependencies()
# We find the collection of nodes that are now parents, but which
# weren't before, and we tell the graph-manager about them. (This
# is used to ensure that nodes are correctly calculated if the graph
# changes shape during the calculation-cycle.)
new_parents = self._parent_nodes.difference(parents_before_reset)
self.graph_manager.parents_updated(self, new_parents)
def parent_updated(self, parent):
"""
Returns true if the node passed in caused the calculation of the calling
node in this calculation cycle.
"""
return parent in self._updated_parent_nodes
def set_gc_type(self, gc_type):
"""
Sets whether or not this node can be garbage-collected.
"""
self._gc_type = gc_type
self.graph_manager.update_gc_info_for_node(self)
def needs_calculation(self):
"""
Marks the node as needing calculation in the next calculation cycle.
"""
self.graph_manager.needs_calculation(self)
def add_updated_parent(self, node):
"""
Adds a node to the collection of parent nodes that have updated for the next
calculation.
(See the wiki section about "Handling graph shape-changes during calculation"
for more details.)
"""
self._updated_parent_nodes.add(node)
self.graph_manager.node_has_updated_parents(self)
def clear_updated_parents(self):
"""
Clears out the collection of updated parents.
"""
self._updated_parent_nodes.clear()
def add_gc_ref_count(self):
"""
Increases the GC ref-count.
"""
self._gc_ref_count += 1
def release_gc_ref_count(self):
"""
Decreases the GC ref count.
"""
self._gc_ref_count -= 1
def get_gc_ref_count(self):
"""
Return the GC ref count.
"""
return self._gc_ref_count
def add_parent_node(self, node_type, *args, **kwargs):
"""
Adds a parent node of the type passed in for the identity parameters
supplied.
kwargs can include:
auto_rebuild = True / False (defaults to False if not supplied)
"""
# We find the node...
node = NodeFactory.get_node(
self.graph_manager,
GraphNode.GCType.COLLECTABLE,
node_type,
*args,
**kwargs)
self.add_parent(node)
# If the optional auto_rebuild flag is set, we will automatically reset
# dependencies if this node has updated in a calculation cycle...
auto_rebuild = kwargs["auto_rebuild"] if "auto_rebuild" in kwargs else False
if auto_rebuild is True:
self._auto_rebuild_nodes.add(node)
return node
| richard-shepherd/calculation_graph | graph/graph_node.py | Python | mit | 13,788 |
# coding=UTF-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
#import tornado
from tornado import ioloop , web , httpserver , websocket , options
#import handler function
import handler
import os
#set server settings
server_settings = {
"static_path": os.path.join(os.path.dirname(__file__), "static"),
"xsrf_cookies": True,
"autoreload": True,
#"login_url": "/accounts/login",
"debug":True,
"template_path":os.path.join(os.path.dirname(__file__),"templates"),
}
#the handlers list
handlers=[
(r"/?",handler.MainHandler),
(r"/upload",handler.WavFileHandler)
]
options.define("port", default=8080, help="the application will be run on the given port", type=int)
if __name__ == "__main__":
options.parse_command_line()
app_server = httpserver.HTTPServer(web.Application(handlers,**server_settings))
app_server.listen(options.options.port)
ioloop.IOLoop.current().start() | ken1277725/pythonweb-STT | server/server.py | Python | mit | 1,001 |
#!/bin/python3
import sys
x1, v1, x2, v2 = map(int, input().strip().split(' '))
willLand = (
v1 != v2
and (x1 - x2) % (v2 - v1) == 0
and (x1 - x2) // (v2 - v1) >= 0)
print(('NO', 'YES')[willLand])
| lilsweetcaligula/Online-Judges | hackerrank/algorithms/implementation/easy/kangaroo/py/solution.py | Python | mit | 215 |
import os
import textwrap
import glob
from os.path import join, curdir, pardir
import pytest
from pip.utils import rmtree
from tests.lib import pyversion
from tests.lib.local_repos import local_checkout
from tests.lib.path import Path
@pytest.mark.network
def test_without_setuptools(script):
script.run("pip", "uninstall", "setuptools", "-y")
result = script.run(
"python", "-c",
"import pip; pip.main(['install', 'INITools==0.2', '--no-use-wheel'])",
expect_error=True,
)
assert (
"setuptools must be installed to install from a source distribution"
in result.stdout
)
@pytest.mark.network
def test_pip_second_command_line_interface_works(script):
"""
Check if ``pip<PYVERSION>`` commands behaves equally
"""
args = ['pip%s' % pyversion]
args.extend(['install', 'INITools==0.2'])
result = script.run(*args)
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_from_pypi(script):
"""
Test installing a package from PyPI.
"""
result = script.pip('install', '-vvv', 'INITools==0.2')
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
def test_editable_install(script):
"""
Test editable installation.
"""
result = script.pip('install', '-e', 'INITools==0.2', expect_error=True)
assert (
"INITools==0.2 should either be a path to a local project or a VCS url"
in result.stdout
)
assert not result.files_created
assert not result.files_updated
@pytest.mark.network
def test_install_editable_from_svn(script, tmpdir):
"""
Test checking out from svn.
"""
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache")
)
)
result.assert_installed('INITools', with_files=['.svn'])
@pytest.mark.network
def test_download_editable_to_custom_path(script, tmpdir):
"""
Test downloading an editable using a relative custom src folder.
"""
script.scratch_path.join("customdl").mkdir()
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache")
),
'--src',
'customsrc',
'--download',
'customdl',
)
customsrc = Path('scratch') / 'customsrc' / 'initools'
assert customsrc in result.files_created, (
sorted(result.files_created.keys())
)
assert customsrc / 'setup.py' in result.files_created, (
sorted(result.files_created.keys())
)
customdl = Path('scratch') / 'customdl' / 'initools'
customdl_files_created = [
filename for filename in result.files_created
if filename.startswith(customdl)
]
assert customdl_files_created
@pytest.mark.network
def test_editable_no_install_followed_by_no_download(script, tmpdir):
"""
Test installing an editable in two steps (first with --no-install, then
with --no-download).
"""
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
),
'--no-install',
expect_error=True,
)
result.assert_installed(
'INITools', without_egg_link=True, with_files=['.svn'],
)
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
),
'--no-download',
expect_error=True,
)
result.assert_installed('INITools', without_files=[curdir, '.svn'])
@pytest.mark.network
def test_no_install_followed_by_no_download(script):
"""
Test installing in two steps (first with --no-install, then with
--no-download).
"""
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
build_dir = script.venv / 'build' / 'INITools'
result1 = script.pip(
'install', 'INITools==0.2', '--no-install', expect_error=True,
)
assert egg_info_folder not in result1.files_created, str(result1)
assert initools_folder not in result1.files_created, (
sorted(result1.files_created)
)
assert build_dir in result1.files_created, result1.files_created
assert build_dir / 'INITools.egg-info' in result1.files_created
result2 = script.pip(
'install', 'INITools==0.2', '--no-download', expect_error=True,
)
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, (
sorted(result2.files_created)
)
assert build_dir not in result2.files_created
assert build_dir / 'INITools.egg-info' not in result2.files_created
def test_bad_install_with_no_download(script):
"""
Test that --no-download behaves sensibly if the package source can't be
found.
"""
result = script.pip(
'install', 'INITools==0.2', '--no-download', expect_error=True,
)
assert (
"perhaps --no-download was used without first running "
"an equivalent install with --no-install?" in result.stdout
)
@pytest.mark.network
def test_install_dev_version_from_pypi(script):
"""
Test using package==dev.
"""
result = script.pip(
'install', 'INITools===dev',
'--allow-external', 'INITools',
'--allow-unverified', 'INITools',
expect_error=True,
)
assert (script.site_packages / 'initools') in result.files_created, (
str(result.stdout)
)
@pytest.mark.network
def test_install_editable_from_git(script, tmpdir):
"""
Test cloning from Git.
"""
args = ['install']
args.extend([
'-e',
'%s#egg=pip-test-package' %
local_checkout(
'git+http://github.com/pypa/pip-test-package.git',
tmpdir.join("cache"),
),
])
result = script.pip(*args, **{"expect_error": True})
result.assert_installed('pip-test-package', with_files=['.git'])
@pytest.mark.network
def test_install_editable_from_hg(script, tmpdir):
"""
Test cloning from Mercurial.
"""
result = script.pip(
'install', '-e',
'%s#egg=ScriptTest' %
local_checkout(
'hg+https://bitbucket.org/ianb/scripttest',
tmpdir.join("cache"),
),
expect_error=True,
)
result.assert_installed('ScriptTest', with_files=['.hg'])
@pytest.mark.network
def test_vcs_url_final_slash_normalization(script, tmpdir):
"""
Test that presence or absence of final slash in VCS URL is normalized.
"""
script.pip(
'install', '-e',
'%s/#egg=ScriptTest' %
local_checkout(
'hg+https://bitbucket.org/ianb/scripttest',
tmpdir.join("cache"),
),
)
@pytest.mark.network
def test_install_editable_from_bazaar(script, tmpdir):
"""
Test checking out from Bazaar.
"""
result = script.pip(
'install', '-e',
'%s/@174#egg=django-wikiapp' %
local_checkout(
'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp'
'/release-0.1',
tmpdir.join("cache"),
),
expect_error=True,
)
result.assert_installed('django-wikiapp', with_files=['.bzr'])
@pytest.mark.network
def test_vcs_url_urlquote_normalization(script, tmpdir):
"""
Test that urlquoted characters are normalized for repo URL comparison.
"""
script.pip(
'install', '-e',
'%s/#egg=django-wikiapp' %
local_checkout(
'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp'
'/release-0.1',
tmpdir.join("cache"),
),
)
def test_install_from_local_directory(script, data):
"""
Test installing from a local directory.
"""
to_install = data.packages.join("FSPkg")
result = script.pip('install', to_install, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_from_local_directory_with_symlinks_to_directories(
script, data):
"""
Test installing from a local directory containing symlinks to directories.
"""
to_install = data.packages.join("symlinks")
result = script.pip('install', to_install, expect_error=False)
pkg_folder = script.site_packages / 'symlinks'
egg_info_folder = (
script.site_packages / 'symlinks-0.1.dev0-py%s.egg-info' % pyversion
)
assert pkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_from_local_directory_with_no_setup_py(script, data):
"""
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', data.root, expect_error=True)
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stdout
def test_editable_install_from_local_directory_with_no_setup_py(script, data):
"""
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', '-e', data.root, expect_error=True)
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stdout
def test_install_as_egg(script, data):
"""
Test installing as egg, instead of flat install.
"""
to_install = data.packages.join("FSPkg")
result = script.pip('install', to_install, '--egg', expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_folder = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg' % pyversion
assert fspkg_folder not in result.files_created, str(result.stdout)
assert egg_folder in result.files_created, str(result)
assert join(egg_folder, 'fspkg') in result.files_created, str(result)
def test_install_curdir(script, data):
"""
Test installing current directory ('.').
"""
run_from = data.packages.join("FSPkg")
# Python 2.4 Windows balks if this exists already
egg_info = join(run_from, "FSPkg.egg-info")
if os.path.isdir(egg_info):
rmtree(egg_info)
result = script.pip('install', curdir, cwd=run_from, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_pardir(script, data):
"""
Test installing parent directory ('..').
"""
run_from = data.packages.join("FSPkg", "fspkg")
result = script.pip('install', pardir, cwd=run_from, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_global_option(script):
"""
Test using global distutils options.
(In particular those that disable the actual install action)
"""
result = script.pip(
'install', '--global-option=--version', "INITools==0.1",
)
assert '0.1\n' in result.stdout
def test_install_with_pax_header(script, data):
"""
test installing from a tarball with pax header for python<2.6
"""
script.pip('install', 'paxpkg.tar.bz2', cwd=data.packages)
def test_install_with_hacked_egg_info(script, data):
"""
test installing a package which defines its own egg_info class
"""
run_from = data.packages.join("HackedEggInfo")
result = script.pip('install', '.', cwd=run_from)
assert 'Successfully installed hackedegginfo-0.0.0\n' in result.stdout
@pytest.mark.network
def test_install_using_install_option_and_editable(script, tmpdir):
"""
Test installing a tool using -e and --install-option
"""
folder = 'script_folder'
script.scratch_path.join(folder).mkdir()
url = 'git+git://github.com/pypa/pip-test-package'
result = script.pip(
'install', '-e', '%s#egg=pip-test-package' %
local_checkout(url, tmpdir.join("cache")),
'--install-option=--script-dir=%s' % folder
)
script_file = (
script.venv / 'src' / 'pip-test-package' /
folder / 'pip-test-package' + script.exe
)
assert script_file in result.files_created
@pytest.mark.network
def test_install_global_option_using_editable(script, tmpdir):
"""
Test using global distutils options, but in an editable installation
"""
url = 'hg+http://bitbucket.org/runeh/anyjson'
result = script.pip(
'install', '--global-option=--version', '-e',
'%s@0.2.5#egg=anyjson' % local_checkout(url, tmpdir.join("cache"))
)
assert 'Successfully installed anyjson' in result.stdout
@pytest.mark.network
def test_install_package_with_same_name_in_curdir(script):
"""
Test installing a package with the same name of a local folder
"""
script.scratch_path.join("mock==0.6").mkdir()
result = script.pip('install', 'mock==0.6')
egg_folder = script.site_packages / 'mock-0.6.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
mock100_setup_py = textwrap.dedent('''\
from setuptools import setup
setup(name='mock',
version='100.1')''')
def test_install_folder_using_dot_slash(script):
"""
Test installing a folder using pip install ./foldername
"""
script.scratch_path.join("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', './mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_slash_in_the_end(script):
r"""
Test installing a folder using pip install foldername/ or foldername\
"""
script.scratch_path.join("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', 'mock' + os.path.sep)
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_relative_path(script):
"""
Test installing a folder using pip install folder1/folder2
"""
script.scratch_path.join("initools").mkdir()
script.scratch_path.join("initools", "mock").mkdir()
pkg_path = script.scratch_path / 'initools' / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', Path('initools') / 'mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_package_which_contains_dev_in_name(script):
"""
Test installing package from pypi which contains 'dev' in name
"""
result = script.pip('install', 'django-devserver==0.0.4')
devserver_folder = script.site_packages / 'devserver'
egg_info_folder = (
script.site_packages / 'django_devserver-0.0.4-py%s.egg-info' %
pyversion
)
assert devserver_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_package_with_target(script):
"""
Test installing a package using pip install --target
"""
target_dir = script.scratch_path / 'target'
result = script.pip('install', '-t', target_dir, "initools==0.1")
assert Path('scratch') / 'target' / 'initools' in result.files_created, (
str(result)
)
# Test repeated call without --upgrade, no files should have changed
result = script.pip('install', '-t', target_dir, "initools==0.1")
assert not Path('scratch') / 'target' / 'initools' in result.files_updated
# Test upgrade call, check that new version is installed
result = script.pip('install', '--upgrade', '-t',
target_dir, "initools==0.2")
assert Path('scratch') / 'target' / 'initools' in result.files_updated, (
str(result)
)
egg_folder = (
Path('scratch') / 'target' / 'INITools-0.2-py%s.egg-info' % pyversion)
assert egg_folder in result.files_created, (
str(result)
)
# Test install and upgrade of single-module package
result = script.pip('install', '-t', target_dir, 'six')
assert Path('scratch') / 'target' / 'six.py' in result.files_created, (
str(result)
)
result = script.pip('install', '-t', target_dir, '--upgrade', 'six')
assert Path('scratch') / 'target' / 'six.py' in result.files_updated, (
str(result)
)
def test_install_package_with_root(script, data):
"""
Test installing a package using pip install --root
"""
root_dir = script.scratch_path / 'root'
result = script.pip(
'install', '--root', root_dir, '-f', data.find_links, '--no-index',
'simple==1.0',
)
normal_install_path = (
script.base_path / script.site_packages / 'simple-1.0-py%s.egg-info' %
pyversion
)
# use distutils to change the root exactly how the --root option does it
from distutils.util import change_root
root_path = change_root(
os.path.join(script.scratch, 'root'),
normal_install_path
)
assert root_path in result.files_created, str(result)
# skip on win/py3 for now, see issue #782
@pytest.mark.skipif("sys.platform == 'win32' and sys.version_info >= (3,)")
def test_install_package_that_emits_unicode(script, data):
"""
Install a package with a setup.py that emits UTF-8 output and then fails.
Refs https://github.com/pypa/pip/issues/326
"""
to_install = data.packages.join("BrokenEmitsUTF8")
result = script.pip(
'install', to_install, expect_error=True, expect_temp=True, quiet=True,
)
assert (
'FakeError: this package designed to fail on install' in result.stdout
)
assert 'UnicodeDecodeError' not in result.stdout
def test_install_package_with_utf8_setup(script, data):
"""Install a package with a setup.py that declares a utf-8 encoding."""
to_install = data.packages.join("SetupPyUTF8")
script.pip('install', to_install)
def test_install_package_with_latin1_setup(script, data):
"""Install a package with a setup.py that declares a latin-1 encoding."""
to_install = data.packages.join("SetupPyLatin1")
script.pip('install', to_install)
def test_url_req_case_mismatch_no_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/packages contains Upper-1.0.tar.gz and Upper-2.0.tar.gz
'requiresupper' has install_requires = ['upper']
"""
Upper = os.path.join(data.find_links, 'Upper-1.0.tar.gz')
result = script.pip(
'install', '--no-index', '-f', data.find_links, Upper, 'requiresupper'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_req_case_mismatch_file_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/packages3 contains Dinner-1.0.tar.gz and Dinner-2.0.tar.gz
'requiredinner' has install_requires = ['dinner']
This test is similar to test_url_req_case_mismatch_no_index; that test
tests behaviour when using "--no-index -f", while this one does the same
test when using "--index-url". Unfortunately this requires a different
set of packages as it requires a prepared index.html file and
subdirectory-per-package structure.
"""
Dinner = os.path.join(data.find_links3, 'Dinner', 'Dinner-1.0.tar.gz')
result = script.pip(
'install', '--index-url', data.find_links3, Dinner, 'requiredinner'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_incorrect_case_no_index(script, data):
"""
Same as test_url_req_case_mismatch_no_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--no-index', '-f', data.find_links, "upper",
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_url_incorrect_case_file_index(script, data):
"""
Same as test_url_req_case_mismatch_file_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--index-url', data.find_links3, "dinner",
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
@pytest.mark.network
def test_compiles_pyc(script):
"""
Test installing with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--compile", "--no-use-wheel", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert any(exists)
@pytest.mark.network
def test_no_compiles_pyc(script, data):
"""
Test installing from wheel with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--no-compile", "--no-use-wheel", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert not any(exists)
| pjdelport/pip | tests/functional/test_install.py | Python | mit | 24,544 |
from django.db import models
from django.contrib.auth.models import User, Group
import uuid
import davvy
import davvy.exceptions
from lxml import etree
# Create your models here.
class Resource(models.Model):
def generate_uuid():
return str(uuid.uuid4())
user = models.ForeignKey(User)
groups = models.ManyToManyField(Group, null=True, blank=True)
parent = models.ForeignKey('Resource', null=True, blank=True)
name = models.CharField(max_length=255)
collection = models.BooleanField(default=False)
uuid = models.CharField(max_length=36, default=generate_uuid)
content_type = models.CharField(max_length=255, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
size = models.BigIntegerField(default=0)
protected = models.BooleanField(default=False)
# pretty ugly, but should help viewing the full names
def __unicode__(self):
parts = []
parent = self.parent
while True:
if not parent:
break
parts.insert(0, Resource.objects.get(pk=parent.id).name)
parent = parent.parent
parts.append(self.name)
return '/' + '/'.join(parts)
def del_prop(self, dav, request, name):
try:
model_prop = self.prop_set.get(name=name)
model_prop.delete()
except Prop.DoesNotExist:
# removing a non existent property is not an error
pass
def get_prop(self, dav, request, name):
if name in davvy.props_get:
value = davvy.props_get[name](dav, request, self)
if value is not None:
return value
raise davvy.exceptions.Forbidden()
try:
model_prop = self.prop_set.get(name=name)
if model_prop.is_xml:
return etree.fromstring(model_prop.value)
return model_prop.value
except Prop.DoesNotExist:
raise davvy.exceptions.NotFound()
def set_prop(self, dav, request, name, value):
if name in davvy.props_set:
e = davvy.props_set[name](dav, request, self, value)
if isinstance(e, Exception):
raise e
else:
try:
prop = self.prop_set.get(name=name)
except Prop.DoesNotExist:
prop = self.prop_set.create(name=name)
if len(value):
prop.value = '\n'.join(
[etree.tostring(children, pretty_print=True)
for children
in value]
)
prop.is_xml = True
elif value.text is not None:
prop.value = value.text
prop.is_xml = False
prop.save()
return self.get_prop(dav, request, name)
@property
def displayname(self):
try:
prop = self.prop_set.get(name='{DAV:}displayname')
return prop.value
except:
return ''
@property
def progenitor(self):
parent = self.parent
while parent and parent.parent:
parent = parent.parent
return parent
def properties(self, dav, request, requested_props):
propstat = []
for prop in requested_props:
try:
value = self.get_prop(dav, request, prop)
status = '200 OK'
except Exception as e:
value = None
if hasattr(e, 'status'):
status = e.status
else:
status = '500 Internal Server Error'
propstat.append((prop, ) + (value, status))
return propstat
class Meta:
unique_together = ('user', 'parent', 'name')
class Prop(models.Model):
resource = models.ForeignKey(Resource)
name = models.CharField(max_length=255)
value = models.TextField(blank=True, null=True)
is_xml = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Meta:
unique_together = ('resource', 'name')
| unbit/davvy | davvy/models.py | Python | mit | 4,165 |
from django import forms
from Crate.models import Discussion
class ReportForm(forms.Form):
report = forms.CharField(label="Enter your report", max_length=500, widget=forms.Textarea)
class DiscussionForm(forms.ModelForm):
class Meta:
model = Discussion
fields = ['comment']
| KamLii/Databaes | Crate/forms.py | Python | mit | 302 |
from flask import Blueprint, render_template, redirect, url_for, current_app
monitoring_main = Blueprint('monitoring_main', __name__, # pylint: disable=invalid-name
template_folder='templates',
static_url_path='/static',
static_folder='static')
@monitoring_main.context_processor
def inject_data():
data = {
'dashboards': current_app.config['monitoring']['dashboards'],
'uchiwa_url': current_app.config['monitoring']['uchiwa_url'],
}
return data
@monitoring_main.route('/')
def index():
return redirect(url_for('monitoring_main.events'))
@monitoring_main.route('/events')
def events():
return render_template('events.html', title='Events')
@monitoring_main.route('/checks')
def checks():
return render_template('checks.html', title='Checks')
@monitoring_main.route('/clients')
def clients():
return render_template('clients.html', title='Clients')
@monitoring_main.route('/clients/<zone>/<client_name>')
def client(zone, client_name):
return render_template('client_details.html', zone=zone,
client=client_name, title='Client Details')
@monitoring_main.route('/clients/<zone>/<client_name>/events/<check>')
def client_event(zone, client_name, check):
return render_template('client_event_details.html', zone=zone,
client=client_name, check=check, title='Event Details')
| derekmoyes/opsy | opsy/plugins/monitoring/main.py | Python | mit | 1,477 |
'''
Created on 20/02/2009
@author: Chuidiang
Ejemplo de cliente de socket.
Establece conexion con el servidor, envia "hola", recibe y escribe la
respuesta, espera 2 segundos, envia "adios", recibe y escribe la respuesta
y cierrra la conexion
'''
import socket
#import time
if __name__ == '__main__':
# Se establece la conexion
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("127.0.0.1", 8000))
cond = True
while cond:
#s.send(str(665))
#a=s.recv(1000)
#print a
b = raw_input(">. ")
#print type(b)
if (b != ''):
b = int(b)
if (b <= 255) & (b >= 0):
s.send(str(b))
elif (b == 666):
cond = False
s.send(str(b))
s.close()
print "nojoda"
s.close()
elif (b == 665):
s.send(str(b))
a = s.recv(1000)
print a
else:
print "el rango debe ser entre 0 y 255, 666 es para cerrar"
# Se envia "hola"
#s.send("hola")
# Se recibe la respuesta y se escribe en pantalla
#datos = s.recv(1000)
#print datos
#
## Espera de 2 segundos
#time.sleep(2)
#
## Se envia "adios"
#s.send("adios")
#
## Se espera respuesta, se escribe en pantalla y se cierra la
## conexion
#datos = s.recv(1000)
#print datos
#s.close()
| oroxo/LPPDP | cliente.py | Python | mit | 1,465 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitConnectionsOperations(object):
"""ExpressRouteCircuitConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified Express Route Circuit Connection from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
connection_name=connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuitConnection"
"""Gets the specified Express Route Circuit Connection from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.ExpressRouteCircuitConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
express_route_circuit_connection_parameters, # type: "_models.ExpressRouteCircuitConnection"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuitConnection"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(express_route_circuit_connection_parameters, 'ExpressRouteCircuitConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
express_route_circuit_connection_parameters, # type: "_models.ExpressRouteCircuitConnection"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitConnection"]
"""Creates or updates a Express Route Circuit Connection in the specified express route circuits.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the express route circuit connection.
:type connection_name: str
:param express_route_circuit_connection_parameters: Parameters supplied to the create or update
express route circuit connection operation.
:type express_route_circuit_connection_parameters: ~azure.mgmt.network.v2020_11_01.models.ExpressRouteCircuitConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitConnection or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_11_01.models.ExpressRouteCircuitConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
connection_name=connection_name,
express_route_circuit_connection_parameters=express_route_circuit_connection_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections/{connectionName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCircuitConnectionListResult"]
"""Gets all global reach connections associated with a private peering in an express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.ExpressRouteCircuitConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/connections'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_11_01/operations/_express_route_circuit_connections_operations.py | Python | mit | 24,012 |
import subprocess as sp
import sys
goleft_bed = sys.argv[1]
bam = sys.argv[2]
for toks in (l.rstrip().split("\t") for l in open(goleft_bed)):
cmd = "samtools depth -a -Q 1 -r '%s:%d-%s' %s | awk '{s+=$3}END{if(NR==0){print 0}else{print s/%d}}'" % (toks[0], int(toks[1]) + 1, toks[2], bam, int(toks[2]) - int(toks[1]))
out = sp.check_output(cmd, shell=True).strip()
expected = float(toks[3])
if abs(expected - float(out.strip())) > 0.5:
print("ERROR")
print(float(out.strip()), expected)
print(cmd)
sys.exit(1)
| brentp/goleft | depth/test/cmp.py | Python | mit | 562 |
from __future__ import division
import conformal_blocks.cbbundle as cbd
import cProfile, time, random
#First test
#----------
#
def experiment():
"""
Computes the rank and divisor of conformal block bundles with random weights.
:return: Null
"""
rank = 5
level = 3
num_points = 10
tries = 100
liealg = cbd.TypeALieAlgebra(rank, store_fusion=True, exact=False)
A_l = liealg.get_weights(level)
print("Weight", "Rank", "Divisor")
for i in range(tries):
weights = [random.choice(A_l) for i in range(num_points)]
if sum([sum(liealg._convert_funds_to_epsilons(wt)) for wt in weights]) % (rank+1) != 0: continue
cbb = cbd.ConformalBlocksBundle(liealg, weights, level)
if cbb.get_rank() > 0:
divisor = cbb.get_symmetrized_divisor()
print(weights, cbb.get_rank(), divisor)
else:
print(weights, cbb.get_rank(), 0)
if __name__ == '__main__':
t0 = time.clock()
experiment()
print(time.clock() -t0)
#cProfile.run('experiment()', sort='cumtime') | mjschust/conformal-blocks | experiments/non_sym_m0n.py | Python | mit | 1,075 |
"""
The Tornado Framework
By Ali Pesaranghader
University of Ottawa, Ontario, Canada
E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com
---
*** The Page Hinkley (PH) Method Implementation ***
Paper: Page, Ewan S. "Continuous inspection schemes."
Published in: Biometrika 41.1/2 (1954): 100-115.
URL: http://www.jstor.org/stable/2333009
"""
from dictionary.tornado_dictionary import TornadoDic
from drift_detection.detector import SuperDetector
class PH(SuperDetector):
"""The Page Hinkley (PH) drift detection method class."""
DETECTOR_NAME = TornadoDic.PH
def __init__(self, min_instance=30, delta=0.005, lambda_=50, alpha=1 - 0.0001):
super().__init__()
self.MINIMUM_NUM_INSTANCES = min_instance
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
self.delta = delta
self.lambda_ = lambda_
self.alpha = alpha
def run(self, pr):
pr = 1 if pr is False else 0
warning_status = False
drift_status = False
# 1. UPDATING STATS
self.x_mean = self.x_mean + (pr - self.x_mean) / self.m_n
self.sum = self.alpha * self.sum + (pr - self.x_mean - self.delta)
self.m_n += 1
# 2. UPDATING WARNING AND DRIFT STATUSES
if self.m_n >= self.MINIMUM_NUM_INSTANCES:
if self.sum > self.lambda_:
drift_status = True
return warning_status, drift_status
def reset(self):
super().reset()
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
def get_settings(self):
return [str(self.MINIMUM_NUM_INSTANCES) + "." + str(self.delta) + "." +
str(self.lambda_) + "." + str(self.alpha),
"$n_{min}$:" + str(self.MINIMUM_NUM_INSTANCES) + ", " +
"$\delta$:" + str(self.delta).upper() + ", " +
"$\lambda$:" + str(self.lambda_).upper() + ", " +
"$\\alpha$:" + str(self.alpha).upper()]
| alipsgh/tornado | drift_detection/page_hinkley.py | Python | mit | 2,052 |
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tvdb',
version='0.1',
packages=['tvdb'],
include_package_data=True,
license='The MIT License: http://www.opensource.org/licenses/mit-license.php',
description='A simple Django app for TV channels DB.',
long_description=README,
author='Maksym Sokolsky',
author_email='misokolsky@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| maxsocl/django-tvdb | setup.py | Python | mit | 1,193 |
# -*- coding:utf-8 -*-
"""
用户消息相关的视图
"""
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.filters import SearchFilter
from account.serializers.message import MessageSerializer
from account.models import Message
class MessageCreateView(generics.CreateAPIView):
"""创建用户消息api"""
queryset = Message.objects.all()
serializer_class = MessageSerializer
# 权限控制
permission_classes = (IsAuthenticated,)
class MessageListView(generics.ListAPIView):
"""
用户消息列表api View
> 用户只能看到自己的消息列表
"""
# queryset = Message.objects.filter(deleted=False)
serializer_class = MessageSerializer
# 权限控制
permission_classes = (IsAuthenticated,)
# 搜索和过滤
filter_backends = (DjangoFilterBackend, SearchFilter)
filter_fields = ('category', 'unread')
search_fields = ('title', 'content')
ordering_fields = ('id', 'time_added')
ordering = ('-time_added',)
def get_queryset(self):
# 第1步:获取到请求的用户
# 用户只可以看到自己的消息列表
user = self.request.user
# 第2步:获取到是否已读:unread=0/1(已读/未读)
queryset = Message.objects.filter(user=user, is_deleted=False).order_by('-id')
# 第3步:返回结果集
return queryset
class MessageDetailView(generics.RetrieveDestroyAPIView):
"""
用户消息详情View
> 只能获取到用户自己的消息,即使是超级用户,也只能查看到自己的消息,不可以去看别人的
"""
queryset = Message.objects.filter(is_deleted=False)
serializer_class = MessageSerializer
# 权限控制
permission_classes = (IsAuthenticated,)
def get_object(self):
# 1. 先获取到用户
user = self.request.user
# 2. 调用父类的方法获取到这个对象
instance = super().get_object()
# 3. 如果这个对象user是请求的用户,那么返回对象,不是的话返回None
if instance and user == instance.user:
return instance
else:
return None
def retrieve(self, request, *args, **kwargs):
# 1. 获取到对象
instance = self.get_object()
# 2. 修改unread
if instance.unread:
instance.unread = False
instance.save(update_fields=('unread',))
return super().retrieve(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
# 1. 获取到user和对象
user = self.request.user
instance = self.get_object()
# 2. 如果是自己的消息或者是超级管理员,那么就可以删除本条消息
if instance.is_deleted:
response = Response(status=204)
else:
if instance.user == user or user.is_superuser:
instance.is_deleted = True
instance.save()
response = Response(status=204)
else:
response = Response("没权限删除", status=403)
# 3. 返回响应
return response
| codelieche/codelieche.com | apps/account/views/message.py | Python | mit | 3,318 |
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class Session(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'user_id': 'int',
'token': 'str',
'created': 'datetime'
}
attribute_map = {
'user_id': 'userId',
'token': 'token',
'created': 'created'
}
def __init__(self, user_id=None, token=None, created=None, local_vars_configuration=None): # noqa: E501
"""Session - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._user_id = None
self._token = None
self._created = None
self.discriminator = None
self.user_id = user_id
self.token = token
self.created = created
@property
def user_id(self):
"""Gets the user_id of this Session. # noqa: E501
The ID of the user of this session # noqa: E501
:return: The user_id of this Session. # noqa: E501
:rtype: int
"""
return self._user_id
@user_id.setter
def user_id(self, user_id):
"""Sets the user_id of this Session.
The ID of the user of this session # noqa: E501
:param user_id: The user_id of this Session. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and user_id is None: # noqa: E501
raise ValueError("Invalid value for `user_id`, must not be `None`") # noqa: E501
self._user_id = user_id
@property
def token(self):
"""Gets the token of this Session. # noqa: E501
An opaque session identifier # noqa: E501
:return: The token of this Session. # noqa: E501
:rtype: str
"""
return self._token
@token.setter
def token(self, token):
"""Sets the token of this Session.
An opaque session identifier # noqa: E501
:param token: The token of this Session. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and token is None: # noqa: E501
raise ValueError("Invalid value for `token`, must not be `None`") # noqa: E501
self._token = token
@property
def created(self):
"""Gets the created of this Session. # noqa: E501
Unix timestamp indicating when the session was first created. # noqa: E501
:return: The created of this Session. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this Session.
Unix timestamp indicating when the session was first created. # noqa: E501
:param created: The created of this Session. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Session):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Session):
return True
return self.to_dict() != other.to_dict()
| talon-one/talon_one.py | talon_one/models/session.py | Python | mit | 5,860 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class UpgradePolicy(Model):
"""Describes an upgrade policy - automatic or manual.
:param mode: Specifies the mode of an upgrade to virtual machines in the
scale set.<br /><br /> Possible values are:<br /><br /> **Manual** - You
control the application of updates to virtual machines in the scale set.
You do this by using the manualUpgrade action.<br /><br /> **Automatic** -
All virtual machines in the scale set are automatically updated at the
same time. Possible values include: 'Automatic', 'Manual'
:type mode: str or
~azure.mgmt.compute.v2016_04_30_preview.models.UpgradeMode
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'UpgradeMode'},
}
def __init__(self, *, mode=None, **kwargs) -> None:
super(UpgradePolicy, self).__init__(**kwargs)
self.mode = mode
| lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/upgrade_policy_py3.py | Python | mit | 1,372 |
"""Unit tests of resource managers."""
import pytest
from ..utilities.general import is_never_authz, is_no_authz, uses_cataloging, uses_filesystem_only
from dlkit.abstract_osid.osid import errors
from dlkit.abstract_osid.type.objects import TypeList as abc_type_list
from dlkit.primordium.id.primitives import Id
from dlkit.primordium.type.primitives import Type
from dlkit.runtime import PROXY_SESSION, proxy_example
from dlkit.runtime.managers import Runtime
REQUEST = proxy_example.SimpleRequest()
CONDITION = PROXY_SESSION.get_proxy_condition()
CONDITION.set_http_request(REQUEST)
PROXY = PROXY_SESSION.get_proxy(CONDITION)
DEFAULT_TYPE = Type(**{'identifier': 'DEFAULT', 'namespace': 'DEFAULT', 'authority': 'DEFAULT'})
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def resource_profile_class_fixture(request):
request.cls.service_config = request.param
request.cls.mgr = Runtime().get_service_manager(
'RESOURCE',
proxy=PROXY,
implementation=request.cls.service_config)
@pytest.fixture(scope="function")
def resource_profile_test_fixture(request):
pass
@pytest.mark.usefixtures("resource_profile_class_fixture", "resource_profile_test_fixture")
class TestResourceProfile(object):
"""Tests for ResourceProfile"""
def test_supports_resource_lookup(self):
"""Tests supports_resource_lookup"""
assert isinstance(self.mgr.supports_resource_lookup(), bool)
def test_supports_resource_query(self):
"""Tests supports_resource_query"""
assert isinstance(self.mgr.supports_resource_query(), bool)
def test_supports_resource_search(self):
"""Tests supports_resource_search"""
assert isinstance(self.mgr.supports_resource_search(), bool)
def test_supports_resource_admin(self):
"""Tests supports_resource_admin"""
assert isinstance(self.mgr.supports_resource_admin(), bool)
def test_supports_resource_notification(self):
"""Tests supports_resource_notification"""
assert isinstance(self.mgr.supports_resource_notification(), bool)
def test_supports_resource_bin(self):
"""Tests supports_resource_bin"""
assert isinstance(self.mgr.supports_resource_bin(), bool)
def test_supports_resource_bin_assignment(self):
"""Tests supports_resource_bin_assignment"""
assert isinstance(self.mgr.supports_resource_bin_assignment(), bool)
def test_supports_resource_agent(self):
"""Tests supports_resource_agent"""
assert isinstance(self.mgr.supports_resource_agent(), bool)
def test_supports_resource_agent_assignment(self):
"""Tests supports_resource_agent_assignment"""
assert isinstance(self.mgr.supports_resource_agent_assignment(), bool)
def test_supports_bin_lookup(self):
"""Tests supports_bin_lookup"""
assert isinstance(self.mgr.supports_bin_lookup(), bool)
def test_supports_bin_query(self):
"""Tests supports_bin_query"""
assert isinstance(self.mgr.supports_bin_query(), bool)
def test_supports_bin_admin(self):
"""Tests supports_bin_admin"""
assert isinstance(self.mgr.supports_bin_admin(), bool)
def test_supports_bin_hierarchy(self):
"""Tests supports_bin_hierarchy"""
assert isinstance(self.mgr.supports_bin_hierarchy(), bool)
def test_supports_bin_hierarchy_design(self):
"""Tests supports_bin_hierarchy_design"""
assert isinstance(self.mgr.supports_bin_hierarchy_design(), bool)
def test_get_resource_record_types(self):
"""Tests get_resource_record_types"""
assert isinstance(self.mgr.get_resource_record_types(), abc_type_list)
def test_get_resource_search_record_types(self):
"""Tests get_resource_search_record_types"""
assert isinstance(self.mgr.get_resource_search_record_types(), abc_type_list)
def test_get_resource_relationship_record_types(self):
"""Tests get_resource_relationship_record_types"""
assert isinstance(self.mgr.get_resource_relationship_record_types(), abc_type_list)
def test_get_resource_relationship_search_record_types(self):
"""Tests get_resource_relationship_search_record_types"""
assert isinstance(self.mgr.get_resource_relationship_search_record_types(), abc_type_list)
def test_get_bin_record_types(self):
"""Tests get_bin_record_types"""
assert isinstance(self.mgr.get_bin_record_types(), abc_type_list)
def test_get_bin_search_record_types(self):
"""Tests get_bin_search_record_types"""
assert isinstance(self.mgr.get_bin_search_record_types(), abc_type_list)
class NotificationReceiver(object):
# Implemented from resource.ResourceManager
pass
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def resource_manager_class_fixture(request):
# Implemented from resource.ResourceManager
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'RESOURCE',
implementation=request.cls.service_config)
if not is_never_authz(request.cls.service_config):
create_form = request.cls.svc_mgr.get_bin_form_for_create([])
create_form.display_name = 'Test Bin'
create_form.description = 'Test Bin for resource manager tests'
catalog = request.cls.svc_mgr.create_bin(create_form)
request.cls.catalog_id = catalog.get_id()
request.cls.receiver = NotificationReceiver()
else:
request.cls.catalog_id = Id('resource.Resource%3A000000000000000000000000%40DLKIT.MIT.EDU')
def class_tear_down():
if not is_never_authz(request.cls.service_config):
request.cls.svc_mgr.delete_bin(request.cls.catalog_id)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def resource_manager_test_fixture(request):
# Implemented from resource.ResourceManager
pass
@pytest.mark.usefixtures("resource_manager_class_fixture", "resource_manager_test_fixture")
class TestResourceManager(object):
"""Tests for ResourceManager"""
def test_get_resource_lookup_session(self):
"""Tests get_resource_lookup_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_template
if self.svc_mgr.supports_resource_lookup():
self.svc_mgr.get_resource_lookup_session()
def test_get_resource_lookup_session_for_bin(self):
"""Tests get_resource_lookup_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_for_bin_template
if self.svc_mgr.supports_resource_lookup():
self.svc_mgr.get_resource_lookup_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_lookup_session_for_bin()
def test_get_resource_query_session(self):
"""Tests get_resource_query_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_template
if self.svc_mgr.supports_resource_query():
self.svc_mgr.get_resource_query_session()
def test_get_resource_query_session_for_bin(self):
"""Tests get_resource_query_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_for_bin_template
if self.svc_mgr.supports_resource_query():
self.svc_mgr.get_resource_query_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_query_session_for_bin()
def test_get_resource_search_session(self):
"""Tests get_resource_search_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_search():
self.svc_mgr.get_resource_search_session()
def test_get_resource_search_session_for_bin(self):
"""Tests get_resource_search_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_search():
self.svc_mgr.get_resource_search_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_search_session_for_bin()
def test_get_resource_admin_session(self):
"""Tests get_resource_admin_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_admin():
self.svc_mgr.get_resource_admin_session()
def test_get_resource_admin_session_for_bin(self):
"""Tests get_resource_admin_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_admin():
self.svc_mgr.get_resource_admin_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_admin_session_for_bin()
def test_get_resource_notification_session(self):
"""Tests get_resource_notification_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_notification_session_template
if self.svc_mgr.supports_resource_notification():
self.svc_mgr.get_resource_notification_session(self.receiver)
def test_get_resource_notification_session_for_bin(self):
"""Tests get_resource_notification_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_notification_session_for_bin_template
if self.svc_mgr.supports_resource_notification():
self.svc_mgr.get_resource_notification_session_for_bin(self.receiver, self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_notification_session_for_bin()
def test_get_resource_bin_session(self):
"""Tests get_resource_bin_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_bin():
self.svc_mgr.get_resource_bin_session()
def test_get_resource_bin_assignment_session(self):
"""Tests get_resource_bin_assignment_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_bin_assignment():
self.svc_mgr.get_resource_bin_assignment_session()
def test_get_resource_agent_session(self):
"""Tests get_resource_agent_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_agent():
self.svc_mgr.get_resource_agent_session()
def test_get_resource_agent_session_for_bin(self):
"""Tests get_resource_agent_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_agent():
self.svc_mgr.get_resource_agent_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_agent_session_for_bin()
def test_get_resource_agent_assignment_session(self):
"""Tests get_resource_agent_assignment_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_agent_assignment():
self.svc_mgr.get_resource_agent_assignment_session()
def test_get_resource_agent_assignment_session_for_bin(self):
"""Tests get_resource_agent_assignment_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_agent_assignment():
self.svc_mgr.get_resource_agent_assignment_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_agent_assignment_session_for_bin()
def test_get_bin_lookup_session(self):
"""Tests get_bin_lookup_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_lookup():
self.svc_mgr.get_bin_lookup_session()
def test_get_bin_query_session(self):
"""Tests get_bin_query_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_query():
self.svc_mgr.get_bin_query_session()
def test_get_bin_admin_session(self):
"""Tests get_bin_admin_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_admin():
self.svc_mgr.get_bin_admin_session()
def test_get_bin_hierarchy_session(self):
"""Tests get_bin_hierarchy_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_hierarchy():
self.svc_mgr.get_bin_hierarchy_session()
def test_get_bin_hierarchy_design_session(self):
"""Tests get_bin_hierarchy_design_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_hierarchy_design():
self.svc_mgr.get_bin_hierarchy_design_session()
def test_get_resource_batch_manager(self):
"""Tests get_resource_batch_manager"""
# From tests_templates/resource.py::ResourceManager::get_resource_batch_manager_template
if self.svc_mgr.supports_resource_batch():
self.svc_mgr.get_resource_batch_manager()
def test_get_resource_demographic_manager(self):
"""Tests get_resource_demographic_manager"""
# From tests_templates/resource.py::ResourceManager::get_resource_batch_manager_template
if self.svc_mgr.supports_resource_demographic():
self.svc_mgr.get_resource_demographic_manager()
class NotificationReceiver(object):
# Implemented from resource.ResourceProxyManager
pass
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def resource_proxy_manager_class_fixture(request):
# Implemented from resource.ResourceProxyManager
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'RESOURCE',
proxy=PROXY,
implementation=request.cls.service_config)
if not is_never_authz(request.cls.service_config):
create_form = request.cls.svc_mgr.get_bin_form_for_create([])
create_form.display_name = 'Test Bin'
create_form.description = 'Test Bin for resource proxy manager tests'
catalog = request.cls.svc_mgr.create_bin(create_form)
request.cls.catalog_id = catalog.get_id()
else:
request.cls.catalog_id = Id('resource.Resource%3A000000000000000000000000%40DLKIT.MIT.EDU')
request.cls.receiver = NotificationReceiver()
def class_tear_down():
if not is_never_authz(request.cls.service_config):
request.cls.svc_mgr.delete_bin(request.cls.catalog_id)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def resource_proxy_manager_test_fixture(request):
# Implemented from resource.ResourceProxyManager
pass
@pytest.mark.usefixtures("resource_proxy_manager_class_fixture", "resource_proxy_manager_test_fixture")
class TestResourceProxyManager(object):
"""Tests for ResourceProxyManager"""
def test_get_resource_lookup_session(self):
"""Tests get_resource_lookup_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_lookup_session_template
if self.svc_mgr.supports_resource_lookup():
self.svc_mgr.get_resource_lookup_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_lookup_session()
def test_get_resource_lookup_session_for_bin(self):
"""Tests get_resource_lookup_session_for_bin"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_lookup_session_for_bin_template
if self.svc_mgr.supports_resource_lookup():
self.svc_mgr.get_resource_lookup_session_for_bin(self.catalog_id, PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_lookup_session_for_bin()
def test_get_resource_query_session(self):
"""Tests get_resource_query_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_lookup_session_template
if self.svc_mgr.supports_resource_query():
self.svc_mgr.get_resource_query_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_query_session()
def test_get_resource_query_session_for_bin(self):
"""Tests get_resource_query_session_for_bin"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_lookup_session_for_bin_template
if self.svc_mgr.supports_resource_query():
self.svc_mgr.get_resource_query_session_for_bin(self.catalog_id, PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_query_session_for_bin()
def test_get_resource_search_session(self):
"""Tests get_resource_search_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_search():
self.svc_mgr.get_resource_search_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_search_session()
def test_get_resource_search_session_for_bin(self):
"""Tests get_resource_search_session_for_bin"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_search():
self.svc_mgr.get_resource_search_session_for_bin(self.catalog_id, PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_search_session_for_bin()
def test_get_resource_admin_session(self):
"""Tests get_resource_admin_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_admin():
self.svc_mgr.get_resource_admin_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_admin_session()
def test_get_resource_admin_session_for_bin(self):
"""Tests get_resource_admin_session_for_bin"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_admin():
self.svc_mgr.get_resource_admin_session_for_bin(self.catalog_id, PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_admin_session_for_bin()
def test_get_resource_notification_session(self):
"""Tests get_resource_notification_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_notification_session_template
if self.svc_mgr.supports_resource_notification():
self.svc_mgr.get_resource_notification_session(self.receiver, proxy=PROXY)
def test_get_resource_notification_session_for_bin(self):
"""Tests get_resource_notification_session_for_bin"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_notification_session_for_bin_template
if self.svc_mgr.supports_resource_notification():
self.svc_mgr.get_resource_notification_session_for_bin(self.receiver, self.catalog_id, proxy=PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_notification_session_for_bin()
def test_get_resource_bin_session(self):
"""Tests get_resource_bin_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_bin():
self.svc_mgr.get_resource_bin_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_bin_session()
def test_get_resource_bin_assignment_session(self):
"""Tests get_resource_bin_assignment_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_bin_assignment():
self.svc_mgr.get_resource_bin_assignment_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_bin_assignment_session()
def test_get_group_hierarchy_session(self):
"""Tests get_group_hierarchy_session"""
if self.svc_mgr.supports_group_hierarchy():
self.svc_mgr.get_group_hierarchy_session(PROXY)
with pytest.raises(errors.Unimplemented):
self.svc_mgr.get_group_hierarchy_session()
def test_get_resource_agent_session(self):
"""Tests get_resource_agent_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_agent():
self.svc_mgr.get_resource_agent_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_agent_session()
def test_get_resource_agent_session_for_bin(self):
"""Tests get_resource_agent_session_for_bin"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_agent():
self.svc_mgr.get_resource_agent_session_for_bin(self.catalog_id, PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_agent_session_for_bin()
def test_get_resource_agent_assignment_session(self):
"""Tests get_resource_agent_assignment_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_agent_assignment():
self.svc_mgr.get_resource_agent_assignment_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_agent_assignment_session()
def test_get_resource_agent_assignment_session_for_bin(self):
"""Tests get_resource_agent_assignment_session_for_bin"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_for_bin_template
if self.svc_mgr.supports_resource_agent_assignment():
self.svc_mgr.get_resource_agent_assignment_session_for_bin(self.catalog_id, PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_agent_assignment_session_for_bin()
def test_get_bin_lookup_session(self):
"""Tests get_bin_lookup_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_lookup():
self.svc_mgr.get_bin_lookup_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_bin_lookup_session()
def test_get_bin_query_session(self):
"""Tests get_bin_query_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_query():
self.svc_mgr.get_bin_query_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_bin_query_session()
def test_get_bin_admin_session(self):
"""Tests get_bin_admin_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_admin():
self.svc_mgr.get_bin_admin_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_bin_admin_session()
def test_get_bin_hierarchy_session(self):
"""Tests get_bin_hierarchy_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_hierarchy():
self.svc_mgr.get_bin_hierarchy_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_bin_hierarchy_session()
def test_get_bin_hierarchy_design_session(self):
"""Tests get_bin_hierarchy_design_session"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_admin_session_template
if self.svc_mgr.supports_bin_hierarchy_design():
self.svc_mgr.get_bin_hierarchy_design_session(PROXY)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_bin_hierarchy_design_session()
def test_get_resource_batch_proxy_manager(self):
"""Tests get_resource_batch_proxy_manager"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_batch_proxy_manager_template
if self.svc_mgr.supports_resource_batch():
self.svc_mgr.get_resource_batch_proxy_manager()
def test_get_resource_demographic_proxy_manager(self):
"""Tests get_resource_demographic_proxy_manager"""
# From tests_templates/resource.py::ResourceProxyManager::get_resource_batch_proxy_manager_template
if self.svc_mgr.supports_resource_demographic():
self.svc_mgr.get_resource_demographic_proxy_manager()
| mitsei/dlkit | tests/resource/test_managers.py | Python | mit | 26,397 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from flask import Blueprint, render_template, Markup, url_for
from flask_toolbox.models import Package
package_page = Blueprint('package_page', __name__,
template_folder='templates')
@package_page.route('/packages')
def index():
packages = Package.query.order_by(Package.name).filter(Package.category_id != None).all()
sidebar_title = "All the packages"
package_list = [package.name for package in packages]
print(len(package_list))
return render_template(
'packages.html', packages=packages,
sidebar_title=sidebar_title, package_list=package_list)
@package_page.route('/packages/<package>')
def show(package):
the_package = Package.query.filter_by(name=package).first_or_404()
category = the_package.category
related_packages = [item.name for item in category.packages.order_by(Package.score.desc()).all()
if item.name != package]
sidebar_title = (
Markup("Other related packages in the <a href='{0}'>{1}</a> category".format(
url_for('category_page.show', category=category.name),
category.name
))
)
return render_template(
'package.html', package=the_package,
related_packages=related_packages, sidebar_title=sidebar_title)
@package_page.route('/packages/<package>/score')
def score(package):
flask = Package.query.filter_by(name="Flask").first()
the_package = Package.query.filter_by(name=package).first_or_404()
category = the_package.category
related_packages = [item.name for item in category.packages.order_by(Package.score.desc()).all()
if item.name != package]
sidebar_title = (
Markup("Other related packages in the <a href='{0}'>{1}</a> category".format(
url_for('category_page.index', category=category.name),
category.name
))
)
return render_template(
'score.html', package=the_package, flask=flask,
related_packages=related_packages, sidebar_title=sidebar_title)
| lord63/flask_toolbox | flask_toolbox/views/package.py | Python | mit | 2,151 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_export_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupJobsExport')
path_format_arguments = {
"vaultName": _SERIALIZER.url("vault_name", vault_name, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class JobsOperations(object):
"""JobsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.activestamp.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def export(
self,
vault_name: str,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> None:
"""Triggers export of jobs specified by filters and returns an OperationID to track.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param filter: OData filter options.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_export_request(
vault_name=vault_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.export.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
export.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupJobsExport'} # type: ignore
| Azure/azure-sdk-for-python | sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/activestamp/operations/_jobs_operations.py | Python | mit | 5,627 |
import os
import uuid
from django.db import models
from django.core.files.uploadedfile import UploadedFile
from django.forms.forms import pretty_name
from . import get_image_cropper
from . import tasks
from . import settings
from . import utils
from . import signals
from .managers import AssetManager
from .fields import AssetRealFileField
try:
from ..versioning import manager
except ValueError:
from versioning import manager
try:
from ..cms.internal_tags.models import AutoTagModel
except ValueError:
from cms.internal_tags.models import AutoTagModel
class AssetBase(AutoTagModel):
UNKNOWN = 'unknown'
IMAGE = 'image'
DOCUMENT = 'document'
AUDIO = 'audio'
VIDEO = 'video'
TYPES = settings.ASSET_TYPES and settings.ASSET_TYPES or \
((UNKNOWN, 'Unknown'),
(IMAGE, 'Image'),
(DOCUMENT, 'Document'),
(AUDIO, 'Audio'),
(VIDEO, 'Video'),)
__original_file = None
title = models.CharField(max_length=255)
file = AssetRealFileField(upload_to=utils.assets_dir)
type = models.CharField(max_length=255, choices=TYPES, db_index=True)
slug = models.SlugField(unique=True, max_length=255)
user_filename = models.CharField(max_length=255)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
cbversion = models.PositiveIntegerField(editable=False)
objects = AssetManager()
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(AssetBase, self).__init__(*args, **kwargs)
self.__original_file = self.file
def rename_file(self):
if self.type == self.DOCUMENT:
return False
return settings.HASH_FILENAME
def url(self):
"""
This is a wrapper of file.url
"""
return self.file.url
def generate_slug(self):
return str(uuid.uuid1())
def assign_tag(self):
pass
def delete_real_file(self, file_obj):
file_obj.storage.delete(file_obj.name)
signals.file_removed.send(file_obj.name)
def _can_crop(self):
return self.type == self.IMAGE
def reset_crops(self):
"""
Reset all known crops to the default crop.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
tasks.reset_crops.apply_async(args=[self.pk], countdown=5)
else:
tasks.reset_crops(None, asset=self)
def ensure_crops(self, *required_crops):
"""
Make sure a crop exists for each crop in required_crops.
Existing crops will not be changed.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
args = [self.pk]+list(required_crops)
tasks.ensure_crops.apply_async(args=args, countdown=5)
else:
tasks.ensure_crops(None, *required_crops, asset=self)
def create_crop(self, name, x, x2, y, y2):
"""
Create a crop for this asset.
"""
if self._can_crop():
spec = get_image_cropper().create_crop(name, self.file, x=x,
x2=x2, y=y, y2=y2)
ImageDetail.save_crop_spec(self, spec)
def save(self, *args, **kwargs):
"""
For new assets, creates a new slug.
For updates, deletes the old file from storage.
Calls super to actually save the object.
"""
if not self.pk and not self.slug:
self.slug = self.generate_slug()
if self.__original_file and self.file != self.__original_file:
self.delete_real_file(self.__original_file)
file_changed = True
if self.pk:
new_value = getattr(self, 'file')
if hasattr(new_value, "file"):
file_changed = isinstance(new_value.file, UploadedFile)
else:
self.cbversion = 0
if file_changed:
self.user_filename = os.path.basename(self.file.name)
self.cbversion = self.cbversion + 1
if not self.title:
self.title = self.user_filename
super(AssetBase, self).save(*args, **kwargs)
if file_changed:
signals.file_saved.send(self.file.name)
utils.update_cache_bust_version(self.file.url, self.cbversion)
self.reset_crops()
if self.__original_file and self.file.name != self.__original_file.name:
with manager.SwitchSchemaManager(None):
for related in self.__class__._meta.get_all_related_objects(
include_hidden=True):
field = related.field
if getattr(field, 'denormalize', None):
cname = field.get_denormalized_field_name(field.name)
if getattr(field, 'denormalize'):
related.model.objects.filter(**{
field.name: self.pk
}).update(**{
cname: self.file.name
})
def delete(self, *args, **kwargs):
"""
Deletes the actual file from storage after the object is deleted.
Calls super to actually delete the object.
"""
file_obj = self.file
super(AssetBase, self).delete(*args, **kwargs)
self.delete_real_file(file_obj)
def __unicode__(self):
return '%s' % (self.user_filename)
class ImageDetailBase(models.Model):
image = models.ForeignKey(settings.ASSET_MODEL)
width = models.PositiveIntegerField()
height = models.PositiveIntegerField()
name = models.CharField(max_length=255)
editable = models.BooleanField(editable=False, default=False)
x = models.PositiveIntegerField(null=True)
x2 = models.PositiveIntegerField(null=True)
y = models.PositiveIntegerField(null=True)
y2 = models.PositiveIntegerField(null=True)
class Meta:
abstract = True
def __unicode__(self):
return pretty_name(self.name)
def get_crop_config(self):
return get_image_cropper().get_crop_config(self.name)
@classmethod
def save_crop_spec(cls, asset, spec, update_version=True):
if spec:
cdict = spec.to_dict()
updated = cls.objects.filter(image=asset,
name=cdict['name']).update(**cdict)
if not updated:
cls(image=asset, **cdict).save()
if update_version:
asset.__class__.objects.filter(pk=asset.pk
).update(cbversion=models.F('cbversion')+1)
class Asset(AssetBase):
class Meta:
abstract = False
class ImageDetail(ImageDetailBase):
class Meta:
abstract = False
| ff0000/scarlet | scarlet/assets/models.py | Python | mit | 7,186 |
from setuptools import setup
import minify.command
setup(name='cloaca',
version='0.1.0',
url='https://github.com/mhmurray/cloaca',
author='Michael Murray',
author_email='michaelhamburgmurray@gmail.com',
license='MIT',
packages=['cloaca'],
zip_safe=False,
include_package_data=True,
scripts=[
'cloaca/cloacaapp.py'
],
install_requires=[
'tornado>=4.3.0',
'tornadis>=0.7.0',
'bcrypt>=2.0.0',
'futures>=3.0.5',
'minify',
],
cmdclass={
'minify_css' : minify.command.minify_css,
},
)
| mhmurray/cloaca | setup.py | Python | mit | 696 |
# -*- coding: utf-8 -*-
# pylint: disable-msg=W0104,E0602,W0613,R0201
"""
Abstract classes and utilities for template engines
"""
from hyde._compat import with_metaclass
from hyde.exceptions import HydeException
import abc
from commando.util import getLoggerWithNullHandler, load_python_object
DEFAULT_TEMPLATE = 'hyde.ext.templates.jinja.Jinja2Template'
class HtmlWrap(object):
"""
A wrapper class for raw html.
Provides pyquery interface if available.
Otherwise raw html access.
"""
def __init__(self, html):
super(HtmlWrap, self).__init__()
self.raw = html
try:
from pyquery import PyQuery
except:
PyQuery = None
self.q = PyQuery(html) if PyQuery else None
def __str__(self):
return self.raw
# Support __unicode__ as well as __str__ for backward compatibility.
__unicode__ = __str__
def __call__(self, selector=None):
if not self.q:
return self.raw
return self.q(selector).html()
class Template(with_metaclass(abc.ABCMeta)):
"""
Interface for hyde template engines. To use a different template engine,
the following interface must be implemented.
"""
def __init__(self, sitepath):
self.sitepath = sitepath
self.logger = getLoggerWithNullHandler(self.__class__.__name__)
@abc.abstractmethod
def configure(self, site, engine):
"""
The site object should contain a config attribute. The config object
is a simple YAML object with required settings. The template
implementations are responsible for transforming this object to match
the `settings` required for the template engines.
The engine is an informal protocol to provide access to some
hyde internals.
The preprocessor attribute must contain the function that trigger the
hyde plugins to preprocess the template after load.
A context_for_path attribute must contain the function that returns
the context object that is populated with the appropriate variables
for the given path.
"""
return
def clear_caches(self):
"""
Clear all caches to prepare for regeneration
"""
return
def get_dependencies(self, text):
"""
Finds the dependencies based on the included
files.
"""
return None
@abc.abstractmethod
def render_resource(self, resource, context):
"""
This function must load the file represented by the resource
object and return the rendered text.
"""
return ''
@abc.abstractmethod
def render(self, text, context):
"""
Given the text, and the context, this function must return the
rendered string.
"""
return ''
@abc.abstractproperty
def exception_class(self):
return HydeException
@abc.abstractproperty
def patterns(self):
"""
Patterns for matching selected template statements.
"""
return {}
@abc.abstractmethod
def get_include_statement(self, path_to_include):
"""
Returns an include statement for the current template,
given the path to include.
"""
return '{%% include \'%s\' %%}' % path_to_include
@abc.abstractmethod
def get_extends_statement(self, path_to_extend):
"""
Returns an extends statement for the current template,
given the path to extend.
"""
return '{%% extends \'%s\' %%}' % path_to_extend
@abc.abstractmethod
def get_open_tag(self, tag, params):
"""
Returns an open tag statement.
"""
return '{%% %s %s %%}' % (tag, params)
@abc.abstractmethod
def get_close_tag(self, tag, params):
"""
Returns an open tag statement.
"""
return '{%% end%s %%}' % tag
@abc.abstractmethod
def get_content_url_statement(self, url):
"""
Returns the content url statement.
"""
return '/' + url
@abc.abstractmethod
def get_media_url_statement(self, url):
"""
Returns the media url statement.
"""
return '/media/' + url
@staticmethod
def find_template(site):
"""
Reads the configuration to find the appropriate template.
"""
template_object = site.config.get('template', DEFAULT_TEMPLATE)
template_cls = load_python_object(template_object)
template = template_cls(site.sitepath)
return template
| hyde/hyde | hyde/template.py | Python | mit | 4,633 |
class Bot:
def setup(self, initial_data):
pass
def update(self, state, response):
pass
| RokKos/FRI_Programiranje | OUI/planet-lia/rokkos/core/bot.py | Python | mit | 112 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-04-09 08:07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('georegion', '0001_initial_squashed_0004_auto_20180307_2026'),
]
operations = [
migrations.AlterField(
model_name='georegion',
name='part_of',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='georegion.GeoRegion', verbose_name='Part of'),
),
]
| stefanw/froide | froide/georegion/migrations/0002_auto_20180409_1007.py | Python | mit | 617 |
from base_screen import BaseScreen
import pygame
from ..graphic_utils import ListView,\
ScreenObjectsManager, TouchAndTextItem
from ..input import InputManager
from play_options import PlayOptions
mode_track_name = 0
mode_album_name = 1
mode_artist_name = 2
class SearchScreen(BaseScreen):
def __init__(self, size, base_size, manager, fonts, playqueues=None):
BaseScreen.__init__(self, size, base_size, manager, fonts)
self.list_view = ListView((0, self.base_size*2), (
self.size[0], self.size[1] -
3*self.base_size), self.base_size, manager.fonts['base'])
self.results_strings = []
self.results = []
self.screen_objects = ScreenObjectsManager()
self.query = ""
self.playqueues = playqueues
# Search button
button = TouchAndTextItem(self.fonts['icon'], u" \ue986",
(0, self.base_size),
None, center=True)
self.screen_objects.set_touch_object(
"search", button)
x = button.get_right_pos()
# Query text
text = TouchAndTextItem(self.fonts['base'], self.query, (0, 0),
(self.size[0], self.base_size), center=True)
self.screen_objects.set_touch_object("query", text)
# Mode buttons
button_size = ((self.size[0]-x)/3, self.base_size)
self.mode_objects_keys = ["mode_track", "mode_album",
"mode_artist"]
# Track button
button = TouchAndTextItem(self.fonts['base'], "Track",
(x, self.base_size),
(button_size[0], self.base_size),
center=True)
self.screen_objects.set_touch_object(
self.mode_objects_keys[0], button)
# Album button
button = TouchAndTextItem(self.fonts['base'], "Album",
(button_size[0]+x, self.base_size),
button_size, center=True)
self.screen_objects.set_touch_object(
self.mode_objects_keys[1], button)
# Artist button
button = TouchAndTextItem(self.fonts['base'], "Artist",
(button_size[0]*2+x, self.base_size),
button_size, center=True)
self.screen_objects.set_touch_object(
self.mode_objects_keys[2], button)
# Top Bar
self.top_bar = pygame.Surface(
(self.size[0], self.base_size * 2),
pygame.SRCALPHA)
self.top_bar.fill((38, 38, 38, 128))
self.mode = -1
self.set_mode(mode=mode_track_name)
self.set_query("Search")
self.play_options_dialog = None
def should_update(self):
return self.list_view.should_update()
def update(self, screen, update_type, rects):
screen.blit(self.top_bar, (0, 0))
self.screen_objects.render(screen)
update_all = (update_type == BaseScreen.update_all)
self.list_view.render(screen, update_all, rects)
if self.play_options_dialog != None :
self.play_options_dialog.render(screen, update_all, rects)
def set_mode(self, mode=mode_track_name):
if mode is not self.mode:
self.mode = mode
for key in self.mode_objects_keys:
self.screen_objects.get_touch_object(key).\
set_active(False)
self.screen_objects.get_touch_object(
self.mode_objects_keys[self.mode]).set_active(True)
def set_query(self, query=""):
self.query = query
self.screen_objects.get_touch_object("query").set_text(
self.query, False)
def search(self, query=None, mode=None):
if query is not None:
self.set_query(query)
if mode is not None:
self.set_mode(mode)
if self.mode == mode_track_name:
search_query = {'any': [self.query]}
elif self.mode == mode_album_name:
search_query = {'album': [self.query]}
else:
search_query = {'artist': [self.query]}
if len(self.query) > 0:
current_results = self.manager.core.library.search(
search_query).get()
self.results = []
self.results_strings = []
for backend in current_results:
if mode == mode_track_name:
iterable = backend.tracks
elif mode == mode_album_name:
iterable = backend.albums
else:
iterable = backend.artists
for result in iterable:
self.results.append(result)
self.results_strings.append(result.name)
self.list_view.set_list(self.results_strings)
def touch_event(self, touch_event):
if touch_event.type == InputManager.click:
# check if user clicked in play options dialog
if ((self.play_options_dialog != None) and (self.play_options_dialog.is_position_in_dialog(touch_event.current_pos))):
# TODO the user clicked in the play options dialog, now do something with it (i.e. play song, add to queue, etc.)
self.play_options_dialog.touch_event(touch_event)
self.play_options_dialog = None
#self.list_view.scroll_text(True)
else:
self.play_options_dialog = None
clicked = self.list_view.touch_event(touch_event)
if clicked is not None:
#self.list_view.scroll_text(False)
self.play_options_dialog = PlayOptions(self.size, self.base_size, self.manager, self.fonts, self.results[clicked].uri, self.playqueues)
#self.manager.core.tracklist.clear()
#self.manager.core.tracklist.add(
# uri=self.results[clicked].uri)
# javey: pull up play options dialog
#self.manager.core.playback.play()
else:
clicked = self.screen_objects.get_touch_objects_in_pos(
touch_event.down_pos)
if len(clicked) > 0:
clicked = clicked[0]
if clicked == self.mode_objects_keys[0]:
self.search(mode=0)
if clicked == self.mode_objects_keys[1]:
self.search(mode=1)
if clicked == self.mode_objects_keys[2]:
self.search(mode=2)
if clicked == "query" or clicked == "search":
self.manager.open_keyboard(self)
elif touch_event.type == InputManager.long_click:
# javey: TODO do something on long click if needed
x = 0
else:
pos = self.list_view.touch_event(touch_event)
if pos is not None:
self.screen_objects.get_touch_object(pos).set_selected()
self.manager.core.tracklist.clear()
self.manager.core.tracklist.add(
uri=self.results[pos].uri)
# self.manager.core.playback.play()
def change_screen(self, direction):
if direction == InputManager.right:
if self.mode < 2:
self.set_mode(self.mode+1)
return True
elif direction == InputManager.left:
if self.mode > 0:
self.set_mode(self.mode-1)
return True
else:
self.manager.open_keyboard(self)
return False
def text_input(self, text):
self.search(text, self.mode)
| jravey7/Joe2Music | mopidy_touchscreen/screens/search_screen.py | Python | mit | 7,928 |
# cgc.py - Curriculum Graph Converter - Dana Toribio
import graphviz
import os
import re
import subprocess
import sys
re_courses = re.compile('\w+\s\d+\w*') # regex for courses
header = r'''from graphviz import Digraph
g = Digraph('studyplan', filename='studyplan.gv')
g.attr('graph', fontname='Helvetica')
g.attr('node', fontname='Helvetica')'''
legend = r'''c0 = Digraph('cluster_0')
c0.body.append('label = "LEGEND"')
c0.body.append('color=lightgrey')
c0.node_attr.update(style='filled', color='white')
c0.edge_attr.update(color='white')
c0.node('Semester 6', color='plum')
c0.node('Semester 7', color='crimson')
c0.node('Semester 3', color='peachpuff')
c0.node('Semester 4', color='darkseagreen')
c0.node('Semester 5', color='lightblue')
c0.node('Completed', color='grey')
c0.node('Semester 1', color='pink')
c0.node('Semester 2', color='lightsalmon')
c0.node('Semester 8', color='chocolate')
c0.edge('Semester 6', 'Semester 7')
c0.edge('Semester 7', 'Semester 8')
c0.edge('Semester 3', 'Semester 4')
c0.edge('Semester 4', 'Semester 5')
c0.edge('Completed', 'Semester 1')
c0.edge('Semester 1', 'Semester 2')
c0.body.append('label = "LEGEND"')
'''
req_electives = r'''c1 = Digraph('cluster_1')
c1.body.append('color=aliceblue')
c1.body.append('style=filled')
c1.body.append('labelloc = "b"')
c1.body.append('label = "'''
req_electives_footer = "c1.body.append('label = \""
trk_electives = r'''
c2 = Digraph('cluster_2')
c2.body.append('color=aliceblue')
c2.body.append('style=filled')
c2.body.append('labelloc = "b"')
c2.body.append('label = "'''
trk_electives_footer = "c2.body.append('label = \""
completed_courses = ''
suggestions = ''
core_courses = ''
elec_prereqs = ''
def node(value):
return ".node('" + value + "')\n"
def prereq_edge(node1, node2, crit):
if crit is True:
return ".edge('" + node1 + "', '" + node2 + "', color='red')\n"
else:
return ".edge('" + node1 + "', '" + node2 + "')\n"
def coreq_edge(node1, node2, crit):
if crit is True:
return ".edge('" + node1 + "', '" + node2 + "', '', arrowhead='dot', arrowtail='dot', dir='both', color='red')\n"
else:
return ".edge('" + node1 + "', '" + node2 + "', '', arrowhead='dot', arrowtail='dot', dir='both')\n"
f = open('studyplan.txt', 'r')
nf = open('studyplan.py', 'w')
write_to = ''
legend_index = 0
legend_color = ['pink', 'lightsalmon', 'peachpuff', 'darkseagreen', 'lightblue', 'plum', 'crimson', 'chocolate', 'goldenrod']
for line in f:
if ('#' in line.split(' ')) and ('Core' in line):
write_to = 'core'
core_courses = core_courses + '\n' + line
elif ('#' in line.split(' ')) and ('required' in line):
write_to = 'req_electives'
req_electives = req_electives + line[2:-1] + '"\')\n'
elif ('Track' in line):
write_to = 'trk_electives'
trk_electives = trk_electives + line[2:-1] + '"\')\n'
elif ('#' in line.split(' ')) and ('taken' in line):
write_to = 'suggestions'
suggestions = suggestions + '\n' + line + "g.attr('node', style='filled', color='grey')\n"
elif ('#' in line.split(' ')) and ('semester' in line):
write_to = 'suggestions'
suggestions = suggestions + '\n' + line + "g.attr('node', style='filled', color='" + legend_color[legend_index] + "')\n"
legend_index = legend_index + 1
elif line is '\n':
write_to = ''
course = re_courses.findall(line)
if write_to is 'core':
if (course) and ('->' in line) and ('*' in line):
core_courses = core_courses + 'g' + prereq_edge(course[0], course[1], True)
elif (course) and ('--' in line) and ('*' in line):
core_courses = core_courses + 'g' + coreq_edge(course[0], course[1], True)
elif (course) and ('->' in line):
core_courses = core_courses + 'g' + prereq_edge(course[0], course[1], False)
elif (course) and ('--' in line):
core_courses = core_courses + 'g' + coreq_edge(course[0], course[1], False)
elif (course):
core_courses = core_courses + 'g' + node(course[0])
elif write_to is 'req_electives':
if (course) and ('->' in line):
req_electives = req_electives + 'c1' + prereq_edge(course[0], course[1], False)
elif (course) and ('--' in line):
req_electives = req_electives + 'c1' + coreq_edge(course[0], course[1], False)
elif (course):
req_electives = req_electives + 'c1' + node(course[0])
elif write_to is 'trk_electives':
if (course) and ('->' in line):
trk_electives = trk_electives + 'c2' + prereq_edge(course[0], course[1], False)
elif (course) and ('--' in line):
trk_electives = trk_electives + 'c2' + coreq_edge(course[0], course[1], False)
elif (course):
trk_electives = trk_electives + 'c2' + node(course[0])
elif (write_to is 'suggestions') and course:
suggestions = suggestions + 'g' + node(course[0])
else:
pass
nf.write(header + '\n')
nf.write(legend + '\n')
nf.write(req_electives + '\n')
nf.write(trk_electives + '\n')
nf.write(suggestions + '\n')
nf.write(core_courses + '\n')
# write track prerequisites
# subgraph calls
nf.write('g.subgraph(c1)' + '\n')
nf.write('g.subgraph(c2)' + '\n')
nf.write('g.subgraph(c0)' + '\n')
nf.write('g.view()')
os.startfile('studyplan.py') | danaoira/CurriculumGraphVisualizer | cgc.py | Python | mit | 5,047 |
""" A location-aware script to manage ringer volume """
__author__ = 'Marco Bonifacio <bonifacio.marco@gmail.com>'
__license__ = 'MIT License'
import android
import time
# Parameters
SSID = {'bonix-lan': 'casa',
'ZENIT SECURED WPA': 'lavoro'}
RINGER = {'casa': 5,
'lavoro': 2,
'sconosciuto': 5}
# Functions
def check_ssid(droid):
""" Check if wireless network SSID is known.
Args:
droid: an Android instance.
Returns:
a string representing a known or unknown environment. """
state = 'sconosciuto'
try:
lwifi = droid.wifiGetScanResults().result
lssid = [w['ssid']for w in lwifi]
for s in lssid:
if s in SSID:
state = SSID[s]
except Exception, e:
droid.notify('PyLocale', 'Errore: {}'.format(e))
finally:
return(state)
def check_state(droid, state, stateold):
""" Check if environment has changed.
Args:
droid: an Android instance.
state: a string, the present state.
stateold: a string, the former state.
Returns:
a binary true if environment has changed. """
if state != stateold:
droid.vibrate()
if state != 'sconosciuto':
droid.makeToast('Sei a {}'.format(state))
else:
droid.makeToast('Sei uscito da {}'.format(stateold))
return(True)
else:
return(False)
def set_ringer(droid, state):
""" Set the ringer volume depending on state.
Args:
droid: an Android instance.
state: a string, the present state.
Returns:
nothing. """
droid.setRingerVolume(RINGER[state])
droid.makeToast('Volume: {}'.format(RINGER[state]))
if __name__ == '__main__':
droid = android.Android()
state = 'sconosciuto'
while True:
stateold = state
state = check_ssid(droid)
changed = check_state(droid, state, stateold)
if changed is True:
set_ringer(droid, state)
time.sleep(300) | mbonix/yapr | android/PyLocale.py | Python | mit | 2,023 |
from django.db import models
from django.contrib.auth.models import User
from django.urls import reverse
class Category(models.Model):
"""
Django 要求模型必须继承 models.Model 类。
Category 只需要一个简单的分类名 name 就可以了。
CharField 指定了分类名 name 的数据类型,CharField 是字符型,
CharField 的 max_length 参数指定其最大长度,超过这个长度的分类名就不能被存入数据库。
当然 Django 还为我们提供了多种其它的数据类型,如日期时间类型 DateTimeField、整数类型 IntegerField 等等。
Django 内置的全部类型可查看文档:
https://docs.djangoproject.com/en/1.10/ref/models/fields/#field-types
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Tag(models.Model):
"""
标签 Tag 也比较简单,和 Category 一样。
再次强调一定要继承 models.Model 类!
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Post(models.Model):
"""
文章的数据库表稍微复杂一点,主要是涉及的字段更多。
"""
# 文章标题
title = models.CharField(max_length=70)
# 阅读量
views = models.PositiveIntegerField(default=0)
# 文章正文,我们使用了 TextField。
# 存储比较短的字符串可以使用 CharField,但对于文章的正文来说可能会是一大段文本,因此使用 TextField 来存储大段文本。
body = models.TextField()
# 这两个列分别表示文章的创建时间和最后一次修改时间,存储时间的字段用 DateTimeField 类型。
created_time = models.DateTimeField()
modified_time = models.DateTimeField()
# 文章摘要,可以没有文章摘要,但默认情况下 CharField 要求我们必须存入数据,否则就会报错。
# 指定 CharField 的 blank=True 参数值后就可以允许空值了。
excerpt = models.CharField(max_length=200, blank=True)
# 这是分类与标签,分类与标签的模型我们已经定义在上面。
# 我们在这里把文章对应的数据库表和分类、标签对应的数据库表关联了起来,但是关联形式稍微有点不同。
# 我们规定一篇文章只能对应一个分类,但是一个分类下可以有多篇文章,所以我们使用的是 ForeignKey,即一对多的关联关系。
# 而对于标签来说,一篇文章可以有多个标签,同一个标签下也可能有多篇文章,所以我们使用 ManyToManyField,表明这是多对多的关联关系。
# 同时我们规定文章可以没有标签,因此为标签 tags 指定了 blank=True。
# 如果你对 ForeignKey、ManyToManyField 不了解,请看教程中的解释,亦可参考官方文档:
# https://docs.djangoproject.com/en/1.10/topics/db/models/#relationships
category = models.ForeignKey(
Category,
on_delete=models.CASCADE,
)
tags = models.ManyToManyField(Tag, blank=True)
# 文章作者,这里 User 是从 django.contrib.auth.models 导入的。
# django.contrib.auth 是 Django 内置的应用,专门用于处理网站用户的注册、登录等流程,User 是 Django 为我们已经写好的用户模型。
# 这里我们通过 ForeignKey 把文章和 User 关联了起来。
# 因为我们规定一篇文章只能有一个作者,而一个作者可能会写多篇文章,因此这是一对多的关联关系,和 Category 类似。
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('blog:detail',kwargs={'pk':self.pk})
def increase_views(self):
self.views += 1
self.save(update_fields=['views'])
class Meta:
ordering = ['-created_time']
| ghostcode/django-blog | blogproject/blog/models.py | Python | mit | 3,967 |
#!/usr/bin/env python
import sys
import string
import subprocess
#----------------------------------------------------------------------
# Define some handy functions
#----------------------------------------------------------------------
def get_contents(file):
f = open(file,'r')
fc = f.readlines()
f.close()
return fc
def write_to_file(file,fc):
f = open(file,'w')
f.writelines(fc)
f.close()
#----------------------------------------------------------------------
# The main function
#----------------------------------------------------------------------
def main():
# Read in identities of dihedrals to change, step size and number of steps per dihedral
# Note that we will take one extra step per dihedral to capture the initial conformation
# Read in name of original pdb file, and store the base file name (without the pdb)
pdb_file = sys.argv[1]
base = pdb_file.split('/')[-1].split('.')[0]
gzmat_file = base + ".gzmat"
diheds = []
stepsizes = []
nsteps = []
for i in range(0,n_dihed):
diheds.append(sys.argv[3*i+2])
stepsizes.append(float(sys.argv[3*i+3]))
nsteps.append(int(sys.argv[3*i+4])+1)
#----------------------------------------------------------------------
# Generate gzmat file from pdb file
#----------------------------------------------------------------------
process = subprocess.Popen("babel -ipdb {0} -ogzmat {1}".format(pdb_file, gzmat_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
gzmat = get_contents(gzmat_file)
#----------------------------------------------------------------------
# Find the line numbers and initial values for each dihedral angle in the gzmat file
#----------------------------------------------------------------------
lines = []
values = []
for i in range(0,n_dihed):
dihed = diheds[i]
for j in range(0,len(gzmat)):
if string.find(gzmat[j],dihed+"=") != -1:
lines.append(j)
value = gzmat[j].split()[1]
values.append(float(value))
#----------------------------------------------------------------------
# The main bit of code that actually drives the process
# of generating new input files with altered dihedrals
# Note: the "for x in range(0,y)" is essentially a do loop,
# using different values of x at each iteration x = 0,1,2,...,y-1
# Also note that python starts counting at 0 rather than 1
#----------------------------------------------------------------------
for i0 in range(0,nsteps[0]):
# for first dihedral, set new value to initial + step number*step size
newvalue0 = values[0] + float(i0)*stepsizes[0]
gzmat[lines[0]] = diheds[0] + "= " + str(newvalue0) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + ".pdb"
if n_dihed == 1:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i1 in range(0,nsteps[1]):
# for second dihedral, set new value to initial + step number*step size
newvalue1 = values[1] + float(i1)*stepsizes[1]
gzmat[lines[1]] = diheds[1] + "= " + str(newvalue1) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".pdb"
if n_dihed == 2:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i2 in range(0,nsteps[2]):
# for third dihedral, set new value to initial + step number*step size
newvalue2 = values[2] + float(i2)*stepsizes[2]
gzmat[lines[2]] = diheds[2] + "= " + str(newvalue2) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".pdb"
if n_dihed == 3:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
print 'Error: more than 3 dihedrals, should not be able to get here'
sys.exit()
if __name__ == '__main__':
#----------------------------------------------------------------------
# Read in arguments from command line
#----------------------------------------------------------------------
if (len(sys.argv)-2)%3 != 0 or len(sys.argv) == 2:
print 'Usage: scan_dihedral.py <pdb_file_name> <dihedral name 1> <step size 1> <number of steps 1>'
print ' ... ... ... ... <dihedral name N> <step size N> <number of steps N>'
else:
# Calculate the number of dihedrals to change based on the number of arguments supplied
n_dihed = (len(sys.argv)-2)/3
if (n_dihed > 3):
print 'Changing more than 3 dihedrals at once, are you sure?'
print 'If so, you will need to edit the python script to remove the sys.exit() statement'
print 'And write some more do loops in the main part of the code'
sys.exit()
main()
| hermes47/QUACCS_2.0_Shython | py2/scan_dihedral.py | Python | mit | 6,460 |
# The default ``config.py``
# flake8: noqa
def set_prefs(prefs):
"""This function is called before opening the project"""
# Specify which files and folders to ignore in the project.
# Changes to ignored resources are not added to the history and
# VCSs. Also they are not returned in `Project.get_files()`.
# Note that ``?`` and ``*`` match all characters but slashes.
# '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
# 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
# '.svn': matches 'pkg/.svn' and all of its children
# 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
# 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
'.hg', '.svn', '_svn', '.git', '.tox']
# Specifies which files should be considered python files. It is
# useful when you have scripts inside your project. Only files
# ending with ``.py`` are considered to be python files by
# default.
# prefs['python_files'] = ['*.py']
# Custom source folders: By default rope searches the project
# for finding source folders (folders that should be searched
# for finding modules). You can add paths to that list. Note
# that rope guesses project source folders correctly most of the
# time; use this if you have any problems.
# The folders should be relative to project root and use '/' for
# separating folders regardless of the platform rope is running on.
# 'src/my_source_folder' for instance.
# prefs.add('source_folders', 'src')
# You can extend python path for looking up modules
# prefs.add('python_path', '~/python/')
# Should rope save object information or not.
prefs['save_objectdb'] = True
prefs['compress_objectdb'] = False
# If `True`, rope analyzes each module when it is being saved.
prefs['automatic_soa'] = True
# The depth of calls to follow in static object analysis
prefs['soa_followed_calls'] = 0
# If `False` when running modules or unit tests "dynamic object
# analysis" is turned off. This makes them much faster.
prefs['perform_doa'] = True
# Rope can check the validity of its object DB when running.
prefs['validate_objectdb'] = True
# How many undos to hold?
prefs['max_history_items'] = 32
# Shows whether to save history across sessions.
prefs['save_history'] = True
prefs['compress_history'] = False
# Set the number spaces used for indenting. According to
# :PEP:`8`, it is best to use 4 spaces. Since most of rope's
# unit-tests use 4 spaces it is more reliable, too.
prefs['indent_size'] = 4
# Builtin and c-extension modules that are allowed to be imported
# and inspected by rope.
prefs['extension_modules'] = []
# Add all standard c-extensions to extension_modules list.
prefs['import_dynload_stdmods'] = True
# If `True` modules with syntax errors are considered to be empty.
# The default value is `False`; When `False` syntax errors raise
# `rope.base.exceptions.ModuleSyntaxError` exception.
prefs['ignore_syntax_errors'] = False
# If `True`, rope ignores unresolvable imports. Otherwise, they
# appear in the importing namespace.
prefs['ignore_bad_imports'] = False
# If `True`, rope will insert new module imports as
# `from <package> import <module>` by default.
prefs['prefer_module_from_imports'] = False
# If `True`, rope will transform a comma list of imports into
# multiple separate import statements when organizing
# imports.
prefs['split_imports'] = False
# If `True`, rope will sort imports alphabetically by module name
# instead of alphabetically by import statement, with from imports
# after normal imports.
prefs['sort_imports_alphabetically'] = False
def project_opened(project):
"""This function is called after opening the project"""
# Do whatever you like here!
| fredericklussier/ObservablePy | .vscode/.ropeproject/config.py | Python | mit | 4,037 |
# Given code
n = int(input().strip())
arr = [int(arr_temp) for arr_temp in input().strip().split(' ')]
# Start
pos = 0.0
zero = 0.0
neg = 0.0
for i in arr:
if i == 0:
zero += 1
elif i > 0:
pos += 1
else:
neg += 1
print("%.6f" % (pos / n))
print("%.6f" % (neg / n))
print("%.6f" % (zero / n))
| Adriel-M/HackerRank | Learn/Algorithms/Warmup/Plus Minus/plusminus.py | Python | mit | 331 |
from setuptools import setup
setup(
name='behave-teamcity',
version="0.1.23",
packages=['behave_teamcity', ],
url='https://github.com/iljabauer/behave-teamcity',
download_url='https://github.com/iljabauer/behave-teamcity/releases/tag/0.1.23',
license='MIT',
author='Ilja Bauer',
author_email='i.bauer@cuescience.de',
description='TeamCity test report formatter for behave',
install_requires=["behave>=1.2.5,<=1.3", "teamcity-messages"],
keywords=['testing', 'behave', 'teamcity', 'formatter', 'report'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Build Tools",
"Topic :: Utilities"
],
)
| jronald01/behave-teamcity | setup.py | Python | mit | 861 |
from wrapper import parse_header, safe_parse_header, XCSVDialect
__all__ = ["parse_header", "safe_parse_header", "XCSVDialect"]
__version__ = "0.2.4"
| Alphadelta14/XCSV | xcsv/__init__.py | Python | mit | 152 |
import unittest
from Pluss import add
class TestAdd(unittest.TestCase):
def setUp(self):
pass
def test_numbers_5_7(self):
self.assertEqual(add(5,7), 12)
if __name__ == '__main__':
unittest.main() | github4321/IS-105_2016_Gruppe92 | uke03/Sebastian_Test_Pluss.py | Python | mit | 233 |
# Generated by Django 3.1.6 on 2022-01-26 20:48
import common.mixins
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("Scholarship", "0005_change_on_delete"),
]
operations = [
migrations.CreateModel(
name="Course",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"catalog_code",
models.CharField(
max_length=10,
unique=True,
validators=[
django.core.validators.RegexValidator(regex="[A-Z]+[0-9]+")
],
),
),
("title", models.CharField(max_length=100)),
],
bases=(common.mixins.ModelMixin, models.Model),
),
migrations.CreateModel(
name="CourseSection",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"term",
models.CharField(
choices=[
("A", "A"),
("B", "B"),
("C", "C"),
("D", "D"),
("E", "E"),
("S", "S"),
("F", "F"),
],
default="A",
max_length=1,
),
),
(
"year",
models.PositiveIntegerField(
validators=[django.core.validators.MaxValueValidator(99)]
),
),
("professor", models.CharField(max_length=100)),
(
"catalog_course",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="Scholarship.course",
to_field="catalog_code",
),
),
("participants", models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
bases=(common.mixins.ModelMixin, models.Model),
),
]
| sigmapi-gammaiota/sigmapi-web | sigmapiweb/apps/Scholarship/migrations/0006_course_coursesection.py | Python | mit | 2,984 |
# coding=utf-8
import os
from __init__ import *
import traceback, cStringIO, re
from flask import current_app
from werkzeug.datastructures import FileStorage
from server import user_server, article_server, status_server, form, \
account_server, news_server, resource_server
from server import general, honor_server
from dao.dbACCOUNT import Account
from dao import dbCompetition, dbPlayer
from util import json, CJsonEncoder
from flask.globals import _app_ctx_stack
from flask import request, jsonify
from sqlalchemy.exc import IntegrityError
from server.account_server import AccountUpdatingException, AccountExistException
from util import function
#
# @blueprint: ajax
# @created: 2015/06/22
# @author: Z2Y
#
ajax = blueprints.Blueprint('ajax', __name__)
#
# @brief: json for recent contest
# @route: /ajax/contest.json
# @allowed user: public
#
@ajax.route("/ajax/contest.json", methods=['GET'])
def recent_contests():
import json
json_file = open(RECENT_CONTEST_JSON, 'r').read()
json_contests = json.JSONDecoder().decode(json_file)
contests = []
for contest in json_contests:
name, link = contest['name'], contest['link']
new_contest = {
'oj': contest['oj'],
'name': '<a href="' + link + '" class="contest-name" title="' + name + '">' + name + '</a>',
'start_time': contest['start_time'],
'access': contest['access'],
}
contests.append(new_contest)
return json.dumps({ 'data': contests })
#
# @brief: ajax rank list
# @route: /ajax/rank_list
# @allowed user: student and coach
#
@ajax.route('/ajax/main_rank_table')
def main_rank_table():
main_rank_list = general.get_rank_list()
return json.dumps({ 'data': main_rank_list })
#
# @brief: ajax html for one user item
# @allowed user: admin and coach
#
@login_required
def get_user_list_item(user):
return render_template('ajax/user_list_item.html',
user = user,
school_mapper = SCHOOL_MAP,
college_mapper = SCHOOL_COLLEGE_MAP)
#
# @brief: ajax user list
# @route: /ajax/user_list
# @allowed user: admin and coach
#
@ajax.route('/ajax/user_list', methods=["GET", "POST"])
@login_required
def get_users():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
search = request.args.get('search', None)
per_page = USER_MANAGE_PER_PAGE
pagination = None
if current_user.is_admin:
pagination = user_server.get_list_pageable(page, per_page, search=search)
elif current_user.is_coach:
pagination = user_server.get_list_pageable(page, per_page, search=search,
school=current_user.school)
page_list = list(pagination.iter_pages(left_current=1, right_current=2))
return jsonify(items=[get_user_list_item(user) for user in pagination.items],
prev_num=pagination.prev_num,
next_num=pagination.next_num,
page_list=page_list,
page=pagination.page,
pages=pagination.pages)
#
# @brief: add user
# @route: /ajax/create_user
# @accepted methods: [post]
# @allowed user: admin and coach
# @ajax return: 用户是否添加成功
#
@ajax.route('/ajax/create_user', methods=["POST"])
@login_required
def create_user():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
reg_form = form.RegisterForm()
if reg_form.validate_on_submit():
try:
rights_list = request.form.getlist('rights')
rights = 0
for item in rights_list:
rights = rights | int(item)
ret = user_server.create_user(reg_form, rights)
if ret == 'OK':
return u"添加用户成功"
return u"添加用户失败: " + ret
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u"添加用户失败: " + e.message
else:
#print reg_form.errors
return u"添加用户失败: 表单填写有误"
#
# @brief: add many users
# @route: /ajax/create_users
# @accepted methods: [post]
# @allowed user: admin and coach
# @ajax return: 用户添加成功的数量
#
@ajax.route('/ajax/create_users', methods=["POST"])
@login_required
def create_users():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
reg_form = form.MultiRegisterForm()
if reg_form.validate_on_submit():
try:
ret = user_server.create_many_users(reg_form, current_user)
return ret
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u"添加用户失败: " + e.message
else:
#print reg_form.errors
return u"添加用户失败: 表单填写有误"
#
# @brief: check apply user
# @route: /ajax/check_apply
# @accepted methods: [post]
# @allowed user: admin and coach
# @ajax return: 操作结果
#
@ajax.route("/ajax/check_apply", methods= ['POST'])
@login_required
def check_apply():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
try:
apply_id = request.form.get('uid')
user = user_server.get_by_id(apply_id)
opt = request.form.get('opt')
ret = user_server.update_apply(apply_id, opt)
if ret == 'OK':
function.reply_of_apply(mail, user.serialize, _app_ctx_stack.top, opt)
return ret
except Exception:
current_app.logger.error(traceback.format_exc())
return u'操作失败'
#
# @brief: edit user
# @route: /ajax/edit_user
# @accepted methods: [post]
# @allowed user: admin and coach
#
@ajax.route('/ajax/edit_user', methods=["POST"])
@login_required
def edit_user():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
user_modify_form = form.UserModifyForm()
if user_modify_form.validate_on_submit():
try:
rights_list = request.form.getlist('rights')
rights = 0
for item in rights_list:
rights = rights | int(item)
ret = user_server.update_user(user_modify_form, rights)
if ret == 'OK':
return u"修改用户成功"
return u'修改用户失败: ' + ret
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u"修改用户失败: " + e.message
else:
#print user_modify_form.errors
return u"修改用户失败: 表单填写有误"
#
# @brief: edit user for self
# @route: /ajax/edit_user_self
# @accepted methods: [post]
# @allowed user: all
#
@ajax.route('/ajax/edit_user_self', methods=["POST"])
@login_required
def edit_user_self():
user_modify_form = form.UserModifyForm()
if user_modify_form.validate_on_submit():
try:
ret = user_server.update_user(user_modify_form, for_self=True)
if ret == 'OK':
return u"修改用户成功"
return u'修改用户失败: ' + ret
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u"修改用户失败: " + e.message
else:
#print user_modify_form.errors
return u"修改用户失败: 表单填写有误"
#
# @brief: modify password
# @route: /ajax/modify_password
# @accepted methods: [post]
# @allowed user: all
# @ajax return: 密码是否修改成功 => string
#
@ajax.route('/ajax/modify_password', methods=['POST'])
@login_required
def modify_password():
pwd_modify_form = form.PasswordModifyForm()
if pwd_modify_form.validate_on_submit():
if not current_user.verify_password(pwd_modify_form.password.data):
return u"当前密码输入错误"
return user_server.modify_password(pwd_modify_form, current_user)
return u"修改密码失败"
#
# @brief: delete user
# @route: /ajax/delete_user
# @accepted methods: [post]
# @allowed user: admin and coach
#
@ajax.route('/ajax/delete_user', methods=["POST"])
@login_required
def delete_user():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
try:
id = request.form.get('user_id')
user_server.delete_by_id(id)
return u"OK"
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u"FAIL"
#
# @brief: ajax html for one user item
# @allowed user: administrator
#
@login_required
def get_news_list_item(news):
return render_template('ajax/news_list_item.html', news=news)
#
# @brief: ajax news list
# @route: /ajax/news_list
# @allowed user: administrator
#
@ajax.route('/ajax/news_list', methods=['GET', 'POST'])
@login_required
def get_news_list():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
search = request.args.get('search', None)
per_page = NEWS_MANAGE_PER_PAGE
pagination = news_server.get_list_pageable(page, per_page, show_draft=True, search=search)
page_list = list(pagination.iter_pages(left_current=1, right_current=2))
return jsonify(items=[get_news_list_item(news) for news in pagination.items],
prev_num=pagination.prev_num,
next_num=pagination.next_num,
page_list=page_list,
page=pagination.page,
pages=pagination.pages)
#
# @brief: delete news
# @route: /ajax/delete_news
# @accepted methods: [post]
# @allowed user: admin and coach
#
@ajax.route("/ajax/delete_news", methods = ['POST'])
@login_required
def delete_news():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
try:
news_id = request.form.get('news_id')
news_server.delete_by_id(news_id)
return u'OK'
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'FAIL'
#
# @brief: post news
# @route: /ajax/post_news
# @accepted methods: [post]
# @allowed user: admin and coach
#
@ajax.route('/ajax/post_news', methods=['POST'])
@login_required
def post_news():
if not current_user.is_admin and not current_user.is_coach:
return u"没有权限"
news_form = form.NewsForm()
if news_form.validate_on_submit():
try:
is_draft = int(request.args['draft'])
news_server.post(news_form, current_user, is_draft)
return u"发表成功!"
except IntegrityError:
current_app.logger.error(traceback.format_exc())
return u"发表新闻失败: 固定链接已存在"
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u"发表新闻失败: " + e.message
else:
return u"发表新闻失败,请检查内容"
#
# @brief: ajax html for one account item
# @allowed user: self, admin and coach
#
@login_required
def get_account_item(account, user):
return render_template('ajax/account_info_item.html',
account = account,
user = user, str = str)
#
# @brief: add or modify account
# @route: /ajax/account_manager
# @accepted methods: [post]
# @allowed user: administrator or the user
#
@ajax.route('/ajax/account_info_list', methods=['POST', 'GET'])
@login_required
def account_info_list():
try:
profile_user = user_server.get_by_username_or_404(request.args['username'])
except:
profile_user = current_user
account_info_list = account_server.get_account_info_list(profile_user)
return jsonify(account_list = [get_account_item(account_info, profile_user) for account_info in account_info_list],
length = len(account_info_list))
#
# @brief: update the statistics info of account
# @route: /ajax/update_account
# @accepted methods: [post]
# @allowed user: admin, coach or the user
#
@ajax.route('/ajax/update_account', methods=['POST'])
@login_required
def update_account():
try:
profile_user = user_server.get_by_id(request.form.get('user_id'))
except:
profile_user = current_user
if profile_user != current_user and\
(not current_user.is_admin and not current_user.is_coach_of(profile_user)):
return u"没有权限"
try:
account_id = request.form.get('account_id')
account_server.update_account_by_id(account_id)
return u"ok"
except AccountUpdatingException, e:
current_app.logger.error(traceback.format_exc())
return 'ERROR: ' + e.message
except:
current_app.logger.error(traceback.format_exc())
return 'ERROR: unknown error'
#
# @brief: add or modify account
# @route: /ajax/account_manager
# @accepted methods: [post]
# @allowed user: administrator or the user
# @ajax return: string
#
@ajax.route('/ajax/account_manager', methods=['POST'])
@login_required
def account_manager():
try:
profile_user = user_server.get_by_username_or_404(request.args['username'])
except:
profile_user = current_user
account_form = form.AccountForm()
if current_user != profile_user and\
(not current_user.is_admin and not current_user.is_coach_of(profile_user)):
return u"没有权限"
if account_form.validate_on_submit():
try:
has_account = Account.query\
.filter_by(user=profile_user, oj_name=account_form.oj_name.data)\
.first()
if has_account:
account_server.modify_account(has_account, account_form)
return u"ok"
else:
account_server.add_account(profile_user, account_form)
return u"ok"
except AccountUpdatingException, e:
current_app.logger.error(traceback.format_exc())
return 'ERROR: ' + e.message
except AccountExistException, e:
current_app.logger.error(traceback.format_exc())
return 'ERROR: ' + e.message
except:
current_app.logger.error(traceback.format_exc())
return 'ERROR: unknown error'
else:
return u"添加账号失败"
#
# @brief: delete account
# @route: /ajax/delete_account
# @accepted methods: [post]
# @allowed user: administrator or the user
# @ajax return: string
#
@ajax.route('/ajax/delete_account', methods=['POST'])
@login_required
def delete_account():
try:
profile_user = user_server.get_by_id(request.form.get('user_id'))
except:
profile_user = current_user
if profile_user != current_user and\
(not current_user.is_admin and not current_user.is_coach_of(profile_user)):
return u"没有权限"
try:
account_id = request.form.get('account_id')
account_server.delete_account_by_id(profile_user, account_id)
return u"OK"
except AccountUpdatingException, e:
current_app.logger.error(traceback.format_exc())
return 'ERROR: ' + e.message
except:
current_app.logger.error(traceback.format_exc())
return 'ERROR: unknown error'
#
# @brief: add or modify solution
# @route: /ajax/solution_manager
# @accepted methods: [post]
# @allowed user: administrator or the user
# @ajax return: string
#
@ajax.route('/ajax/solution_manager', methods=['POST'])
@login_required
def solution_manager():
try:
profile_user = user_server.get_by_username_or_404(request.args['username'])
except:
profile_user = current_user
solution_form = form.SolutionForm()
if solution_form.validate_on_submit():
try:
is_draft = int(request.args['draft'])
article_server.post(solution_form, profile_user, is_draft)
return u"发表成功!"
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u"发表文章失败" + e.message
else:
return u"发表文章失败,请检查内容"
#
# @brief: ajax to get status list
# @route: /ajax/fitch_status/<oj_name>
# @allowed user: all
#
@ajax.route('/ajax/fitch_status/<oj_name>', methods=['POST'])
@login_required
def fitch_status(oj_name):
headers = ['account_name', 'run_id', 'pro_id', 'lang', 'run_time', 'memory', 'submit_time']
ret = status_server.DataTablesServer(request.form, oj_name, headers).run_query()
return json.dumps(ret, cls=CJsonEncoder)
#
# @brief: ajax html for one img choose item
# @allowed user: admin and coach
#
@login_required
def get_img_choose_item(img_item):
return render_template('ajax/img-choose-item.html',
img_item = img_item,
file_url = resource_server.file_url)
#
# @brief: ajax img choose list
# @route: /ajax/img_choose_list
# @allowed user: admin and coach
#
@ajax.route('/ajax/img_choose_list', methods=["POST"])
@login_required
def get_img_choose_list():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
offset = request.form.get('offset')
limit = request.form.get('limit')
type = request.form.get('type')
from dao.dbResource import ResourceUsage
if type == 'honor':
images = resource_server.get_image_list(offset, limit, ResourceUsage.HONOR_RES)
sum = resource_server.get_image_count(ResourceUsage.HONOR_RES)
else:
images = resource_server.get_image_list(offset, limit)
sum = resource_server.get_image_count()
return jsonify(img_list=[get_img_choose_item(img) for img in images],
sum=sum, offset=int(offset), limit=len(images))
#
# @brief: ajax html for one resource item
# @allowed user: self, admin and coach
#
@login_required
def get_resource_list_item(resource):
return render_template('ajax/resource_list_item.html',
resource = resource,
file_size = resource_server.file_size,
file_url = resource_server.file_url)
#
# @brief: ajax resource list
# @route: /ajax/resource_list
# @accepted methods: [post]
# @allowed user: self, admin, coach
#
@ajax.route('/ajax/resource_list', methods=['GET', 'POST'])
@login_required
def get_resource_list():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
search = request.args.get('search', None)
per_page = RESOURCE_MANAGE_PER_PAGE
pagination = resource_server.get_list_pageable(page, per_page, current_user, search)
page_list = list(pagination.iter_pages(left_current=1, right_current=2))
return jsonify(items=[get_resource_list_item(resource) for resource in pagination.items],
prev_num=pagination.prev_num,
next_num=pagination.next_num,
page_list=page_list,
page=pagination.page,
pages=pagination.pages)
#
# @brief: ajax to upload resource
# @route: /ajax/upload
# @accepted methods: [post]
#
@ajax.route('/ajax/upload', methods=['POST'])
@login_required
def upload():
file_form = form.FileUploadForm()
if file_form.validate_on_submit():
try:
if file_form.upload.data:
file = request.files[file_form.upload.name]
msg = resource_server.save_file(file_form, file, current_user, 'other')
return msg
else:
return u'上传数据失败'
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'错误: ' + e.message
return u'数据填写有误'
#
# @brief: ajax to upload poster
# @route: /ajax/upload
# @accepted methods: [post]
#
@ajax.route('/ajax/upload/poster', methods=['POST'])
@login_required
def upload_poster():
from dao.dbResource import ResourceLevel, ResourceUsage
file_form = form.FileUploadForm()
file_form.level.data = str(ResourceLevel.PUBLIC)
file_form.usage.data = str(ResourceUsage.POSTER_RES)
if file_form.validate_on_submit():
try:
file_canvas = request.form.get('croppedImage')
if file_canvas:
file_string = re.sub('^data:image/.+;base64,', '', file_canvas).decode('base64')
file_binary = cStringIO.StringIO(file_string)
file = FileStorage(file_binary, file_form.name.data + '.jpg')
msg = resource_server.save_file(file_form, file, current_user, 'poster')
return msg
else:
return u'上传数据失败'
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'错误: ' + e.message
current_app.logger.error(file_form.errors)
return u'数据填写有误'
#
# @brief: ajax to get modal with edit-resource form
# @route: /ajax/resource_info
# @accepted methods: [post]
#
@ajax.route('/ajax/resource_info', methods=['POST'])
@login_required
def get_resource_info():
resource_id = request.form.get('resource_id')
rs = resource_server.get_by_id(resource_id)
if rs.level >= 2 and not current_user.is_admin and not current_user.is_coach_of(rs.user):
return u'permission denied'
file_edit_form = form.FileInfoForm()
if not current_user.is_admin and not current_user.is_coach:
file_edit_form.usage.choices = [('3',u'题解资源'), ('4',u'其他资源')]
file_edit_form.id.data = rs.id
file_edit_form.level.data = str(rs.level)
file_edit_form.name.data = rs.name
file_edit_form.description.data = rs.description
file_edit_form.usage.data = str(rs.usage)
return render_template('ajax/resource_modify_modal.html',
file_edit_form = file_edit_form)
#
# @brief: ajax to edit resource
# @route: /ajax/resource_info
# @accepted methods: [post]
#
@ajax.route("/ajax/edit_resource", methods = ['POST'])
@login_required
def edit_resource():
file_edit_form = form.FileInfoForm()
if file_edit_form.validate_on_submit():
return resource_server.modify_file(file_edit_form, current_user)
return u'表单填写错误'
#
# @brief: ajax to delete resource
# @route: /ajax/delete_resource
# @accepted methods: [post]
#
@ajax.route("/ajax/delete_resource", methods = ['POST'])
@login_required
def delete_resource():
try:
resource_id = request.form.get('resource_id')
msg = resource_server.delete_file(resource_id, current_user)
return msg
except:
current_app.logger.error(traceback.format_exc())
return u'删除失败'
#
# @brief: ajax html for one honor item
# @allowed user: self, admin and coach
#
@login_required
def get_honor_list_item(honor):
from config import HONOR_LEVEL_MAP
return render_template('ajax/honor_list_item.html',
honor = honor,
level_mapper = HONOR_LEVEL_MAP)
#
# @brief: ajax honor list
# @route: /ajax/honor_list
# @accepted methods: [post]
# @allowed user: self, admin, coach
#
@ajax.route('/ajax/honor_list', methods=['GET', 'POST'])
@login_required
def get_honor_list():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
search = request.args.get('search', None)
per_page = HONOR_MANAGE_PER_PAGE
pagination = honor_server.get_list_pageable(page, per_page, search)
page_list = list(pagination.iter_pages(left_current=1, right_current=2))
return jsonify(items=[get_honor_list_item(honor) for honor in pagination.items],
prev_num=pagination.prev_num,
next_num=pagination.next_num,
page_list=page_list,
page=pagination.page,
pages=pagination.pages)
#
# @brief: ajax to add honor
# @route: /ajax/add_honor
# @accepted methods: [post]
# @allowed user: self, admin, coach
#
@ajax.route("/ajax/add_honor", methods = ['POST'])
@login_required
def add_honor():
honor_form = form.HonorForm()
file_form = form.FileUploadForm()
honor_form.users.choices = user_server.get_user_choice()
if honor_form.validate_on_submit():
try:
from dao.dbResource import ResourceLevel, ResourceUsage
resource_list = []
for name, file in request.files.items(multi=True):
file_form.level.data = ResourceLevel.PUBLIC
file_form.name.data = unicode(file.filename).split('.')[0]
file_form.usage.data = ResourceUsage.HONOR_RES
resource_server.save_file(file_form, file, current_user, 'honor')
resource = resource_server.get_by_name(file_form.name.data)
resource_list.append(resource)
msg = honor_server.add_honor(honor_form, resource_list)
return msg
except Exception, e:
current_app.logger.error(traceback.format_exc())
return 'failed'
return u'数据填写有误'
#
# @brief: ajax to modify honor
# @route: /ajax/modify_honor
# @accepted methods: [post]
# @allowed user: self, admin, coach
#
@ajax.route("/ajax/modify_honor", methods = ['POST'])
@login_required
def modify_honor():
honor_form = form.HonorForm()
file_form = form.FileUploadForm()
honor_form.users.choices = user_server.get_user_choice()
if honor_form.validate_on_submit():
try:
honor = honor_server.get_by_id(honor_form.id.data)
from dao.dbResource import ResourceLevel, ResourceUsage
resource_list = []
for name, file in request.files.items(multi=True):
if file.filename == '':
continue
file_form.level.data = ResourceLevel.PUBLIC
file_form.name.data = unicode(file.filename).split('.')[0]
file_form.usage.data = ResourceUsage.HONOR_RES
ret = resource_server.save_file(file_form, file, current_user, 'honor')
if ret == 'OK':
resource = resource_server.get_by_name(file_form.name.data)
resource_list.append(resource)
msg = honor_server.modify_honor(honor, honor_form, resource_list)
return msg
except:
current_app.logger.error(traceback.format_exc())
return 'failed'
return u'数据填写有误'
#
# @brief: ajax to delete honor
# @route: /ajax/delete_honor
# @accepted methods: [post]
# @allowed user: self, admin, coach
#
@ajax.route("/ajax/delete_honor", methods = ['POST'])
@login_required
def delete_honor():
try:
honor_id = request.form.get('honor_id')
msg = honor_server.delete_honor(honor_id)
return msg
except:
current_app.logger.error(traceback.format_exc())
return u'FAIL'
# not used
@login_required
def get_article_list_item(article):
return render_template('ajax/article_list_item.html', article = article)
#
# @brief: ajax article list
# @route: /ajax/article_list
# @accepted methods: [post]
# @allowed user: self, admin, coach
#
@ajax.route("/ajax/article_list", methods = ['POST'])
@login_required
def get_article_list():
offset = request.form.get('offset')
limit = request.form.get('limit')
article_list = article_server.get_list(offset, limit, current_user)
sum = article_server.get_count(current_user)
return jsonify(article_list=[get_article_list_item(article) for article in article_list],
sum=sum, offset=int(offset), limit=len(article_list))
# not used
@login_required
def get_related_submits_item(submit):
return render_template('ajax/related_submits_item.html', submit=submit)
# not used
@ajax.route("/ajax/related_submits", methods = ['POST'])
@login_required
def get_related_submits():
article_id = request.form.get('article_id')
offset = request.form.get('offset')
limit = request.form.get('limit')
one = article_server.get_by_id(article_id)
related_submits = article_server.related_submits(one, offset, limit)
sum = article_server.related_submits_count(one)
return jsonify(submits_list=[get_related_submits_item(submit) for submit in related_submits],
sum=sum, offset=int(offset), limit=len(related_submits))
# not used
@login_required
def get_related_article_item(article):
return render_template('ajax/related_article_item.html', article=article)
# not used
@ajax.route("/ajax/related_article", methods = ['POST'])
@login_required
def get_related_article():
submit_id = request.form.get('submit_id')
offset = request.form.get('offset')
limit = request.form.get('limit')
one = general.get_submit_by_id(submit_id)
related_article = general.related_article(one, offset, limit)
sum = general.related_article_count(one)
return jsonify(article_list=[get_related_article_item(article) for article in related_article],
sum=sum, offset=int(offset), limit=len(related_article))
#
# @brief: ajax to delete article
# @route: /ajax/delete_article
# @accepted methods: [post]
# @allowed user: self, admin, coach
#
@ajax.route("/ajax/delete_article", methods = ['POST'])
@login_required
def delete_article():
try:
article_id = request.form.get('article_id')
article_server.delete_by_id(article_id)
return u'删除成功'
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'删除失败'
#
# @brief: ajax to get member situation list
# @route: /ajax/members
# @accepted methods: [get]
# @allowed user: public
#
@ajax.route("/ajax/members", methods=['GET'])
def members():
all_users = user_server.get_list(limit=-1)
users = []
for user in all_users:
if user.is_student:
users.append({
'name': user.name,
'college': SCHOOL_COLLEGE_MAP[user.college] if user.college else '',
'grade': user.grade + u'级' if user.grade else '',
'situation': user.situation
})
return json.dumps({ 'data': users })
#
# @brief: ajax html for one competition item
# @allowed user: admin and coach
#
@login_required
def get_competition_list_item(competition):
from datetime import datetime
diff = (competition.event_date - datetime.today()).days
if diff > 2:
process = 0
elif diff > -1:
process = 1
else:
process = 2
return render_template('ajax/competition_list_item.html',
competition = competition,
len = len, process = process)
#
# @brief: ajax competition list
# @route: /ajax/competition_list
# @allowed user: admin and coach
#
@ajax.route('/ajax/competition_list', methods=["GET", "POST"])
@login_required
def get_competitions():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
search = request.args.get('search', None)
per_page = COMPETITION_MANAGE_PER_PAGE
pagination = dbCompetition.get_list_pageable(page, per_page, search=search)
page_list = list(pagination.iter_pages(left_current=1, right_current=2))
return jsonify(items=[get_competition_list_item(c) for c in pagination.items],
prev_num=pagination.prev_num,
next_num=pagination.next_num,
page_list=page_list,
page=pagination.page,
pages=pagination.pages)
@ajax.route("/ajax/add_competition", methods = ['POST'])
@login_required
def add_competition():
competition_form = form.CompetitionForm()
if competition_form.validate_on_submit():
try:
feedback = dbCompetition.create_competition(competition_form)
if feedback == 'OK':
return '添加成功'
else:
return feedback
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'添加失败'
return u'数据填写有误'
@ajax.route("/ajax/edit_competition", methods = ['POST'])
@login_required
def edit_competition():
competition_form = form.CompetitionForm()
if competition_form.validate_on_submit():
try:
id = request.form.get('id')
feedback = dbCompetition.update_competition(id, competition_form)
if feedback == 'OK':
return '修改成功'
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'修改失败'
return u'数据填写有误'
@ajax.route("/ajax/delete_competition", methods = ['POST'])
@login_required
def delete_competition():
try:
competition_id = request.form.get('competition_id', -1, type=int)
dbCompetition.delete_by_id(competition_id)
return u'OK'
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'删除失败'
#
# @brief: ajax html for one player item
# @allowed user: admin and coach
#
@login_required
def get_player_list_item(player):
return render_template('ajax/player_list_item.html',
player = player,
college_mapper = SCHOOL_COLLEGE_MAP)
#
# @brief: ajax player list
# @route: /ajax/player_list
# @allowed user: admin and coach
#
@ajax.route('/ajax/player_list', methods=["GET", "POST"])
@login_required
def get_players():
if not current_user.is_admin and not current_user.is_coach:
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
search = request.args.get('search', None)
competition_id = request.args.get('competition', 1, type=int)
per_page = COMPETITION_MANAGE_PER_PAGE
competition = dbCompetition.get_by_id(competition_id)
pagination = dbCompetition.get_players_pageable(competition, page,
per_page, search=search)
page_list = list(pagination.iter_pages(left_current=1, right_current=2))
return jsonify(items=[get_player_list_item(p) for p in pagination.items],
prev_num=pagination.prev_num,
next_num=pagination.next_num,
page_list=page_list,
page=pagination.page,
pages=pagination.pages)
@ajax.route("/ajax/delete_player", methods = ['POST'])
@login_required
def delete_player():
try:
player_id = request.form.get('player_id', -1, type=int)
dbPlayer.delete_by_id(player_id)
return u'OK'
except Exception, e:
current_app.logger.error(traceback.format_exc())
return u'删除失败' | Raynxxx/CUIT-ACM-Website | view/ajax.py | Python | mit | 35,176 |
from .views import DepartmentDetailView, Stats, FireDepartmentListView, SimilarDepartmentsListView, DepartmentUpdateGovernmentUnits
from django.contrib.auth.decorators import permission_required
from django.views.generic import TemplateView
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
urlpatterns = patterns('',
url(r'departments/(?P<pk>\d+)/(?P<slug>[-\w]+)/similar-departments/?$', SimilarDepartmentsListView.as_view(template_name='firestation/firedepartment_list.html'), name='similar_departments_slug'),
url(r'departments/(?P<pk>\d+)/similar-departments/?$', SimilarDepartmentsListView.as_view(template_name='firestation/firedepartment_list.html'), name='similar_departments'),
url(r'departments/(?P<pk>\d+)/settings/government-units/?$', permission_required('firestation.change_firedepartment')(DepartmentUpdateGovernmentUnits.as_view()), name='firedepartment_update_government_units'),
url(r'departments/(?P<pk>\d+)/?$', DepartmentDetailView.as_view(template_name='firestation/department_detail.html'), name='firedepartment_detail'),
url(r'departments/(?P<pk>\d+)/(?P<slug>[-\w]+)/?$', DepartmentDetailView.as_view(template_name='firestation/department_detail.html'), name='firedepartment_detail_slug'),
url(r'departments/?$', FireDepartmentListView.as_view(template_name='firestation/firedepartment_list.html'), name='firedepartment_list'),
url(r'community-risk$', cache_page(60 * 60 * 24)(TemplateView.as_view(template_name='firestation/community_risk_model.html')), name='models_community_risk'),
url(r'performance-score$', cache_page(60 * 60 * 24)(TemplateView.as_view(template_name='firestation/performance_score_model.html')), name='models_performance_score'),
url(r'stats/fire-stations/?$', Stats.as_view(), name='firestation_stats'),
)
| garnertb/firecares | firecares/firestation/urls.py | Python | mit | 2,033 |
'''
@author: davandev
'''
import logging
import os
import urllib
import datetime
import telnetlib
import paramiko
import davan.config.config_creator as configuration
import davan.util.constants as constants
import davan.util.helper_functions as helper
from davan.http.service.presence.AsusRouterDeviceStatus import AsusRouterDeviceStatus,\
FAMILY, GUESTS, HOUSE
from davan.http.service.reoccuring_base_service import ReoccuringBaseService
'''
'''
class AsusRouterPresenceService(ReoccuringBaseService):
'''
This service is tested on an Asus router and relies on Telnet/ssh being enabled in the router.
It is enabled in the router configuration.
The presence of the monitored devices are determined on whether the device is
connected on the wifi network.
The check is done by logging into the router via Telnet, and checking the connections
using command "/usr/sbin/ip neigh" this gives a list of devices registered.
Each device can be in state ESTABLISHED, STALE or FAILED.
FAILED means that the device is not connected to the network. The states ESTABLISHED and STALE is
interpreted as being available on wifi network.
At state change a telegram message is sent to the receivers. Virtual device on Fibaro HC2 is also updated
with the state change.
'''
def __init__(self, service_provider, config ):
'''
Constructor
'''
ReoccuringBaseService.__init__(self, constants.DEVICE_PRESENCE_SERVICE_NAME, service_provider, config)
self.logger = logging.getLogger(os.path.basename(__file__))
logging.getLogger('paramiko.transport').setLevel(logging.CRITICAL)
# Command to run on router to list available devices
self.list_active_devices_cmd = "/usr/sbin/ip neigh"
# Check status every 5th minute
self.time_to_next_timeout = 300
self.unknown_devices = []
self.family_devices = {}
for key, value in self.config['FAMILY_DEVICES'].items():
self.family_devices[key]= AsusRouterDeviceStatus(key, value, FAMILY)
self.guest_devices = {}
for key, value in self.config['GUEST_DEVICES'].items():
self.guest_devices[key]= AsusRouterDeviceStatus(key, value, GUESTS)
self.house_devices = {}
for key, value in self.config['HOUSE_DEVICES'].items():
self.house_devices[key]= AsusRouterDeviceStatus(key, value, HOUSE)
def get_next_timeout(self):
return self.time_to_next_timeout
def handle_timeout(self):
#self.logger.info("Check presence of monitored devices")
active_devices = self.fetch_active_devices()
# Check family status
self.check_device_group(self.family_devices, active_devices)
# check guest status
self.check_device_group(self.guest_devices, active_devices)
# check house devices
self.check_device_group(self.house_devices, active_devices)
self.check_unknown_devices(active_devices)
def check_unknown_devices(self, active_devices):
#self.logger.info("Check unknown devices")
for line in active_devices:
if line.startswith("192."):
items = line.split()
if items[0] in self.family_devices.keys():
continue
elif items[0] in self.guest_devices.keys():
continue
elif items[0] in self.house_devices.keys():
continue
else:
if items[0] not in self.unknown_devices:
self.logger.warning("Unknown: "+ items[0])
self.unknown_devices.append(items[0])
if items[3] == "REACHABLE" or items[3] == "STALE":
self.logger.warning("Unknown active device : "+ str(items))
helper.send_telegram_message(self.config, "Unknown device is now active on network")
def check_device_group(self, monitored_devices, active_devices):
# Reset changed state to false for all devices
for ip, device in monitored_devices.iteritems():
monitored_devices[ip].changed = False
self.update_presence(monitored_devices, active_devices)
for _, device in monitored_devices.items():
if device.changed:
# device.toString()
self.notify_change(device)
def notify_change(self, device):
'''
Update Virtual device on Fibaro system and send Telegram messages
with the changed device and status
@param device device that changed state
'''
if device.type == FAMILY:
url = helper.createFibaroUrl(self.config['UPDATE_DEVICE'],
self.config['FIBARO_VD_PRESENCE_ID'],
self.config['FIBARO_VD_MAPPINGS'][device.user],
device.active_toString())
urllib.urlopen(url)
if (device.type == FAMILY or device.type == GUESTS):
helper.send_telegram_message(self.config, device.user + " [" + device.active_toString() + "]")
def fetch_active_devices_telnet(self):
'''
Fetch a list of all devices status from router.
@return list of found devices
'''
# self.logger.info("Fetch active devices from router")
tn = telnetlib.Telnet(self.config['ROUTER_ADRESS'])
tn.read_until("login: ")
tn.write(self.config['ROUTER_USER'] + "\n")
tn.read_until("Password: ")
tn.write(self.config['ROUTER_PASSWORD'] + "\n")
tn.write(self.list_active_devices_cmd + "\n")
tn.write("exit\n")
result = tn.read_all()
lines = result.split("\n")
return lines
def fetch_active_devices(self):
'''
Fetch a list of all devices status from router.
@return list of found devices
'''
p = paramiko.SSHClient()
p.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # This script doesn't work for me unless this line is added!
p.connect(self.config['ROUTER_ADRESS'],
port=22,
username=self.config['ROUTER_USER'],
password=self.config['ROUTER_PASSWORD'])
stdin, stdout, stderr = p.exec_command(self.list_active_devices_cmd)
#p.close()
result = stdout.readlines()
return result
def update_presence(self, monitored_devices, active_devices):
'''
Determine if any of the active devices in router is also
monitored, then check device status
'''
for line in active_devices:
if line.startswith("192."):
items = line.split()
if items[0] in monitored_devices.keys():
self.update_device_status(items, monitored_devices)
def update_device_status(self, status, monitored_devices):
'''
Update device status of a monitored device
@param status the status from router
@param monitored_devices list of configured monitored devices
'''
device = monitored_devices[status[0]]
#device.toString()
previous_status = device.active
if("REACHABLE" in status or "STALE" in status):
device.active = True
if status[4].strip() != device.mac:
self.logger.info(device.user +"["+device.ip_adress+"] do not match received[" + status[4].strip() + "] != stored[" + device.mac+"]")
elif "FAILED" in status:
device.active = False
if (previous_status == device.active): # No state changed
# self.logger.info("No change of status for device[" + status[0] + "]")
device.changed = False
if device.active:
device.last_active = str(datetime.datetime.now())
else: # State is changed
# self.logger.info("Change of status for device[" + status[0] + "]")
device.changed = True
if device.active:
device.first_active = str(datetime.datetime.now())
#self.family_devices[status[0]] = device
def get_announcement(self):
'''
Compile announcement to be read
'''
announcement = ""
for _, device in self.family_devices.items():
announcement += device.user + " �r " + device.active_toString()+", "
return helper.encode_message(announcement)
def has_html_gui(self):
"""
Override if service has gui
"""
return True
def get_html_gui(self, column_id):
"""
Override and provide gui
"""
if not self.is_enabled():
return ReoccuringBaseService.get_html_gui(self, column_id)
column = constants.COLUMN_TAG.replace("<COLUMN_ID>", str(column_id))
column = column.replace("<SERVICE_NAME>", self.service_name)
htmlresult = ["Family</br>\n"]
for _,family in self.family_devices.items():
htmlresult.append(family.user + "["+family.active_toString()+"]</br>\n")
htmlresult += "\nGuests</br>\n"
for _,guests in self.guest_devices.items():
htmlresult.append(guests.user + "["+guests.active_toString()+"]</br>\n")
htmlresult += "\nDevices</br>\n"
for _,device in self.house_devices.items():
htmlresult.append(device.user + "["+device.active_toString()+"]</br>\n")
htmlresult += "\nUnknown Devices</br>\n"
for device in self.unknown_devices:
htmlresult.append(str(device)+"</br>\n")
column = column.replace("<SERVICE_VALUE>", ''.join(htmlresult))
return column
if __name__ == '__main__':
from davan.util import application_logger as log_config
config = configuration.create('/home/pi/private_config.py')
log_config.start_logging(config['LOGFILE_PATH'],loglevel=4)
test = AsusRouterPresenceService("",config)
test.handle_timeout()
| davandev/davanserver | davan/http/service/presence/AsusRouterPresenceService.py | Python | mit | 10,548 |
""":mod:`flask.ext.volatile.transaction` --- Key-level transactions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Hong Minhee, StyleShare
:license: MIT License, see :file:`LICENSE` for more details.
"""
import time
from werkzeug.contrib.cache import BaseCache
__all__ = 'Transaction', 'Reference'
class Transaction(object):
"""The key-level transaction block. It implements two interfaces:
:class:`~collections.Iterable`
If it is used in :keyword:`for` loop, the operations inside
:keyword:`for` block become committed atomically::
for ref in Transaction(cache, 'cache_key'):
value = ref()
value = value + ['append element']
ref(value)
The yielding object is :class:`Reference`.
:class:`~collections.Callable`
If a function is passed into the transaction object,
the operations inside the function are committed atomically::
def block(value):
return value + ['append element']
t = Transaction(cache, 'cache_key')
t(block)
The block function takes a cached value and a return value will be
committed.
Of course it can be used as decorator also::
@Transaction(cache, 'cache_key')
def block(value):
return value + ['append element']
:param cache: the cache client to use
:type cache: :class:`werkzeug.contrib.cache.BaseCache`
:param key: the key to operate atomically
:param version_key: the key for versioning. by default ``__ver`` suffix
appended to ``key``
:type timeout: the cache timeout for the key (if not specified,
it uses the default timeout)
"""
def __init__(self, cache, key, version_key=None, timeout=None):
if not isinstance(cache, BaseCache):
raise TypeError('cache must be a werkzeug.contrib.cache.BaseCache '
'object, but %r passed' % cache)
self.cache = cache
self.key = key
if version_key is None:
version_key = key + '__ver'
self.version_key = version_key
self.timeout = timeout
def __iter__(self):
i = 0
while True:
ref = Reference(self, i)
yield ref
if ref.commit():
break
i += 1
def __call__(self, block):
for ref in self:
ref.set(block(ref.value))
class Reference(object):
"""The reference to key. It provides atomic :meth:`get`/:meth:`set`
operations for the key.
There redundant ways to :meth:`get`/:meth:`set` the value:
By property
You can get or set the :attr:`value` property.
By methods
You can use :meth:`get()` and :meth:`set` methods.
By call
It is callable. You can get the value by calling the reference without
any arguments and set the value by calling the reference with
an argument of the value to set.
:param transaction: the transaction block
:type transaction: :class:`Transaction`
:param tried_number: the retried number in a transaction.
default is 0
.. note::
This object is automatically made by :class:`Transaction`.
You don't have to instantiate it directly.
"""
def __init__(self, transaction, tried_number=0):
if not isinstance(transaction, Transaction):
raise TypeError('expected a flask.ext.volatile.transaction.'
'Transaction, but %r passed' % transaction)
self.transaction = transaction
self.cache = transaction.cache
self.key = transaction.key
self.version_key = transaction.version_key
self.timeout = transaction.timeout
self.version = None
self.tried_number = tried_number
@property
def value(self):
"""The read/write property for the value inside the key."""
self.version = time.time()
self.cache.set(self.version_key, self.version, self.timeout)
val = self.cache.get(self.key)
if val:
return val[1]
@value.setter
def value(self, value):
self._val = value
def get(self):
"""Gets the value inside the key.
:returns: the value inside the key
"""
return self.value
def set(self, value):
"""Sets the value into the key.
:param value: the new value to set into the key
"""
self.value = value
def commit(self):
"""Tries committing the operations and its result.
:returns: ``False`` if it conflicted
:rtype: :class:`bool`
"""
try:
val = self._val
except AttributeError:
return True
self.cache.set(self.key, (self.version, val), self.timeout)
check = self.cache.get(self.key)
return check and check[0] == self.version
def __call__(self, *args):
if len(args) > 1:
raise TypeError('too many arguments')
elif args:
self.set(args[0])
return self.value
| StyleShare/flask-volatile | flask_volatile/transaction.py | Python | mit | 5,200 |
import numpy as np
import moderngl
from ported._example import Example
class Fractal(Example):
title = "Mandelbrot"
gl_version = (3, 3)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.prog = self.ctx.program(
vertex_shader='''
#version 330
in vec2 in_vert;
out vec2 v_text;
void main() {
gl_Position = vec4(in_vert, 0.0, 1.0);
v_text = in_vert;
}
''',
fragment_shader='''
#version 330
in vec2 v_text;
out vec4 f_color;
uniform sampler2D Texture;
uniform vec2 Center;
uniform float Scale;
uniform float Ratio;
uniform int Iter;
void main() {
vec2 c;
int i;
c.x = Ratio * v_text.x * Scale - Center.x;
c.y = v_text.y * Scale - Center.y;
vec2 z = c;
for (i = 0; i < Iter; i++) {
float x = (z.x * z.x - z.y * z.y) + c.x;
float y = (z.y * z.x + z.x * z.y) + c.y;
if ((x * x + y * y) > 4.0) {
break;
}
z.x = x;
z.y = y;
}
f_color = texture(Texture, vec2((i == Iter ? 0.0 : float(i)) / 100.0, 0.0));
}
'''
)
self.center = self.prog['Center']
self.scale = self.prog['Scale']
self.ratio = self.prog['Ratio']
self.iter = self.prog['Iter']
self.texture = self.load_texture_2d('pal.png')
vertices = np.array([-1.0, -1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0])
self.vbo = self.ctx.buffer(vertices.astype('f4'))
self.vao = self.ctx.simple_vertex_array(self.prog, self.vbo, 'in_vert')
def render(self, time, frame_time):
self.ctx.clear(1.0, 1.0, 1.0)
self.center.value = (0.5, 0.0)
self.iter.value = 100
self.scale.value = 1.5
self.ratio.value = self.aspect_ratio
self.texture.use()
self.vao.render(moderngl.TRIANGLE_STRIP)
if __name__ == '__main__':
Fractal.run()
| cprogrammer1994/ModernGL | examples/mandelbrot_set.py | Python | mit | 2,406 |
'''
Unit tests for mir_eval.onset
'''
import numpy as np
import json
import mir_eval
import glob
import warnings
import nose.tools
A_TOL = 1e-12
# Path to the fixture files
REF_GLOB = 'data/onset/ref*.txt'
EST_GLOB = 'data/onset/est*.txt'
SCORES_GLOB = 'data/onset/output*.json'
def __unit_test_onset_function(metric):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
# First, test for a warning on empty onsets
metric(np.array([]), np.arange(10))
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert str(w[-1].message) == "Reference onsets are empty."
metric(np.arange(10), np.array([]))
assert len(w) == 2
assert issubclass(w[-1].category, UserWarning)
assert str(w[-1].message) == "Estimated onsets are empty."
# And that the metric is 0
assert np.allclose(metric(np.array([]), np.array([])), 0)
# Now test validation function - onsets must be 1d ndarray
onsets = np.array([[1., 2.]])
nose.tools.assert_raises(ValueError, metric, onsets, onsets)
# onsets must be in seconds (so not huge)
onsets = np.array([1e10, 1e11])
nose.tools.assert_raises(ValueError, metric, onsets, onsets)
# onsets must be sorted
onsets = np.array([2., 1.])
nose.tools.assert_raises(ValueError, metric, onsets, onsets)
# Valid onsets which are the same produce a score of 1 for all metrics
onsets = np.arange(10, dtype=np.float)
assert np.allclose(metric(onsets, onsets), 1)
def __check_score(sco_f, metric, score, expected_score):
assert np.allclose(score, expected_score, atol=A_TOL)
def test_onset_functions():
# Load in all files in the same order
ref_files = sorted(glob.glob(REF_GLOB))
est_files = sorted(glob.glob(EST_GLOB))
sco_files = sorted(glob.glob(SCORES_GLOB))
# Unit tests
for metric in [mir_eval.onset.f_measure]:
yield (__unit_test_onset_function, metric)
# Regression tests
for ref_f, est_f, sco_f in zip(ref_files, est_files, sco_files):
with open(sco_f, 'r') as f:
expected_scores = json.load(f)
# Load in an example onset annotation
reference_onsets = mir_eval.io.load_events(ref_f)
# Load in an example onset tracker output
estimated_onsets = mir_eval.io.load_events(est_f)
# Compute scores
scores = mir_eval.onset.evaluate(reference_onsets, estimated_onsets)
# Compare them
for metric in scores:
# This is a simple hack to make nosetest's messages more useful
yield (__check_score, sco_f, metric, scores[metric],
expected_scores[metric])
| mrgloom/mir_eval | tests/test_onset.py | Python | mit | 2,721 |
import system
# Create the computer system and power it up.
sys = system.System()
sys.power_on()
| richgieg/RichEmu86 | main.py | Python | mit | 98 |
"""
Token class definition
"""
class Token(object):
"""Represents a token with type and value"""
def __init__(self, type, value):
self._type = type
self._value = value
@property
def type(self):
return self._type
@property
def value(self):
return self._value
| piller-imre/exprail-python | exprail/token.py | Python | mit | 319 |
#!/usr/bin/python
import sys
import subprocess
import os
import commands
CLIENT_CN = sys.argv[1]
SEP = '==========> '
OVPN_ROOT='/etc/openvpn/'
ERSA=OVPN_ROOT+'easy-rsa-release-2.x/easy-rsa/2.0/keys/'
IP_PLACEHOLDER = 'placeholder_ip'
TA_PLACEHOLDER = 'placeholder_ta'
CA_PLACEHOLDER = 'placeholder_ca'
CL_KEY_PLACEHOLDER = 'placeholder_ck'
CL_CR_PLACEHOLDER = 'placeholder_crt'
CONFIG_TEMPLATE = OVPN_ROOT+'client/template.ovpn'
TA_FILE = OVPN_ROOT+'keys/ta.key'
CA = OVPN_ROOT+'keys/ca.crt'
RES_FILE = OVPN_ROOT+'client/'+CLIENT_CN+".ovpn"
CLIENT_KEY_FILE = ERSA+CLIENT_CN+".key"
CLIENT_CER_FILE = ERSA+CLIENT_CN+".crt"
if not os.geteuid() == 0:
sys.exit('Script must be run as root')
cmd = 'cd easy-rsa-release-2.x/easy-rsa/2.0/; source ./vars; ./pkitool ' + CLIENT_CN
print SEP+cmd
if subprocess.call(cmd, shell=True):
print SEP+"CN in use"
sys.exit()
print SEP+"Data has been generated: "+CLIENT_CN
#contents = open(CONFIG_TEMPLATE, "r+").read()
conf = open(RES_FILE, 'w+')
# tls key
taKey = open(TA_FILE, 'r').read()[:-1]
# CA isnt encrypted
serverCa = open(CA, 'r').read()[:-1]
# client key
clientKey = open(CLIENT_KEY_FILE, 'r').read()[:-1]
# client cert
clientCert = open(CLIENT_CER_FILE, 'r').read()[:-1]
# server ip
serverIp=commands.getoutput("/sbin/ifconfig").split("\n")[1].split()[1][0:]
resultConfig = open(CONFIG_TEMPLATE, "r+").read().replace(TA_PLACEHOLDER, taKey)
resultConfig = resultConfig.replace(CA_PLACEHOLDER, serverCa)
resultConfig = resultConfig.replace(CL_KEY_PLACEHOLDER, clientKey)
resultConfig = resultConfig.replace(CL_CR_PLACEHOLDER, clientCert)
resultConfig = resultConfig.replace(IP_PLACEHOLDER, serverIp)
conf.write(resultConfig)
conf.close()
open(OVPN_ROOT+'ccd/'+CLIENT_CN, 'w').close()
print SEP+"Client: "+RES_FILE | storytime/openvpn-server-utils | openvpn/conf_generator.py | Python | mit | 1,782 |
import os
from sqs import (make_SQS_connection, get_queue, queue_size, )
from cloudwatch import (make_CW_connection, update_metric, )
from threading import Timer
# Define AWS credentials
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
REGION = 'us-west-2'
# Set queue name variables
FULL_COMPOSITE_QUEUE = 'snapsat_composite_queue'
PREVIEW_COMPOSITE_QUEUE = 'snapsat_preview_queue'
# Set metric name variables
FULL_COMPOSITE_METRIC = 'number_jobs_full_queue'
PREVIEW_COMPOSITE_METRIC = 'number_jobs_preview_queue'
# Set metric namespace
NAMESPACE = 'Snapsat'
# Set size check intervals
FULL_INTERVAL = 10
PREVIEW_INTERVAL = 10
# Create SQS connction
SQSconn = make_SQS_connection(REGION,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY)
# Create CW connection
CWconn = make_CW_connection(REGION,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY)
# Monitor size of queue
def monitor_queue(SQSconn, CWconn, queue_name, metric_name):
queue = get_queue(SQSconn, queue_name)
size = queue_size(queue)
update_metric(CWconn, NAMESPACE, metric_name, size)
# Create full queue size check timer funciton
def full_queue_timer(SQSconn, CWconn, queue_name, metric_name, interval):
monitor_queue(SQSconn, CWconn, queue_name, metric_name)
return Timer(interval,
full_queue_timer,
args=[SQSconn, CWconn, queue_name, metric_name, interval]
).start()
# Create preview queue size check timer funciton
def preview_queue_timer(SQSconn, CWconn, queue_name, metric_name, interval):
monitor_queue(SQSconn, CWconn, queue_name, metric_name)
return Timer(interval,
full_queue_timer,
args=[SQSconn, CWconn, queue_name, metric_name, interval]
).start()
# Check queue sizes every 20 seconds
def main():
full_queue_timer(SQSconn,
CWconn,
FULL_COMPOSITE_QUEUE,
FULL_COMPOSITE_METRIC,
FULL_INTERVAL)
preview_queue_timer(SQSconn,
CWconn,
PREVIEW_COMPOSITE_QUEUE,
PREVIEW_COMPOSITE_METRIC,
PREVIEW_INTERVAL)
if __name__ == '__main__':
main()
| recombinators/autoscaling | checker.py | Python | mit | 2,410 |
"""length_of_subdivisionfirst
Revision ID: eb61567ea005
Revises: 66f71cf543a4
Create Date: 2018-05-15 22:24:27.212164
"""
from alembic import op
import sqlalchemy as sa
from docassemble.webapp.database import dbtableprefix
# revision identifiers, used by Alembic.
revision = 'eb61567ea005'
down_revision = '66f71cf543a4'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column(dbtableprefix + 'user', 'subdivisionfirst',
existing_type=sa.String(length=3),
type_=sa.String(length=255),
existing_nullable=True)
op.alter_column(dbtableprefix + 'user', 'country',
existing_type=sa.String(length=2),
type_=sa.String(length=3),
existing_nullable=True)
def downgrade():
op.alter_column(dbtableprefix + 'user', 'subdivisionfirst',
existing_type=sa.String(length=255),
type_=sa.String(length=3),
existing_nullable=True)
op.alter_column(dbtableprefix + 'user', 'country',
existing_type=sa.String(length=3),
type_=sa.String(length=2),
existing_nullable=True)
| jhpyle/docassemble | docassemble_webapp/docassemble/webapp/alembic/versions/eb61567ea005_length_of_subdivisionfirst.py | Python | mit | 1,226 |
# -*- coding: utf-8 -*-
import urllib2
from BeautifulSoup import BeautifulSoup
# Download the HTML
request = urllib2.Request('http://www.uci.edu')
response = urllib2.urlopen(request)
print '\r\n\r\n'
# Verify that everything went ok.
# Error codes: 200 == good, 404, 500 == bad
print 'The error code is:', response.code
print '\r\n\r\n'
html = response.read()
# Parse the HTML into a dom object via our BS library.
dom = BeautifulSoup(html)
# Extract out the <div> tag containing our news.
news_tag = dom.find('div', {'id': 'news'})
# See what the extracted HTML looks like.
print 'The extracted news div HTML looks like:'
print '===================================='
print news_tag
print '\r\n\r\n'
# Further extract out a list of the actual news titles.
news_li_tags = news_tag.findAll('li')
titles = [tag.text for tag in news_li_tags]
links = [tag.a['href'] for tag in news_li_tags]
print 'The top news titles on www.uci.edu are currently:'
print '===================================='
for title in titles:
print title
print 'The top news links on www.uci.edu are currently:'
print '===================================='
for link in links:
print link
print '\r\n\r\n'
| codelucas/uci-web-extraction-workshop | basic.py | Python | mit | 1,190 |
# -*- coding: utf-8 -*-
from openerp import models,fields,api
import datetime
import time
class assistent_report1(models.TransientModel):
_name = "assistent.report1"
def date_debut_mois():
now = datetime.date.today() # Date du jour
date_debut_mois = datetime.datetime( now.year, now.month, 1 ) # Premier jour du mois
return date_debut_mois.strftime('%Y-%m-%d') # Formatage
site= fields.Selection([
("1", "Gray"),
("4", "ST-Brice"),
], "Site", required=True)
version = fields.Selection([
("1", "1"),
("2", "2"),
("3", "3"),
], "Version du rapport", required=True, default="2")
type_rapport= fields.Selection([
("rapport_mois", "Liste mensuelle"),
("rapport_date_a_date", "Liste de date à date"),
("rapport_a_date", "Liste à date")
], "Modèle de rapport", required=True)
date_jour = fields.Date("Date", required=False)
date_mois = fields.Date("Date dans le mois", required=False)
date_debut = fields.Date("Date de début", required=False)
date_fin = fields.Date("Date de fin", required=False)
employee = fields.Many2one('hr.employee', 'Employé', required=False, ondelete='set null', help="Sélectionnez un employé")
interimaire = fields.Boolean('Intérimaire', help="Cocher pour sélectionner uniquement les intérimaires")
saut_page = fields.Boolean('Saut de page', help="Cocher pour avoir un saut de page pour chaque employé")
detail = fields.Boolean("Vue détaillée")
_defaults = {
'date_jour': time.strftime('%Y-%m-%d'),
'date_mois': date_debut_mois(),
'date_debut': date_debut_mois(),
'date_fin': time.strftime('%Y-%m-%d'),
'type_rapport': 'rapport_mois',
}
def assistent_report1(self, cr, uid, ids, context=None):
report_data = self.browse(cr, uid, ids[0])
report_link = "http://odoo/odoo-rh/rapport"+ str(report_data.version)+".php"
url = str(report_link) + '?'+ '&type_rapport=' + str(report_data.type_rapport)+'&site=' + str(report_data.site)+ '&date_jour=' + str(report_data.date_jour)+ '&date_mois=' + str(report_data.date_mois)+'&detail='+str(report_data.detail)+'&employee='+str(report_data.employee.id)+'&interimaire='+str(report_data.interimaire)+'&saut_page='+str(report_data.saut_page)+ '&date_debut=' + str(report_data.date_debut)+ '&date_fin=' + str(report_data.date_fin)
return {
'name' : 'Go to website',
'res_model': 'ir.actions.act_url',
'type' : 'ir.actions.act_url',
'target' : 'current',
'url' : url
}
| tonygalmiche/is_plastigray | wizard/assistent_report.py | Python | mit | 2,803 |
from django.apps import AppConfig
class NstoreConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'nsync_server.nstore'
| pizzapanther/Neutron-Sync | nsync_server/nstore/apps.py | Python | mit | 153 |
import wmi
import requests
import pythoncom
def get_iip():
""""""
f = requests.get("http://myip.dnsomatic.com")
iip = f.text
return iip
def get_lip():
""""""
c = wmi.WMI()
lip = ""
for interface in c.Win32_NetworkAdapterConfiguration(IPEnabled=1):
lip = interface.IPAddress[0]
return lip
def get_mac():
""""""
c = wmi.WMI()
mac = ""
for interface in c.Win32_NetworkAdapterConfiguration(IPEnabled=1):
mac = interface.MACAddress
return mac
def get_hd():
""""""
c = wmi.WMI()
hd = "disk01"
# for disk in c.Win32_DiskDrive():
# hd = disk.SerialNumber.strip()
return hd
def get_terminal_info():
""""""
# Initialize COM object in this thread.
pythoncom.CoInitialize()
iip = ""
iport = ""
lip = get_lip()
mac = get_mac()
hd = get_hd()
terminal_info = ";".join([
"PC",
f"IIP={iip}",
f"IPORT={iport}",
f"LIP={lip}",
f"MAC={mac}",
f"HD={hd}",
"PCN=NA;CPU=NA;PI=NA;VOL=NA@NA"
])
return terminal_info
| msincenselee/vnpy | vnpy/gateway/tora/terminal_info.py | Python | mit | 1,108 |
class Config(dict):
def __init__ (self):
self['server'] = 'chat.freenode.com'
self['port'] = 6667
self['debug'] = False
self['verbose'] = False
self['web_host'] = '0.0.0.0'
self['web_port'] = 8080
def __getattr__(self, attr):
try:
return self.__getitem__(item)
except KeyError:
raise AttributeError(item)
def __setattr__ (self, attr, value):
if self.__dict__.has_key(item): # any normal attributes are handled normally
dict.__setattr__(self, attr, value)
else:
self.__setitem__(attr, value)
| citruspi/ReactIRC | ReactIRC/config.py | Python | mit | 643 |