repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ios-xr/iosxr-ansible
|
local/library/iosxr_nc11_send.py
|
1
|
4463
|
#!/usr/bin/python
#------------------------------------------------------------------------------
#
# Copyright (C) 2016 Cisco Systems, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#------------------------------------------------------------------------------
from ansible.module_utils.basic import *
import paramiko
DOCUMENTATION = """
---
module: iosxr_nc11_send
author: Adisorn Ermongkonchai
short_description: Send NETCONF-YANG 1.1 XML file to IOS-XR device
description:
- Send NETCONF-YANG 1.1 XML file to IOS-XR device
options:
host:
description:
- IP address or hostname (resolvable by Ansible control host) of
the target IOS-XR node.
required: true
username:
description:
- username used to login to IOS-XR
required: false
default: none
password:
description:
- password used to login to IOS-XR
required: false
default: none
xmlfile:
description:
- XML file
required: true
example: nc_show_install_active.xml
<rpc message-id="101" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<get>
<filter type="subtree">
<interface-configurations xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg"/>
</filter>
</get>
</rpc>
"""
EXAMPLES = """
- iosxr_nc11_send:
host: '{{ ansible_ssh_host }}'
username: cisco
password: cisco
xmlfile: xml/nc_show_install_active.xml
"""
RETURN = """
stdout:
description: raw response
returned: always
stdout_lines:
description: list of response lines
returned: always
"""
HELLO = """
<hello xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<capabilities>
<capability>urn:ietf:params:netconf:base:1.1</capability>
</capabilities>
</hello>
]]>]]>"""
COMMIT = """
#91
<rpc message-id="101" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<commit/>
</rpc>
##
"""
CLOSE = """
#98
<rpc message-id="102" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<close-session/>
</rpc>
##
"""
def main():
module = AnsibleModule(
argument_spec = dict(
host = dict(required=True),
username = dict(required=False, default=None),
password = dict(required=False, default=None),
xmlfile = dict(required=True),
port = dict(required=False, type='int', default=830)
),
supports_check_mode = False
)
args = module.params
xml_file = module.params['xmlfile']
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(module.params['host'],
port=module.params['port'],
username=module.params['username'],
password=module.params['password'],
timeout=10)
transport = ssh.get_transport()
channel = transport.open_channel('session')
channel.invoke_subsystem('netconf')
# read hello msg
response = channel.recv(1024)
while ']]>]]>' not in response:
response += channel.recv(1024)
result = dict(changed=False)
xml_text = open(xml_file).read()
if 'edit-config' in xml_text or 'delete-config' in xml_text:
result['changed'] = True
xml_msg = '\n#' + str(len(xml_text)-1) + '\n' + xml_text + '##\n'
# send hello followed by contents of xml file
channel.send(HELLO)
channel.send(xml_msg)
# collect all responses 1024 bytes at a time
response = channel.recv(1024)
while '##' not in response:
response += channel.recv(1024)
# commit changes
if result['changed']:
channel.send(COMMIT)
channel.send(CLOSE)
result['stdout'] = response
if 'rpc-error' in response:
return module.fail_json(msg=response)
else:
return module.exit_json(**result)
if __name__ == "__main__":
main()
|
gpl-3.0
| 6,026,454,946,953,059,000
| 26.89375
| 95
| 0.611248
| false
| 3.719167
| false
| false
| false
|
Fyzel/weather-data-flaskapi
|
database/__init__.py
|
1
|
5398
|
"""
@author: Fyzel@users.noreply.github.com
@copyright: 2017 Englesh.org. All rights reserved.
@license: https://github.com/Fyzel/weather-data-flaskapi/blob/master/LICENSE
@contact: Fyzel@users.noreply.github.com
@deffield updated: 2017-06-14
"""
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_humidity_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create humidity table indices
try:
sql = text(
'CREATE UNIQUE INDEX humidity_city_subdivision_country_index ON humidity (city, subdivision, country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_city_index ON humidity (city);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_subdivision_index ON humidity (subdivision);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_country_index ON humidity (country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_latitude_longitude_index ON humidity (latitude, longitude);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_timestamp_index ON humidity (timestamp);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_pressure_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create pressure table indices
try:
sql = text(
'CREATE UNIQUE INDEX pressure_city_subdivision_country_index ON pressure (city, subdivision, country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_city_index ON pressure (city);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_subdivision_index ON pressure (subdivision);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_country_index ON pressure (country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_latitude_longitude_index ON pressure (latitude, longitude);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_timestamp_index ON pressure (timestamp);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_temperature_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create temperature table indices
try:
sql = text(
'CREATE UNIQUE INDEX temperature_city_subdivision_country_index ON temperature (city, subdivision, country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_city_index ON temperature (city);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_subdivision_index ON temperature (subdivision);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_country_index ON temperature (country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_latitude_longitude_index ON temperature (latitude, longitude);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_timestamp_index ON temperature (timestamp);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_user_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create user table indices
try:
sql = text('CREATE INDEX user_username_index ON user (username);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_indexes(app):
create_humidity_indexes(app)
create_pressure_indexes(app)
create_temperature_indexes(app)
create_user_indexes(app)
def create_database(app=None):
db.create_all(app=app)
create_indexes(app)
def reset_database():
from database.models import ProtectedHumidity, ProtectedPressure, ProtectedTemperature, User
db.drop_all()
db.create_all()
|
apache-2.0
| 7,651,208,777,404,038,000
| 29.497175
| 126
| 0.608744
| false
| 4.363783
| false
| false
| false
|
ladinu/CS350-project
|
TestEfficency.py
|
1
|
2211
|
import time
from geometry.utils import getNRandomPoints, getCircle
import BruteForceHull, QuickHull
from math import log
global fcount
fcount = 2
def outStr(a, b):
return "%i,%f" % (a, b)
def getBruteForceExecTime(points):
t1 = time.time()
BruteForceHull.computeHull(points)
t2 = time.time()
return t2-t1
def getQuickHullExecTime(points, loopCount=1):
t1 = time.time()
qh = QuickHull.QuickHull(points)
qh.computeHull()
t2 = time.time()
return t2-t1
def getBruteForceData():
global fcount
print "> Generating execution reports for BruteForce hull..."
f = open("reports/bruteforce%s.csv" % fcount, 'w', 1)
dataPoints = [100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]
for d in dataPoints:
etime = getBruteForceExecTime(getNRandomPoints(d))
f.write("%s\n" % outStr(d, etime))
print outStr(d, etime)
f.close()
def getQuickHullWorstCaseData():
global fcount
print "> Generating execution reports for QuickHull worst case..."
dataPoints = [10000, 20000, 40000, 50000, 60000, 70000, 80000, 90000, 100000, 200000,
400000, 500000, 600000, 700000, 800000, 900000, 1000000, 2000000, 3000000,
4000000, 5000000, 6000000, 7000000, 8000000, 9000000, 10000000]
f = open("reports/quickhull_worst_case%s.csv" % fcount, 'w', 1)
for d in dataPoints:
etime = getQuickHullExecTime(getCircle(10000, d))
f.write("%s\n" % outStr(d, etime))
print outStr(d, etime)
f.close()
def getQuickHullData():
global fcount
print "> Generating execution reports for QuickHull..."
f = open('reports/quickhull%s.csv' % fcount, 'w', 1)
for j in [100000000, 10, 100, 1000, 10000, 100000, 200000, 400000, 600000, 800000, 1000000, 9000000, 10000000]:
f.write("%s\n" % outStr(j, getQuickHullExecTime(getNRandomPoints(j))))
print outStr(j, getQuickHullExecTime(getNRandomPoints(j)))
f.close()
if __name__ == "__main__":
fcount = "_0"
getQuickHullData()
getBruteForceData()
getQuickHullWorstCaseData()
fcount = "_1"
getBruteForceData()
getQuickHullWorstCaseData()
getQuickHullData()
fcount = "_2"
getBruteForceData()
getQuickHullWorstCaseData()
getQuickHullData()
|
mit
| 5,516,835,767,643,267,000
| 30.585714
| 114
| 0.679331
| false
| 2.955882
| false
| false
| false
|
rdo-management/tuskar
|
tuskar/api/renderers.py
|
1
|
2382
|
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
import wsme
from wsme import api
class JSONRenderer(object):
"""Custom JSON renderer.
Renders to JSON and handles responses for various HTTP status codes.
"""
def __init__(self, path, extra_vars):
"""Create an empty __init__ to accept the arguments provided to a
Renderer but ignore them as they are not needed.
"""
def _render_fault(self, message, details, code=500):
"""Given the namespace dictionary render a JSON error response for the
fault in the format defined by the OpenStack identity service
documentation.
"""
body = {
'identityFault': {
"message": message,
"details": details,
"code": code
}
}
return wsme.rest.json.encode_error(None, body)
def render(self, template_path, namespace):
"""Given a namespace dict render the response as JSON and return.
If the dict contains a faultcode or wsme.api.Response its a fault from
user code and is rendered via _render_fault.
template_path is a required parameter for renderers but unused in
this context.
"""
if 'faultcode' in namespace:
return self._render_fault(
namespace['faultstring'],
namespace['debuginfo'])
result = namespace['result']
if isinstance(namespace['result'], api.Response):
pecan.response.status_code = result.status_code
return self._render_fault(
result.obj.faultstring, result.obj.debuginfo,
code=result.status_code)
return wsme.rest.json.encode_result(
result,
namespace['datatype']
)
|
apache-2.0
| 3,922,971,986,940,133,400
| 32.549296
| 78
| 0.625525
| false
| 4.528517
| false
| false
| false
|
xibalbanus/PIA2
|
osiris_phylogenetics/phylogenies/phytab_raxml_pars.py
|
1
|
5675
|
#!/usr/bin/env python
## This tool runs RAxML's parsimony inference on a phytab input.
## If N = # of nodes requested in job runner, then N RAxML jobs will run simultaneously. Make sure that the
## number of processors ('ppn') in the job runner matches the 'numthreads' commandline argument -T.
##
## Usage: ./phytab_raxml_using_ptree.parallel.py -i <phytabinput> -e <model> -f <modelfile> -T 4
## example: ./phytab_raxml_using_ptree.parallel.py -i myphytab.txt -e PROTGAMMAWAG -f None -T 4
## or: ./phytab_raxml_using_ptree.parallel.py -i myphtab.txt -e None -f modelsforeachpartition.txt -T 4
##
## outputs a tab-delimited file with gene-partition and newick parsimony tree on each line.
import optparse
import os
import subprocess
import multiprocessing
RESULTS_DIR = 'results'
RESULTS_FILE = 'parsimony_results.txt'
RAXML_PREFIX = 'RAxML_parsimonyTree.'
def unescape(string):
mapped_chars = {
'>': '__gt__',
'<': '__lt__',
"'": '__sq__',
'"': '__dq__',
'[': '__ob__',
']': '__cb__',
'{': '__oc__',
'}': '__cc__',
'@': '__at__',
'\n': '__cn__',
'\r': '__cr__',
'\t': '__tc__',
'#': '__pd__'
}
for key, value in mapped_chars.iteritems():
string = string.replace(value, key)
return string
class Species:
def __init__(self, string):
lis = string.split('\t')
# print lis
self.species = lis[0]
self.gene = lis[1]
self.name = lis[2]
self.sequence = lis[3]
def toString(self):
return self.species + '\t' + self.sequence
class Gene:
def __init__(self, name):
self.name = name
self.count = 0
self.length = 0
self.species = []
def output(self):
file_name = self.name + ".phy"
location = RESULTS_DIR + os.sep + file_name
with open(location, 'w') as f:
f.write(str(self.count) + '\t' + str(self.length) + '\n')
for s in self.species:
f.write(s.toString())
return file_name
def add(self, species):
if species.name == "":
return
self.species.append(species)
self.count += 1
if self.length == 0:
self.length = len(species.sequence) - 1
def output_species(species):
file_name = species.gene + ".phy"
location = RESULTS_DIR + os.sep + file_name
with open(location, 'a') as f:
f.write(species.toString())
return file_name
def process_phytab(input):
files = set()
genes = dict()
with open(input) as f:
for line in f:
if len(line) < 4:
continue
species = Species(line)
if species.gene in genes:
genes[species.gene].add(species)
else:
gene = Gene(species.gene)
gene.add(species)
genes[gene.name] = gene
for k, gene in genes.iteritems():
files.add(gene.output())
return files
def runRaxml(list_of_files, evo, evoDict,NUMTHREADS):
for gene_file in list_of_files:
if gene_file.split(".")[0] in evoDict:
newEvo = evoDict[gene_file.split(".")[0]]
else:
newEvo = evo
# cpu_count = str(multiprocessing.cpu_count())
file_name = RESULTS_DIR + os.sep + gene_file
# to run parsimony trees:
popen = subprocess.Popen(['raxmlHPC-PTHREADS', '-T', cpu_count,'-f', 'd', '-s', file_name,'-y', '-m', newEvo, '-n', gene_file[:-4]+'.tre', '-p', '34'])
# to run likelihood trees:
# popen = subprocess.Popen(['raxmlHPC-PTHREADS', "-T", NUMTHREADS, "-s", file_name, '-m', newEvo, '-n', gene_file[:-4], '-p', '34'])
popen.wait()
def toData(text, name):
text = name + "\t" + text.replace("\n", "\\n")
return text
def readEfile(efile):
evoDict = {}
with open(efile, "r") as f:
for line in f:
pair = line.split("\t")
evoDict[pair[0].strip()] = pair[1].strip()
return evoDict
def main():
usage = """%prog [options]
options (listed below) default to 'None' if omitted
"""
parser = optparse.OptionParser(usage=usage)
parser.add_option(
'-i', '--in',
dest='input',
action='store',
type='string',
metavar="FILE",
help='Name of input data.')
parser.add_option(
'-e', '--evo',
dest='evo',
action='store',
type='string',
metavar="EVO",
help='Evolution model.')
parser.add_option(
'-f', '--evo-file',
dest='efile',
action='store',
type='string',
metavar="EVO_FILE",
help='Evolution model file. Format is gene_name [tab] evolution_model.')
parser.add_option('-T', '--numthread',dest='numthreads', action='store',type='int', metavar="NUMT", help='Provide number of threads for RAxML')
options, args = parser.parse_args()
os.mkdir(RESULTS_DIR)
list_of_species_files = process_phytab(unescape(options.input))
try:
evoDict = readEfile(unescape(options.efile))
except IOError:
print "Could not find evolution model file, using:", unescape(options.evo)
evoDict = {}
runRaxml(list_of_species_files, unescape(options.evo), evoDict,str(options.numthreads))
result = [file for file in os.listdir('./') if file.startswith(RAXML_PREFIX)]
with open(RESULTS_DIR + os.sep + RESULTS_FILE, "w") as f:
for file in result:
with open(file, "r") as r:
f.write(file[len(RAXML_PREFIX):-4] + '\t' + r.read())
if __name__ == '__main__':
main()
|
mit
| -6,460,458,334,718,591,000
| 28.868421
| 160
| 0.551366
| false
| 3.369952
| false
| false
| false
|
lehinevych/cfme_tests
|
cfme/tests/test_rest.py
|
1
|
4051
|
# -*- coding: utf-8 -*-
"""This module contains REST API specific tests."""
import fauxfactory
import pytest
from cfme import Credential
from cfme.configure.access_control import User, Group
from cfme.login import login
from cfme.rest import vm as _vm
from utils.providers import setup_a_provider as _setup_a_provider
from utils.version import current_version
from utils import testgen, conf, version
pytest_generate_tests = testgen.generate(
testgen.provider_by_type,
['virtualcenter', 'rhevm'],
scope="module"
)
@pytest.fixture(scope="module")
def a_provider():
return _setup_a_provider("infra")
@pytest.mark.usefixtures("logged_in")
@pytest.fixture(scope='function')
def user():
user = User(credential=Credential(principal=fauxfactory.gen_alphanumeric(),
secret=fauxfactory.gen_alphanumeric()), name=fauxfactory.gen_alphanumeric(),
group=Group(description='EvmGroup-super_administrator'))
user.create()
return user
# This test should be deleted when we get new build > 5.5.2.4
@pytest.mark.tier(2)
@pytest.mark.uncollectif(lambda: version.current_version() < '5.5')
def test_edit_user_password(rest_api, user):
if "edit" not in rest_api.collections.users.action.all:
pytest.skip("Edit action for users is not implemented in this version")
try:
for cur_user in rest_api.collections.users:
if cur_user.userid != conf.credentials['default']['username']:
rest_user = cur_user
break
except:
pytest.skip("There is no user to change password")
new_password = fauxfactory.gen_alphanumeric()
rest_user.action.edit(password=new_password)
cred = Credential(principal=rest_user.userid, secret=new_password)
new_user = User(credential=cred)
login(new_user)
@pytest.fixture(scope="function")
def vm(request, a_provider, rest_api):
return _vm(request, a_provider, rest_api)
@pytest.mark.tier(2)
@pytest.mark.parametrize(
"from_detail", [True, False],
ids=["from_detail", "from_collection"])
def test_vm_scan(rest_api, vm, from_detail):
rest_vm = rest_api.collections.vms.get(name=vm)
if from_detail:
response = rest_vm.action.scan()
else:
response, = rest_api.collections.vms.action.scan(rest_vm)
@pytest.wait_for(timeout="5m", delay=5, message="REST running scanning vm finishes")
def _finished():
response.task.reload()
if response.task.status.lower() in {"error"}:
pytest.fail("Error when running scan vm method: `{}`".format(response.task.message))
return response.task.state.lower() == 'finished'
COLLECTIONS_IGNORED_53 = {
"availability_zones", "conditions", "events", "flavors", "policy_actions", "security_groups",
"tags", "tasks",
}
COLLECTIONS_IGNORED_54 = {
"features", "pictures", "provision_dialogs", "rates", "results", "service_dialogs",
}
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"collection_name",
["availability_zones", "chargebacks", "clusters", "conditions", "data_stores", "events",
"features", "flavors", "groups", "hosts", "pictures", "policies", "policy_actions",
"policy_profiles", "provision_dialogs", "rates", "request_tasks", "requests", "resource_pools",
"results", "roles", "security_groups", "servers", "service_dialogs", "service_requests",
"tags", "tasks", "templates", "users", "vms", "zones"])
@pytest.mark.uncollectif(
lambda collection_name: (
collection_name in COLLECTIONS_IGNORED_53 and current_version() < "5.4") or (
collection_name in COLLECTIONS_IGNORED_54 and current_version() < "5.5"))
def test_query_simple_collections(rest_api, collection_name):
"""This test tries to load each of the listed collections. 'Simple' collection means that they
have no usable actions that we could try to run
Steps:
* GET /api/<collection_name>
Metadata:
test_flag: rest
"""
collection = getattr(rest_api.collections, collection_name)
collection.reload()
list(collection)
|
gpl-2.0
| 6,124,307,325,183,130,000
| 34.535088
| 99
| 0.680573
| false
| 3.547285
| true
| false
| false
|
scality/manila
|
manila/scheduler/filters/capabilities.py
|
1
|
3548
|
# Copyright (c) 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from manila.scheduler.filters import base_host
from manila.scheduler.filters import extra_specs_ops
LOG = logging.getLogger(__name__)
class CapabilitiesFilter(base_host.BaseHostFilter):
"""HostFilter to work with resource (instance & volume) type records."""
def _satisfies_extra_specs(self, capabilities, resource_type):
"""Compare capabilities against extra specs.
Check that the capabilities provided by the services satisfy
the extra specs associated with the resource type.
"""
extra_specs = resource_type.get('extra_specs', [])
if not extra_specs:
return True
for key, req in extra_specs.items():
# Either not scoped format, or in capabilities scope
scope = key.split(':')
# Ignore scoped (such as vendor-specific) capabilities
if len(scope) > 1 and scope[0] != "capabilities":
continue
# Strip off prefix if spec started with 'capabilities:'
elif scope[0] == "capabilities":
del scope[0]
cap = capabilities
for index in range(len(scope)):
try:
cap = cap.get(scope[index])
except AttributeError:
cap = None
if cap is None:
LOG.debug("Host doesn't provide capability '%(cap)s' "
"listed in the extra specs",
{'cap': scope[index]})
return False
# Make all capability values a list so we can handle lists
cap_list = [cap] if not isinstance(cap, list) else cap
# Loop through capability values looking for any match
for cap_value in cap_list:
if extra_specs_ops.match(cap_value, req):
break
else:
# Nothing matched, so bail out
LOG.debug('Share type extra spec requirement '
'"%(key)s=%(req)s" does not match reported '
'capability "%(cap)s"',
{'key': key, 'req': req, 'cap': cap})
return False
return True
def host_passes(self, host_state, filter_properties):
"""Return a list of hosts that can create resource_type."""
# Note(zhiteng) Currently only Cinder and Nova are using
# this filter, so the resource type is either instance or
# volume.
resource_type = filter_properties.get('resource_type')
if not self._satisfies_extra_specs(host_state.capabilities,
resource_type):
LOG.debug("%(host_state)s fails resource_type extra_specs "
"requirements", {'host_state': host_state})
return False
return True
|
apache-2.0
| 9,135,078,643,539,611,000
| 39.318182
| 78
| 0.580045
| false
| 4.775236
| false
| false
| false
|
antiface/audiolazy
|
examples/pi.py
|
1
|
2141
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of AudioLazy, the signal processing Python package.
# Copyright (C) 2012-2014 Danilo de Jesus da Silva Bellini
#
# AudioLazy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Created on Sun May 05 2013
# danilo [dot] bellini [at] gmail [dot] com
"""
Calculate "pi" using the Madhava-Gregory-Leibniz series and Machin formula
"""
from __future__ import division, print_function
from audiolazy import Stream, thub, count, z, pi # For comparison
def mgl_seq(x):
"""
Sequence whose sum is the Madhava-Gregory-Leibniz series.
[x, -x^3/3, x^5/5, -x^7/7, x^9/9, -x^11/11, ...]
Returns
-------
An endless sequence that has the property
``atan(x) = sum(mgl_seq(x))``.
Usually you would use the ``atan()`` function, not this one.
"""
odd_numbers = thub(count(start=1, step=2), 2)
return Stream(1, -1) * x ** odd_numbers / odd_numbers
def atan_mgl(x, n=10):
"""
Finds the arctan using the Madhava-Gregory-Leibniz series.
"""
acc = 1 / (1 - z ** -1) # Accumulator filter
return acc(mgl_seq(x)).skip(n-1).take()
if __name__ == "__main__":
print("Reference (for comparison):", repr(pi))
print()
print("Machin formula (fast)")
pi_machin = 4 * (4 * atan_mgl(1/5) - atan_mgl(1/239))
print("Found:", repr(pi_machin))
print("Error:", repr(abs(pi - pi_machin)))
print()
print("Madhava-Gregory-Leibniz series for 45 degrees (slow)")
pi_mgl_series = 4 * atan_mgl(1, n=1e6) # Sums 1,000,000 items...slow...
print("Found:", repr(pi_mgl_series))
print("Error:", repr(abs(pi - pi_mgl_series)))
print()
|
gpl-3.0
| -3,385,673,187,416,630,000
| 31.439394
| 74
| 0.670715
| false
| 3.032578
| false
| false
| false
|
achernya/byobu
|
usr/lib/byobu/include/select-session.py
|
1
|
6798
|
#!/usr/bin/python3
#
# select-session.py
# Copyright (C) 2010 Canonical Ltd.
# Copyright (C) 2012-2014 Dustin Kirkland <kirkland@byobu.org>
#
# Authors: Dustin Kirkland <kirkland@byobu.org>
# Ryan C. Thompson <rct@thompsonclan.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import sys
import subprocess
try:
# For Python3, try and import input from builtins
from builtins import input
except Exception:
# But fall back to using the default input
True
PKG = "byobu"
SHELL = os.getenv("SHELL", "/bin/bash")
HOME = os.getenv("HOME")
BYOBU_CONFIG_DIR = os.getenv("BYOBU_CONFIG_DIR", HOME + "/.byobu")
BYOBU_BACKEND = os.getenv("BYOBU_BACKEND", "tmux")
choice = -1
sessions = []
text = []
reuse_sessions = os.path.exists("%s/.reuse-session" % (BYOBU_CONFIG_DIR))
BYOBU_UPDATE_ENVVARS = ["DISPLAY", "DBUS_SESSION_BUS_ADDRESS", "SESSION_MANAGER", "GPG_AGENT_INFO", "XDG_SESSION_COOKIE", "XDG_SESSION_PATH", "GNOME_KEYRING_CONTROL", "GNOME_KEYRING_PID", "GPG_AGENT_INFO", "SSH_ASKPASS", "SSH_AUTH_SOCK", "SSH_AGENT_PID", "WINDOWID", "UPSTART_JOB", "UPSTART_EVENTS", "UPSTART_SESSION", "UPSTART_INSTANCE"]
def get_sessions():
sessions = []
i = 0
output = False
if BYOBU_BACKEND == "screen":
try:
output = subprocess.Popen(["screen", "-ls"], stdout=subprocess.PIPE).communicate()[0]
except subprocess.CalledProcessError as cpe:
# screen -ls seems to always return 1
if cpe.returncode != 1:
raise
else:
output = cpe.output
if sys.stdout.encoding is None:
output = output.decode("UTF-8")
else:
output = output.decode(sys.stdout.encoding)
if output:
for s in output.splitlines():
s = re.sub(r'\s+', ' ', s)
# Ignore hidden sessions (named sessions that start with a "." or a "_")
if s and s != " " and (s.find(" ") == 0 and len(s) > 1 and s.count("..") == 0 and s.count("._") == 0):
text.append("screen: %s" % s.strip())
items = s.split(" ")
sessions.append("screen____%s" % items[1])
i += 1
if BYOBU_BACKEND == "tmux":
output = subprocess.Popen(["tmux", "list-sessions"], stdout=subprocess.PIPE).communicate()[0]
if sys.stdout.encoding is None:
output = output.decode("UTF-8")
else:
output = output.decode(sys.stdout.encoding)
if output:
for s in output.splitlines():
# Ignore hidden sessions (named sessions that start with a "_")
if s and not s.startswith("_"):
text.append("tmux: %s" % s.strip())
sessions.append("tmux____%s" % s.split(":")[0])
i += 1
return sessions
def cull_zombies(session_name):
# When using tmux session groups, closing a client will leave
# unattached "zombie" sessions that will never be reattached.
# Search for and kill any unattached hidden sessions in the same group
if BYOBU_BACKEND == "tmux":
output = subprocess.Popen(["tmux", "list-sessions"], stdout=subprocess.PIPE).communicate()[0]
if sys.stdout.encoding is None:
output = output.decode("UTF-8")
else:
output = output.decode(sys.stdout.encoding)
if not output:
return
# Find the master session to extract the group name. We use
# the group number to be extra sure the right session is getting
# killed. We don't want to accidentally kill the wrong one
pattern = "^%s:.+\\((group [^\\)]+)\\).*$" % session_name
master = re.search(pattern, output, re.MULTILINE)
if not master:
return
# Kill all the matching hidden & unattached sessions
pattern = "^_%s-\\d+:.+\\(%s\\)$" % (session_name, master.group(1))
for s in re.findall(pattern, output, re.MULTILINE):
subprocess.Popen(["tmux", "kill-session", "-t", s.split(":")[0]])
def update_environment(session):
backend, session_name = session.split("____", 2)
for var in BYOBU_UPDATE_ENVVARS:
value = os.getenv(var)
if value:
if backend == "tmux":
cmd = ["tmux", "setenv", "-t", session_name, var, value]
else:
cmd = ["screen", "-S", session_name, "-X", "setenv", var, value]
subprocess.call(cmd, stdout=open(os.devnull, "w"))
def attach_session(session):
update_environment(session)
backend, session_name = session.split("____", 2)
cull_zombies(session_name)
# must use the binary, not the wrapper!
if backend == "tmux":
if reuse_sessions:
os.execvp("tmux", ["tmux", "attach", "-t", session_name])
else:
os.execvp("tmux", ["tmux", "-2", "new-session", "-t", session_name, "-s", "_%s-%i" % (session_name, os.getpid())])
else:
os.execvp("screen", ["screen", "-AOxRR", session_name])
sessions = get_sessions()
show_shell = os.path.exists("%s/.always-select" % (BYOBU_CONFIG_DIR))
if len(sessions) > 1 or show_shell:
sessions.append("NEW")
text.append("Create a new Byobu session (%s)" % BYOBU_BACKEND)
sessions.append("SHELL")
text.append("Run a shell without Byobu (%s)" % SHELL)
if len(sessions) > 1:
sys.stdout.write("\nByobu sessions...\n\n")
tries = 0
while tries < 3:
i = 1
for s in text:
sys.stdout.write(" %d. %s\n" % (i, s))
i += 1
try:
try:
user_input = input("\nChoose 1-%d [1]: " % (i - 1))
except Exception:
user_input = ""
if not user_input or user_input == "":
choice = 1
break
try:
choice = int(user_input)
except Exception:
choice = int(eval(user_input))
if choice >= 1 and choice < i:
break
else:
tries += 1
choice = -1
sys.stderr.write("\nERROR: Invalid input\n")
except KeyboardInterrupt:
sys.stdout.write("\n")
sys.exit(0)
except Exception:
if choice == "" or choice == -1:
choice = 1
break
tries += 1
choice = -1
sys.stderr.write("\nERROR: Invalid input\n")
elif len(sessions) == 1:
# Auto-select the only session
choice = 1
if choice >= 1:
if sessions[choice - 1] == "NEW":
# Create a new session
if BYOBU_BACKEND == "tmux":
os.execvp("byobu", ["byobu", "new-session", SHELL])
else:
os.execvp("byobu", ["byobu", SHELL])
elif sessions[choice - 1] == "SHELL":
os.execvp(SHELL, [SHELL])
else:
# Attach to the chosen session; must use the binary, not the wrapper!
attach_session(sessions[choice - 1])
# No valid selection, default to the youngest session, create if necessary
if BYOBU_BACKEND == "tmux":
os.execvp("tmux", ["tmux"])
else:
os.execvp("screen", ["screen", "-AOxRR"])
|
gpl-3.0
| -5,511,983,227,267,064,000
| 31.371429
| 338
| 0.649603
| false
| 2.963383
| false
| false
| false
|
p-l-/miasm
|
miasm2/expression/simplifications_common.py
|
1
|
19247
|
# ----------------------------- #
# Common simplifications passes #
# ----------------------------- #
from miasm2.expression.expression import *
from miasm2.expression.expression_helper import *
def simp_cst_propagation(e_s, e):
"""This passe includes:
- Constant folding
- Common logical identities
- Common binary identities
"""
# merge associatif op
if not isinstance(e, ExprOp):
return e
args = list(e.args)
op = e.op
# simpl integer manip
# int OP int => int
# TODO: <<< >>> << >> are architecture dependant
if op in op_propag_cst:
while (len(args) >= 2 and
isinstance(args[-1], ExprInt) and
isinstance(args[-2], ExprInt)):
i2 = args.pop()
i1 = args.pop()
if op == '+':
o = i1.arg + i2.arg
elif op == '*':
o = i1.arg * i2.arg
elif op == '^':
o = i1.arg ^ i2.arg
elif op == '&':
o = i1.arg & i2.arg
elif op == '|':
o = i1.arg | i2.arg
elif op == '>>':
o = i1.arg >> i2.arg
elif op == '<<':
o = i1.arg << i2.arg
elif op == 'a>>':
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 >> x2)
elif op == '>>>':
rounds = i2.arg
o = i1.arg >> i2.arg | i1.arg << (i1.size - i2.arg)
elif op == '<<<':
o = i1.arg << i2.arg | i1.arg >> (i1.size - i2.arg)
elif op == '/':
o = i1.arg / i2.arg
elif op == '%':
o = i1.arg % i2.arg
elif op == 'idiv':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
elif op == 'imod':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'umod':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'udiv':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
o = ExprInt_fromsize(i1.size, o)
args.append(o)
# bsf(int) => int
if op == "bsf" and isinstance(args[0], ExprInt) and args[0].arg != 0:
i = 0
while args[0].arg & (1 << i) == 0:
i += 1
return ExprInt_from(args[0], i)
# bsr(int) => int
if op == "bsr" and isinstance(args[0], ExprInt) and args[0].arg != 0:
i = args[0].size - 1
while args[0].arg & (1 << i) == 0:
i -= 1
return ExprInt_from(args[0], i)
# -(-(A)) => A
if op == '-' and len(args) == 1 and isinstance(args[0], ExprOp) and \
args[0].op == '-' and len(args[0].args) == 1:
return args[0].args[0]
# -(int) => -int
if op == '-' and len(args) == 1 and isinstance(args[0], ExprInt):
return ExprInt(-args[0].arg)
# A op 0 =>A
if op in ['+', '|', "^", "<<", ">>", "<<<", ">>>"] and len(args) > 1:
if isinstance(args[-1], ExprInt) and args[-1].arg == 0:
args.pop()
# A - 0 =>A
if op == '-' and len(args) > 1 and args[-1].arg == 0:
assert(len(args) == 2) # Op '-' with more than 2 args: SantityCheckError
return args[0]
# A * 1 =>A
if op == "*" and len(args) > 1:
if isinstance(args[-1], ExprInt) and args[-1].arg == 1:
args.pop()
# for cannon form
# A * -1 => - A
if op == "*" and len(args) > 1:
if (isinstance(args[-1], ExprInt) and
args[-1].arg == (1 << args[-1].size) - 1):
args.pop()
args[-1] = - args[-1]
# op A => A
if op in ['+', '*', '^', '&', '|', '>>', '<<',
'a>>', '<<<', '>>>', 'idiv', 'imod', 'umod', 'udiv'] and len(args) == 1:
return args[0]
# A-B => A + (-B)
if op == '-' and len(args) > 1:
if len(args) > 2:
raise ValueError(
'sanity check fail on expr -: should have one or 2 args ' +
'%r %s' % (e, e))
return ExprOp('+', args[0], -args[1])
# A op 0 => 0
if op in ['&', "*"] and isinstance(args[1], ExprInt) and args[1].arg == 0:
return ExprInt_from(e, 0)
# - (A + B +...) => -A + -B + -C
if (op == '-' and
len(args) == 1 and
isinstance(args[0], ExprOp) and
args[0].op == '+'):
args = [-a for a in args[0].args]
e = ExprOp('+', *args)
return e
# -(a?int1:int2) => (a?-int1:-int2)
if (op == '-' and
len(args) == 1 and
isinstance(args[0], ExprCond) and
isinstance(args[0].src1, ExprInt) and
isinstance(args[0].src2, ExprInt)):
i1 = args[0].src1
i2 = args[0].src2
i1 = ExprInt_from(i1, -i1.arg)
i2 = ExprInt_from(i2, -i2.arg)
return ExprCond(args[0].cond, i1, i2)
i = 0
while i < len(args) - 1:
j = i + 1
while j < len(args):
# A ^ A => 0
if op == '^' and args[i] == args[j]:
args[i] = ExprInt_from(args[i], 0)
del(args[j])
continue
# A + (- A) => 0
if op == '+' and isinstance(args[j], ExprOp) and args[j].op == "-":
if len(args[j].args) == 1 and args[i] == args[j].args[0]:
args[i] = ExprInt_from(args[i], 0)
del(args[j])
continue
# (- A) + A => 0
if op == '+' and isinstance(args[i], ExprOp) and args[i].op == "-":
if len(args[i].args) == 1 and args[j] == args[i].args[0]:
args[i] = ExprInt_from(args[i], 0)
del(args[j])
continue
# A | A => A
if op == '|' and args[i] == args[j]:
del(args[j])
continue
# A & A => A
if op == '&' and args[i] == args[j]:
del(args[j])
continue
j += 1
i += 1
if op in ['|', '&', '%', '/'] and len(args) == 1:
return args[0]
# A <<< A.size => A
if (op in ['<<<', '>>>'] and
isinstance(args[1], ExprInt) and
args[1].arg == args[0].size):
return args[0]
# A <<< X <<< Y => A <<< (X+Y) (ou <<< >>>)
if (op in ['<<<', '>>>'] and
isinstance(args[0], ExprOp) and
args[0].op in ['<<<', '>>>']):
op1 = op
op2 = args[0].op
if op1 == op2:
op = op1
args1 = args[0].args[1] + args[1]
else:
op = op2
args1 = args[0].args[1] - args[1]
args0 = args[0].args[0]
args = [args0, args1]
# A >> X >> Y => A >> (X+Y)
if (op in ['<<', '>>'] and
isinstance(args[0], ExprOp) and
args[0].op == op):
args = [args[0].args[0], args[0].args[1] + args[1]]
# ((A & A.mask)
if op == "&" and args[-1] == e.mask:
return ExprOp('&', *args[:-1])
# ((A | A.mask)
if op == "|" and args[-1] == e.mask:
return args[-1]
# ! (!X + int) => X - int
# TODO
# ((A & mask) >> shift) whith mask < 2**shift => 0
if (op == ">>" and
isinstance(args[1], ExprInt) and
isinstance(args[0], ExprOp) and args[0].op == "&"):
if (isinstance(args[0].args[1], ExprInt) and
2 ** args[1].arg > args[0].args[1].arg):
return ExprInt_from(args[0], 0)
# parity(int) => int
if op == 'parity' and isinstance(args[0], ExprInt):
return ExprInt1(parity(args[0].arg))
# (-a) * b * (-c) * (-d) => (-a) * b * c * d
if op == "*" and len(args) > 1:
new_args = []
counter = 0
for a in args:
if isinstance(a, ExprOp) and a.op == '-' and len(a.args) == 1:
new_args.append(a.args[0])
counter += 1
else:
new_args.append(a)
if counter % 2:
return -ExprOp(op, *new_args)
args = new_args
# A << int with A ExprCompose => move index
if op == "<<" and isinstance(args[0], ExprCompose) and isinstance(args[1], ExprInt):
final_size = args[0].size
shift = int(args[1].arg)
new_args = []
# shift indexes
for expr, start, stop in args[0].args:
new_args.append((expr, start+shift, stop+shift))
# filter out expression
filter_args = []
min_index = final_size
for expr, start, stop in new_args:
if start >= final_size:
continue
if stop > final_size:
expr = expr[:expr.size - (stop - final_size)]
stop = final_size
filter_args.append((expr, start, stop))
min_index = min(start, min_index)
# create entry 0
expr = ExprInt_fromsize(min_index, 0)
filter_args = [(expr, 0, min_index)] + filter_args
return ExprCompose(filter_args)
# A >> int with A ExprCompose => move index
if op == ">>" and isinstance(args[0], ExprCompose) and isinstance(args[1], ExprInt):
final_size = args[0].size
shift = int(args[1].arg)
new_args = []
# shift indexes
for expr, start, stop in args[0].args:
new_args.append((expr, start-shift, stop-shift))
# filter out expression
filter_args = []
max_index = 0
for expr, start, stop in new_args:
if stop <= 0:
continue
if start < 0:
expr = expr[-start:]
start = 0
filter_args.append((expr, start, stop))
max_index = max(stop, max_index)
# create entry 0
expr = ExprInt_fromsize(final_size - max_index, 0)
filter_args += [(expr, max_index, final_size)]
return ExprCompose(filter_args)
# Compose(a) OP Compose(b) with a/b same bounds => Compose(a OP b)
if op in ['|', '&', '^'] and all([isinstance(arg, ExprCompose) for arg in args]):
bounds = set()
for arg in args:
bound = tuple([(start, stop) for (expr, start, stop) in arg.args])
bounds.add(bound)
if len(bounds) == 1:
bound = list(bounds)[0]
new_args = [[expr] for (expr, start, stop) in args[0].args]
for sub_arg in args[1:]:
for i, (expr, start, stop) in enumerate(sub_arg.args):
new_args[i].append(expr)
for i, arg in enumerate(new_args):
new_args[i] = ExprOp(op, *arg), bound[i][0], bound[i][1]
return ExprCompose(new_args)
return ExprOp(op, *args)
def simp_cond_op_int(e_s, e):
"Extract conditions from operations"
if not isinstance(e, ExprOp):
return e
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
if not isinstance(e.args[-1], ExprInt):
return e
a_int = e.args[-1]
conds = []
for a in e.args[:-1]:
if not isinstance(a, ExprCond):
return e
conds.append(a)
if not conds:
return e
c = conds.pop()
c = ExprCond(c.cond,
ExprOp(e.op, c.src1, a_int),
ExprOp(e.op, c.src2, a_int))
conds.append(c)
new_e = ExprOp(e.op, *conds)
return new_e
def simp_cond_factor(e_s, e):
"Merge similar conditions"
if not isinstance(e, ExprOp):
return e
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
conds = {}
not_conds = []
multi_cond = False
for a in e.args:
if not isinstance(a, ExprCond):
not_conds.append(a)
continue
c = a.cond
if not c in conds:
conds[c] = []
else:
multi_cond = True
conds[c].append(a)
if not multi_cond:
return e
c_out = not_conds[:]
for c, vals in conds.items():
new_src1 = [x.src1 for x in vals]
new_src2 = [x.src2 for x in vals]
src1 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src1))
src2 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src2))
c_out.append(ExprCond(c, src1, src2))
if len(c_out) == 1:
new_e = c_out[0]
else:
new_e = ExprOp(e.op, *c_out)
return new_e
def simp_slice(e_s, e):
"Slice optimization"
# slice(A, 0, a.size) => A
if e.start == 0 and e.stop == e.arg.size:
return e.arg
# Slice(int) => int
elif isinstance(e.arg, ExprInt):
total_bit = e.stop - e.start
mask = (1 << (e.stop - e.start)) - 1
return ExprInt_fromsize(total_bit, (e.arg.arg >> e.start) & mask)
# Slice(Slice(A, x), y) => Slice(A, z)
elif isinstance(e.arg, ExprSlice):
if e.stop - e.start > e.arg.stop - e.arg.start:
raise ValueError('slice in slice: getting more val', str(e))
new_e = ExprSlice(e.arg.arg, e.start + e.arg.start,
e.start + e.arg.start + (e.stop - e.start))
return new_e
# Slice(Compose(A), x) => Slice(A, y)
elif isinstance(e.arg, ExprCompose):
for a in e.arg.args:
if a[1] <= e.start and a[2] >= e.stop:
new_e = a[0][e.start - a[1]:e.stop - a[1]]
return new_e
# ExprMem(x, size)[:A] => ExprMem(x, a)
# XXXX todo hum, is it safe?
elif (isinstance(e.arg, ExprMem) and
e.start == 0 and
e.arg.size > e.stop and e.stop % 8 == 0):
e = ExprMem(e.arg.arg, size=e.stop)
return e
# distributivity of slice and &
# (a & int)[x:y] => 0 if int[x:y] == 0
elif (isinstance(e.arg, ExprOp) and
e.arg.op == "&" and
isinstance(e.arg.args[-1], ExprInt)):
tmp = e_s.expr_simp_wrapper(e.arg.args[-1][e.start:e.stop])
if isinstance(tmp, ExprInt) and tmp.arg == 0:
return tmp
# distributivity of slice and exprcond
# (a?int1:int2)[x:y] => (a?int1[x:y]:int2[x:y])
elif (isinstance(e.arg, ExprCond) and
isinstance(e.arg.src1, ExprInt) and
isinstance(e.arg.src2, ExprInt)):
src1 = e.arg.src1[e.start:e.stop]
src2 = e.arg.src2[e.start:e.stop]
e = ExprCond(e.arg.cond, src1, src2)
# (a * int)[0:y] => (a[0:y] * int[0:y])
elif (e.start == 0 and isinstance(e.arg, ExprOp) and
e.arg.op == "*" and isinstance(e.arg.args[-1], ExprInt)):
args = [e_s.expr_simp_wrapper(a[e.start:e.stop]) for a in e.arg.args]
e = ExprOp(e.arg.op, *args)
return e
def simp_compose(e_s, e):
"Commons simplification on ExprCompose"
args = merge_sliceto_slice(e.args)
out = []
# compose of compose
for a in args:
if isinstance(a[0], ExprCompose):
for x, start, stop in a[0].args:
out.append((x, start + a[1], stop + a[1]))
else:
out.append(a)
args = out
# Compose(a) with a.size = compose.size => a
if len(args) == 1 and args[0][1] == 0 and args[0][2] == e.size:
return args[0][0]
# {(X[X.size-z, 0, z), (0, z, X.size)} => (X >> x)
if (len(args) == 2 and
isinstance(args[1][0], ExprInt) and
args[1][0].arg == 0):
a1 = args[0]
a2 = args[1]
if (isinstance(a1[0], ExprSlice) and
a1[1] == 0 and a1[0].stop == a1[0].arg.size):
if a2[1] == a1[0].size and a2[2] == a1[0].arg.size:
new_e = a1[0].arg >> ExprInt_fromsize(
a1[0].arg.size, a1[0].start)
return new_e
# Compose with ExprCond with integers for src1/src2 and intergers =>
# propagage integers
# {XXX?(0x0,0x1)?(0x0,0x1),0,8, 0x0,8,32} => XXX?(int1, int2)
ok = True
expr_cond = None
expr_ints = []
for i, a in enumerate(args):
if not is_int_or_cond_src_int(a[0]):
ok = False
break
expr_ints.append(a)
if isinstance(a[0], ExprCond):
if expr_cond is not None:
ok = False
expr_cond = i
cond = a[0]
if ok and expr_cond is not None:
src1 = []
src2 = []
for i, a in enumerate(expr_ints):
if i == expr_cond:
src1.append((a[0].src1, a[1], a[2]))
src2.append((a[0].src2, a[1], a[2]))
else:
src1.append(a)
src2.append(a)
src1 = e_s.apply_simp(ExprCompose(src1))
src2 = e_s.apply_simp(ExprCompose(src2))
if isinstance(src1, ExprInt) and isinstance(src2, ExprInt):
return ExprCond(cond.cond, src1, src2)
return ExprCompose(args)
def simp_cond(e_s, e):
"Common simplifications on ExprCond"
if not isinstance(e, ExprCond):
return e
# eval exprcond src1/src2 with satifiable/unsatisfiable condition
# propagation
if (not isinstance(e.cond, ExprInt)) and e.cond.size == 1:
src1 = e.src1.replace_expr({e.cond: ExprInt1(1)})
src2 = e.src2.replace_expr({e.cond: ExprInt1(0)})
if src1 != e.src1 or src2 != e.src2:
return ExprCond(e.cond, src1, src2)
# -A ? B:C => A ? B:C
if (isinstance(e.cond, ExprOp) and
e.cond.op == '-' and
len(e.cond.args) == 1):
e = ExprCond(e.cond.args[0], e.src1, e.src2)
# a?x:x
elif e.src1 == e.src2:
e = e.src1
# int ? A:B => A or B
elif isinstance(e.cond, ExprInt):
if e.cond.arg == 0:
e = e.src2
else:
e = e.src1
# a?(a?b:c):x => a?b:x
elif isinstance(e.src1, ExprCond) and e.cond == e.src1.cond:
e = ExprCond(e.cond, e.src1.src1, e.src2)
# a?x:(a?b:c) => a?x:c
elif isinstance(e.src2, ExprCond) and e.cond == e.src2.cond:
e = ExprCond(e.cond, e.src1, e.src2.src2)
# a|int ? b:c => b with int != 0
elif (isinstance(e.cond, ExprOp) and
e.cond.op == '|' and
isinstance(e.cond.args[1], ExprInt) and
e.cond.args[1].arg != 0):
return e.src1
# (C?int1:int2)?(A:B) =>
elif (isinstance(e.cond, ExprCond) and
isinstance(e.cond.src1, ExprInt) and
isinstance(e.cond.src2, ExprInt)):
int1 = e.cond.src1.arg.arg
int2 = e.cond.src2.arg.arg
if int1 and int2:
e = e.src1
elif int1 == 0 and int2 == 0:
e = e.src2
elif int1 == 0 and int2:
e = ExprCond(e.cond.cond, e.src2, e.src1)
elif int1 and int2 == 0:
e = ExprCond(e.cond.cond, e.src1, e.src2)
return e
|
gpl-2.0
| -3,524,819,148,954,221,000
| 32.473043
| 88
| 0.470203
| false
| 3.070187
| false
| false
| false
|
kpeckett/willie-trello
|
willie-trello.py
|
1
|
1429
|
# -*- coding: utf-8 -*-
"""
willie-trello.py - Enhanced Trello links
Licensed under the GNU GPLv3
Copyright (C) 2015 Kieran Peckett
"""
import willie.module
import requests
import time
import re
def setup(bot):
regex = re.compile(r".*\bhttps?://trello\.com/c/(\w+).*")
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = {regex: showTrelloInfo}
else:
exclude = bot.memory['url_callbacks']
exclude[regex] = showTrelloInfo
bot.memory['url_callbacks'] = exclude
@willie.module.rule(r".*https?://trello\.com/c/(\w+).*")
def showTrelloInfo(bot,trigger,found_match=None):
"""Shows info about a card on Trello"""
match = found_match or trigger
card_id = match.group(1)
url = "https://api.trello.com/1/card/" + card_id + "?fields=name,closed,desc,due,shortUrl"
response = requests.get(url)
if response.text == "unauthorized card permission requested":
bot.say("Private Trello Card")
else:
data = response.json()
output = data["name"] # Add name of card
# Add first 50 chars or less of description
if len(data["desc"]) > 50:
output += " | " + data["desc"][0:75] + u"…" # Add ellipsis at end
elif data["desc"] == "":
output += " | No Description"
else:
output += " | " + data["desc"]
if data["due"] == None:
output += " | No Due Date"
else:
due_date = data["due"][0:10]
output += " | Due: " + due_date
output += " | " + data["shortUrl"]
bot.say(output)
|
gpl-3.0
| -7,577,456,725,166,876,000
| 30.021739
| 91
| 0.644008
| false
| 2.770874
| false
| false
| false
|
jangxyz/ecogwiki-client
|
setup.py
|
1
|
1523
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Ecogwiki OAuth client
`ecog` is a python client that talks with [ecogwiki](http://www.ecogwiki.com/). It is configurable to talk with any other ecogwiki hosts.
See https://github.com/jangxyz/ecogwiki-client for details.
"""
from setuptools import setup
def read_version():
import sys
import importlib
sys.path.insert(0, 'ecog')
try:
v = importlib.import_module('version')
return v.__version__
finally:
sys.path.pop(0)
setup(name='ecog',
version=read_version(),
author = 'Jang-hwan Kim',
author_email = 'janghwan@gmail.com',
description = 'Ecogwiki OAuth client',
long_description = __doc__,
url = 'https://github.com/jangxyz/ecogwiki-client',
packages = ['ecog'],
scripts = ['scripts/ecog'],
install_requires = ['oauth2', 'feedparser', 'python-dateutil'],
license = 'MIT License',
platforms = ['POSIX'],
keywords = ['oauth', 'markdown'],
classifiers = [line.strip() for line in '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: Developers
Intended Audience :: End Users/Desktop
License :: OSI Approved :: MIT License
Natural Language :: English
Operating System :: POSIX
Programming Language :: Python :: 2.7
Topic :: Communications
Topic :: Terminals
Topic :: Text Processing
Topic :: Utilities
'''.strip().splitlines()]
)
|
mit
| 8,209,925,821,725,269,000
| 26.196429
| 141
| 0.61392
| false
| 3.945596
| false
| false
| false
|
nuagenetworks/vspk-python
|
vspk/v5_0/nupolicydecision.py
|
1
|
11345
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUQOSsFetcher
from bambou import NURESTObject
class NUPolicyDecision(NURESTObject):
""" Represents a PolicyDecision in the VSD
Notes:
This object is a read only object that provides the policy decisions for a particular VM interface.
"""
__rest_name__ = "policydecision"
__resource_name__ = "policydecisions"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a PolicyDecision instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> policydecision = NUPolicyDecision(id=u'xxxx-xxx-xxx-xxx', name=u'PolicyDecision')
>>> policydecision = NUPolicyDecision(data=my_dict)
"""
super(NUPolicyDecision, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._egress_acls = None
self._egress_qos = None
self._fip_acls = None
self._ingress_acls = None
self._ingress_adv_fwd = None
self._entity_scope = None
self._qos = None
self._stats = None
self._external_id = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="egress_acls", remote_name="egressACLs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="egress_qos", remote_name="egressQos", attribute_type=dict, is_required=False, is_unique=False)
self.expose_attribute(local_name="fip_acls", remote_name="fipACLs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="ingress_acls", remote_name="ingressACLs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="ingress_adv_fwd", remote_name="ingressAdvFwd", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="qos", remote_name="qos", attribute_type=dict, is_required=False, is_unique=False)
self.expose_attribute(local_name="stats", remote_name="stats", attribute_type=dict, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.qoss = NUQOSsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def egress_acls(self):
""" Get egress_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `egressACLs` in VSD API.
"""
return self._egress_acls
@egress_acls.setter
def egress_acls(self, value):
""" Set egress_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `egressACLs` in VSD API.
"""
self._egress_acls = value
@property
def egress_qos(self):
""" Get egress_qos value.
Notes:
Egress QoS primitive that was selected
This attribute is named `egressQos` in VSD API.
"""
return self._egress_qos
@egress_qos.setter
def egress_qos(self, value):
""" Set egress_qos value.
Notes:
Egress QoS primitive that was selected
This attribute is named `egressQos` in VSD API.
"""
self._egress_qos = value
@property
def fip_acls(self):
""" Get fip_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `fipACLs` in VSD API.
"""
return self._fip_acls
@fip_acls.setter
def fip_acls(self, value):
""" Set fip_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `fipACLs` in VSD API.
"""
self._fip_acls = value
@property
def ingress_acls(self):
""" Get ingress_acls value.
Notes:
List of actual Ingress ACLs that will be applied on the interface of this VM
This attribute is named `ingressACLs` in VSD API.
"""
return self._ingress_acls
@ingress_acls.setter
def ingress_acls(self, value):
""" Set ingress_acls value.
Notes:
List of actual Ingress ACLs that will be applied on the interface of this VM
This attribute is named `ingressACLs` in VSD API.
"""
self._ingress_acls = value
@property
def ingress_adv_fwd(self):
""" Get ingress_adv_fwd value.
Notes:
List of actual Ingress Redirect ACLs that will be applied on the interface of this VM
This attribute is named `ingressAdvFwd` in VSD API.
"""
return self._ingress_adv_fwd
@ingress_adv_fwd.setter
def ingress_adv_fwd(self, value):
""" Set ingress_adv_fwd value.
Notes:
List of actual Ingress Redirect ACLs that will be applied on the interface of this VM
This attribute is named `ingressAdvFwd` in VSD API.
"""
self._ingress_adv_fwd = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def qos(self):
""" Get qos value.
Notes:
QoS primitive that was selected based on inheritance policies
"""
return self._qos
@qos.setter
def qos(self, value):
""" Set qos value.
Notes:
QoS primitive that was selected based on inheritance policies
"""
self._qos = value
@property
def stats(self):
""" Get stats value.
Notes:
Stats primitive that was selected based on inheritance policies
"""
return self._stats
@stats.setter
def stats(self, value):
""" Set stats value.
Notes:
Stats primitive that was selected based on inheritance policies
"""
self._stats = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
|
bsd-3-clause
| -3,218,528,264,936,086,000
| 28.857895
| 175
| 0.58052
| false
| 4.570911
| false
| false
| false
|
AlCap23/Thesis
|
Python/Experiments/MIMO/titostudy_extern_PTN_H05_TICONST.py
|
1
|
11238
|
"""
Python programm to study the robustness of TITO systems.
Identitfies the system, computes the controller and analysis the controller using the state space - transfer function relation.
Computes the singular values.
Use this script from the terminal / console with
./python FILENAME.py --file_storage = FOLDERNAME
to store essential information with sacred
"""
# Import the packages
# Import numpy
import numpy as np
# Import pandas
import pandas as pd
# Import linear regression model
from scipy import stats
# Import the Algorithms
import sys
#sys.path.append('../../')
import Algorithms as alg
# Import the MOBA Simulator
import MoBASimulator as mb
# Plotting
import pylab as p
# Define an experiment
from sacred import Experiment
###########################################################
########################## MAIN PROGRAM ###################
###########################################################
# Define a Sacred Experiment
ex = Experiment()
###########################################################
########################## CONFIG #########################
###########################################################
@ex.config
def experimental_setup():
# Filename to store in
filename = 'H05_TICONST.csv'
# Overall sample_size
sample_size = 9000
# Max degree
max_deg = 9
# Gain Limits
gain_limits = [-20., 20.0]
# Lag Limits
lag_limits = [50,100]
# Delay Limits, if small approx. no delay
delay_limits = [10,30]
# Step size for simulate
dt = 0.01
# Final time for simulation
t_sim = 1500
# Maximum Interaction
H = 0.5*np.eye(2,2)
# Frequency parameter (as dB)
wmin = -5
wmax = 3
dw = 10000
# Special frequencies
w_special = np.array([0.01, 0.02, 0.1, 1./150., 1./200.])
# Store the results
# System Order, Maximum Sensitivity, corresponding Frequency, MS_w0.1, MS_w0.5, MS_w1, Grad_MS(w0.1...1)
columns = ['Degree','MS_RGA','w_MS_RGA','Grad_RGA','MS_A', 'w_MS_A','Grad_A', 'MS_D','w_MS_D', 'Grad_D']
# Add the special frequencies
for freq in range(0, w_special.shape[0]):
columns.append('w_'+str(w_special[freq])+'_RGA')
columns.append('w_'+str(w_special[freq])+'_A')
columns.append('w_'+str(w_special[freq])+'_D')
# Make empty data frame with zeros
R = pd.DataFrame(data = np.zeros((sample_size, len(columns))), columns = columns)
###########################################################
################## CREATE VARIABLES #######################
###########################################################
# Create the gain
k = np.random.uniform(gain_limits[0], gain_limits[1],(sample_size,2,2))
num = np.zeros_like(k)
# Delay
l = np.random.uniform(delay_limits[0], delay_limits[1], (sample_size,2,2))
# Create random time constants
t = np.random.uniform(lag_limits[0],lag_limits[1],(sample_size,2,2))
den = np.zeros((sample_size, 2, 2, max_deg+1))
# Loop over the samples and estimate even distribution over degree
for samples in range(0, sample_size):
# Compute current order, from 1 to ...
degree = int(1.0*samples/sample_size * max_deg) + 1
# Loop over outputs
for outputs in range(0,2):
# Loop over inputs
for inputs in range(0,2):
# Compute the distances between the random time constants
# Sort until current degree
dist = float(t[samples, outputs, inputs]/degree) * np.ones(degree)
# Insert a zero for the first distance
#dist = np.insert(dist, [0], 0.0)
# Calculate the distance
#dist = np.ediff1d(dist)
# Calculate a stable polynomial, which highest coefficient is normed!!!
den[samples, outputs, inputs, :(degree+1)] = np.polynomial.polynomial.polyfromroots(-1./dist)
# Hence, normalize the gain with the highest coefficient
num[samples, outputs, inputs] = k[samples, outputs, inputs] * den[samples, outputs, inputs, 0]
###########################################################
################## EXPERIMENT #############################
###########################################################
@ex.automain
def experiment(num, den, l, R, filename, sample_size, max_deg, dt, t_sim, H, wmin, wmax, dw, w_special):
# Loop over the samples, compute order like earlier
###########################################################
####################### INITIAL MODEL #####################
###########################################################
# Open Simulator
sim = mb.Simulator()
# Show the log window
sim.showLogWindow()
###########################################################
####################### SAMPLE LOOP #######################
###########################################################
# Set initial degree to zero
degree = 0
for samples in range(0, sample_size):
# Calculate the current degree
c_deg = int(1.0 * samples/sample_size * max_deg) +1
# Check if degree has changed
if degree < c_deg:
# Change degree
degree = c_deg
# Clear Simulator
#sim.clear()
# Load new model
sim.loadModel("/2_2_n"+str(degree)+"/Masterthesis_Models_mimo_0processmodel.fmu")
sim.setOperationMode('FMU for ModelExchange')
# Preallocat identification parameter
K = np.zeros((2,2))
T = np.zeros((2,2))
L = np.zeros((2,2))
# Reload the model
sim.reloadModel()
# Set Simulation Parameter
daeSolverParams = sim.getDAESolverParameters()
daeSolverParams['absTol'] = 1e-7
daeSolverParams['relTol'] = 1e-8
sim.setDAESolverParameters(daeSolverParams)
###########################################################
####################### MODEL SETUP #######################
###########################################################
# Create a parameter list
params = {}
# Loop over the systems outputs
for outputs in range(0,2):
# Loop over the systems inputs
for inputs in range(0,2):
# Set system gain
params.update({"fmu.num["+str(outputs+1)+","+str(inputs+1)+",1]": num[samples][outputs][inputs]})
# Set system delay
params.update({"fmu.delay["+str(outputs+1)+","+str(inputs+1)+"]": l.item(samples,outputs,inputs)})
# Loop over denominator coefficients
for order in range(0, degree+1):
params.update({"fmu.den["+str(outputs+1)+","+str(inputs+1)+","+str(degree-order+1)+"]": den[samples][outputs][inputs][(order)]})
# Set the parameter
sim.set(params)
# Show the Parameter
#sim.showParameterDialog()
# Store the state space rep for later use
ss = sim.analyser_getStateSpaceForm()
###########################################################
####################### IDENTIFICATION ####################
###########################################################
# Setup first experiment Input 1 -> Output 1 and Output 2
sim.set({"fmu.u[1]": 1,"fmu.u[2]": 0})
# Simulation of the experiment
res = sim.simulate(dt, t_sim)
# Get the needed signals
y = res["fmu.y[1]"]
y2 = res["fmu.y[2]"]
u = res["fmu.u[1]"]
time = res["time"]
# Plot the system
#p.plot(time,y)
#p.plot(time,y2)
#p.show()
# Get TF from Input 1 to Output 1
K[0][0],T[0][0],L[0][0]=alg.Integral_Identification(y,u,time)
# Get TF from Input 1 to Output 2
K[1][0],T[1][0],L[1][0]=alg.Integral_Identification(y2,u,time)
# Setup second experiment Input 2 -> Output 1 and Output 2
# Reset the model state
sim.resetModelState()
# Input Parameter
sim.set({"fmu.u[1]": 0,"fmu.u[2]": 1})
# Simulation of the experiment
res = sim.simulate(dt, t_sim)
# Get the needed signals
y = res["fmu.y[1]"]
y2 = res["fmu.y[2]"]
u = res["fmu.u[2]"]
time = res["time"]
# Get TF from Input 2 to Output 1
K[0][1],T[0][1],L[0][1] = alg.Integral_Identification(y,u,time)
# Get TF from Input 2 to Output 2
K[1][1],T[1][1],L[1][1] = alg.Integral_Identification(y2,u,time)
# Print the System Parameter
# print(K,T,L)
###########################################################
####################### CONTROLLER DESIGN #################
###########################################################
# Loop over the three methods
for methods in range(0,3):
if methods == 0:
KY,B,D = alg.Control_Decentral(K,T,L, b = 0.)
elif methods == 1:
KY,B,D = alg.Control_Astrom(K,T,L,H, b = .0)
else:
KY,B,D = alg.Control_Decoupled(K,T,L,H ,b = .0)
###########################################################
####################### EVALUATION ########################
###########################################################
# Create a frequency range
omega = np.logspace(wmin, wmax, dw)
# Store the singular values
sv = np.zeros((2,omega.shape[0]))
# Loop over the frequency
for freq in range(0, omega.shape[0]):
# Evaluate the sensitivity at given frequency
S = alg.compute_sensitivity(ss, KY,B,D, omega[freq])
u, sv[:, freq], w = np.linalg.svd(np.abs(S))
# Clear variables
del u,w
# Find the maximum of the sensitivity
ms = np.max(sv)
# Get the corresponding frequency
omega_ms = omega[np.argmax(sv)]
# Print the sensitivity
#p.loglog(omega, sv[0,:])
#p.loglog(omega, sv[1,:])
#p.show()
# Compute the gradient of the maximal singular values
# Compute the maximum singular value along all frequency
sv_max = np.max(sv, axis=0)
# Compute the slope via linear regression
slope, intercept, r_value, p_value, std_err = stats.linregress(omega[np.where(omega<=1.0)], sv_max[np.where(omega<=1.0)])
# Clear variables
del intercept, r_value, p_value, std_err
# Evaluate at the special frequencies
ms_s = []
for freq in w_special:
# Evaluate the sensitivity at given frequency
S = alg.compute_sensitivity(ss, KY,B,D, freq)
u, v, w = np.linalg.svd(np.abs(S))
ms_s.append(np.max(v))
# Clear variables
del u,v,w
###########################################################
####################### STORE DATA ########################
###########################################################
# Store Degree
R.set_value(samples, 'Degree', degree)
if methods == 0:
# Store the maximum sensitivity
R.set_value(samples, 'MS_RGA', ms)
# Store the correspondig frequency
R.set_value(samples, 'w_MS_RGA', omega_ms)
# Store the maximum singular value at the special frequencies
for freq in range(0, w_special.shape[0]):
R.set_value(samples, 'w_'+str(w_special[freq])+'_RGA', ms_s[0])
# Store the gradient
R.set_value(samples, 'Grad_RGA', slope)
elif methods == 1:
# Store the maximum sensitivity
R.set_value(samples, 'MS_A', ms)
# Store the correspondig frequency
R.set_value(samples, 'w_MS_A', omega_ms)
# Store the maximum singular value at the special frequencies
for freq in range(0, w_special.shape[0]):
R.set_value(samples, 'w_'+str(w_special[freq])+'_A', ms_s[0])
# Store the gradient
R.set_value(samples, 'Grad_A', slope)
else:
# Store the maximum sensitivity
R.set_value(samples, 'MS_D', ms)
# Store the correspondig frequency
R.set_value(samples, 'w_MS_D', omega_ms)
# Store the maximum singular value at the special frequencies
for freq in range(0, w_special.shape[0]):
R.set_value(samples, 'w_'+str(w_special[freq])+'_D', ms_s[0])
# Store the gradient
R.set_value(samples, 'Grad_D', slope)
# Store after every sample
R.to_csv(filename, sep=";")
|
gpl-3.0
| 4,852,432,078,727,296,000
| 33.158055
| 133
| 0.555348
| false
| 3.284044
| false
| false
| false
|
AlexPayment/github-sms-notifier
|
github_sms_notifier/github_sms_notifier.py
|
1
|
7472
|
import json
import re
from flask import Flask, flash, make_response
from flask.globals import request
from flask.templating import render_template
import requests
from twilio.rest import TwilioRestClient
PHONE_NUMBER_PATTERN = re.compile("^\\+?\\d{10,14}$")
PULL_REQUEST_OPENED = 'prOpened'
PULL_REQUEST_CLOSED = 'prClosed'
PULL_REQUEST_SYNCHRONIZE = 'prSynchronize'
PULL_REQUEST_REOPENED = 'prReopened'
REPOSITORIES = 'repositories'
REPOSITORY_PATTERN = re.compile("[A-Za-z0-9_\\.-]+/[A-Za-z0-9_\\.-]+")
SETTINGS_JSON_FILE_NAME = 'settings.json'
SETTINGS_TEMPLATE = 'settings.html'
TO_NUMBERS = 'toNumbers'
TWILIO_ACCOUNT_SID = 'twilioAccountSid'
TWILIO_AUTH_TOKEN = 'twilioAuthToken'
TWILIO_FROM_NUMBER = 'twilioFromNumber'
app = Flask(__name__)
short_urls = {}
@app.route('/')
def root():
return 'Thank you for using github-sms-notifier!'
@app.route('/admin', methods=['GET'])
def config():
settings = __read_settings()
return render_template(SETTINGS_TEMPLATE, settings=settings)
@app.route('/admin', methods=['POST'])
def save_config():
app.logger.debug(request.form)
pull_request_closed_enabled = False
if PULL_REQUEST_CLOSED in request.form:
pull_request_closed_enabled = True
pull_request_opened_enabled = False
if PULL_REQUEST_OPENED in request.form:
pull_request_opened_enabled = True
pull_request_reopened_enabled = False
if PULL_REQUEST_REOPENED in request.form:
pull_request_reopened_enabled = True
pull_request_synchronize_enabled = False
if PULL_REQUEST_SYNCHRONIZE in request.form:
pull_request_synchronize_enabled = True
settings = {TWILIO_ACCOUNT_SID: request.form[TWILIO_ACCOUNT_SID].strip(),
TWILIO_AUTH_TOKEN: request.form[TWILIO_AUTH_TOKEN].strip(),
TWILIO_FROM_NUMBER: request.form[TWILIO_FROM_NUMBER].strip(),
TO_NUMBERS: request.form[TO_NUMBERS].strip().split(), PULL_REQUEST_CLOSED: pull_request_closed_enabled,
PULL_REQUEST_OPENED: pull_request_opened_enabled, PULL_REQUEST_REOPENED: pull_request_reopened_enabled,
PULL_REQUEST_SYNCHRONIZE: pull_request_synchronize_enabled,
REPOSITORIES: request.form[REPOSITORIES].strip().split()}
errors = __validate_settings(settings)
if errors:
for error in errors:
flash(error, category='error')
else:
with open(SETTINGS_JSON_FILE_NAME, 'w+') as settings_file:
json.dump(settings, settings_file)
flash("Settings saved!")
return render_template(SETTINGS_TEMPLATE, settings=settings)
@app.route('/pullRequests', methods=['POST'])
def pull_requests():
settings = __read_settings()
if settings:
content = json.loads(request.data)
if 'pull_request' in content:
client = TwilioRestClient(settings[TWILIO_ACCOUNT_SID], settings[TWILIO_AUTH_TOKEN])
message = __build_sms_body(content)
app.logger.debug(request.data)
if message and not app.testing:
numbers = settings[TO_NUMBERS]
for number in numbers:
client.sms.messages.create(body=message, from_=settings[TWILIO_FROM_NUMBER], to=number)
else:
app.logger.warn("Not a pull request: {}".format(request.data))
else:
app.logger.warn("Cannot load settings.")
return make_response("", 204)
def __build_sms_body(request_body):
settings = __read_settings()
message_prefix = 'Pull request #' + str(request_body['number'])
message_suffix = request_body['repository']['full_name'] + ' ' + __get_short_url(
request_body['pull_request']['html_url'])
if request_body['action'] == 'opened':
if settings[PULL_REQUEST_OPENED] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was opened in ' + message_suffix
elif request_body['action'] == 'closed':
if settings[PULL_REQUEST_CLOSED] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was closed in ' + message_suffix
elif request_body['action'] == 'synchronize':
if settings[PULL_REQUEST_SYNCHRONIZE] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was synchronized in ' + message_suffix
elif request_body['action'] == 'reopened':
if settings[PULL_REQUEST_REOPENED] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was reopened in ' + message_suffix
else:
return 'Unsupported action \'' + request_body['action'] + '\' occurred on pull request #' + str(
request_body['number']) + ' in ' + message_suffix
def __get_short_url(url):
if short_urls.get(url):
return short_urls[url]
payload = {'url': url}
r = requests.post('http://git.io', data=payload)
short_urls[url] = r.headers.get('Location')
return short_urls[url]
def __is_supported_repository(repositories_settings, notification_repository):
if not repositories_settings:
return True
for repository in repositories_settings:
if notification_repository == repository:
return True
return False
def __is_valid_phone_number(phone_number):
if PHONE_NUMBER_PATTERN.match(phone_number):
return True
else:
return False
def __is_valid_repository_name(repository_name):
if REPOSITORY_PATTERN.match(repository_name):
return True
else:
return False
def __read_settings():
settings = {}
with open(SETTINGS_JSON_FILE_NAME, 'r+') as settings_file:
try:
settings = json.load(settings_file)
except ValueError:
app.logger.warning("Cannot load configuration.")
return settings
def __validate_settings(settings):
errors = []
if not settings.get(TWILIO_ACCOUNT_SID):
errors.append('Twilio Account Sid is required')
if not settings.get(TWILIO_AUTH_TOKEN):
errors.append('Twilio Auth Token is required')
if not settings.get(TWILIO_FROM_NUMBER):
errors.append('Twilio From Number is required')
else:
if not __is_valid_phone_number(settings.get(TWILIO_FROM_NUMBER)):
errors.append("Invalid Twilio From Number: " + settings.get(TWILIO_FROM_NUMBER))
if not settings.get(TO_NUMBERS):
errors.append('Numbers to send SMS to is required')
else:
for to_number in settings.get(TO_NUMBERS):
if not __is_valid_phone_number(to_number):
errors.append("Invalid phone number: " + to_number)
if settings.get(REPOSITORIES):
for repository in settings.get(REPOSITORIES):
if not __is_valid_repository_name(repository):
errors.append("Invalid repository name format: " + repository)
return errors
if __name__ == '__main__':
app.secret_key = 'Uqtbl6HxgNWcJsuycuXtHQyR8ExiaNHm'
app.debug = True
app.run()
|
mit
| 1,487,203,907,290,717,700
| 36.928934
| 119
| 0.633565
| false
| 3.816139
| false
| false
| false
|
unixnut/cpylmnl
|
cpylmnl/linux/genetlinkh.py
|
1
|
1734
|
# -*- coding: utf-8 -*-
import ctypes
from cpylmnl.nlstruct import NLStructure
import cpylmnl.linux.netlinkh as netlink
GENL_NAMSIZ = 16 # length of family name
GENL_MIN_ID = netlink.NLMSG_MIN_TYPE
GENL_MAX_ID = 1023
class Genlmsghdr(NLStructure):
"""struct genlmsghdr
"""
_fields_ = [("cmd", ctypes.c_uint8), # __u8 cmd
("version", ctypes.c_uint8), # __u8 version
("reserved", ctypes.c_uint16)] # __u16 reserved
GENL_HDR_LEN = netlink.NLMSG_ALIGN(ctypes.sizeof(Genlmsghdr))
GENL_ADMIN_PERM = 0x01
GENL_CMD_CAP_DO = 0x02
GENL_CMD_CAP_DUMP = 0x04
GENL_CMD_CAP_HASPOL = 0x08
# List of reserved static generic netlink identifiers:
GENL_ID_GENERATE = 0
GENL_ID_CTRL = netlink.NLMSG_MIN_TYPE
GENL_ID_VFS_DQUOT = netlink.NLMSG_MIN_TYPE + 1
GENL_ID_PMCRAID = netlink.NLMSG_MIN_TYPE + 2
# Controller
# enum
CTRL_CMD_UNSPEC = 0
CTRL_CMD_NEWFAMILY = 1
CTRL_CMD_DELFAMILY = 2
CTRL_CMD_GETFAMILY = 3
CTRL_CMD_NEWOPS = 4
CTRL_CMD_DELOPS = 5
CTRL_CMD_GETOPS = 6
CTRL_CMD_NEWMCAST_GRP = 7
CTRL_CMD_DELMCAST_GRP = 8
CTRL_CMD_GETMCAST_GRP = 9
__CTRL_CMD_MAX = 10
CTRL_CMD_MAX = (__CTRL_CMD_MAX - 1)
# enum
CTRL_ATTR_UNSPEC = 0
CTRL_ATTR_FAMILY_ID = 1
CTRL_ATTR_FAMILY_NAME = 2
CTRL_ATTR_VERSION = 3
CTRL_ATTR_HDRSIZE = 4
CTRL_ATTR_MAXATTR = 5
CTRL_ATTR_OPS = 6
CTRL_ATTR_MCAST_GROUPS = 7
__CTRL_ATTR_MAX = 8
CTRL_ATTR_MAX = (__CTRL_ATTR_MAX - 1)
# enum
CTRL_ATTR_OP_UNSPEC = 0
CTRL_ATTR_OP_ID = 1
CTRL_ATTR_OP_FLAGS = 2
__CTRL_ATTR_OP_MAX = 3
CTRL_ATTR_OP_MAX = (__CTRL_ATTR_OP_MAX - 1)
# enum
CTRL_ATTR_MCAST_GRP_UNSPEC = 0
CTRL_ATTR_MCAST_GRP_NAME = 1
CTRL_ATTR_MCAST_GRP_ID = 2
__CTRL_ATTR_MCAST_GRP_MAX = 3
CTRL_ATTR_MCAST_GRP_MAX = (__CTRL_ATTR_MCAST_GRP_MAX - 1)
|
lgpl-2.1
| -5,343,383,620,565,762,000
| 23.083333
| 64
| 0.672434
| false
| 2.162095
| false
| false
| false
|
sagiss/txrm2nexus
|
txm2nexuslib/xrmnex.py
|
1
|
44684
|
#!/usr/bin/python
"""
(C) Copyright 2016-2017 Carlos Falcon, Zbigniew Reszela, Marc Rosanes
The program is distributed under the terms of the
GNU General Public License (or the Lesser GPL).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from OleFileIO_PL import *
import numpy as np
import h5py
import sys
import struct
import datetime
import re
import pkg_resources
#import pprint
from tinydb import Query
from operator import itemgetter
from txm2nexuslib.parser import get_db, get_file_paths
SAMPLEENC = 2
DETECTORENC_Z = 23
ENERGY = 27
CURRENT = 28
ENERGYENC = 30
class FilesOrganization(object):
def __init__(self):
pass
def get_samples(self, txm_txt_script, use_existing_db=False,
use_subfolders=True, organize_by_repetitions=False):
"""Organize the files by samples"""
#prettyprinter = pprint.PrettyPrinter(indent=4)
if use_subfolders:
print("Using Subfolders for finding the files")
else:
print("Searching files through the whole root path")
root_path = os.path.dirname(os.path.abspath(txm_txt_script))
db = get_db(txm_txt_script, use_existing_db=use_existing_db)
all_file_records = db.all()
#prettyprinter.pprint(all_file_records)
dates_samples_energies = []
for record in all_file_records:
dates_samples_energies.append((record["date"],
record["sample"],
record["energy"]))
dates_samples_energies = list(set(dates_samples_energies))
samples = {}
files_query = Query()
for date_sample_energie in dates_samples_energies:
files_raw_data = {}
files_for_sample_subdict = {}
date = date_sample_energie[0]
sample = date_sample_energie[1]
energy = date_sample_energie[2]
query_impl = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.FF == False))
records_by_sample_and_energy = db.search(query_impl)
if not organize_by_repetitions:
zps_by_sample_and_e = [record["zpz"] for record in
records_by_sample_and_energy]
zpz_positions_by_sample_e = sorted(set(zps_by_sample_and_e))
for zpz in zpz_positions_by_sample_e:
query_impl = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.zpz == zpz) &
(files_query.FF == False))
fn_by_zpz_query = db.search(query_impl)
sorted_fn_by_zpz_query = sorted(fn_by_zpz_query,
key=itemgetter('angle'))
files = get_file_paths(sorted_fn_by_zpz_query, root_path,
use_subfolders=use_subfolders)
files_raw_data[zpz] = files
else:
repetitions_by_sample_and_e = [record["repetition"] for record
in records_by_sample_and_energy]
repetitions_by_sample_and_e = sorted(set(
repetitions_by_sample_and_e))
for repetition in repetitions_by_sample_and_e:
query_impl = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.repetition == repetition) &
(files_query.FF == False))
fn_by_repetition_query = db.search(query_impl)
sorted_fn_by_repetition_query = sorted(
fn_by_repetition_query, key=itemgetter('angle'))
files = get_file_paths(sorted_fn_by_repetition_query,
root_path,
use_subfolders=use_subfolders)
files_raw_data[repetition] = files
# Get FF image records
fn_ff_query_by_energy = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.FF == True))
query_output = db.search(fn_ff_query_by_energy)
files_FF = get_file_paths(query_output, root_path,
use_subfolders=use_subfolders)
files_for_sample_subdict['tomos'] = files_raw_data
files_for_sample_subdict['ff'] = files_FF
samples[date_sample_energie] = files_for_sample_subdict
#prettyprinter.pprint(samples)
return samples
class validate_getter(object):
def __init__(self, required_fields):
self.required_fields = required_fields
def __call__(self, method):
def wrapped_method(xradia_file):
if not xradia_file.is_opened():
raise RuntimeError("XradiaFile is not opened")
for field in self.required_fields:
if not xradia_file.exists(field):
raise RuntimeError(
"%s does not exist in XradiaFile" % field)
return method(xradia_file)
return wrapped_method
class XradiaFile(object):
def __init__(self, file_name):
self.file_name = file_name
self.file = None
self._axes_names = None
self._no_of_images = None
self._no_of_axes = None
self._energyenc_name = None
self._image_width = None
self._image_height = None
self._data_type = None
self._det_zero = None
self._pixel_size = None
self._dates = None
self._axes_positions = None
def __enter__(self):
self.open()
return self
def __exit__(self, type, value, traceback):
self.close()
def is_opened(self):
return self.file is not None
def open(self):
self.file = OleFileIO(self.file_name)
def close(self):
self.file.close()
def exists(self, field):
return self.file.exists(field)
@validate_getter(["SampleInfo/SampleID"])
def get_sample_id(self):
stream = self.file.openstream('SampleInfo/SampleID')
data = stream.read()
struct_fmt = '<' + '50s'
sample_id = struct.unpack(struct_fmt, data)
if sample_id != 'Unknown':
sample_id = sample_id[0]
return sample_id
@validate_getter(["ImageInfo/PixelSize"])
def get_pixel_size(self):
if self._pixel_size is None:
stream = self.file.openstream('ImageInfo/PixelSize')
data = stream.read()
struct_fmt = '<1f'
pixel_size = struct.unpack(struct_fmt, data)
self._pixel_size = pixel_size[0]
return self._pixel_size
pixel_size = property(get_pixel_size)
@validate_getter(["ImageInfo/XrayMagnification"])
def get_xray_magnification(self):
stream = self.file.openstream('ImageInfo/XrayMagnification')
data = stream.read(4)
struct_fmt = '<1f'
xray_magnification = struct.unpack(struct_fmt, data)
xray_magnification = xray_magnification[0]
if (xray_magnification != 0.0):
pass
elif (xray_magnification == 0.0 and self.pixel_size != 0.0):
# magnification in micrometers
xray_magnification = 13.0 / self.pixel_size
else:
print("Magnification could not be deduced.")
xray_magnification = 0.0
return xray_magnification
@validate_getter(["PositionInfo/MotorPositions"])
def get_axes_positions(self):
if self._axes_positions is None:
stream = self.file.openstream('PositionInfo/MotorPositions')
data = stream.read(112)
struct_fmt = '<28f'
self._axes_positions = struct.unpack(struct_fmt, data)
return self._axes_positions
axes_positions = property(get_axes_positions)
def get_sample_distance(self):
return self.axes_positions[SAMPLEENC]
sample_distance = property(get_sample_distance)
def get_detector_distance(self):
return self.axes_positions[DETECTORENC_Z] * 1000 # from mm to um
detector_distance = property(get_detector_distance)
def get_distance(self):
if (self.sampleenc_name == "Sample Z" and
self.detectorenc_name == "Detector Z"):
distance = (self.det_zero + self.detector_distance +
self.sample_distance)
return distance
@validate_getter(["ImageData1/Image1"])
def get_image(self):
stream = self.file.openstream('ImageData1/Image1')
data = stream.read()
if self.data_type == 'uint16':
struct_fmt = "<{0:10}H".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
elif self.data_type == 'float':
struct_fmt = "<{0:10}f".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
else:
print "Wrong data type"
return
image = np.flipud(np.reshape(imgdata, (self.image_height,
self.image_width), order='A'))
image = np.reshape(image, (1, self.image_height, self.image_width),
order='A')
return image
@validate_getter(["ImageData1/Image1"])
def get_image_2D(self):
stream = self.file.openstream('ImageData1/Image1')
data = stream.read()
if self.data_type == 'uint16':
struct_fmt = "<{0:10}H".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
elif self.data_type == 'float':
struct_fmt = "<{0:10}f".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
else:
print "Wrong data type"
return
image = np.flipud(np.reshape(imgdata, (self.image_height,
self.image_width), order='A'))
return image
@validate_getter(["PositionInfo/AxisNames"])
def get_axes_names(self):
if self._axes_names is None:
stream = self.file.openstream('PositionInfo/AxisNames')
data = stream.read()
lendatabytes = len(data)
formatstring = '<' + str(lendatabytes) + 'c'
struct_fmt = formatstring
axis_names_raw = struct.unpack(struct_fmt, data)
axis_names_raw = ''.join(axis_names_raw)
axis_names_raw = axis_names_raw.replace("\x00", " ")
self._axes_names = re.split('\s+\s+', axis_names_raw)
self._no_of_axes = len(self._axes_names) - 1
return self._axes_names
axes_names = property(get_axes_names)
def get_energyenc_name(self):
return self.axes_names[ENERGYENC]
energyenc_name = property(get_energyenc_name)
def get_energy_name(self):
return self.axes_names[ENERGY]
energy_name = property(get_energy_name)
def get_detectorenc_name(self):
return self.axes_names[DETECTORENC_Z]
detectorenc_name = property(get_detectorenc_name)
def get_sampleenc_name(self):
return self.axes_names[SAMPLEENC]
sampleenc_name = property(get_sampleenc_name)
def get_current_name(self):
return self.axes_names[CURRENT]
current_name = property(get_current_name)
def get_no_of_axes(self):
if self._no_of_axes is None:
self.get_axes_names()
return self._no_of_axes
no_of_axes = property(get_no_of_axes)
@validate_getter(["ImageInfo/NoOfImages"])
def get_no_of_images(self):
if self._no_of_images is None:
stream = self.file.openstream('ImageInfo/NoOfImages')
data = stream.read()
nimages = struct.unpack('<I', data)
self._no_of_images = np.int(nimages[0])
return self._no_of_images
no_of_images = property(get_no_of_images)
@validate_getter(["ImageInfo/ImageWidth"])
def get_image_width(self):
if self._image_width is None:
stream = self.file.openstream('ImageInfo/ImageWidth')
data = stream.read()
yimage = struct.unpack('<I', data)
self._image_width = np.int(yimage[0])
return self._image_width
image_width = property(get_image_width)
@validate_getter(["ImageInfo/ImageHeight"])
def get_image_height(self):
if self._image_height is None:
stream = self.file.openstream('ImageInfo/ImageHeight')
data = stream.read()
yimage = struct.unpack('<I', data)
self._image_height = np.int(yimage[0])
return self._image_height
image_height = property(get_image_height)
@validate_getter(["PositionInfo/MotorPositions"])
def get_machine_currents(self):
stream = self.file.openstream('PositionInfo/MotorPositions')
num_axes = len(self.axes_names) - 1
number_of_floats = num_axes * self.no_of_images
struct_fmt = '<' + str(number_of_floats) + 'f'
number_of_bytes = number_of_floats * 4 # 4 bytes every float
data = stream.read(number_of_bytes)
axis = struct.unpack(struct_fmt, data)
currents = self.no_of_images * [0]
for i in range(self.no_of_images):
currents[i] = axis[self.no_of_axes * i + CURRENT] # In mA
return currents
@validate_getter([])
def get_energies(self):
if (self.energyenc_name.lower() == "energyenc"):
if self.file.exists('PositionInfo/MotorPositions'):
stream = self.file.openstream('PositionInfo/MotorPositions')
number_of_floats = self.no_of_axes * self.no_of_images
struct_fmt = '<' + str(number_of_floats) + 'f'
number_of_bytes = number_of_floats * 4 # 4 bytes every float
data = stream.read(number_of_bytes)
axis = struct.unpack(struct_fmt, data)
energies = self.no_of_images * [0]
for i in range(self.no_of_images):
energies[i] = axis[self.no_of_axes * i + ENERGYENC] # In eV
# Energy for each image calculated from Energy motor ####
elif (self.energy_name == "Energy"):
if self.file.exists('PositionInfo/MotorPositions'):
stream = self.file.openstream('PositionInfo/MotorPositions')
number_of_floats = self.no_of_axes * self.no_of_images
struct_fmt = '<' + str(number_of_floats) + 'f'
number_of_bytes = number_of_floats * 4 # 4 bytes every float
data = stream.read(number_of_bytes)
axis = struct.unpack(struct_fmt, data)
energies = self.no_of_images * [0]
for i in range(self.no_of_images):
energies[i] = axis[self.no_of_axes * i + ENERGY] # In eV
# Energy for each image calculated from ImageInfo ####
elif self.file.exists('ImageInfo/Energy'):
stream = self.file.openstream('ImageInfo/Energy')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
try: # we found some txrm images (flatfields) with different encoding of data
energies = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack energies with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
energies = struct.unpack(struct_fmt, data)
else:
raise RuntimeError("There is no information about the energies at"
"which have been taken the different images.")
return energies
@validate_getter(["ImageInfo/ExpTimes"])
def get_exp_times(self):
stream = self.file.openstream('ImageInfo/ExpTimes')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
try: # we found some txrm images (flatfields) with different encoding of data
exp_times = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack exposure times with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
exp_times = struct.unpack(struct_fmt, data)
return exp_times
@validate_getter(['ImageInfo/Angles'])
def get_angles(self):
stream = self.file.openstream('ImageInfo/Angles')
data = stream.read()
struct_fmt = '<{0:10}f'.format(self.no_of_images)
angles = struct.unpack(struct_fmt, data)
return angles
@validate_getter(['ImageInfo/XPosition'])
def get_x_positions(self):
stream = self.file.openstream('ImageInfo/XPosition')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
# Found some txrm images with different encoding of data #
try:
positions = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack XPositions with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
positions = struct.unpack(struct_fmt, data)
return positions
@validate_getter(['ImageInfo/YPosition'])
def get_y_positions(self):
stream = self.file.openstream('ImageInfo/YPosition')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
# Found some txrm images with different encoding of data #
try:
positions = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack YPositions with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
positions = struct.unpack(struct_fmt, data)
return positions
@validate_getter(['ImageInfo/ZPosition'])
def get_z_positions(self):
stream = self.file.openstream('ImageInfo/ZPosition')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
# Found some txrm images with different encoding of data #
try:
positions = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack ZPositions with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
positions = struct.unpack(struct_fmt, data)
return positions
@validate_getter(["ImageInfo/DataType"])
def get_data_type(self):
if self._data_type is None:
stream = self.file.openstream('ImageInfo/DataType')
data = stream.read()
struct_fmt = '<1I'
datatype = struct.unpack(struct_fmt, data)
datatype = int(datatype[0])
if datatype == 5:
self._data_type = 'uint16'
else:
self._data_type = 'float'
return self._data_type
data_type = property(get_data_type)
@validate_getter(["ImageInfo/Date"])
def get_single_date(self):
stream = self.file.openstream('ImageInfo/Date')
data = stream.read()
date = struct.unpack('<' + '17s23x', data)[0]
[day, hour] = date.split(" ")
[month, day, year] = day.split("/")
[hour, minute, second] = hour.split(":")
year = '20' + year
year = int(year)
month = int(month)
day = int(day)
hour = int(hour)
minute = int(minute)
second = int(second)
raw_time = datetime.datetime(year, month, day,
hour, minute, second)
time_iso = raw_time.isoformat()
return time_iso
@validate_getter(["ImageInfo/Date"])
def get_dates(self):
if self._dates is None:
stream = self.file.openstream('ImageInfo/Date')
data = stream.read()
self._dates = struct.unpack('<' + '17s23x' * self.no_of_images,
data)
return self._dates
dates = property(get_dates)
def get_start_date(self):
startdate = self.dates[0]
[day, hour] = startdate.split(" ")
[month, day, year] = day.split("/")
[hour, minute, second] = hour.split(":")
year = '20' + year
year = int(year)
month = int(month)
day = int(day)
hour = int(hour)
minute = int(minute)
second = int(second)
starttime = datetime.datetime(year, month, day,
hour, minute, second)
starttimeiso = starttime.isoformat()
return starttimeiso
def get_end_date(self):
enddate = self.dates[self.no_of_images - 1]
[endday, endhour] = enddate.split(" ")
[endmonth, endday, endyear] = endday.split("/")
[endhour, endminute, endsecond] = endhour.split(":")
endyear = '20' + endyear
endyear = int(endyear)
endmonth = int(endmonth)
endday = int(endday)
endhour = int(endhour)
endminute = int(endminute)
endsecond = int(endsecond)
endtime = datetime.datetime(endyear, endmonth, endday,
endhour, endminute, endsecond)
endtimeiso = endtime.isoformat()
return endtimeiso
def get_det_zero(self):
where_detzero = ("ConfigureBackup/ConfigCamera/" +
"Camera 1/ConfigZonePlates/DetZero")
if self._det_zero is None and self.file.exists(where_detzero):
stream = self.file.openstream("ConfigureBackup/ConfigCamera/" +
"Camera 1/ConfigZonePlates/DetZero")
data = stream.read()
if len(data) != 0:
struct_fmt = '<1f'
sample_to_detector_zero_enc = struct.unpack(struct_fmt, data)
self._det_zero = sample_to_detector_zero_enc[0]
else:
self._det_zero = 0
else:
self._det_zero = 0
return self._det_zero
det_zero = property(get_det_zero)
class xrmNXtomo(object):
definition = 'NXtomo'
# CCD detector pixelsize in micrometers
CCDdetector_pixelsize = 13
CCDdetector_pixelsize_unit = 'um'
def __init__(self, reader, ffreader, file_order, program_name,
hdf5_output_path=None, title='X-ray tomography',
zero_deg_in=None, zero_deg_final=None, sourcename='ALBA',
sourcetype='Synchrotron X-ray Source',
sourceprobe='x-ray', instrument='BL09 @ ALBA',
sample='Unknown'):
self.reader = reader
self.ff_reader = ffreader
if hdf5_output_path is None:
path = reader.get_sample_path()
else:
path = hdf5_output_path
sample_name = reader.get_sample_name()
splitted_file = sample_name.split('_')
sample_dir_name = '{0}_{1}'.format(splitted_file[0], splitted_file[1])
path = os.path.join(path, sample_dir_name)
if not os.path.exists(path):
os.makedirs(path)
self.hdf5_file_name = os.path.join(path, "%s.hdf5" % sample_name)
self.txrmhdf = h5py.File(self.hdf5_file_name, 'w')
self.filename_zerodeg_in = zero_deg_in
self.filename_zerodeg_final = zero_deg_final
self.nxentry = None
self.nxsample = None
self.nxmonitor = None
self.nxinstrument = None
self.nxdata = None
self.nxdetectorsample = None
self.nxsource = None
self.count_num_sequence = 0
self.num_sample_sequence = []
self.num_bright_sequence = []
self.num_dark_sequence = []
self.program_name = program_name
version = pkg_resources.get_distribution("txrm2nexus").version
self.program_version = version
self.title = title
self.sourcename = sourcename
self.sourcetype = sourcetype
self.sourceprobe = sourceprobe
self.instrument = instrument
self.sample = sample
self.file_order = list(file_order)
self.datatype_zerodeg = 'uint16'
self.numrows_zerodeg = 0
self.numcols_zerodeg = 0
self.filename_zerodeg_in = zero_deg_in
self.filename_zerodeg_final = zero_deg_final
self.numrows = 0
self.numcols = 0
self.nSampleFrames = 0
self.datatype = None
self.numrows_bright = 0
self.numcols_bright = 0
self.nFramesBright = 0
self.datatype_bright = 'uint16'
def convert_metadata(self):
self.nxentry = self.txrmhdf.create_group(self.definition)
self.nxentry.attrs['NX_class'] = "NXentry"
self.nxentry.create_dataset("title", data=self.title)
self.nxentry.create_dataset("definition", data=self.definition)
self.nxinstrument = self.nxentry.create_group("instrument")
self.nxsample = self.nxentry.create_group("sample")
self.nxmonitor = self.nxentry.create_group("control")
self.nxdata = self.nxentry.create_group("data")
self.nxmonitor.attrs['NX_class'] = "NXmonitor"
self.nxsample.attrs['NX_class'] = "NXsample"
self.nxdata.attrs['NX_class'] = "NXdata"
self.nxinstrument.attrs['NX_class'] = "NXinstrument"
self.nxinstrument['name'] = self.instrument
pixel_size = "%d %s" % (self.CCDdetector_pixelsize,
self.CCDdetector_pixelsize_unit)
self.nxinstrument['name'].attrs['CCD pixel size'] = pixel_size
self.nxsource= self.nxinstrument.create_group("source")
self.nxdetectorsample = self.nxinstrument.create_group("sample")
self.nxsource.attrs['NX_class'] = "NXsource"
self.nxdetectorsample.attrs['NX_class'] = "NXdetector"
self.nxinstrument['source']['name'] = self.sourcename
self.nxinstrument['source']['type'] = self.sourcetype
self.nxinstrument['source']['probe'] = self.sourceprobe
self.nxentry['program_name'] = self.program_name
self.nxentry['program_name'].attrs['version'] = self.program_version
self.nxentry['program_name'].attrs['configuration'] = \
(self.program_name + ' ' + ' '.join(sys.argv[1:]))
# Sample-ID
sample_name = self.reader.get_sample_name()
self.nxsample['name'] = sample_name
distance = self.reader.get_distance()
self.nxdetectorsample.create_dataset("distance", data=distance)
self.nxdetectorsample["distance"].attrs["units"] = "um"
# Pixel-size
pixel_size = self.reader.get_pixel_size()
self.nxdetectorsample.create_dataset("x_pixel_size",
data=pixel_size)
self.nxdetectorsample.create_dataset("y_pixel_size",
data=pixel_size)
self.nxdetectorsample["x_pixel_size"].attrs["units"] = "um"
self.nxdetectorsample["y_pixel_size"].attrs["units"] = "um"
# X-Ray Magnification
magnification = self.reader.get_xray_magnification()
self.nxdetectorsample['magnification'] = magnification
# Accelerator current for each image (machine current)
currents = self.reader.get_machine_currents()
self.nxdetectorsample['current'] = currents
self.nxdetectorsample['current'].attrs["units"] = "mA"
# Energy for each image:
energies = self.reader.get_energies()
self.nxsource["energy"] = energies
self.nxsource["energy"].attrs["units"] = "eV"
# Exposure Times
exptimes = self.reader.get_exp_times()
self.nxdetectorsample["ExpTimes"] = exptimes
self.nxdetectorsample["ExpTimes"].attrs["units"] = "s"
# Start and End Times
starttimeiso = self.reader.get_start_time()
self.nxentry['start_time'] = str(starttimeiso)
endtimeiso = self.reader.get_end_time()
self.nxentry['end_time'] = str(endtimeiso)
# Sample rotation angles
angles = self.reader.get_angles()
self.nxsample['rotation_angle'] = angles
self.nxsample["rotation_angle"].attrs["units"] = "degrees"
# h5py NeXus link
source_addr = '/NXtomo/sample/rotation_angle'
target_addr = 'rotation_angle'
self.nxsample['rotation_angle'].attrs['target'] = source_addr
self.nxdata._id.link(source_addr, target_addr, h5py.h5g.LINK_HARD)
# X sample translation: nxsample['z_translation']
xpositions = self.reader.get_x_positions()
self.nxsample['x_translation'] = xpositions
self.nxsample['x_translation'].attrs['units'] = 'um'
# Y sample translation: nxsample['z_translation']
ypositions = self.reader.get_y_positions()
self.nxsample['y_translation'] = ypositions
self.nxsample['y_translation'].attrs['units'] = 'um'
# Z sample translation: nxsample['z_translation']
zpositions = self.reader.get_z_positions()
self.nxsample['z_translation'] = zpositions
self.nxsample['z_translation'].attrs['units'] = 'um'
def _convert_samples(self):
self.numrows, self.numcols = self.reader.get_image_size()
data_type = self.reader.get_data_type()
self.nSampleFrames = self.reader.get_images_number()
if data_type == 'float':
self.datatype = 'float32'
else:
self.datatype = data_type
self.nxdetectorsample.create_dataset(
"data",
shape=(self.nSampleFrames,
self.numrows,
self.numcols),
chunks=(1,
self.numrows,
self.numcols),
dtype=self.datatype)
self.nxdetectorsample['data'].attrs[
'Data Type'] = self.datatype
self.nxdetectorsample[
'data'].attrs['Number of Frames'] = self.nSampleFrames
self.nxdetectorsample['data'].attrs[
'Image Height'] = self.numrows
self.nxdetectorsample['data'].attrs[
'Image Width'] = self.numcols
for numimage in range(self.nSampleFrames):
self.count_num_sequence = self.count_num_sequence + 1
tomoimagesingle = self.reader.get_image(numimage)
self.num_sample_sequence.append(
self.count_num_sequence)
self.nxdetectorsample['data'][numimage] = tomoimagesingle
if numimage % 20 == 0:
print('Image %i converted' % numimage)
if numimage + 1 == self.nSampleFrames:
print ('%i images converted\n' % self.nSampleFrames)
# h5py NeXus link
source_addr = '/NXtomo/instrument/sample/data'
target_addr = 'data'
self.nxdetectorsample['data'].attrs[
'target'] = source_addr
self.nxdata._id.link(source_addr, target_addr,
h5py.h5g.LINK_HARD)
def _convert_bright(self):
self.datatype_bright = self.ff_reader.get_data_type()
self.numrows_bright, self.numcols_bright = \
self.ff_reader.get_image_size()
self.nFramesBright = self.ff_reader.get_images_number()
self.nxbright = self.nxinstrument.create_group("bright_field")
self.nxbright.attrs['NX_class'] = "Unknown"
self.nxbright.create_dataset(
"data",
shape=(self.nFramesBright,
self.numrows_bright,
self.numcols_bright),
chunks=(1,
self.numrows_bright,
self.numcols_bright),
dtype=self.datatype_bright)
self.nxbright['data'].attrs['Data Type'] = \
self.datatype_bright
self.nxbright['data'].attrs['Image Height'] = \
self.numrows_bright
self.nxbright['data'].attrs['Image Width'] = \
self.numcols_bright
for numimage in range(self.nFramesBright):
if numimage + 1 == self.nFramesBright:
print ('%i Bright-Field images '
'converted\n' % self.nFramesBright)
self.count_num_sequence = self.count_num_sequence + 1
tomoimagesingle = self.ff_reader.get_image(numimage)
self.num_bright_sequence.append(self.count_num_sequence)
self.nxbright['data'][numimage] = tomoimagesingle
# Accelerator current for each image of FF (machine current)
ff_currents = self.ff_reader.get_machine_currents()
self.nxbright.create_dataset("current", data=ff_currents)
self.nxbright["current"].attrs["units"] = "mA"
# Exposure Times
exp_times = self.ff_reader.get_exp_times()
self.nxbright.create_dataset("ExpTimes", data=exp_times)
self.nxbright["ExpTimes"].attrs["units"] = "s"
def _convert_zero_deg_images(self, ole_zerodeg):
verbose = False
# DataType: 10 float; 5 uint16 (unsigned 16-bit (2-byte) integers)
if ole_zerodeg.exists('ImageInfo/DataType'):
stream = ole_zerodeg.openstream('ImageInfo/DataType')
data = stream.read()
struct_fmt = '<1I'
datatype_zerodeg = struct.unpack(struct_fmt, data)
datatype_zerodeg = int(datatype_zerodeg[0])
if datatype_zerodeg == 5:
self.datatype_zerodeg = 'uint16'
else:
self.datatype_zerodeg = 'float'
if verbose:
print "ImageInfo/DataType: %s " % self.datatype_zerodeg
else:
print("There is no information about DataType")
# Zero degrees data size
if (ole_zerodeg.exists('ImageInfo/NoOfImages') and
ole_zerodeg.exists('ImageInfo/ImageWidth') and
ole_zerodeg.exists('ImageInfo/ImageHeight')):
stream = ole_zerodeg.openstream('ImageInfo/ImageHeight')
data = stream.read()
yimage = struct.unpack('<I', data)
self.numrows_zerodeg = np.int(yimage[0])
if verbose:
print "ImageInfo/ImageHeight = %i" % yimage[0]
stream = ole_zerodeg.openstream('ImageInfo/ImageWidth')
data = stream.read()
ximage = struct.unpack('<I', data)
self.numcols_zerodeg = np.int(ximage[0])
if verbose:
print "ImageInfo/ImageWidth = %i" % ximage[0]
else:
print('There is no information about the 0 degrees image size '
'(ImageHeight, or about ImageWidth)')
if ole_zerodeg.exists('ImageData1/Image1'):
img_string = "ImageData1/Image1"
stream = ole_zerodeg.openstream(img_string)
data = stream.read()
if self.datatype == 'uint16':
struct_fmt = "<{0:10}H".format(self.numrows_zerodeg *
self.numcols_zerodeg)
imgdata = struct.unpack(struct_fmt, data)
elif self.datatype == 'float':
struct_fmt = "<{0:10}f".format(self.numrows_zerodeg *
self.numcols_zerodeg)
imgdata = struct.unpack(struct_fmt, data)
else:
print "Wrong data type"
imgdata_zerodeg = np.flipud(np.reshape(imgdata,
(self.numrows,
self.numcols),
order='A'))
else:
imgdata_zerodeg = 0
return imgdata_zerodeg
def convert_tomography(self):
# TODO: 0 degree images not implemented in xrm2nexs
if self.filename_zerodeg_in is not None:
ole_zerodeg_in = OleFileIO(self.filename_zerodeg_in)
image_zerodeg_in = self._convert_zero_deg_images(ole_zerodeg_in)
self.nxdetectorsample.create_dataset(
'0_degrees_initial_image',
data=image_zerodeg_in,
dtype=self.datatype_zerodeg)
self.nxdetectorsample['0_degrees_initial_image'].attrs[
'Data Type'] = self.datatype_zerodeg
self.nxdetectorsample['0_degrees_initial_image'].attrs[
'Image Height'] = self.numrows_zerodeg
self.nxdetectorsample['0_degrees_initial_image'].attrs[
'Image Width'] = self.numcols_zerodeg
print('Zero degrees initial image converted')
if self.filename_zerodeg_final is not None:
ole_zerodeg_final = OleFileIO(self.filename_zerodeg_final)
image_zerodeg_final = self._convert_zero_deg_images(
ole_zerodeg_final)
self.nxdetectorsample.create_dataset(
'0_degrees_final_image',
data=image_zerodeg_final,
dtype=self.datatype_zerodeg)
self.nxdetectorsample['0_degrees_final_image'].attrs[
'Data Type'] = self.datatype_zerodeg
self.nxdetectorsample['0_degrees_final_image'].attrs[
'Image Height'] = self.numrows_zerodeg
self.nxdetectorsample['0_degrees_final_image'].attrs[
'Image Width'] = self.numcols_zerodeg
print('Zero degrees final image converted')
print("\nConverting tomography image data from xrm(s) to NeXus HDF5.")
brightexists = False
darkexists = False
for file in self.file_order:
# Tomography Data Images
if file == 's':
self._convert_samples()
# Bright-Field
elif file == 'b':
brightexists = True
self._convert_bright()
# Post-Dark-Field
elif file == 'd':
darkexists = True
# TODO
pass
self.nxinstrument['sample']['sequence_number'] = \
self.num_sample_sequence
if brightexists:
self.nxinstrument['bright_field']['sequence_number'] = \
self.num_bright_sequence
if darkexists:
self.nxinstrument['dark_field']['sequence_number'] = \
self.num_dark_sequence
# NXMonitor data: Not used in TXM microscope.
# In the ALBA-BL09 case all the values will be set to 1.
monitor_size = self.nSampleFrames + self.nFramesBright
monitor_counts = np.ones(monitor_size, dtype=np.uint16)
self.nxmonitor['data'] = monitor_counts
# Flush and close the nexus file
self.txrmhdf.flush()
self.txrmhdf.close()
class xrmReader(object):
def __init__(self, file_names):
self.file_names = file_names
def get_images_number(self):
return len(self.file_names)
def get_pixel_size(self):
file_name = self.file_names[0]
with XradiaFile(file_name) as xrm_file:
return xrm_file.pixel_size
def get_exp_times(self):
exp_times = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
exp_times.extend(xrm_file.get_exp_times())
return exp_times
def get_machine_currents(self):
currents = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
currents.extend(xrm_file.get_machine_currents())
return currents
def get_energies(self):
energies = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
energies.extend(xrm_file.get_energies())
return energies
def get_start_time(self):
filename = self.file_names[0]
with XradiaFile(filename) as xrm_file:
return xrm_file.get_start_date()
def get_end_time(self):
filename = self.file_names[-1]
with XradiaFile(filename) as xrm_file:
return xrm_file.get_end_date()
def get_angles(self):
angles = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
angles.extend(xrm_file.get_angles())
return angles
def get_x_positions(self):
positions = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
positions.extend(xrm_file.get_x_positions())
return positions
def get_y_positions(self):
positions = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
positions.extend(xrm_file.get_y_positions())
return positions
def get_z_positions(self):
positions = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
positions.extend(xrm_file.get_z_positions())
return positions
def get_image(self, id):
"""
:param id: number of the images sequence
:return: image data
"""
filename = self.file_names[id]
with XradiaFile(filename) as xrm_file:
return xrm_file.get_image()
def get_distance(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.get_distance()
def get_sample_id(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.get_sample_id()
def get_xray_magnification(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.get_xray_magnification()
def get_data_type(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.data_type
def get_image_size(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.image_height, xrm_file.image_width
def get_sample_name(self):
filename = self.file_names[0]
file = filename.rsplit('/', 1)[1]
splitted_file = file.split('_')
tomo_name = splitted_file[1]
energy = splitted_file[2]
pos_ext = splitted_file[-1].find('.xrm')
conf = splitted_file[-1][:pos_ext]
return '{0}_{1}_{2}_{3}'.format(splitted_file[0],
tomo_name,
energy,
conf)
def get_sample_path(self):
filename = self.file_names[0]
path = filename.rsplit('/', 1)[0]
return path
|
gpl-3.0
| -3,555,758,332,532,904,400
| 37.720971
| 143
| 0.567362
| false
| 3.815884
| false
| false
| false
|
Teknologforeningen/tf-info
|
apps/reittiopas/views.py
|
1
|
2286
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.conf import settings
from operator import itemgetter
from datetime import datetime, timedelta
import json
import urllib2
import re
# Get API user and token from settings
user = settings.REITTIOPAS_USER
token = settings.REITTIOPAS_TOKEN
stops = settings.REITTIOPAS_STOPS
def index(request):
all_departures = []
for stop in stops:
try:
response = urllib2.urlopen("http://api.reittiopas.fi/hsl/prod/?user=%s&pass=%s&request=stop&code=%s"%(user,token,stop))
except:
return HttpResponse("Unable to access reittiopas API.", status=500)
try:
stop_departures = json.load(response)[0]
except ValueError as e:
return HttpResponse("Error parsing json from reittiopas", status=500)
# Parse line destinations from codes
lines_dict = {}
for item in stop_departures['lines']:
parts = item.split(':')
lines_dict[parts[0]] = parts[1]
# Parse departures
departures = []
for departure in stop_departures['departures']:
# Convert code to actual line number
departure['line'] = re.sub(r'^\d0*(\d?\w*) .*', r'\1',departure['code'])
departure['stop'] = stop_departures['name_fi']
# Add destination name to departure item
departure['dest'] = lines_dict[departure['code']]
# Create datetime object to sort departures by
if departure['time'] >= 2400:
departure['time'] = departure['time']-2400
dt = datetime.strptime('%d%d'%(departure['date'], departure['time']), "%Y%m%d%H%M")
departure['datetime'] = dt + timedelta(days=1)
else:
departure['datetime'] = datetime.strptime('%d%d'%(departure['date'], departure['time']), "%Y%m%d%H%M")
departures.append(departure)
all_departures = all_departures + departures
sorted_departures = sorted(all_departures, key=itemgetter('datetime'))[:10]
return render_to_response('reittiopas/index.html', {"departures": sorted_departures}, context_instance=RequestContext(request))
|
bsd-3-clause
| -5,200,406,112,815,346,000
| 36.491803
| 131
| 0.631234
| false
| 3.914384
| false
| false
| false
|
quantumlib/Cirq
|
cirq-core/cirq/ops/diagonal_gate.py
|
1
|
8669
|
# Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates the gate instance for any number qubits diagonal gate.
The gate is used to create a (2^n)x(2^n) matrix with the diagonal elements
passed as a list.
"""
from typing import AbstractSet, Any, Iterator, List, Optional, Sequence, Tuple, TYPE_CHECKING, Union
import numpy as np
import sympy
from cirq import protocols, value
from cirq._compat import proper_repr
from cirq.ops import common_gates, raw_types, global_phase_op
if TYPE_CHECKING:
import cirq
def _fast_walsh_hadamard_transform(a: Tuple[Any, ...]) -> np.ndarray:
"""Fast Walsh–Hadamard Transform of an array."""
h = 1
a_ = np.array(a)
while h < len(a_):
for i in range(0, len(a_), h * 2):
for j in range(i, i + h):
x = a_[j]
y = a_[j + h]
a_[j] = x + y
a_[j + h] = x - y
h *= 2
return a_
def _gen_gray_code(n: int) -> Iterator[Tuple[int, int]]:
"""Generate the Gray Code from 0 to 2^n-1.
Each iteration yields a two-tuple, `(gray_code, bit_flip)`. `gray_code` is the decimal
representation of the gray code and `bit_flip` is the position of bits flipped for next
gray code.
"""
gray_code = 0
for i in range(1, 2 ** n):
next_gray = i ^ (i >> 1)
bit_flip = int(np.log2(gray_code ^ next_gray))
yield gray_code, bit_flip
gray_code = next_gray
yield gray_code, int(np.log2(gray_code))
@value.value_equality()
class DiagonalGate(raw_types.Gate):
"""A gate given by a diagonal (2^n)\\times(2^n) matrix."""
def __init__(self, diag_angles_radians: Sequence[value.TParamVal]) -> None:
r"""A n-qubit gate with only diagonal elements.
This gate's off-diagonal elements are zero and it's on diagonal
elements are all phases.
Args:
diag_angles_radians: The list of angles on the diagonal in radians.
If these values are $(x_0, x_1, \ldots , x_N)$ then the unitary
has diagonal values $(e^{i x_0}, e^{i x_1}, \ldots, e^{i x_N})$.
"""
self._diag_angles_radians: Tuple[value.TParamVal, ...] = tuple(diag_angles_radians)
def _num_qubits_(self):
return int(np.log2(len(self._diag_angles_radians)))
def _is_parameterized_(self) -> bool:
return any(protocols.is_parameterized(angle) for angle in self._diag_angles_radians)
def _parameter_names_(self) -> AbstractSet[str]:
return {
name for angle in self._diag_angles_radians for name in protocols.parameter_names(angle)
}
def _resolve_parameters_(
self, resolver: 'cirq.ParamResolver', recursive: bool
) -> 'DiagonalGate':
return DiagonalGate(
protocols.resolve_parameters(self._diag_angles_radians, resolver, recursive)
)
def _has_unitary_(self) -> bool:
return not self._is_parameterized_()
def _unitary_(self) -> Optional[np.ndarray]:
if self._is_parameterized_():
return None
return np.diag([np.exp(1j * angle) for angle in self._diag_angles_radians])
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
for index, angle in enumerate(self._diag_angles_radians):
subspace_index = args.subspace_index(big_endian_bits_int=index)
args.target_tensor[subspace_index] *= np.exp(1j * angle)
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
rounded_angles = np.array(self._diag_angles_radians)
if args.precision is not None:
rounded_angles = rounded_angles.round(args.precision)
if len(rounded_angles) <= 4:
rounded_angles_str = ', '.join(proper_repr(angle) for angle in rounded_angles)
diag_str = f'diag({rounded_angles_str})'
else:
diag_str = ', '.join(proper_repr(angle) for angle in rounded_angles[:2])
diag_str += ', ..., '
diag_str += ', '.join(proper_repr(angle) for angle in rounded_angles[-2:])
diag_str = f'diag({diag_str})'
return protocols.CircuitDiagramInfo(
[diag_str] + ['#' + str(i) for i in range(2, self._num_qubits_() + 1)]
)
def __pow__(self, exponent: Any) -> 'DiagonalGate':
if not isinstance(exponent, (int, float, sympy.Basic)):
return NotImplemented
angles = []
for angle in self._diag_angles_radians:
mul_angle = protocols.mul(angle, exponent, NotImplemented)
angles.append(mul_angle)
return DiagonalGate(angles)
def _value_equality_values_(self) -> Any:
return tuple(self._diag_angles_radians)
def _decompose_for_basis(
self, index: int, bit_flip: int, theta: float, qubits: Sequence['cirq.Qid']
) -> Iterator[Union['cirq.ZPowGate', 'cirq.CXPowGate']]:
if index == 0:
return []
largest_digit = self._num_qubits_() - (len(bin(index)) - 2)
yield common_gates.rz(2 * theta)(qubits[largest_digit])
_flip_bit = self._num_qubits_() - bit_flip - 1
if _flip_bit < largest_digit:
yield common_gates.CNOT(qubits[largest_digit], qubits[_flip_bit])
elif _flip_bit > largest_digit:
yield common_gates.CNOT(qubits[_flip_bit], qubits[largest_digit])
def _decompose_(self, qubits: Sequence['cirq.Qid']) -> 'cirq.OP_TREE':
"""Decompose the n-qubit diagonal gates into CNOT and Rz gates.
A 3 qubits decomposition looks like
0: ───────────────────────────────────X───Rz(6)───X───Rz(7)───X───Rz(5)───X───Rz(4)───
│ │ │ │
1: ───────────X───Rz(3)───X───Rz(2)───@───────────┼───────────@───────────┼───────────
│ │ │ │
2: ───Rz(1)───@───────────@───────────────────────@───────────────────────@───────────
where the angles in Rz gates are corresponding to the fast-walsh-Hadamard transfrom
of diagonal_angles in the Gray Code order.
For n qubits decomposition looks similar but with 2^n-1 Rz gates and 2^n-2 CNOT gates.
The algorithm is implemented according to the paper:
Welch, Jonathan, et al. "Efficient quantum circuits for diagonal unitaries without
ancillas." New Journal of Physics 16.3 (2014): 033040.
https://iopscience.iop.org/article/10.1088/1367-2630/16/3/033040/meta
"""
if protocols.is_parameterized(self):
return NotImplemented
n = self._num_qubits_()
hat_angles = _fast_walsh_hadamard_transform(self._diag_angles_radians) / (2 ** n)
# There is one global phase shift between unitary matrix of the diagonal gate and the
# decomposed gates. On its own it is not physically observable. However, if using this
# diagonal gate for sub-system like controlled gate, it is no longer equivalent. Hence,
# we add global phase.
decomposed_circ: List[Any] = [
global_phase_op.GlobalPhaseOperation(np.exp(1j * hat_angles[0]))
]
for i, bit_flip in _gen_gray_code(n):
decomposed_circ.extend(self._decompose_for_basis(i, bit_flip, -hat_angles[i], qubits))
return decomposed_circ
def __repr__(self) -> str:
return 'cirq.DiagonalGate([{}])'.format(
','.join(proper_repr(angle) for angle in self._diag_angles_radians)
)
|
apache-2.0
| 8,599,835,669,843,185,000
| 40.442211
| 100
| 0.5927
| false
| 3.437682
| false
| false
| false
|
chromium/chromium
|
third_party/android_deps/libs/com_google_android_gms_play_services_basement/3pp/fetch.py
|
6
|
1389
|
#!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://maven.google.com/com/google/android/gms/play-services-basement/17.5.0/play-services-basement-17.5.0.aar'
_FILE_NAME = 'play-services-basement-17.5.0.aar'
_FILE_VERSION = '17.5.0'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsupported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
print(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
|
bsd-3-clause
| -1,260,905,872,956,646,100
| 23.803571
| 125
| 0.647948
| false
| 3.222738
| false
| false
| false
|
splunk/splunk-webframework
|
server/splunkdj/testlib.py
|
1
|
1436
|
#
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Helper functions for wrinting unit tests for apps built using the
Splunk Django Bindings."""
from os import path
import sys
__all__ = ["loadrc"]
# Print the given message to stderr, and optionally exit
def error(message, exitcode = None):
print >> sys.stderr, "Error: %s" % message
if not exitcode is None: sys.exit(exitcode)
def loadrc(filepath):
"""Load a `.splunkrc` style options file and return a `dict` of option
values."""
filepath = path.expanduser(filepath) # Just in case
argv = []
try:
file = open(filepath)
except:
error("Unable to open '%s'" % filepath, 2)
result = {}
for line in file:
if line.startswith("#"): continue # Skip comment
line = line.strip()
if len(line) == 0: continue # Skip blank line
k, v = line.split('=', 1)
result[k] = v
return result
|
apache-2.0
| -6,819,429,677,982,128,000
| 29.553191
| 75
| 0.667131
| false
| 3.891599
| false
| false
| false
|
lynxis/libavg
|
src/python/app/app.py
|
1
|
12797
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# libavg - Media Playback Engine.
# Copyright (C) 2003-2013 Ulrich von Zadow
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Current versions can be found at www.libavg.de
#
# Original author of this file is OXullo Interecans <x at brainrapers dot org>
import os
import math
import time
import libavg
from libavg import avg, Point2D, mtemu
import settings
from settings import Option
import keyboardmanager
import debugpanel
import flashmessage
class MainDiv(libavg.avg.DivNode):
VERSION = 'undef'
def __init__(self, **kargs):
assert not 'parent' in kargs
super(MainDiv, self).__init__(**kargs)
self.registerInstance(self, None)
def onArgvParserCreated(self, parser):
pass
def onArgvParsed(self, options, args, parser):
pass
def onStartup(self):
pass
def onInit(self):
pass
def onExit(self):
pass
def onFrame(self):
pass
class App(object):
def __init__(self):
self._setupInstance()
self._mainDiv = None
self._appParent = None
self._debugPanel = None
self._overlayPanel = None
self._resolution = None
self._windowSize = None
self._mtEmu = None
self.__lastFrameTimestamp = 0
self._setupSettings()
def run(self, mainDiv, **kargs):
assert isinstance(mainDiv, MainDiv)
self._mainDiv = mainDiv
self.mainDiv.settings = self._settings
self._applySettingsExtenders(kargs)
self._setupLogging()
mainDiv.onStartup()
self._setupResolution()
self._setupRootNode()
self._setupMouse()
pos, size, angle = self._getAppParentGeometry()
self._setupAppParent(pos, size, angle)
self._setupMainDiv()
self._setupTopPanel()
self._setupDebugPanel()
self._setupKeyboardManager()
self._setupDebuggingWidgets()
self._applyResolution()
self._setupOnInit()
self.onBeforeLaunch()
self.__lastFrameTimestamp = time.time()
try:
self._runLoop()
except Exception, e:
self._teardownKeyboardManager()
raise
mainDiv.onExit()
self._teardownKeyboardManager()
return 0
@property
def mainDiv(self):
return self._mainDiv
@property
def debugPanel(self):
return self._debugPanel
@property
def overlayPanel(self):
return self._overlayPanel
@property
def settings(self):
return self._settings
def onBeforeLaunch(self):
pass
def takeScreenshot(self, targetFolder='.'):
screenBmp = libavg.player.screenshot()
filenameTemplate = os.path.join(targetFolder, '%s-%03d.png')
i = 1
while i < 1000:
filename = filenameTemplate % (self.__class__.__name__, i)
if os.path.exists(filename):
i += 1
else:
break
if i == 1000:
flashmessage.FlashMessage('Maximum number of screenshots reached',
parent=self._appParent, isError=True)
else:
screenBmp.save(filename)
flashmessage.FlashMessage('Screenshot saved as %s' % filename,
parent=self._appParent)
def dumpTextObjectCount(self):
objects = libavg.player.getTestHelper().getObjectCount()
savedSeverity = libavg.logger.getCategories()[libavg.logger.Category.APP]
libavg.logger.configureCategory(libavg.logger.Category.APP,
libavg.logger.Severity.INFO)
libavg.logger.info('Dumping objects count')
for key, value in objects.iteritems():
libavg.logger.info(' %-25s: %s' % (key, value))
libavg.logger.configureCategory(libavg.logger.Category.APP, savedSeverity)
def _setupInstance(self):
import libavg.app
if libavg.app.instance is not None:
raise RuntimeError('%s has been already instantiated' %
self.__class__.__name__)
libavg.app.instance = self
def _setupSettings(self):
self._settings = settings.Settings()
self._settings.addOption(Option('app_resolution', '640x480'))
self._settings.addOption(Option('app_window_size', ''))
self._settings.addOption(Option('app_fullscreen', 'false'))
self._settings.addOption(Option('app_show_cursor', 'true'))
self._settings.addOption(Option('app_rotation', 'normal'))
self._settings.addOption(Option('app_panel_fontsize', '10'))
self._settings.addOption(Option('app_mouse_enabled', 'true'))
self._settings.addOption(Option('multitouch_enabled', 'false'))
self._settings.addOption(Option('multitouch_driver', ''))
self._settings.addOption(Option('multitouch_tuio_port', ''))
self._settings.addOption(Option('multitouch_mtdev_device', ''))
self._settings.addOption(Option('log_avg_categories', ''))
def _applySettingsExtenders(self, kargs):
self.settings.applyExtender(settings.KargsExtender(kargs))
argvExtender = settings.ArgvExtender(self.mainDiv.VERSION)
self.mainDiv.onArgvParserCreated(argvExtender.parser)
self.settings.applyExtender(argvExtender)
self.mainDiv.onArgvParsed(argvExtender.parsedArgs[0], argvExtender.parsedArgs[1],
argvExtender.parser)
def _setupLogging(self):
catMap = self.settings.get('log_avg_categories').strip()
if catMap:
for catPair in catMap.split(' '):
cat, strLevel = catPair.split(':')
level = getattr(avg.logger.Severity, strLevel)
libavg.avg.logger.configureCategory(cat, level)
def _setupRootNode(self):
libavg.player.loadString('''<?xml version="1.0"?>
<!DOCTYPE avg SYSTEM "../../libavg/doc/avg.dtd">
<avg width="%s" height="%s">
</avg>''' % tuple(self._resolution))
def _setupMouse(self):
libavg.player.enableMouse(self.settings.getBoolean('app_mouse_enabled'))
def _setupMultitouch(self):
if self.settings.getBoolean('multitouch_enabled'):
driver = self.settings.get('multitouch_driver').upper()
if driver:
os.putenv('AVG_MULTITOUCH_DRIVER', driver)
tuio_port = self.settings.get('multitouch_tuio_port').upper()
if tuio_port:
os.putenv('AVG_TUIO_PORT', tuio_port)
mtdev_device = self.settings.get('multitouch_mtdev_device').upper()
if mtdev_device:
os.putenv('AVG_LINUX_MULTITOUCH_DEVICE', mtdev_device)
libavg.player.enableMultitouch()
def _getAppParentGeometry(self):
rotation = self.settings.get('app_rotation').lower()
size = self._resolution
pos = (0, 0)
angle = 0
if rotation == 'left':
angle = -math.pi / 2
size = (self._resolution.y, self._resolution.x)
pos = ((self._resolution.x - self._resolution.y) / 2,
(self._resolution.y - self._resolution.x) / 2)
elif rotation == 'right':
angle = math.pi / 2
size = (self._resolution.y, self._resolution.x)
pos = ((self._resolution.x - self._resolution.y) / 2,
(self._resolution.y - self._resolution.x) / 2)
elif rotation == 'inverted':
angle = math.pi
elif rotation != 'normal':
raise TypeError('Invalid rotation %s' % rotation)
return (pos, size, angle)
def _setupAppParent(self, pos, size, angle):
self._appParent = libavg.avg.DivNode(parent=libavg.player.getRootNode(),
pos=pos, size=size, angle=angle)
def _setupMainDiv(self):
self._appParent.appendChild(self.mainDiv)
self.mainDiv.size = self._appParent.size
def _setupTopPanel(self):
self._overlayPanel = libavg.avg.DivNode(parent=self._appParent, id='overlayPanel')
def _setupDebugPanel(self):
self._debugPanel = debugpanel.DebugPanel(parent=self._appParent,
size=self._appParent.size, id='debugPanel',
fontsize=self.settings.getFloat('app_panel_fontsize'))
def _setupDebuggingWidgets(self):
pass
def _setupResolution(self):
rotation = self.settings.get('app_rotation').lower()
resolutionStr = self.settings.get('app_resolution').lower()
if resolutionStr != '':
resolution = self.settings.getPoint2D('app_resolution')
else:
resolution = libavg.player.getScreenResolution()
windowSizeStr = self.settings.get('app_window_size')
if windowSizeStr != '':
windowSize = self.settings.getPoint2D('app_window_size')
else:
windowSize = resolution
if rotation in ('left', 'right'):
resolution = Point2D(resolution.y, resolution.x)
windowSize = Point2D(windowSize.y, windowSize.x)
self._resolution = resolution
self._windowSize = windowSize
def _applyResolution(self):
fullscreen = self.settings.getBoolean('app_fullscreen')
if fullscreen:
resolution = self._resolution
else:
resolution = self._windowSize
libavg.player.setResolution(
fullscreen,
int(resolution.x), int(resolution.y),
0 # color depth
)
libavg.player.showCursor(self.settings.getBoolean('app_show_cursor'))
def _setupKeyboardManager(self):
keyboardmanager.init()
keyboardmanager.bindKeyDown(
keystring='d',
handler=self._debugPanel.toggleVisibility,
help='Show/hide the debug panel',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='h',
handler=lambda: libavg.player.showCursor(
not libavg.player.isCursorShown()),
help='Show/hide cursor',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='p',
handler=self.takeScreenshot,
help='Take screenshot',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='b',
handler=self.dumpTextObjectCount,
help='Dump objects count to the console',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='e',
handler=self._toggleMtEmulation,
help='Toggle multitouch emulation',
modifiers=libavg.avg.KEYMOD_CTRL)
self.debugPanel.setupKeys()
def _toggleMtEmulation(self):
if self._mtEmu is None:
self._mtEmu = mtemu.MTemu()
keyboardmanager.bindKeyDown('shift', self._mtEmu.enableDualTouch,
'Enable pinch gesture emulation')
keyboardmanager.bindKeyUp('shift', self._mtEmu.disableDualTouch,
'Disable pinch gesture emulation')
keyboardmanager.bindKeyDown('t', self._mtEmu.toggleSource,
'Toggle source between TOUCH and TRACK', libavg.avg.KEYMOD_CTRL)
else:
self._mtEmu.deinit()
keyboardmanager.unbindKeyDown('t', libavg.avg.KEYMOD_CTRL)
keyboardmanager.unbindKeyDown('shift')
keyboardmanager.unbindKeyUp('shift')
del self._mtEmu
self._mtEmu = None
def _teardownKeyboardManager(self):
keyboardmanager.unbindAll()
def _setupOnInit(self):
libavg.player.setTimeout(0, self._onInitInternal)
def _runLoop(self):
libavg.player.play()
def _onInitInternal(self):
self._setupMultitouch()
self.mainDiv.onInit()
libavg.player.subscribe(libavg.player.ON_FRAME, self.mainDiv.onFrame)
|
lgpl-2.1
| -4,420,832,061,049,459,700
| 32.238961
| 90
| 0.608815
| false
| 4.057387
| false
| false
| false
|
lochiiconnectivity/exabgp
|
lib/exabgp/protocol/ip/fragment.py
|
1
|
1401
|
# encoding: utf-8
"""
fragment.py
Created by Thomas Mangin on 2010-02-04.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
# =================================================================== Fragment
# Uses bitmask operand format defined above.
# 0 1 2 3 4 5 6 7
# +---+---+---+---+---+---+---+---+
# | Reserved |LF |FF |IsF|DF |
# +---+---+---+---+---+---+---+---+
#
# Bitmask values:
# + Bit 7 - Don't fragment (DF)
# + Bit 6 - Is a fragment (IsF)
# + Bit 5 - First fragment (FF)
# + Bit 4 - Last fragment (LF)
class Fragment (int):
# reserved = 0xF0
LAST = 0x08
FIRST = 0x04
IS = 0x02
DONT = 0x01
def __str__ (self):
if self == 0x00: return 'not-a-fragment'
if self == self.DONT: return 'dont-fragment'
if self == self.IS: return 'is-fragment'
if self == self.FIRST: return 'first-fragment'
if self == self.LAST: return 'last-fragment'
return 'unknown fragment value %d' % int(self)
def NamedFragment (name):
fragment = name.lower()
if fragment == 'not-a-fragment': return Fragment(0x00)
if fragment == 'dont-fragment': return Fragment(Fragment.DONT)
if fragment == 'is-fragment': return Fragment(Fragment.IS)
if fragment == 'first-fragment': return Fragment(Fragment.FIRST)
if fragment == 'last-fragment': return Fragment(Fragment.LAST)
raise ValueError('unknown fragment name %s' % fragment)
|
bsd-3-clause
| 4,092,678,265,313,435,600
| 30.133333
| 78
| 0.58601
| false
| 3.120267
| false
| false
| false
|
tulip-control/tulip-control
|
contrib/aut2simulink.py
|
1
|
14363
|
# Copyright (c) 2012 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
"""
This program takes an aut and smv file from a generated TuLiP controller
and automatically writes the MATLAB compatible script for that controller.
Run this program by typing "python programfile.py nameofautfile
nameofmatlabfile", aut and smv file shall have the same name.
Do not include file extensions.
Written by Robert Rogersten during SURF June 2012,
Co-mentors Mumu Xu, Necmiye Ozay and Ufuk Topcu.
"""
from __future__ import print_function
import re, copy, os, sys
try:
import queue as _queue
except ImportError:
import Queue as _queue
class AutomatonState(object):
"""AutomatonState class for representing a state in a finite state
automaton. An AutomatonState object contains the following
fields:
- `stateid`: an integer specifying the state id of this AutomatonState object.
- `state`: a dictionary whose keys are the names of the variables
and whose values are the values of the variables.
- `transition`: a list of id's of the AutomatonState objects to
which this AutomatonState object can transition.
"""
def __init__(self, stateid=-1, state={},transition=[]):
self.stateid = stateid
self.state = copy.copy(state)
self.transition = transition[:]
def question(string):
"""This function asks a yes/no question and returns the answer.
@param string: The question to use as the prompt.
@return: The "answer" return value is one of "yes" or "no". The
default is "yes". (The default occurs if the user only presses
the RETURN button.)
"""
default="yes"
valid = {"yes":True, "y":True, "ye":True,
"no":False, "n":False}
prompt = " [Y/n] "
while True:
print(string)
choice = raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'yes' or 'no'\n")
def load_file(aut_file):
"""Construct an AutomatonState object from aut_file and place in a Queue.
@param aut_file: the name of the text file containing the
automaton, or an (open) file-like object.
"""
if isinstance(aut_file, str):
f = open(aut_file, 'r')
else:
f = aut_file
stateid = -1
for line in f:
# parse states
if (line.find('State ') >= 0):
stateid = re.search(r'State (\d+)', line)
stateid = int(stateid.group(1))
state = dict(re.findall(r'(\w+):(\w+)', line))
state1 = dict(re.findall(r'(\w+):(-\w+)', line))
state.update(state1)
if re.search('successors', line):
transition = list(re.findall(r'\d+', line))
automaton=(stateid,state,transition)
queue.put(automaton)
queue1.put(automaton)
queue2.put(automaton)
def read_variables(smv_file):
"""Put the enviroment and system variables from smv_file in two different
Queues called system and enviroment.
@param smv_file: the name of the text file containing the
automaton, or an (open) file-like object.
"""
if isinstance(smv_file, str):
f = open(smv_file, 'r')
else:
f = smv_file
for line in f:
if re.search('MODULE env',line):
for line in f:
if re.search(' : ', line):
env = str(re.findall(r'(\w+) :', line))
env = env[2:len(env)-2]
enviroment.put(env)
if re.search('MODULE sys',line):
break
if re.search('MODULE sys',line):
for line in f:
if re.search(' : ', line):
sys = str(re.findall(r'(\w+) :', line))
sys = sys[2:len(sys)-2]
system.put(sys)
def write_startline(enviroment,system,f):
"""Write the first lines before the switch cases in the matlab file.
Input:
- enviroment queue
- system queue
- fileobject f
"""
f.write('function [')
for i in range(system.qsize()):
count = system.qsize()
temp = system.get()
if count == i+1:
f.write(temp)
else:
f.write(temp+',')
system.put(temp)
f.write('] = '+sys.argv[2]+'(')
for i in range(enviroment.qsize()):
count = enviroment.qsize()
temp = enviroment.get()
if count == i+1:
f.write(temp)
else:
f.write(temp+',')
enviroment.put(temp)
f.write(")\nglobal state;\ncoder.extrinsic('disp');\nswitch state\n")
def write_case(enviroment,system,f,verbosem):
"""Write the switch cases in the matlab file.
Input:
- enviroment queue
- system queue
- fileobject f
- verbosem
"""
#for each case
for i in range(queue.qsize()):
f.write('\tcase '+str(i)+'\n')
#for each condition within each case
temp=queue.get()
ef=0
for k in range(queue1.qsize()):
temp2=queue1.get()
if str(k) in temp[2]:
if ef == 0:
f.write('\t\tif ')
ef=1
else:
f.write('\t\telseif ')
for l in range(enviroment.qsize()):
count=enviroment.qsize()
temp1=enviroment.get()
if count == l+1:
f.write(temp1+' == '+temp2[1][temp1])
else:
f.write(temp1+' == '+temp2[1][temp1]+' && ')
enviroment.put(temp1)
f.write('\n')
if verbosem==1:
f.write('\t\t\tstate = '+str(temp2[0])+';\n')
elif verbosem==0:
f.write('\t\t\tstate = '+str(temp2[0])+'\n')
else:
raise Exception
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
queue1.put(temp2)
#else statement for each case
if not temp[2]:
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
else:
f.write('\t\telse\n')
f.write("\t\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
f.write('\t\tend\n')
queue.put(temp)
#the last case is an otherwise statement
f.write('\totherwise\n')
f.write("\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = 0;\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = 0\n')
else:
raise Exception
system.put(temp1)
f.write('end')
def write_case_no(enviroment,system,f,verbosem):
"""Write the switch cases in the matlab file and exclude no
successors.
Input:
- enviroment queue
- system queue
- fileobject f
- verbosem
"""
#for each case
li=list()
for i in range(queue.qsize()):
q=queue.get()
li.append(q[0])
queue.put(q)
for i in range(queue.qsize()):
#for each condition within each case
temp=queue.get()
f.write('\tcase '+str(temp[0])+'\n')
ef=0
for k in range(queue2.qsize()):
temp2=queue2.get()
if str(k) in temp[2] and k in li:
if ef == 0:
f.write('\t\tif ')
ef=1
else:
f.write('\t\telseif ')
for l in range(enviroment.qsize()):
count=enviroment.qsize()
temp1=enviroment.get()
if count == l+1:
f.write(temp1+' == '+temp2[1][temp1])
else:
f.write(temp1+' == '+temp2[1][temp1]+' && ')
enviroment.put(temp1)
f.write('\n')
if verbosem==1:
f.write('\t\t\tstate = '+str(k)+';\n')
elif verbosem==0:
f.write('\t\t\tstate = '+str(k)+'\n')
else:
raise Exception
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
queue2.put(temp2)
#else statement for each case
if not temp[2]:
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
else:
f.write('\t\telse\n')
f.write("\t\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
f.write('\t\tend\n')
queue.put(temp)
#the last case is an otherwise statement
f.write('\totherwise\n')
f.write("\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = 0;\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = 0\n')
else:
raise Exception
system.put(temp1)
f.write('end')
queue=_queue.Queue()
queue1=_queue.Queue()
queue2=_queue.Queue()
enviroment=_queue.Queue()
system=_queue.Queue()
try:
load_file(sys.argv[1]+'.aut')
read_variables(sys.argv[1]+'.smv')
q=question('Shall there be a semicolon printed after each variable assignment? [Y/n]')
q2=question('Shall the script exclude no successors? [Y/n]')
if q:
verbosem=1
else:
verbosem=0
if not os.path.isfile(sys.argv[2]+'.m'):
f=open(sys.argv[2]+'.m','w')
write_startline(enviroment,system,f)
if q2:
for i in range(queue.qsize()):
temp=queue.get()
temp1=queue1.get()
if not temp[2] == []:
queue.put(temp)
queue1.put(temp1)
write_case_no(enviroment,system,f,verbosem)
else:
write_case(enviroment,system,f,verbosem)
f.close()
if queue.get()[0]==-1:
raise IOError
print('MATLAB script written to '+sys.argv[2]+'.m'+' with success\n')
else:
print('Enter a matlab filename that does not exist.')
except IOError:
print(
'Enter correct filename for a TuLiP generated controller, '
'aut and\nsmv file must have the same name')
except IndexError:
print(
'Usage: aut2simulink.py JTLV-AUT-FILE MATLAB-FILE\n\n'
' aut and smv file must have the same name.\n'
' Do not include file extensions.')
|
bsd-3-clause
| -22,288,746,802,600,890
| 34.289926
| 115
| 0.53227
| false
| 3.765863
| false
| false
| false
|
Data2Semantics/prov-o-matic
|
src/provomatic/extension.py
|
1
|
2174
|
from watcher import NotebookWatcher, CodeVisitor
from wrapper import prov, replace
from builder import get_dataset, save_prov, clear_dataset, add_prov, revive, list_entities, list_activities
from viewer import Viewer
from ducktape import Ducktape
import logging
import os
log = logging.getLogger('provomatic.extension')
log.setLevel(logging.WARNING)
def load_ipython_extension(ip):
log.debug("Loading PROV-O-Matic extension")
# Push the prov and replace wrapper functions
ip.push('prov')
ip.push('replace')
# Push the save_prov function (for saving the generated provenance trace to a file)
ip.push('save_prov')
# Push the add_prov function (for adding provenance from external files)
ip.push('add_prov')
# Push the revive function (for binding a value from an inported provenance graph to a new variable)
ip.push('revive')
ip.push('list_entities')
ip.push('list_activities')
## Initialize the PROV-O-Viz adapter
viewer = Viewer()
view_prov = viewer.view_prov
set_provoviz_url = viewer.set_provoviz_url
view_prov_service = viewer.view_prov_service
# Push the PROV-O-Viz functions to the IPython Notebook
ip.push('view_prov')
ip.push('set_provoviz_url')
ip.push('view_prov_service')
## Initialize the Ducktape loader
ducktape = Ducktape(ip)
load_ducktape = ducktape.load
ip.push('load_ducktape')
# Clear the provenance graph
clear_dataset()
try :
add_prov('http://www.w3.org/ns/prov#',url='http://localhost:8000/datafiles/prov-o.ttl')
except :
curwd = os.getcwd()
provopath = os.path.join(curwd,'datafiles/prov-o.ttl')
log.warning('Could not load PROV schema from URL, attempting to load from {}'.format(provopath))
add_prov('http://www.w3.org/ns/prov#',url='file://{}'.format(provopath))
## Initialize the notebookwatcher and code visitor.
nw = NotebookWatcher(ip)
cv = CodeVisitor(nw)
ip.events.register('pre_execute', nw.pre_execute)
ip.events.register('post_execute', nw.post_execute)
ip.ast_transformers.append(cv)
|
mit
| 5,969,903,609,708,959,000
| 27.233766
| 107
| 0.674793
| false
| 3.45628
| false
| false
| false
|
Linktime/Aike
|
app/userApp/forms.py
|
1
|
2544
|
#-*- coding:utf-8 -*-
from django import forms
from django.contrib.auth.models import User
from app.userApp.models import AikeUser, MessageBoard
class UserRegisterForm(forms.ModelForm):
username = forms.EmailField(label=u"*用户名(邮箱)")
password = forms.CharField(widget=forms.PasswordInput,label=u"*密码")
alias = forms.CharField(label=u"*别名",help_text="用于在网站中显示给其他人的名称")
sex = forms.ChoiceField(widget=forms.RadioSelect(),choices=((0,u'高富帅'),(1,u'白富美')),label="性别")
# name = forms.CharField(required=False,label="真名")
# age = forms.IntegerField(required=False,label="年龄")
# city = forms.CharField(required=False,label="所在城市",help_text="我们将优先通过地域给您展示活动")
# university = forms.CharField(required=False,label="大学",help_text="如果您是大学生,我们会优先将您所在大学的活动推荐给您")
# lbs = forms.CharField(required=False,label="上一次手机登陆地理位置")
# Auth = forms.BooleanField(required=False,label="认证")
class Meta:
model = AikeUser
fields = ("username","password","alias","name","sex","age","city","university")
class UserChangeForm(forms.ModelForm):
sex = forms.ChoiceField(widget=forms.RadioSelect(),choices=((0,u'高富帅'),(1,u'白富美')),label="性别")
# alias = forms.CharField(required=False,label="别名",help_text="用于在网站中显示给其他人的名称")
# name = forms.CharField(required=False,label="真名")
# email = forms.EmailField(required=False,label="Email")
# sex = forms.ChoiceField(widget=forms.RadioSelect(),choices=((0,u'高富帅'),(1,u'白富美')),label="性别")
# age = forms.IntegerField(required=False,label="年龄")
# city = forms.CharField(required=False,label="所在城市",help_text="我们将优先通过地域给您展示活动")
# university = forms.CharField(required=False,label="大学",help_text="如果您是大学生,我们会优先将您所在大学的活动推荐给您")
# lbs = forms.CharField(required=False,label="上一次手机登陆地理位置")
# Auth = forms.BooleanField(required=False,label="认证")
class Meta:
model = AikeUser
exclude = ("user")
class UserMessageBoardForm(forms.ModelForm):
# text = forms.CharField(widget=forms.Textarea(attrs={'class':'span5','rows':'5','style':'resize:none'}))
class Meta:
model = MessageBoard
|
apache-2.0
| -8,952,682,153,699,489,000
| 50.634146
| 109
| 0.682576
| false
| 2.411173
| false
| false
| false
|
mfherbst/bohrium
|
test/python/tests/test_reorganization.py
|
4
|
4226
|
import util
import functools
import operator
class test_gather:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); a = %s; " % ary
cmd += "ind = M.arange(%d, dtype=np.int64).reshape(%s); " % (nelem, shape)
yield cmd
yield cmd + "ind = ind[::2]; "
if shape[0] > 2:
yield cmd + "ind = ind[1:]; "
if len(shape) > 1 and shape[1] > 5:
yield cmd + "ind = ind[3:]; "
def test_take(self, cmd):
return cmd + "res = M.take(a, ind)"
def test_take_ary_mth(self, cmd):
return cmd + "res = a.take(ind)"
def test_indexing(self, cmd):
return cmd + "res = a.flatten()[ind.flatten()]"
class test_scatter:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); res = %s; " % ary
cmd += "ind = M.arange(%d, dtype=np.int64).reshape(%s); " % (nelem, shape)
VAL = "val = R.random(ind.shape, np.float64, bohrium=BH); "
yield cmd + VAL
yield cmd + "ind = ind[::2]; " + VAL
if shape[0] > 2:
yield cmd + "ind = ind[1:];" + VAL
if len(shape) > 1 and shape[1] > 5:
yield cmd + "ind = ind[3:];" + VAL
def test_put(self, cmd):
return cmd + "M.put(res, ind, val)"
def test_put_scalar(self, cmd):
return cmd + "M.put(res, ind, 42)"
def test_put_fixed_length_val(self, cmd):
return cmd + "M.put(res, ind, M.arange(10))"
def test_put_ary_mth(self, cmd):
return cmd + "res.put(ind, val)"
def test_indexing(self, cmd):
return cmd + "res = res.flatten(); res[ind] = val"
def test_cond(self, cmd):
cmd += cmd + "mask = R.random(ind.size, np.bool, bohrium=BH).reshape(ind.shape); "
np_cmd = cmd + "np.put(res, ind[mask], val[mask])"
bh_cmd = cmd + "M.cond_scatter(res, ind, val, mask)"
return (np_cmd, bh_cmd)
class test_nonzero:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); a = %s; " % ary
yield cmd
def test_flatnonzero(self, cmd):
return cmd + "res = M.flatnonzero(a)"
def test_nonzero(self, cmd):
return cmd + "res = M.concatenate(M.nonzero(a))"
class test_fancy_indexing_get:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); a = %s; " % ary
ind = "ind = ("
for dim in shape:
ind += "R.random(10, np.uint64, bohrium=BH) %% %d, " % dim
ind += "); "
yield cmd + ind
def test_take_using_index_tuple(self, cmd):
return cmd + "res = bh.take_using_index_tuple(a, ind)"
def test_indexing(self, cmd):
return cmd + "res = a[ind]"
class test_fancy_indexing_set:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); res = %s; " % ary
ind = "ind = ("
for dim in shape:
ind += "R.random(10, np.uint64, bohrium=BH) %% %d, " % dim
ind += "); "
yield cmd + ind
def test_put_using_index_tuple(self, cmd):
return cmd + "bh.put_using_index_tuple(res, ind, 42)"
def test_indexing(self, cmd):
return cmd + "res[ind] = 42"
|
lgpl-3.0
| 6,225,781,323,098,376,000
| 33.647541
| 90
| 0.514671
| false
| 3.230887
| true
| false
| false
|
mupif/mupif
|
mupif/examples/Example09-operatorEmail/Example09.py
|
1
|
6155
|
#!/usr/bin/env python3
import sys
sys.path.extend(['..', '../../..'])
from mupif import *
import jsonpickle
import time # for sleep
import logging
log = logging.getLogger()
import mupif.Physics.PhysicalQuantities as PQ
#
# Expected response from operator: E-mail with "CSJ01" (workflow + jobID)
# in the subject line, message body: json encoded dictionary with 'Operator-results' key, e.g.
# {"Operator-results": 3.14}
#
class EmailAPI(Model.Model):
"""
Simple application API that involves operator interaction
"""
def __init__(self, file):
super(EmailAPI, self).__init__(file)
# note: "From" should correspond to destination e-mail
# where the response is received (Operator can reply to the message)
self.operator = operatorUtil.OperatorEMailInteraction(From='appAPI@gmail.com',
To='operator@gmail.com',
smtpHost='smtp.something.com',
imapHost='imap.gmail.com',
imapUser='appAPI')
self.inputs = {}
self.outputs = {}
self.key = 'Operator-results'
def initialize(self, file='', workdir='', metaData={}, validateMetaData=True, **kwargs):
MD = {
'Name': 'Email operator application',
'ID': 'N/A',
'Description': 'Sending email with input and receiving email with results',
'Physics': {
'Type': 'Other',
'Entity': 'Other'
},
'Solver': {
'Software': 'Unknown',
'Language': 'Unknown',
'License': 'Unknown',
'Creator': 'Unknown',
'Version_date': '02/2019',
'Type': 'Summator',
'Documentation': 'Nowhere',
'Estim_time_step_s': 1,
'Estim_comp_time_s': 0.01,
'Estim_execution_cost_EUR': 0.01,
'Estim_personnel_cost_EUR': 0.01,
'Required_expertise': 'None',
'Accuracy': 'Unknown',
'Sensitivity': 'Unknown',
'Complexity': 'Unknown',
'Robustness': 'Unknown'
},
'Inputs': [
{'Type': 'mupif.Property', 'Type_ID': 'mupif.PropertyID.PID_CumulativeConcentration', 'Name': 'Concentration', 'Description': 'Concentration', 'Units': 'kg/m**3', 'Origin': 'Simulated', 'Required': True}],
'Outputs': [
{'Type': 'mupif.Property', 'Type_ID': 'mupif.PropertyID.PID_Demo_Value', 'Name': 'Demo value',
'Description': 'Demo value', 'Units': 'dimensionless', 'Origin': 'Simulated'}]
}
self.updateMetadata(MD)
super(EmailAPI, self).initialize(file, workdir, metaData, validateMetaData, **kwargs)
def setProperty(self, property, objectID=0):
# remember the mapped value
self.inputs[str(property.propID)] = property
self.inputs[self.key] = 0.0
def getProperty(self, propID, time, objectID=0):
md = {
'Execution': {
'ID': self.getMetadata('Execution.ID'),
'Use_case_ID': self.getMetadata('Execution.Use_case_ID'),
'Task_ID': self.getMetadata('Execution.Task_ID')
}
}
if self.outputs:
# unpack & process outputs (expected json encoded data)
if propID == PropertyID.PID_Demo_Value:
if self.key in self.outputs:
value = float(self.outputs[self.key])
log.info('Found key %s with value %f' % (self.key, value))
return Property.ConstantProperty(value, propID, ValueType.Scalar, PQ.getDimensionlessUnit(), time, 0, metaData=md)
else:
log.error('Not found key %s in email' % self.key)
return None
def solveStep(self, tstep, stageID=0, runInBackground=False):
# send email to operator, pack json encoded inputs in the message
# note workflow and job IDs will be available in upcoming MuPIF version
self.operator.contactOperator("CS", "J01", jsonpickle.encode(self.inputs))
responseReceived = False
# check for response and repeat until received
while not responseReceived:
# check response and receive the data
responseReceived, operatorOutput = self.operator.checkOperatorResponse("CS", "J01")
# print(responseReceived, operatorOutput.splitlines()[0])
if responseReceived:
try:
self.outputs = jsonpickle.decode(operatorOutput.splitlines()[0]) # pick up only dictionary to new line
except Exception as e:
log.error(e)
log.info("Received response from operator %s" % self.outputs)
else:
time.sleep(60) # wait
def getCriticalTimeStep(self):
return PQ.PhysicalQuantity(1.0, 's')
#################################################
# demo code
#################################################
# create instance of application API
app = EmailAPI(None)
try:
executionMetadata = {
'Execution': {
'ID': '1',
'Use_case_ID': '1_1',
'Task_ID': '1'
}
}
app.initialize(metaData=executionMetadata)
# CumulativeConcentration property on input
p = Property.ConstantProperty(0.1, PropertyID.PID_CumulativeConcentration, ValueType.Scalar, 'kg/m**3')
# set concentration as input
app.setProperty(p)
# solve (involves operator interaction)
tstep = TimeStep.TimeStep(0.0, 0.1, 1.0, 's', 1)
app.solveStep (tstep)
# get result of the simulation
r = app.getProperty(PropertyID.PID_Demo_Value, tstep.getTime())
log.info("Application API return value is %f", r.getValue())
# terminate app
except Exception as e:
log.error(e)
finally:
app.terminate()
|
lgpl-3.0
| -1,701,757,126,261,883,600
| 40.308725
| 221
| 0.543461
| false
| 4.192779
| false
| false
| false
|
armijnhemel/cleanup-for-discogs
|
monthly/compare-not-accepted.py
|
1
|
2940
|
#! /usr/bin/python3
## Hackish example script to compare some results output by process-discogs-chunks.py
##
## Licensed under the terms of the General Public License version 3
##
## SPDX-License-Identifier: GPL-3.0
##
## Copyright 2017-2019 - Armijn Hemel
import os
import sys
import argparse
def main():
parser = argparse.ArgumentParser()
# the following options are provided on the commandline
parser.add_argument("-f", "--first", action="store", dest="first",
help="path to first file", metavar="FILE")
parser.add_argument("-s", "--second", action="store", dest="second",
help="path to second file", metavar="FILE")
parser.add_argument("-a", "--all", action="store", dest="all",
help="path to all hashes (example: sha256-201909", metavar="FILE")
parser.add_argument("-p", "--printchanged", action="store_true", dest="printchanged",
help="print changed entries instead of statistics")
args = parser.parse_args()
# then some sanity checks for the data files
if args.first is None:
parser.error("Path to first file missing")
if not os.path.exists(args.first):
parser.error("First file %s does not exist" % args.first)
if args.second is None:
parser.error("Path to second file missing")
if not os.path.exists(args.second):
parser.error("Second file %s does not exist" % args.second)
if args.all is None:
parser.error("Path to file with all hashes missing")
if not os.path.exists(args.all):
parser.error("All hashes file %s does not exist" % args.all)
all_releases = set()
try:
shafile1 = open(args.all, 'r')
except:
print("Could not open %s, exiting" % args.all, file=sys.stderr)
sys.exit(1)
for i in shafile1:
(release_id, sha) = i.split('\t')
release = release_id.split('.')[0]
all_releases.add(release)
release_to_status1 = {}
notacceptedfile1 = open(args.first, 'r')
for i in notacceptedfile1:
(release_id, status) = i.split('\t')
release = release_id.split('.')[0]
release_to_status1[release] = i[1].strip()
notacceptedfile1.close()
release_to_status2 = {}
notacceptedfile2 = open(args.second, 'r')
for i in notacceptedfile2:
(release_id, status) = i.split('\t')
release = release_id.split('.')[0]
release_to_status2[release] = i[1].strip()
notacceptedfile2.close()
notkeys1 = set(release_to_status1.keys())
notkeys2 = set(release_to_status2.keys())
print("%d releases in not1 that are not in not2" % len(notkeys1.difference(notkeys2)))
print("%d releases in not2 that are not in not1" % len(notkeys2.difference(notkeys1)))
for i in sorted(notkeys1.difference(notkeys2)):
print(i, i in all_releases)
if __name__ == "__main__":
main()
|
gpl-3.0
| 4,643,265,375,250,114,000
| 30.612903
| 90
| 0.620748
| false
| 3.616236
| false
| false
| false
|
CrafterLuc2/Python
|
Scenery/main.py
|
1
|
2193
|
# Importation of Pygame
import pygame
from car import Car
import os
folder = os.path.dirname(os.path.realpath(__file__))
pygame.init()
pygame.mixer.init()
# Colouration
BLACK = ( 0, 0, 0)
WHITE = ( 255, 255, 255)
GREEN = ( 0, 255, 0)
RED = ( 255, 0, 0)
BLUE = (0, 0, 255)
SKYBLUE = (135,206,235)
GRASSGREEN = (74, 197, 5)
YELLOW = (255, 255, 0)
BROWN = (139,69,19)
LEAFEGREEN = (0, 100, 0)
# Basic Window
size = (1200, 800)
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Pandorium")
all_sprites_list = pygame.sprite.Group()
playerCar = Car(RED, 20, 30)
playerCar.rect.x = 200
playerCar.rect.y = 300
all_sprites_list.add(playerCar)
carryOn = True
clock = pygame.time.Clock()
pygame.mixer.music.load(os.path.join(folder, "music.ogg"))
pygame.mixer.music.play(-1)
while carryOn:
for event in pygame.event.get():
if event.type == pygame.QUIT:
carryOn = False
elif event.type==pygame.KEYDOWN:
if event.key==pygame.K_x:
carryOn = false
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:
playerCar.moveLeft(5)
if keys[pygame.K_RIGHT]:
playerCar.moveRight(5)
#Drawing code
screen.fill(WHITE)
pygame.draw.rect(screen, RED, [55, 200, 100, 70],0)
pygame.draw.line(screen, GREEN, [0, 0], [100, 100], 5)
pygame.draw.ellipse(screen, BLUE, [20,20,250,100], 2)
pygame.draw.rect(screen, SKYBLUE, [0, 0, 1200, 600],0)
pygame.draw.rect(screen, GRASSGREEN, [0, 550, 1200, 300],0)
pygame.draw.ellipse(screen, YELLOW, [100,75, 100,100], 0)
pygame.draw.line(screen, YELLOW, [40,40] , [80,80], 5)
pygame.draw.line(screen, YELLOW, [150,10] , [150,60], 5)
pygame.draw.line(screen, YELLOW, [280,40] , [220,80], 5)
pygame.draw.rect(screen, BROWN, [800,550, 60,-200], 0)
pygame.draw.ellipse(screen, LEAFEGREEN, [700,130, 260,300], 0)
pygame.draw.rect(screen, BLACK, [0,575, 1200,150], 0 )
pygame.draw.line(screen, WHITE, [0,650],[1200,650],10)
all_sprites_list.draw(screen)
pygame.display.flip()
clock.tick(60)
pygame.quit()
|
apache-2.0
| 8,310,543,791,064,626,000
| 24.421687
| 66
| 0.611035
| false
| 2.727612
| false
| false
| false
|
daphnei/nn_chatbot
|
remove_proper_names/proper_names.py
|
1
|
1587
|
import urllib2
import csv
from pygtrie import Trie
from pygtrie import PrefixSet
import pickle
import os
def _create_trie():
tsvs = ["https://www2.census.gov/topics/genealogy/1990surnames/dist.female.first",
"https://www2.census.gov/topics/genealogy/1990surnames/dist.male.first"]
# "https://www2.census.gov/topics/genealogy/1990surnames/dist.all.last"]
# A harded-coded list of exceptions. (names that are more often seen as common noun
# at the front of sentences.)
exceptions = ["winter", "grant", "van", "son", "young", "royal", "long", "june", "august", "joy", "young", "aura", "ray", "ok", "harmony", "ha", "sun", "in", "many", "see", "so", "my", "may", "an", "les", "will", "love", "man", "major", "faith"]
names = []
for tsv_url in tsvs:
tsv_file = urllib2.urlopen(tsv_url)
tabbed = zip(*[line for line in csv.reader(tsv_file, delimiter=' ')])
names = names + list(tabbed[0])
names_lower = set()
for name in names:
name = name.lower()
if name not in exceptions:
names_lower.add(name)
trie = PrefixSet(names_lower)
with open('proper_names.pickle', 'w') as outfile:
pickle.dump(trie, outfile)
return trie
def get_or_create_proper_names():
if os.path.exists('proper_names.pickle'):
with open('proper_names.pickle', 'r') as file:
return pickle.load(file)
else:
return _create_trie()
if __name__ == "__main__":
p = _create_trie()
print(p.__contains__("daphne"))
print(p.__contains__("xavier"))
print(p.__contains__("sally"))
print(p.__contains__("billy"))
print(p.__contains__("wxyz"))
print(p.__contains__("adamrobinson"))
|
mit
| 2,190,983,337,128,533,500
| 29.519231
| 246
| 0.655955
| false
| 2.712821
| false
| false
| false
|
Erotemic/ibeis
|
ibeis/web/futures_utils/process_actor.py
|
1
|
11765
|
""" Implements ProcessActor """
from concurrent.futures import _base
from concurrent.futures import process
from multiprocessing.connection import wait
from ibeis.web.futures_utils import _base_actor
import os
import queue
import weakref
import threading
import multiprocessing
# Most of this code is duplicated from the concurrent.futures.thread and
# concurrent.futures.process modules, writen by Brian Quinlan. The main
# difference is that we expose an `Actor` class which can be inherited from and
# provides the `executor` classmethod. This creates an asynchronously
# maintained instance of this class in a separate thread/process
__author__ = 'Jon Crall (erotemic@gmail.com)'
def _process_actor_eventloop(_call_queue, _result_queue, _ActorClass, *args,
**kwargs):
"""
actor event loop run in a separate process.
Creates the instance of the actor (passing in the required *args, and
**kwargs). Then the eventloop starts and feeds the actor messages from the
_call_queue. Results are placed in the _result_queue, which are then placed
in Future objects.
"""
actor = _ActorClass(*args, **kwargs)
while True:
call_item = _call_queue.get(block=True)
if call_item is None:
# Wake up queue management thread
_result_queue.put(os.getpid())
return
try:
r = actor.handle(call_item.message)
except BaseException as e:
exc = process._ExceptionWithTraceback(e, e.__traceback__)
_result_queue.put(process._ResultItem(
call_item.work_id, exception=exc))
else:
_result_queue.put(process._ResultItem(
call_item.work_id, result=r))
class _WorkItem(object):
def __init__(self, future, message):
self.future = future
self.message = message
class _CallItem(object):
def __init__(self, work_id, message):
self.work_id = work_id
self.message = message
def _add_call_item_to_queue(pending_work_items,
work_ids,
call_queue):
"""Fills call_queue with _WorkItems from pending_work_items.
This function never blocks.
Args:
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
are consumed and the corresponding _WorkItems from
pending_work_items are transformed into _CallItems and put in
call_queue.
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems.
"""
while True:
if call_queue.full():
return
try:
work_id = work_ids.get(block=False)
except queue.Empty:
return
else:
work_item = pending_work_items[work_id]
if work_item.future.set_running_or_notify_cancel():
call_queue.put(_CallItem(work_id,
work_item.message),
block=True)
else:
del pending_work_items[work_id]
continue
def _queue_management_worker(executor_reference,
_manager,
pending_work_items,
work_ids_queue,
_call_queue,
_result_queue):
"""Manages the communication between this process and the worker processes."""
executor = None
def shutting_down():
return process._shutdown or executor is None or executor._shutdown_thread
def shutdown_worker():
# This is an upper bound
if _manager.is_alive():
_call_queue.put_nowait(None)
# Release the queue's resources as soon as possible.
_call_queue.close()
# If .join() is not called on the created processes then
# some multiprocessing.Queue methods may deadlock on Mac OS X.
_manager.join()
reader = _result_queue._reader
while True:
_add_call_item_to_queue(pending_work_items,
work_ids_queue,
_call_queue)
sentinel = _manager.sentinel
assert sentinel
ready = wait([reader, sentinel])
if reader in ready:
result_item = reader.recv()
else:
# Mark the process pool broken so that submits fail right now.
executor = executor_reference()
if executor is not None:
executor._broken = True
executor._shutdown_thread = True
executor = None
# All futures in flight must be marked failed
for work_id, work_item in pending_work_items.items():
work_item.future.set_exception(
process.BrokenProcessPool(
"A process in the process pool was "
"terminated abruptly while the future was "
"running or pending."
))
# Delete references to object. See issue16284
del work_item
pending_work_items.clear()
# Terminate remaining workers forcibly: the queues or their
# locks may be in a dirty state and block forever.
_manager.terminate()
shutdown_worker()
return
if isinstance(result_item, int):
# Clean shutdown of a worker using its PID
# (avoids marking the executor broken)
assert shutting_down()
_manager.join()
if _manager is None:
shutdown_worker()
return
elif result_item is not None:
work_item = pending_work_items.pop(result_item.work_id, None)
# work_item can be None if another process terminated (see above)
if work_item is not None:
if result_item.exception:
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
# Delete references to object. See issue16284
del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
if shutting_down():
try:
# Since no new work items can be added, it is safe to shutdown
# this thread if there are no pending work items.
if not pending_work_items:
shutdown_worker()
return
except queue.Full:
# This is not a problem: we will eventually be woken up (in
# _result_queue.get()) and be able to send a sentinel again.
pass
executor = None
class ProcessActorExecutor(_base_actor.ActorExecutor):
def __init__(self, _ActorClass, *args, **kwargs):
process._check_system_limits()
self._ActorClass = _ActorClass
# todo: If we want to cancel futures we need to give the task_queue a
# maximum size
self._call_queue = multiprocessing.JoinableQueue()
self._call_queue._ignore_epipe = True
self._result_queue = multiprocessing.Queue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
# We only maintain one process for our actor
self._manager = None
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
self._broken = False
self._queue_count = 0
self._pending_work_items = {}
self._did_initialize = False
if args or kwargs:
# If given actor initialization args we must start the Actor
# immediately. Otherwise just wait until we get a message
print('Init with args')
print('args = %r' % (args,))
self._initialize_actor(*args, **kwargs)
def post(self, message):
with self._shutdown_lock:
if self._broken:
raise process.BrokenProcessPool(
'A child process terminated '
'abruptly, the process pool is not usable anymore')
if self._shutdown_thread:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, message)
self._pending_work_items[self._queue_count] = w
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
self._result_queue.put(None)
self._start_queue_management_thread()
return f
post.__doc__ = _base_actor.ActorExecutor.post.__doc__
def _start_queue_management_thread(self):
# When the executor gets lost, the weakref callback will wake up
# the queue management thread.
def weakref_cb(_, q=self._result_queue):
q.put(None)
if self._queue_management_thread is None:
# Start the processes so that their sentinel are known.
self._initialize_actor()
self._queue_management_thread = threading.Thread(
target=_queue_management_worker,
args=(weakref.ref(self, weakref_cb),
self._manager,
self._pending_work_items,
self._work_ids,
self._call_queue,
self._result_queue))
self._queue_management_thread.daemon = True
self._queue_management_thread.start()
# use structures already in futures as much as possible
process._threads_queues[self._queue_management_thread] = self._result_queue
def _initialize_actor(self, *args, **kwargs):
if self._manager is None:
assert self._did_initialize is False, 'only initialize actor once'
self._did_initialize = True
# We only maintain one thread process for an actor
self._manager = multiprocessing.Process(
target=_process_actor_eventloop,
args=(self._call_queue,
self._result_queue, self._ActorClass) + args,
kwargs=kwargs)
self._manager.start()
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown_thread = True
if self._queue_management_thread:
# Wake up queue management thread
self._result_queue.put(None)
if wait:
self._queue_management_thread.join()
# To reduce the risk of opening too many files, remove references to
# objects that use file descriptors.
self._queue_management_thread = None
self._call_queue = None
self._result_queue = None
self._manager = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
class ProcessActor(_base_actor.Actor):
@classmethod
def executor(cls, *args, **kwargs):
return ProcessActorExecutor(cls, *args, **kwargs)
# executor.__doc__ = _base_actor.Actor.executor.__doc___
# ProcessActor.__doc__ = _base_actor.Actor.__doc___
|
apache-2.0
| -5,419,728,240,105,604,000
| 37.198052
| 87
| 0.569571
| false
| 4.586745
| false
| false
| false
|
andrewyoung1991/supriya
|
supriya/tools/requesttools/BufferGetRequest.py
|
1
|
2109
|
# -*- encoding: utf-8 -*-
from supriya.tools import osctools
from supriya.tools.requesttools.Request import Request
class BufferGetRequest(Request):
r'''A /b_get request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.BufferGetRequest(
... buffer_id=23,
... indices=(0, 4, 8, 16),
... )
>>> request
BufferGetRequest(
buffer_id=23,
indices=(0, 4, 8, 16)
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(42, 23, 0, 4, 8, 16)
::
>>> message.address == requesttools.RequestId.BUFFER_GET
True
'''
### CLASS VARIABLES ###
__slots__ = (
'_buffer_id',
'_indices',
)
### INITIALIZER ###
def __init__(
self,
buffer_id=None,
indices=None,
):
Request.__init__(self)
self._buffer_id = int(buffer_id)
self._indices = tuple(int(index) for index in indices)
### PUBLIC METHODS ###
def to_osc_message(self):
request_id = int(self.request_id)
buffer_id = int(self.buffer_id)
contents = [
request_id,
buffer_id,
]
if self.indices:
for index in self.indices:
contents.append(index)
message = osctools.OscMessage(*contents)
return message
### PUBLIC PROPERTIES ###
@property
def buffer_id(self):
return self._buffer_id
@property
def indices(self):
return self._indices
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.BufferSetResponse: {
'buffer_id': self.buffer_id,
},
responsetools.FailResponse: {
'failed_command': '/b_get',
}
}
@property
def request_id(self):
from supriya.tools import requesttools
return requesttools.RequestId.BUFFER_GET
|
mit
| 8,440,430,544,276,011,000
| 21.688172
| 64
| 0.515884
| false
| 4.119141
| false
| false
| false
|
lucidfrontier45/RethinkPool
|
rethinkpool/__init__.py
|
1
|
2195
|
from __future__ import absolute_import
from logging import getLogger
import rethinkdb as r
from future.builtins import range
from future.moves.queue import Queue
logger = getLogger("RethinkPool")
class ConnectionResource(object):
def __init__(self, queue, conn, **kwds):
self._queue = queue
if conn:
self._conn = conn
else:
self._conn = r.connect(**kwds)
@property
def conn(self):
return self._conn
def release(self):
if self._conn:
logger.info("release a connection")
self._queue.put_nowait(self._conn)
self._conn = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
def __del__(self):
self.release()
class RethinkPool(object):
def __init__(self, max_conns=10, initial_conns=0, get_timeout=10, **kwds):
"""
:param max_conns: maximum number of connections
:param initial_conns: number of connections to be initially establish
:param get_timeout: timeout for obtaining a connection from the queue
:param host, port, ...: same as r.connect
"""
self._current_conns = 0
self.get_timeout = get_timeout
self._connection_info = kwds
self._queue = Queue(max_conns)
for _ in range(min(max_conns, min(initial_conns, max_conns))):
self._queue.put(self._create_connection())
def _create_connection(self):
conn = r.connect(**self._connection_info)
self._current_conns += 1
return conn
@property
def current_conns(self):
return self._current_conns
def get_resource(self):
"""
obtain a connection resource from the queue
:return: ConnectionResource object
"""
if self._queue.empty() and self.current_conns < self._queue.maxsize:
logger.info("create a new connection")
conn = self._create_connection()
else:
logger.info("reuse a connection")
conn = self._queue.get(True, self.get_timeout)
return ConnectionResource(self._queue, conn)
|
apache-2.0
| 2,460,425,041,709,897,700
| 26.4375
| 78
| 0.596355
| false
| 4.173004
| false
| false
| false
|
dynamikdev/transportutils
|
transportutils/driver.py
|
1
|
2790
|
"""
Tools for calculate hours/periods of a Truck's driver
Actually only valable in France
"""
from datetime import timedelta
from dateutil import rrule
import pytz
# GMT = pytz.timezone('UTC')
# fr = pytz.timezone('Europe/Paris')
ENDNIGHT = 6
STARTNIGHT = 21
class DriverDaysDates(object):
"""
"""
def __init__(self,startOfDay,endOfDay):
self.startOfDay = startOfDay
self.endOfDay = endOfDay
self.daytimedelta = self.nighttimedelta = timedelta()
self.change = list(rrule.rrule(rrule.DAILY,
byhour=(ENDNIGHT,STARTNIGHT),
byminute=0,
bysecond=0,
dtstart=startOfDay,
until=endOfDay))
if len(list(self.change))==0 :
#there no changing type
if len(list(rrule.rrule(rrule.DAILY,
byhour=0,
byminute=0,
bysecond=0,
dtstart=self.startOfDay,
until=self.endOfDay)))>0 or self.startOfDay.hour> STARTNIGHT or self.startOfDay.hour> ENDNIGHT :
#there is midnight or start is in night so everything is nigth
self.nighttimedelta = abs(self.endOfDay -self.startOfDay)
self.daytimedelta = timedelta()
else:
#overwise is a day
self.nighttimedelta = timedelta()
self.daytimedelta = abs(self.endOfDay -self.startOfDay)
else:
self.calcthedelta()
def calcthedelta(self):
lstdate = [self.startOfDay] + list(self.change) + [self.endOfDay]
# print lstdate
for k in range(1, len(lstdate)):
# print k,lstdate[k-1],lstdate[k]
isNight = False
if lstdate[k-1] in self.change: #start from a change
if lstdate[k-1].hour == STARTNIGHT:
isNight = True
if lstdate[k] in self.change: #start from a change
if lstdate[k].hour == ENDNIGHT:
isNight = True
if isNight:
self.nighttimedelta += abs(lstdate[k] - lstdate[k-1])
else:
self.daytimedelta += abs(lstdate[k] - lstdate[k-1])
class DriverDates(object):
"""
"""
DriverTimeZone = pytz.timezone('Europe/Paris')
def __init__(self, datedeb, datefin):
self.datedeb = datedeb.astimezone(self.DriverTimeZone)
self.datefin = datefin.astimezone(self.DriverTimeZone)
lstdate = [self.datedeb] + \
list(rrule.rrule(rrule.DAILY,
byhour=0,
byminute=0,
bysecond=0,
dtstart=self.datedeb,
until=self.datefin)) +\
[self.datefin]
self.days = [DriverDaysDates(lstdate[k-1], lstdate[k]) for k in range(1, len(lstdate))]
|
gpl-2.0
| -3,237,510,786,302,446,000
| 33.02439
| 108
| 0.567384
| false
| 3.586118
| false
| false
| false
|
Stefan-Korner/SpacePyLibrary
|
CCSDS/CLTU.py
|
1
|
6692
|
#******************************************************************************
# (C) 2018, Stefan Korner, Austria *
# *
# The Space Python Library is free software; you can redistribute it and/or *
# modify it under under the terms of the MIT License as published by the *
# Massachusetts Institute of Technology. *
# *
# The Space Python Library is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *
# for more details. *
#******************************************************************************
# CCSDS Stack - CLTU Handling Module *
#******************************************************************************
import array
import UTIL.BCH
#############
# constants #
#############
# CLTU header
CLTU_START_SEQUENCE = [0xEB, 0x90]
CLTU_START_SEQUENCE_SIZE = len(CLTU_START_SEQUENCE)
# fill bytes for last code block
CLTU_FILL_BYTE = 0x55
# compliant with SCOS-2000
CLTU_TRAILER_SEQUENCE = [0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55]
# compliant with CCSDS specification
#CLTU_TRAILER_SEQUENCE = [0xC5, 0xC5, 0xC5, 0xC5, 0xC5, 0xC5, 0xC5, 0x79]
CLTU_TRAILER_SEQUENCE_SIZE = len(CLTU_TRAILER_SEQUENCE)
# derived constants
BCH_NETTO_SIZE = UTIL.BCH.CODE_BLOCK_SIZE - 1
BCH_MAX_NETTO_INDEX = BCH_NETTO_SIZE - 1
#############
# functions #
#############
# -----------------------------------------------------------------------------
def encodeCltu(frame):
"""Converts a TC Frame into a CLTU"""
# iterate over the frame bytes, which are copied
# into the CLTU body together with BCH code bytes
frameIdx = 0
frameSize = len(frame)
nrCltuCodeBlocks = (frameSize + BCH_MAX_NETTO_INDEX) // BCH_NETTO_SIZE
cltuBodySize = nrCltuCodeBlocks * UTIL.BCH.CODE_BLOCK_SIZE
cltuBody = array.array("B", [0] * cltuBodySize)
cltuBodyIdx = 0
codeBlkIdx = 0
while frameIdx < frameSize:
# handle start of a code block
if codeBlkIdx == 0:
sreg = UTIL.BCH.encodeStart()
# take the next byte from the frame for the CLTU and the BCH encoding
nextByte = frame[frameIdx]
cltuBody[cltuBodyIdx] = nextByte
sreg = UTIL.BCH.encodeStep(sreg, nextByte)
frameIdx += 1
cltuBodyIdx += 1
codeBlkIdx += 1
# handle end of a code block
if codeBlkIdx >= BCH_NETTO_SIZE:
code = UTIL.BCH.encodeStop(sreg)
cltuBody[cltuBodyIdx] = code
cltuBodyIdx += 1
codeBlkIdx = 0
# fill up remaining bytes in the cltuBody (incl. BCH code byte)
while cltuBodyIdx < cltuBodySize:
nextByte = CLTU_FILL_BYTE
cltuBody[cltuBodyIdx] = nextByte
sreg = UTIL.BCH.encodeStep(sreg, nextByte)
cltuBodyIdx += 1
codeBlkIdx += 1
# handle end of the code block
if codeBlkIdx >= BCH_NETTO_SIZE:
code = UTIL.BCH.encodeStop(sreg)
cltuBody[cltuBodyIdx] = code
cltuBodyIdx += 1
# CLTU body is completely processed
return (array.array("B", CLTU_START_SEQUENCE) +
cltuBody +
array.array("B", CLTU_TRAILER_SEQUENCE))
# -----------------------------------------------------------------------------
def decodeCltu(cltu):
"""Converts a CLTU into a TC Frame"""
# Note: the returned frame might contain additional fill bytes,
# these bytes must be removed at the frame layer
# calculate the frame size from the CLTU size
cltuSize = len(cltu)
cltuBodySize = cltuSize - CLTU_START_SEQUENCE_SIZE - CLTU_TRAILER_SEQUENCE_SIZE
# check general CLTU properties
if cltuBodySize < 0:
return None
if cltuBodySize % UTIL.BCH.CODE_BLOCK_SIZE != 0:
return None
if cltu[:CLTU_START_SEQUENCE_SIZE] != array.array("B", CLTU_START_SEQUENCE):
return None
if cltu[-CLTU_TRAILER_SEQUENCE_SIZE:] != array.array("B", CLTU_TRAILER_SEQUENCE):
return None
# iterate over the CLTU body bytes, which are copied
# into the frame, BCH code is checked during the iteration
nrCltuCodeBlocks = cltuBodySize // UTIL.BCH.CODE_BLOCK_SIZE
frameSize = nrCltuCodeBlocks * BCH_NETTO_SIZE
frame = array.array("B", [0] * frameSize)
frameIdx = 0
cltuIdx = CLTU_START_SEQUENCE_SIZE
codeBlkIdx = 0
while frameIdx < frameSize:
# handle start of a code block
if codeBlkIdx == 0:
sreg = UTIL.BCH.encodeStart()
# take the next byte from the CLTU for the frame and the BCH decoding
nextByte = cltu[cltuIdx]
frame[frameIdx] = nextByte
sreg = UTIL.BCH.encodeStep(sreg, nextByte)
frameIdx += 1
cltuIdx += 1
codeBlkIdx += 1
# handle end of a code block
if codeBlkIdx >= BCH_NETTO_SIZE:
code = UTIL.BCH.encodeStop(sreg)
if cltu[cltuIdx] != code:
return None
cltuIdx += 1
codeBlkIdx = 0
return frame
# -----------------------------------------------------------------------------
def checkCltu(cltu):
"""Checks the consistency of a CLTU"""
# calculate the frame size from the CLTU size
cltuSize = len(cltu)
cltuTrailerStartIdx = cltuSize - CLTU_TRAILER_SEQUENCE_SIZE
cltuBodySize = cltuTrailerStartIdx - CLTU_START_SEQUENCE_SIZE
# check general CLTU properties
if cltuBodySize < 0:
return False, "cltuBodySize too short"
if cltuBodySize % UTIL.BCH.CODE_BLOCK_SIZE != 0:
return False, "wrong cltuBodySize"
for i in range(0, CLTU_START_SEQUENCE_SIZE):
if cltu[i] != CLTU_START_SEQUENCE[i]:
return False, "wrong cltu start sequence"
for i in range(-CLTU_TRAILER_SEQUENCE_SIZE, 0):
if cltu[i] != CLTU_TRAILER_SEQUENCE[i]:
return False, "wrong cltu trailer sequence"
# iterate over the CLTU body bytes and check the BCH code
nrCltuCodeBlocks = cltuBodySize // UTIL.BCH.CODE_BLOCK_SIZE
frameSize = nrCltuCodeBlocks * BCH_NETTO_SIZE
cltuIdx = CLTU_START_SEQUENCE_SIZE
codeBlkIdx = 0
while cltuIdx < cltuTrailerStartIdx:
# handle start of a code block
if codeBlkIdx == 0:
sreg = UTIL.BCH.encodeStart()
# take the next byte from the CLTU for the frame and the BCH decoding
nextByte = cltu[cltuIdx]
sreg = UTIL.BCH.encodeStep(sreg, nextByte)
cltuIdx += 1
codeBlkIdx += 1
# handle end of a code block
if codeBlkIdx >= BCH_NETTO_SIZE:
code = UTIL.BCH.encodeStop(sreg)
if cltu[cltuIdx] != code:
return False, "wrong BCH check byte"
cltuIdx += 1
codeBlkIdx = 0
return True, "cltu OK"
|
mit
| -4,633,000,611,466,024,000
| 39.313253
| 83
| 0.59997
| false
| 3.405598
| false
| false
| false
|
chandrikas/sm
|
drivers/blktap2.py
|
1
|
90416
|
#!/usr/bin/env python
#
# Copyright (C) Citrix Systems Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# blktap2: blktap/tapdisk management layer
#
import os
import sys
import re
import time
import copy
from lock import Lock
import util
import xmlrpclib
import httplib
import errno
import subprocess
import syslog as _syslog
import glob
import json
import xs_errors
import XenAPI
import scsiutil
from syslog import openlog, syslog
from stat import * # S_ISBLK(), ...
import nfs
import resetvdis
import vhdutil
import lvhdutil
# For RRDD Plugin Registration
from xmlrpclib import ServerProxy, Transport
from socket import socket, AF_UNIX, SOCK_STREAM
from httplib import HTTP, HTTPConnection
PLUGIN_TAP_PAUSE = "tapdisk-pause"
SOCKPATH = "/var/xapi/xcp-rrdd"
NUM_PAGES_PER_RING = 32 * 11
MAX_FULL_RINGS = 8
POOL_NAME_KEY = "mem-pool"
POOL_SIZE_KEY = "mem-pool-size-rings"
ENABLE_MULTIPLE_ATTACH = "/etc/xensource/allow_multiple_vdi_attach"
NO_MULTIPLE_ATTACH = not (os.path.exists(ENABLE_MULTIPLE_ATTACH))
class UnixStreamHTTPConnection(HTTPConnection):
def connect(self):
self.sock = socket(AF_UNIX, SOCK_STREAM)
self.sock.connect(SOCKPATH)
class UnixStreamHTTP(HTTP):
_connection_class = UnixStreamHTTPConnection
class UnixStreamTransport(Transport):
def make_connection(self, host):
return UnixStreamHTTP(SOCKPATH) # overridden, but prevents IndexError
def locking(excType, override=True):
def locking2(op):
def wrapper(self, *args):
self.lock.acquire()
try:
try:
ret = op(self, *args)
except (util.CommandException, util.SMException, XenAPI.Failure), e:
util.logException("BLKTAP2:%s" % op)
msg = str(e)
if isinstance(e, util.CommandException):
msg = "Command %s failed (%s): %s" % \
(e.cmd, e.code, e.reason)
if override:
raise xs_errors.XenError(excType, opterr=msg)
else:
raise
except:
util.logException("BLKTAP2:%s" % op)
raise
finally:
self.lock.release()
return ret
return wrapper
return locking2
class RetryLoop(object):
def __init__(self, backoff, limit):
self.backoff = backoff
self.limit = limit
def __call__(self, f):
def loop(*__t, **__d):
attempt = 0
while True:
attempt += 1
try:
return f(*__t, **__d)
except self.TransientFailure, e:
e = e.exception
if attempt >= self.limit: raise e
time.sleep(self.backoff)
return loop
class TransientFailure(Exception):
def __init__(self, exception):
self.exception = exception
def retried(**args): return RetryLoop(**args)
class TapCtl(object):
"""Tapdisk IPC utility calls."""
PATH = "/usr/sbin/tap-ctl"
def __init__(self, cmd, p):
self.cmd = cmd
self._p = p
self.stdout = p.stdout
class CommandFailure(Exception):
"""TapCtl cmd failure."""
def __init__(self, cmd, **info):
self.cmd = cmd
self.info = info
def __str__(self):
items = self.info.iteritems()
info = ", ".join("%s=%s" % item
for item in items)
return "%s failed: %s" % (self.cmd, info)
# Trying to get a non-existent attribute throws an AttributeError
# exception
def __getattr__(self, key):
if self.info.has_key(key): return self.info[key]
return object.__getattribute__(self, key)
# Retrieves the error code returned by the command. If the error code
# was not supplied at object-construction time, zero is returned.
def get_error_code(self):
key = 'status'
if self.info.has_key(key):
return self.info[key]
else:
return 0
@classmethod
def __mkcmd_real(cls, args):
return [ cls.PATH ] + map(str, args)
__next_mkcmd = __mkcmd_real
@classmethod
def _mkcmd(cls, args):
__next_mkcmd = cls.__next_mkcmd
cls.__next_mkcmd = cls.__mkcmd_real
return __next_mkcmd(args)
@classmethod
def failwith(cls, status, prev=False):
"""
Fail next invocation with @status. If @prev is true, execute
the original command
"""
__prev_mkcmd = cls.__next_mkcmd
@classmethod
def __mkcmd(cls, args):
if prev:
cmd = __prev_mkcmd(args)
cmd = "'%s' && exit %d" % ("' '".join(cmd), status)
else:
cmd = "exit %d" % status
return [ '/bin/sh', '-c', cmd ]
cls.__next_mkcmd = __mkcmd
__strace_n = 0
@classmethod
def strace(cls):
"""
Run next invocation through strace.
Output goes to /tmp/tap-ctl.<sm-pid>.<n>; <n> counts invocations.
"""
__prev_mkcmd = cls.__next_mkcmd
@classmethod
def __next_mkcmd(cls, args):
# pylint: disable = E1101
cmd = __prev_mkcmd(args)
tracefile = "/tmp/%s.%d.%d" % (os.path.basename(cls.PATH),
os.getpid(),
cls.__strace_n)
cls.__strace_n += 1
return \
[ '/usr/bin/strace', '-o', tracefile, '--'] + cmd
cls.__next_mkcmd = __next_mkcmd
@classmethod
def _call(cls, args, quiet = False, input = None):
"""
Spawn a tap-ctl process. Return a TapCtl invocation.
Raises a TapCtl.CommandFailure if subprocess creation failed.
"""
cmd = cls._mkcmd(args)
if not quiet:
util.SMlog(cmd)
try:
p = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if input:
p.stdin.write(input)
p.stdin.close()
except OSError, e:
raise cls.CommandFailure(cmd, errno=e.errno)
return cls(cmd, p)
def _errmsg(self):
output = map(str.rstrip, self._p.stderr)
return "; ".join(output)
def _wait(self, quiet = False):
"""
Reap the child tap-ctl process of this invocation.
Raises a TapCtl.CommandFailure on non-zero exit status.
"""
status = self._p.wait()
if not quiet:
util.SMlog(" = %d" % status)
if status == 0: return
info = { 'errmsg' : self._errmsg(),
'pid' : self._p.pid }
if status < 0:
info['signal'] = -status
else:
info['status'] = status
raise self.CommandFailure(self.cmd, **info)
@classmethod
def _pread(cls, args, quiet = False, input = None):
"""
Spawn a tap-ctl invocation and read a single line.
"""
tapctl = cls._call(args=args, quiet=quiet, input=input)
output = tapctl.stdout.readline().rstrip()
tapctl._wait(quiet)
return output
@staticmethod
def _maybe(opt, parm):
if parm is not None: return [ opt, parm ]
return []
@classmethod
def __list(cls, minor = None, pid = None, _type = None, path = None):
args = [ "list" ]
args += cls._maybe("-m", minor)
args += cls._maybe("-p", pid)
args += cls._maybe("-t", _type)
args += cls._maybe("-f", path)
tapctl = cls._call(args, True)
for line in tapctl.stdout:
# FIXME: tap-ctl writes error messages to stdout and
# confuses this parser
if line == "blktap kernel module not installed\n":
# This isn't pretty but (a) neither is confusing stdout/stderr
# and at least causes the error to describe the fix
raise Exception, "blktap kernel module not installed: try 'modprobe blktap'"
row = {}
for field in line.rstrip().split(' ', 3):
bits = field.split('=')
if len(bits) == 2:
key, val = field.split('=')
if key in ('pid', 'minor'):
row[key] = int(val, 10)
elif key in ('state'):
row[key] = int(val, 0x10)
else:
row[key] = val
else:
util.SMlog("Ignoring unexpected tap-ctl output: %s" % repr(field))
yield row
tapctl._wait(True)
@classmethod
@retried(backoff=.5, limit=10)
def list(cls, **args):
# FIXME. We typically get an EPROTO when uevents interleave
# with SM ops and a tapdisk shuts down under our feet. Should
# be fixed in SM.
try:
return list(cls.__list(**args))
except cls.CommandFailure, e:
transient = [ errno.EPROTO, errno.ENOENT ]
if e.status in transient:
raise RetryLoop.TransientFailure(e)
raise
@classmethod
def allocate(cls, devpath = None):
args = [ "allocate" ]
args += cls._maybe("-d", devpath)
return cls._pread(args)
@classmethod
def free(cls, minor):
args = [ "free", "-m", minor ]
cls._pread(args)
@classmethod
@retried(backoff=.5, limit=10)
def spawn(cls):
args = [ "spawn" ]
try:
pid = cls._pread(args)
return int(pid)
except cls.CommandFailure as ce:
# intermittent failures to spawn. CA-292268
if ce.status == 1:
raise RetryLoop.TransientFailure(ce)
raise
@classmethod
def attach(cls, pid, minor):
args = [ "attach", "-p", pid, "-m", minor ]
cls._pread(args)
@classmethod
def detach(cls, pid, minor):
args = [ "detach", "-p", pid, "-m", minor ]
cls._pread(args)
@classmethod
def open(cls, pid, minor, _type, _file, options):
params = Tapdisk.Arg(_type, _file)
args = [ "open", "-p", pid, "-m", minor, '-a', str(params) ]
input = None
if options.get("rdonly"):
args.append('-R')
if options.get("lcache"):
args.append("-r")
if options.get("existing_prt") != None:
args.append("-e")
args.append(str(options["existing_prt"]))
if options.get("secondary"):
args.append("-2")
args.append(options["secondary"])
if options.get("standby"):
args.append("-s")
if options.get("timeout"):
args.append("-t")
args.append(str(options["timeout"]))
if not options.get("o_direct", True):
args.append("-D")
if options.get('cbtlog'):
args.extend(['-C', options['cbtlog']])
if options.get('key_hash'):
import plugins
key_hash = options['key_hash']
vdi_uuid = options['vdi_uuid']
key = plugins.load_key(key_hash, vdi_uuid)
if not key:
raise util.SMException("No key found with key hash {}".format(key_hash))
input = key
args.append('-E')
cls._pread(args=args, input=input)
@classmethod
def close(cls, pid, minor, force = False):
args = [ "close", "-p", pid, "-m", minor ]
if force: args += [ "-f" ]
cls._pread(args)
@classmethod
def pause(cls, pid, minor):
args = [ "pause", "-p", pid, "-m", minor ]
cls._pread(args)
@classmethod
def unpause(cls, pid, minor, _type = None, _file = None, mirror = None,
cbtlog = None):
args = [ "unpause", "-p", pid, "-m", minor ]
if mirror:
args.extend(["-2", mirror])
if _type and _file:
params = Tapdisk.Arg(_type, _file)
args += [ "-a", str(params) ]
if cbtlog:
args.extend(["-c", cbtlog])
cls._pread(args)
@classmethod
def stats(cls, pid, minor):
args = [ "stats", "-p", pid, "-m", minor ]
return cls._pread(args, quiet = True)
@classmethod
def major(cls):
args = [ "major" ]
major = cls._pread(args)
return int(major)
class TapdiskExists(Exception):
"""Tapdisk already running."""
def __init__(self, tapdisk):
self.tapdisk = tapdisk
def __str__(self):
return "%s already running" % self.tapdisk
class TapdiskNotRunning(Exception):
"""No such Tapdisk."""
def __init__(self, **attrs):
self.attrs = attrs
def __str__(self):
items = self.attrs.iteritems()
attrs = ", ".join("%s=%s" % attr
for attr in items)
return "No such Tapdisk(%s)" % attrs
class TapdiskNotUnique(Exception):
"""More than one tapdisk on one path."""
def __init__(self, tapdisks):
self.tapdisks = tapdisks
def __str__(self):
tapdisks = map(str, self.tapdisks)
return "Found multiple tapdisks: %s" % tapdisks
class TapdiskFailed(Exception):
"""Tapdisk launch failure."""
def __init__(self, arg, err):
self.arg = arg
self.err = err
def __str__(self):
return "Tapdisk(%s): %s" % (self.arg, self.err)
def get_error(self):
return self.err
class TapdiskInvalidState(Exception):
"""Tapdisk pause/unpause failure"""
def __init__(self, tapdisk):
self.tapdisk = tapdisk
def __str__(self):
return str(self.tapdisk)
def mkdirs(path, mode=0777):
if not os.path.exists(path):
parent, subdir = os.path.split(path)
assert parent != path
try:
if parent:
mkdirs(parent, mode)
if subdir:
os.mkdir(path, mode)
except OSError, e:
if e.errno != errno.EEXIST:
raise
class KObject(object):
SYSFS_CLASSTYPE = None
def sysfs_devname(self):
raise NotImplementedError("sysfs_devname is undefined")
class Attribute(object):
SYSFS_NODENAME = None
def __init__(self, path):
self.path = path
@classmethod
def from_kobject(cls, kobj):
path = "%s/%s" % (kobj.sysfs_path(), cls.SYSFS_NODENAME)
return cls(path)
class NoSuchAttribute(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return "No such attribute: %s" % self.name
def _open(self, mode='r'):
try:
return file(self.path, mode)
except IOError, e:
if e.errno == errno.ENOENT:
raise self.NoSuchAttribute(self)
raise
def readline(self):
f = self._open('r')
s = f.readline().rstrip()
f.close()
return s
def writeline(self, val):
f = self._open('w')
f.write(val)
f.close()
class ClassDevice(KObject):
@classmethod
def sysfs_class_path(cls):
return "/sys/class/%s" % cls.SYSFS_CLASSTYPE
def sysfs_path(self):
return "%s/%s" % (self.sysfs_class_path(),
self.sysfs_devname())
class Blktap(ClassDevice):
DEV_BASEDIR = '/dev/xen/blktap-2'
SYSFS_CLASSTYPE = "blktap2"
def __init__(self, minor):
self.minor = minor
self._pool = None
self._task = None
@classmethod
def allocate(cls):
# FIXME. Should rather go into init.
mkdirs(cls.DEV_BASEDIR)
devname = TapCtl.allocate()
minor = Tapdisk._parse_minor(devname)
return cls(minor)
def free(self):
TapCtl.free(self.minor)
def __str__(self):
return "%s(minor=%d)" % (self.__class__.__name__, self.minor)
def sysfs_devname(self):
return "blktap!blktap%d" % self.minor
class Pool(Attribute):
SYSFS_NODENAME = "pool"
def get_pool_attr(self):
if not self._pool:
self._pool = self.Pool.from_kobject(self)
return self._pool
def get_pool_name(self):
return self.get_pool_attr().readline()
def set_pool_name(self, name):
self.get_pool_attr().writeline(name)
def set_pool_size(self, pages):
self.get_pool().set_size(pages)
def get_pool(self):
return BlktapControl.get_pool(self.get_pool_name())
def set_pool(self, pool):
self.set_pool_name(pool.name)
class Task(Attribute):
SYSFS_NODENAME = "task"
def get_task_attr(self):
if not self._task:
self._task = self.Task.from_kobject(self)
return self._task
def get_task_pid(self):
pid = self.get_task_attr().readline()
try:
return int(pid)
except ValueError:
return None
def find_tapdisk(self):
pid = self.get_task_pid()
if pid is None: return None
return Tapdisk.find(pid=pid, minor=self.minor)
def get_tapdisk(self):
tapdisk = self.find_tapdisk()
if not tapdisk:
raise TapdiskNotRunning(minor=self.minor)
return tapdisk
class Tapdisk(object):
TYPES = [ 'aio', 'vhd' ]
def __init__(self, pid, minor, _type, path, state):
self.pid = pid
self.minor = minor
self.type = _type
self.path = path
self.state = state
self._dirty = False
self._blktap = None
def __str__(self):
state = self.pause_state()
return "Tapdisk(%s, pid=%d, minor=%s, state=%s)" % \
(self.get_arg(), self.pid, self.minor, state)
@classmethod
def list(cls, **args):
for row in TapCtl.list(**args):
args = { 'pid' : None,
'minor' : None,
'state' : None,
'_type' : None,
'path' : None }
for key, val in row.iteritems():
if key in args:
args[key] = val
if 'args' in row:
image = Tapdisk.Arg.parse(row['args'])
args['_type'] = image.type
args['path'] = image.path
if None in args.values():
continue
yield Tapdisk(**args)
@classmethod
def find(cls, **args):
found = list(cls.list(**args))
if len(found) > 1:
raise TapdiskNotUnique(found)
if found:
return found[0]
return None
@classmethod
def find_by_path(cls, path):
return cls.find(path=path)
@classmethod
def find_by_minor(cls, minor):
return cls.find(minor=minor)
@classmethod
def get(cls, **attrs):
tapdisk = cls.find(**attrs)
if not tapdisk:
raise TapdiskNotRunning(**attrs)
return tapdisk
@classmethod
def from_path(cls, path):
return cls.get(path=path)
@classmethod
def from_minor(cls, minor):
return cls.get(minor=minor)
@classmethod
def __from_blktap(cls, blktap):
tapdisk = cls.from_minor(minor=blktap.minor)
tapdisk._blktap = blktap
return tapdisk
def get_blktap(self):
if not self._blktap:
self._blktap = Blktap(self.minor)
return self._blktap
class Arg:
def __init__(self, _type, path):
self.type = _type
self.path = path
def __str__(self):
return "%s:%s" % (self.type, self.path)
@classmethod
def parse(cls, arg):
try:
_type, path = arg.split(":", 1)
except ValueError:
raise cls.InvalidArgument(arg)
if _type not in Tapdisk.TYPES:
raise cls.InvalidType(_type)
return cls(_type, path)
class InvalidType(Exception):
def __init__(self, _type):
self.type = _type
def __str__(self):
return "Not a Tapdisk type: %s" % self.type
class InvalidArgument(Exception):
def __init__(self, arg):
self.arg = arg
def __str__(self):
return "Not a Tapdisk image: %s" % self.arg
def get_arg(self):
return self.Arg(self.type, self.path)
def get_devpath(self):
return "%s/tapdev%d" % (Blktap.DEV_BASEDIR, self.minor)
@classmethod
def launch_from_arg(cls, arg):
arg = cls.Arg.parse(arg)
return cls.launch(arg.path, arg.type, False)
@classmethod
def launch_on_tap(cls, blktap, path, _type, options):
tapdisk = cls.find_by_path(path)
if tapdisk:
raise TapdiskExists(tapdisk)
minor = blktap.minor
try:
pid = TapCtl.spawn()
try:
TapCtl.attach(pid, minor)
try:
TapCtl.open(pid, minor, _type, path, options)
try:
tapdisk = cls.__from_blktap(blktap)
node = '/sys/dev/block/%d:%d' % (tapdisk.major(), tapdisk.minor)
util.set_scheduler_sysfs_node(node, 'noop')
return tapdisk
except:
TapCtl.close(pid, minor)
raise
except:
TapCtl.detach(pid, minor)
raise
except:
exc_info = sys.exc_info()
# FIXME: Should be tap-ctl shutdown.
try:
import signal
os.kill(pid, signal.SIGTERM)
os.waitpid(pid, 0)
finally:
raise exc_info[0], exc_info[1], exc_info[2]
except TapCtl.CommandFailure, ctl:
util.logException(ctl)
if ('/dev/xapi/cd/' in path and
'status' in ctl.info and
ctl.info['status'] == 123): # ENOMEDIUM (No medium found)
raise xs_errors.XenError('TapdiskDriveEmpty')
else:
raise TapdiskFailed(cls.Arg(_type, path), ctl)
@classmethod
def launch(cls, path, _type, rdonly):
blktap = Blktap.allocate()
try:
return cls.launch_on_tap(blktap, path, _type, {"rdonly": rdonly})
except:
blktap.free()
raise
def shutdown(self, force = False):
TapCtl.close(self.pid, self.minor, force)
TapCtl.detach(self.pid, self.minor)
self.get_blktap().free()
def pause(self):
if not self.is_running():
raise TapdiskInvalidState(self)
TapCtl.pause(self.pid, self.minor)
self._set_dirty()
def unpause(self, _type=None, path=None, mirror=None, cbtlog = None):
if not self.is_paused():
raise TapdiskInvalidState(self)
# FIXME: should the arguments be optional?
if _type is None: _type = self.type
if path is None: path = self.path
TapCtl.unpause(self.pid, self.minor, _type, path, mirror=mirror,
cbtlog=cbtlog)
self._set_dirty()
def stats(self):
return json.loads(TapCtl.stats(self.pid, self.minor))
#
# NB. dirty/refresh: reload attributes on next access
#
def _set_dirty(self):
self._dirty = True
def _refresh(self, __get):
t = self.from_minor(__get('minor'))
self.__init__(t.pid, t.minor, t.type, t.path, t.state)
def __getattribute__(self, name):
def __get(name):
# NB. avoid(rec(ursion)
return object.__getattribute__(self, name)
if __get('_dirty') and \
name in ['minor', 'type', 'path', 'state']:
self._refresh(__get)
self._dirty = False
return __get(name)
class PauseState:
RUNNING = 'R'
PAUSING = 'r'
PAUSED = 'P'
class Flags:
DEAD = 0x0001
CLOSED = 0x0002
QUIESCE_REQUESTED = 0x0004
QUIESCED = 0x0008
PAUSE_REQUESTED = 0x0010
PAUSED = 0x0020
SHUTDOWN_REQUESTED = 0x0040
LOCKING = 0x0080
RETRY_NEEDED = 0x0100
LOG_DROPPED = 0x0200
PAUSE_MASK = PAUSE_REQUESTED|PAUSED
def is_paused(self):
return not not (self.state & self.Flags.PAUSED)
def is_running(self):
return not (self.state & self.Flags.PAUSE_MASK)
def pause_state(self):
if self.state & self.Flags.PAUSED:
return self.PauseState.PAUSED
if self.state & self.Flags.PAUSE_REQUESTED:
return self.PauseState.PAUSING
return self.PauseState.RUNNING
@staticmethod
def _parse_minor(devpath):
regex = '%s/(blktap|tapdev)(\d+)$' % Blktap.DEV_BASEDIR
pattern = re.compile(regex)
groups = pattern.search(devpath)
if not groups:
raise Exception, \
"malformed tap device: '%s' (%s) " % (devpath, regex)
minor = groups.group(2)
return int(minor)
_major = None
@classmethod
def major(cls):
if cls._major: return cls._major
devices = file("/proc/devices")
for line in devices:
row = line.rstrip().split(' ')
if len(row) != 2: continue
major, name = row
if name != 'tapdev': continue
cls._major = int(major)
break
devices.close()
return cls._major
class VDI(object):
"""SR.vdi driver decorator for blktap2"""
CONF_KEY_ALLOW_CACHING = "vdi_allow_caching"
CONF_KEY_MODE_ON_BOOT = "vdi_on_boot"
CONF_KEY_CACHE_SR = "local_cache_sr"
CONF_KEY_O_DIRECT = "o_direct"
LOCK_CACHE_SETUP = "cachesetup"
ATTACH_DETACH_RETRY_SECS = 120
# number of seconds on top of NFS timeo mount option the tapdisk should
# wait before reporting errors. This is to allow a retry to succeed in case
# packets were lost the first time around, which prevented the NFS client
# from returning before the timeo is reached even if the NFS server did
# come back earlier
TAPDISK_TIMEOUT_MARGIN = 30
def __init__(self, uuid, target, driver_info):
self.target = self.TargetDriver(target, driver_info)
self._vdi_uuid = uuid
self._session = target.session
self.xenstore_data = scsiutil.update_XS_SCSIdata(uuid,scsiutil.gen_synthetic_page_data(uuid))
self.__o_direct = None
self.__o_direct_reason = None
self.lock = Lock("vdi", uuid)
def get_o_direct_capability(self, options):
"""Returns True/False based on licensing and caching_params"""
if self.__o_direct is not None:
return self.__o_direct, self.__o_direct_reason
if util.read_caching_is_restricted(self._session):
self.__o_direct = True
self.__o_direct_reason = "LICENSE_RESTRICTION"
elif not ((self.target.vdi.sr.handles("nfs") or self.target.vdi.sr.handles("ext") or self.target.vdi.sr.handles("smb"))):
self.__o_direct = True
self.__o_direct_reason = "SR_NOT_SUPPORTED"
elif not (options.get("rdonly") or self.target.vdi.parent):
util.SMlog(self.target.vdi)
self.__o_direct = True
self.__o_direct_reason = "NO_RO_IMAGE"
elif options.get("rdonly") and not self.target.vdi.parent:
self.__o_direct = True
self.__o_direct_reason = "RO_WITH_NO_PARENT"
elif options.get(self.CONF_KEY_O_DIRECT):
self.__o_direct = True
self.__o_direct_reason = "SR_OVERRIDE"
if self.__o_direct is None:
self.__o_direct = False
self.__o_direct_reason = ""
return self.__o_direct, self.__o_direct_reason
@classmethod
def from_cli(cls, uuid):
import VDI as sm
import XenAPI
session = XenAPI.xapi_local()
session.xenapi.login_with_password('root', '', '', 'SM')
target = sm.VDI.from_uuid(session, uuid)
driver_info = target.sr.srcmd.driver_info
session.xenapi.session.logout()
return cls(uuid, target, driver_info)
@staticmethod
def _tap_type(vdi_type):
"""Map a VDI type (e.g. 'raw') to a tapdisk driver type (e.g. 'aio')"""
return {
'raw' : 'aio',
'vhd' : 'vhd',
'iso' : 'aio', # for ISO SR
'aio' : 'aio', # for LVHD
'file' : 'aio',
'phy' : 'aio'
} [vdi_type]
def get_tap_type(self):
vdi_type = self.target.get_vdi_type()
return VDI._tap_type(vdi_type)
def get_phy_path(self):
return self.target.get_vdi_path()
class UnexpectedVDIType(Exception):
def __init__(self, vdi_type, target):
self.vdi_type = vdi_type
self.target = target
def __str__(self):
return \
"Target %s has unexpected VDI type '%s'" % \
(type(self.target), self.vdi_type)
VDI_PLUG_TYPE = { 'phy' : 'phy', # for NETAPP
'raw' : 'phy',
'aio' : 'tap', # for LVHD raw nodes
'iso' : 'tap', # for ISOSR
'file' : 'tap',
'vhd' : 'tap' }
def tap_wanted(self):
# 1. Let the target vdi_type decide
vdi_type = self.target.get_vdi_type()
try:
plug_type = self.VDI_PLUG_TYPE[vdi_type]
except KeyError:
raise self.UnexpectedVDIType(vdi_type,
self.target.vdi)
if plug_type == 'tap':
return True
elif self.target.vdi.sr.handles('udev'):
return True
# 2. Otherwise, there may be more reasons
#
# .. TBD
return False
class TargetDriver:
"""Safe target driver access."""
# NB. *Must* test caps for optional calls. Some targets
# actually implement some slots, but do not enable them. Just
# try/except would risk breaking compatibility.
def __init__(self, vdi, driver_info):
self.vdi = vdi
self._caps = driver_info['capabilities']
def has_cap(self, cap):
"""Determine if target has given capability"""
return cap in self._caps
def attach(self, sr_uuid, vdi_uuid):
#assert self.has_cap("VDI_ATTACH")
return self.vdi.attach(sr_uuid, vdi_uuid)
def detach(self, sr_uuid, vdi_uuid):
#assert self.has_cap("VDI_DETACH")
self.vdi.detach(sr_uuid, vdi_uuid)
def activate(self, sr_uuid, vdi_uuid):
if self.has_cap("VDI_ACTIVATE"):
return self.vdi.activate(sr_uuid, vdi_uuid)
def deactivate(self, sr_uuid, vdi_uuid):
if self.has_cap("VDI_DEACTIVATE"):
self.vdi.deactivate(sr_uuid, vdi_uuid)
#def resize(self, sr_uuid, vdi_uuid, size):
# return self.vdi.resize(sr_uuid, vdi_uuid, size)
def get_vdi_type(self):
_type = self.vdi.vdi_type
if not _type:
_type = self.vdi.sr.sr_vditype
if not _type:
raise VDI.UnexpectedVDIType(_type, self.vdi)
return _type
def get_vdi_path(self):
return self.vdi.path
class Link(object):
"""Relink a node under a common name"""
# NB. We have to provide the device node path during
# VDI.attach, but currently do not allocate the tapdisk minor
# before VDI.activate. Therefore those link steps where we
# relink existing devices under deterministic path names.
BASEDIR = None
def _mklink(self, target):
raise NotImplementedError("_mklink is not defined")
def _equals(self, target):
raise NotImplementedError("_equals is not defined")
def __init__(self, path):
self._path = path
@classmethod
def from_name(cls, name):
path = "%s/%s" % (cls.BASEDIR, name)
return cls(path)
@classmethod
def from_uuid(cls, sr_uuid, vdi_uuid):
name = "%s/%s" % (sr_uuid, vdi_uuid)
return cls.from_name(name)
def path(self):
return self._path
def stat(self):
return os.stat(self.path())
def mklink(self, target):
path = self.path()
util.SMlog("%s -> %s" % (self, target))
mkdirs(os.path.dirname(path))
try:
self._mklink(target)
except OSError, e:
# We do unlink during teardown, but have to stay
# idempotent. However, a *wrong* target should never
# be seen.
if e.errno != errno.EEXIST: raise
assert self._equals(target), "'%s' not equal to '%s'" % (path, target)
def unlink(self):
try:
os.unlink(self.path())
except OSError, e:
if e.errno != errno.ENOENT: raise
def __str__(self):
path = self.path()
return "%s(%s)" % (self.__class__.__name__, path)
class SymLink(Link):
"""Symlink some file to a common name"""
def readlink(self):
return os.readlink(self.path())
def symlink(self):
return self.path()
def _mklink(self, target):
os.symlink(target, self.path())
def _equals(self, target):
return self.readlink() == target
class DeviceNode(Link):
"""Relink a block device node to a common name"""
@classmethod
def _real_stat(cls, target):
"""stat() not on @target, but its realpath()"""
_target = os.path.realpath(target)
return os.stat(_target)
@classmethod
def is_block(cls, target):
"""Whether @target refers to a block device."""
return S_ISBLK(cls._real_stat(target).st_mode)
def _mklink(self, target):
st = self._real_stat(target)
if not S_ISBLK(st.st_mode):
raise self.NotABlockDevice(target, st)
os.mknod(self.path(), st.st_mode, st.st_rdev)
def _equals(self, target):
target_rdev = self._real_stat(target).st_rdev
return self.stat().st_rdev == target_rdev
def rdev(self):
st = self.stat()
assert S_ISBLK(st.st_mode)
return os.major(st.st_rdev), os.minor(st.st_rdev)
class NotABlockDevice(Exception):
def __init__(self, path, st):
self.path = path
self.st = st
def __str__(self):
return "%s is not a block device: %s" % (self.path, self.st)
class Hybrid(Link):
def __init__(self, path):
VDI.Link.__init__(self, path)
self._devnode = VDI.DeviceNode(path)
self._symlink = VDI.SymLink(path)
def rdev(self):
st = self.stat()
if S_ISBLK(st.st_mode): return self._devnode.rdev()
raise self._devnode.NotABlockDevice(self.path(), st)
def mklink(self, target):
if self._devnode.is_block(target):
self._obj = self._devnode
else:
self._obj = self._symlink
self._obj.mklink(target)
def _equals(self, target):
return self._obj._equals(target)
class PhyLink(SymLink): BASEDIR = "/dev/sm/phy"
# NB. Cannot use DeviceNodes, e.g. FileVDIs aren't bdevs.
class BackendLink(Hybrid): BASEDIR = "/dev/sm/backend"
# NB. Could be SymLinks as well, but saving major,minor pairs in
# Links enables neat state capturing when managing Tapdisks. Note
# that we essentially have a tap-ctl list replacement here. For
# now make it a 'Hybrid'. Likely to collapse into a DeviceNode as
# soon as ISOs are tapdisks.
@staticmethod
def _tap_activate(phy_path, vdi_type, sr_uuid, options, pool_size = None):
tapdisk = Tapdisk.find_by_path(phy_path)
if not tapdisk:
blktap = Blktap.allocate()
blktap.set_pool_name(sr_uuid)
if pool_size:
blktap.set_pool_size(pool_size)
try:
tapdisk = \
Tapdisk.launch_on_tap(blktap,
phy_path,
VDI._tap_type(vdi_type),
options)
except:
blktap.free()
raise
util.SMlog("tap.activate: Launched %s" % tapdisk)
else:
util.SMlog("tap.activate: Found %s" % tapdisk)
return tapdisk.get_devpath()
@staticmethod
def _tap_deactivate(minor):
try:
tapdisk = Tapdisk.from_minor(minor)
except TapdiskNotRunning, e:
util.SMlog("tap.deactivate: Warning, %s" % e)
# NB. Should not be here unless the agent refcount
# broke. Also, a clean shutdown should not have leaked
# the recorded minor.
else:
tapdisk.shutdown()
util.SMlog("tap.deactivate: Shut down %s" % tapdisk)
@classmethod
def tap_pause(cls, session, sr_uuid, vdi_uuid, failfast=False):
"""
Pauses the tapdisk.
session: a XAPI session
sr_uuid: the UUID of the SR on which VDI lives
vdi_uuid: the UUID of the VDI to pause
failfast: controls whether the VDI lock should be acquired in a
non-blocking manner
"""
util.SMlog("Pause request for %s" % vdi_uuid)
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
session.xenapi.VDI.add_to_sm_config(vdi_ref, 'paused', 'true')
sm_config = session.xenapi.VDI.get_sm_config(vdi_ref)
for key in filter(lambda x: x.startswith('host_'), sm_config.keys()):
host_ref = key[len('host_'):]
util.SMlog("Calling tap-pause on host %s" % host_ref)
if not cls.call_pluginhandler(session, host_ref,
sr_uuid, vdi_uuid, "pause", failfast=failfast):
# Failed to pause node
session.xenapi.VDI.remove_from_sm_config(vdi_ref, 'paused')
return False
return True
@classmethod
def tap_unpause(cls, session, sr_uuid, vdi_uuid, secondary = None,
activate_parents = False):
util.SMlog("Unpause request for %s secondary=%s" % (vdi_uuid, secondary))
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
sm_config = session.xenapi.VDI.get_sm_config(vdi_ref)
for key in filter(lambda x: x.startswith('host_'), sm_config.keys()):
host_ref = key[len('host_'):]
util.SMlog("Calling tap-unpause on host %s" % host_ref)
if not cls.call_pluginhandler(session, host_ref,
sr_uuid, vdi_uuid, "unpause", secondary, activate_parents):
# Failed to unpause node
return False
session.xenapi.VDI.remove_from_sm_config(vdi_ref, 'paused')
return True
@classmethod
def tap_refresh(cls, session, sr_uuid, vdi_uuid, activate_parents = False):
util.SMlog("Refresh request for %s" % vdi_uuid)
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
sm_config = session.xenapi.VDI.get_sm_config(vdi_ref)
for key in filter(lambda x: x.startswith('host_'), sm_config.keys()):
host_ref = key[len('host_'):]
util.SMlog("Calling tap-refresh on host %s" % host_ref)
if not cls.call_pluginhandler(session, host_ref,
sr_uuid, vdi_uuid, "refresh", None,
activate_parents=activate_parents):
# Failed to refresh node
return False
return True
@classmethod
def tap_status(cls, session, vdi_uuid):
"""Return True if disk is attached, false if it isn't"""
util.SMlog("Disk status request for %s" % vdi_uuid)
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
sm_config = session.xenapi.VDI.get_sm_config(vdi_ref)
for key in filter(lambda x: x.startswith('host_'), sm_config.keys()):
return True
return False
@classmethod
def call_pluginhandler(cls, session, host_ref, sr_uuid, vdi_uuid, action,
secondary = None, activate_parents = False, failfast=False):
"""Optionally, activate the parent LV before unpausing"""
try:
args = {"sr_uuid":sr_uuid, "vdi_uuid":vdi_uuid,
"failfast": str(failfast)}
if secondary:
args["secondary"] = secondary
if activate_parents:
args["activate_parents"] = "true"
ret = session.xenapi.host.call_plugin(
host_ref, PLUGIN_TAP_PAUSE, action,
args)
return ret == "True"
except Exception, e:
util.logException("BLKTAP2:call_pluginhandler %s" % e)
return False
def _add_tag(self, vdi_uuid, writable):
util.SMlog("Adding tag to: %s" % vdi_uuid)
attach_mode = "RO"
if writable:
attach_mode = "RW"
vdi_ref = self._session.xenapi.VDI.get_by_uuid(vdi_uuid)
host_ref = self._session.xenapi.host.get_by_uuid(util.get_this_host())
sm_config = self._session.xenapi.VDI.get_sm_config(vdi_ref)
attached_as = util.attached_as(sm_config)
if NO_MULTIPLE_ATTACH and (attached_as == "RW" or \
(attached_as == "RO" and attach_mode == "RW")):
util.SMlog("need to reset VDI %s" % vdi_uuid)
if not resetvdis.reset_vdi(self._session, vdi_uuid, force=False,
term_output=False, writable=writable):
raise util.SMException("VDI %s not detached cleanly" % vdi_uuid)
sm_config = self._session.xenapi.VDI.get_sm_config(vdi_ref)
if sm_config.has_key('paused'):
util.SMlog("Paused or host_ref key found [%s]" % sm_config)
return False
host_key = "host_%s" % host_ref
assert not sm_config.has_key(host_key)
self._session.xenapi.VDI.add_to_sm_config(vdi_ref, host_key,
attach_mode)
sm_config = self._session.xenapi.VDI.get_sm_config(vdi_ref)
if sm_config.has_key('paused'):
util.SMlog("Found paused key, aborting")
self._session.xenapi.VDI.remove_from_sm_config(vdi_ref, host_key)
return False
util.SMlog("Activate lock succeeded")
return True
def _check_tag(self, vdi_uuid):
vdi_ref = self._session.xenapi.VDI.get_by_uuid(vdi_uuid)
sm_config = self._session.xenapi.VDI.get_sm_config(vdi_ref)
if sm_config.has_key('paused'):
util.SMlog("Paused key found [%s]" % sm_config)
return False
return True
def _remove_tag(self, vdi_uuid):
vdi_ref = self._session.xenapi.VDI.get_by_uuid(vdi_uuid)
host_ref = self._session.xenapi.host.get_by_uuid(util.get_this_host())
sm_config = self._session.xenapi.VDI.get_sm_config(vdi_ref)
host_key = "host_%s" % host_ref
if sm_config.has_key(host_key):
self._session.xenapi.VDI.remove_from_sm_config(vdi_ref, host_key)
util.SMlog("Removed host key %s for %s" % (host_key, vdi_uuid))
else:
util.SMlog("_remove_tag: host key %s not found, ignore" % host_key)
def _get_pool_config(self, pool_name):
pool_info = dict()
vdi_ref = self.target.vdi.sr.srcmd.params.get('vdi_ref')
if not vdi_ref:
# attach_from_config context: HA disks don't need to be in any
# special pool
return pool_info
session = XenAPI.xapi_local()
session.xenapi.login_with_password('root', '', '', 'SM')
sr_ref = self.target.vdi.sr.srcmd.params.get('sr_ref')
sr_config = session.xenapi.SR.get_other_config(sr_ref)
vdi_config = session.xenapi.VDI.get_other_config(vdi_ref)
pool_size_str = sr_config.get(POOL_SIZE_KEY)
pool_name_override = vdi_config.get(POOL_NAME_KEY)
if pool_name_override:
pool_name = pool_name_override
pool_size_override = vdi_config.get(POOL_SIZE_KEY)
if pool_size_override:
pool_size_str = pool_size_override
pool_size = 0
if pool_size_str:
try:
pool_size = int(pool_size_str)
if pool_size < 1 or pool_size > MAX_FULL_RINGS:
raise ValueError("outside of range")
pool_size = NUM_PAGES_PER_RING * pool_size
except ValueError:
util.SMlog("Error: invalid mem-pool-size %s" % pool_size_str)
pool_size = 0
pool_info["mem-pool"] = pool_name
if pool_size:
pool_info["mem-pool-size"] = str(pool_size)
session.xenapi.session.logout()
return pool_info
def attach(self, sr_uuid, vdi_uuid, writable, activate = False, caching_params = {}):
"""Return/dev/sm/backend symlink path"""
self.xenstore_data.update(self._get_pool_config(sr_uuid))
if not self.target.has_cap("ATOMIC_PAUSE") or activate:
util.SMlog("Attach & activate")
self._attach(sr_uuid, vdi_uuid)
dev_path = self._activate(sr_uuid, vdi_uuid,
{"rdonly": not writable})
self.BackendLink.from_uuid(sr_uuid, vdi_uuid).mklink(dev_path)
# Return backend/ link
back_path = self.BackendLink.from_uuid(sr_uuid, vdi_uuid).path()
options = {"rdonly": not writable}
options.update(caching_params)
o_direct, o_direct_reason = self.get_o_direct_capability(options)
struct = { 'params': back_path,
'o_direct': o_direct,
'o_direct_reason': o_direct_reason,
'xenstore_data': self.xenstore_data}
util.SMlog('result: %s' % struct)
try:
f=open("%s.attach_info" % back_path, 'a')
f.write(xmlrpclib.dumps((struct,), "", True))
f.close()
except:
pass
return xmlrpclib.dumps((struct,), "", True)
def activate(self, sr_uuid, vdi_uuid, writable, caching_params):
util.SMlog("blktap2.activate")
options = {"rdonly": not writable}
options.update(caching_params)
sr_ref = self.target.vdi.sr.srcmd.params.get('sr_ref')
sr_other_config = self._session.xenapi.SR.get_other_config(sr_ref)
timeout = nfs.get_nfs_timeout(sr_other_config)
if timeout:
# Note NFS timeout values are in deciseconds
timeout = int((timeout+5) / 10)
options["timeout"] = timeout + self.TAPDISK_TIMEOUT_MARGIN
for i in range(self.ATTACH_DETACH_RETRY_SECS):
try:
if self._activate_locked(sr_uuid, vdi_uuid, options):
return
except util.SRBusyException:
util.SMlog("SR locked, retrying")
time.sleep(1)
raise util.SMException("VDI %s locked" % vdi_uuid)
@locking("VDIUnavailable")
def _activate_locked(self, sr_uuid, vdi_uuid, options):
"""Wraps target.activate and adds a tapdisk"""
import VDI as sm
#util.SMlog("VDI.activate %s" % vdi_uuid)
if self.tap_wanted():
if not self._add_tag(vdi_uuid, not options["rdonly"]):
return False
# it is possible that while the VDI was paused some of its
# attributes have changed (e.g. its size if it was inflated; or its
# path if it was leaf-coalesced onto a raw LV), so refresh the
# object completely
params = self.target.vdi.sr.srcmd.params
target = sm.VDI.from_uuid(self.target.vdi.session, vdi_uuid)
target.sr.srcmd.params = params
driver_info = target.sr.srcmd.driver_info
self.target = self.TargetDriver(target, driver_info)
try:
util.fistpoint.activate_custom_fn(
"blktap_activate_inject_failure",
lambda: util.inject_failure())
# Attach the physical node
if self.target.has_cap("ATOMIC_PAUSE"):
self._attach(sr_uuid, vdi_uuid)
vdi_type = self.target.get_vdi_type()
# Take lvchange-p Lock before running
# tap-ctl open
# Needed to avoid race with lvchange -p which is
# now taking the same lock
# This is a fix for CA-155766
if hasattr(self.target.vdi.sr, 'DRIVER_TYPE') and \
self.target.vdi.sr.DRIVER_TYPE == 'lvhd' and \
vdi_type == vhdutil.VDI_TYPE_VHD:
lock = Lock("lvchange-p", lvhdutil.NS_PREFIX_LVM + sr_uuid)
lock.acquire()
# When we attach a static VDI for HA, we cannot communicate with
# xapi, because has not started yet. These VDIs are raw.
if vdi_type != vhdutil.VDI_TYPE_RAW:
session = self.target.vdi.session
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
sm_config = session.xenapi.VDI.get_sm_config(vdi_ref)
if 'key_hash' in sm_config:
key_hash = sm_config['key_hash']
options['key_hash'] = key_hash
options['vdi_uuid'] = vdi_uuid
util.SMlog('Using key with hash {} for VDI {}'.format(key_hash, vdi_uuid))
# Activate the physical node
dev_path = self._activate(sr_uuid, vdi_uuid, options)
if hasattr(self.target.vdi.sr, 'DRIVER_TYPE') and \
self.target.vdi.sr.DRIVER_TYPE == 'lvhd' and \
self.target.get_vdi_type() == vhdutil.VDI_TYPE_VHD:
lock.release()
except:
util.SMlog("Exception in activate/attach")
if self.tap_wanted():
util.fistpoint.activate_custom_fn(
"blktap_activate_error_handling",
lambda: time.sleep(30))
while True:
try:
self._remove_tag(vdi_uuid)
break
except xmlrpclib.ProtocolError, e:
# If there's a connection error, keep trying forever.
if e.errcode == httplib.INTERNAL_SERVER_ERROR:
continue
else:
util.SMlog('failed to remove tag: %s' % e)
break
except Exception, e:
util.SMlog('failed to remove tag: %s' % e)
break
raise
# Link result to backend/
self.BackendLink.from_uuid(sr_uuid, vdi_uuid).mklink(dev_path)
return True
def _activate(self, sr_uuid, vdi_uuid, options):
vdi_options = self.target.activate(sr_uuid, vdi_uuid)
dev_path = self.setup_cache(sr_uuid, vdi_uuid, options)
if not dev_path:
phy_path = self.PhyLink.from_uuid(sr_uuid, vdi_uuid).readlink()
# Maybe launch a tapdisk on the physical link
if self.tap_wanted():
vdi_type = self.target.get_vdi_type()
options["o_direct"] = self.get_o_direct_capability(options)[0]
if vdi_options:
options.update(vdi_options)
dev_path = self._tap_activate(phy_path, vdi_type, sr_uuid,
options,
self._get_pool_config(sr_uuid).get("mem-pool-size"))
else:
dev_path = phy_path # Just reuse phy
return dev_path
def _attach(self, sr_uuid, vdi_uuid):
attach_info = xmlrpclib.loads(self.target.attach(sr_uuid, vdi_uuid))[0][0]
params = attach_info['params']
xenstore_data = attach_info['xenstore_data']
phy_path = util.to_plain_string(params)
self.xenstore_data.update(xenstore_data)
# Save it to phy/
self.PhyLink.from_uuid(sr_uuid, vdi_uuid).mklink(phy_path)
def deactivate(self, sr_uuid, vdi_uuid, caching_params):
util.SMlog("blktap2.deactivate")
for i in range(self.ATTACH_DETACH_RETRY_SECS):
try:
if self._deactivate_locked(sr_uuid, vdi_uuid, caching_params):
return
except util.SRBusyException, e:
util.SMlog("SR locked, retrying")
time.sleep(1)
raise util.SMException("VDI %s locked" % vdi_uuid)
@locking("VDIUnavailable")
def _deactivate_locked(self, sr_uuid, vdi_uuid, caching_params):
"""Wraps target.deactivate and removes a tapdisk"""
#util.SMlog("VDI.deactivate %s" % vdi_uuid)
if self.tap_wanted() and not self._check_tag(vdi_uuid):
return False
self._deactivate(sr_uuid, vdi_uuid, caching_params)
if self.target.has_cap("ATOMIC_PAUSE"):
self._detach(sr_uuid, vdi_uuid)
if self.tap_wanted():
self._remove_tag(vdi_uuid)
return True
def _resetPhylink(self, sr_uuid, vdi_uuid, path):
self.PhyLink.from_uuid(sr_uuid, vdi_uuid).mklink(path)
def detach(self, sr_uuid, vdi_uuid, deactivate = False, caching_params = {}):
if not self.target.has_cap("ATOMIC_PAUSE") or deactivate:
util.SMlog("Deactivate & detach")
self._deactivate(sr_uuid, vdi_uuid, caching_params)
self._detach(sr_uuid, vdi_uuid)
else:
pass # nothing to do
def _deactivate(self, sr_uuid, vdi_uuid, caching_params):
import VDI as sm
# Shutdown tapdisk
back_link = self.BackendLink.from_uuid(sr_uuid, vdi_uuid)
if not util.pathexists(back_link.path()):
util.SMlog("Backend path %s does not exist" % back_link.path())
return
try:
attach_info_path = "%s.attach_info" % (back_link.path())
os.unlink(attach_info_path)
except:
util.SMlog("unlink of attach_info failed")
try:
major, minor = back_link.rdev()
except self.DeviceNode.NotABlockDevice:
pass
else:
if major == Tapdisk.major():
self._tap_deactivate(minor)
self.remove_cache(sr_uuid, vdi_uuid, caching_params)
# Remove the backend link
back_link.unlink()
# Deactivate & detach the physical node
if self.tap_wanted() and self.target.vdi.session is not None:
# it is possible that while the VDI was paused some of its
# attributes have changed (e.g. its size if it was inflated; or its
# path if it was leaf-coalesced onto a raw LV), so refresh the
# object completely
target = sm.VDI.from_uuid(self.target.vdi.session, vdi_uuid)
driver_info = target.sr.srcmd.driver_info
self.target = self.TargetDriver(target, driver_info)
self.target.deactivate(sr_uuid, vdi_uuid)
def _detach(self, sr_uuid, vdi_uuid):
self.target.detach(sr_uuid, vdi_uuid)
# Remove phy/
self.PhyLink.from_uuid(sr_uuid, vdi_uuid).unlink()
def _updateCacheRecord(self, session, vdi_uuid, on_boot, caching):
# Remove existing VDI.sm_config fields
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
for key in ["on_boot", "caching"]:
session.xenapi.VDI.remove_from_sm_config(vdi_ref,key)
if not on_boot is None:
session.xenapi.VDI.add_to_sm_config(vdi_ref,'on_boot',on_boot)
if not caching is None:
session.xenapi.VDI.add_to_sm_config(vdi_ref,'caching',caching)
def setup_cache(self, sr_uuid, vdi_uuid, params):
if params.get(self.CONF_KEY_ALLOW_CACHING) != "true":
return
util.SMlog("Requested local caching")
if not self.target.has_cap("SR_CACHING"):
util.SMlog("Error: local caching not supported by this SR")
return
scratch_mode = False
if params.get(self.CONF_KEY_MODE_ON_BOOT) == "reset":
scratch_mode = True
util.SMlog("Requested scratch mode")
if not self.target.has_cap("VDI_RESET_ON_BOOT/2"):
util.SMlog("Error: scratch mode not supported by this SR")
return
dev_path = None
local_sr_uuid = params.get(self.CONF_KEY_CACHE_SR)
if not local_sr_uuid:
util.SMlog("ERROR: Local cache SR not specified, not enabling")
return
dev_path = self._setup_cache(self._session, sr_uuid, vdi_uuid,
local_sr_uuid, scratch_mode, params)
if dev_path:
self._updateCacheRecord(self._session, self.target.vdi.uuid,
params.get(self.CONF_KEY_MODE_ON_BOOT),
params.get(self.CONF_KEY_ALLOW_CACHING))
return dev_path
def alert_no_cache(self, session, vdi_uuid, cache_sr_uuid, err):
vm_uuid = None
vm_label = ""
try:
cache_sr_ref = session.xenapi.SR.get_by_uuid(cache_sr_uuid)
cache_sr_rec = session.xenapi.SR.get_record(cache_sr_ref)
cache_sr_label = cache_sr_rec.get("name_label")
host_ref = session.xenapi.host.get_by_uuid(util.get_this_host())
host_rec = session.xenapi.host.get_record(host_ref)
host_label = host_rec.get("name_label")
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
vbds = session.xenapi.VBD.get_all_records_where( \
"field \"VDI\" = \"%s\"" % vdi_ref)
for vbd_rec in vbds.values():
vm_ref = vbd_rec.get("VM")
vm_rec = session.xenapi.VM.get_record(vm_ref)
vm_uuid = vm_rec.get("uuid")
vm_label = vm_rec.get("name_label")
except:
util.logException("alert_no_cache")
alert_obj = "SR"
alert_uuid = str(cache_sr_uuid)
alert_str = "No space left in Local Cache SR %s" % cache_sr_uuid
if vm_uuid:
alert_obj = "VM"
alert_uuid = vm_uuid
reason = ""
if err == errno.ENOSPC:
reason = "because there is no space left"
alert_str = "The VM \"%s\" is not using IntelliCache %s on the Local Cache SR (\"%s\") on host \"%s\"" % \
(vm_label, reason, cache_sr_label, host_label)
util.SMlog("Creating alert: (%s, %s, \"%s\")" % \
(alert_obj, alert_uuid, alert_str))
session.xenapi.message.create("No space left in local cache", "3",
alert_obj, alert_uuid, alert_str)
def _setup_cache(self, session, sr_uuid, vdi_uuid, local_sr_uuid,
scratch_mode, options):
import SR
import EXTSR
import NFSSR
import XenAPI
from lock import Lock
from FileSR import FileVDI
parent_uuid = vhdutil.getParent(self.target.vdi.path,
FileVDI.extractUuid)
if not parent_uuid:
util.SMlog("ERROR: VDI %s has no parent, not enabling" % \
self.target.vdi.uuid)
return
util.SMlog("Setting up cache")
parent_uuid = parent_uuid.strip()
shared_target = NFSSR.NFSFileVDI(self.target.vdi.sr, parent_uuid)
if shared_target.parent:
util.SMlog("ERROR: Parent VDI %s has parent, not enabling" %
shared_target.uuid)
return
SR.registerSR(EXTSR.EXTSR)
local_sr = SR.SR.from_uuid(session, local_sr_uuid)
lock = Lock(self.LOCK_CACHE_SETUP, parent_uuid)
lock.acquire()
# read cache
read_cache_path = "%s/%s.vhdcache" % (local_sr.path, shared_target.uuid)
if util.pathexists(read_cache_path):
util.SMlog("Read cache node (%s) already exists, not creating" % \
read_cache_path)
else:
try:
vhdutil.snapshot(read_cache_path, shared_target.path, False)
except util.CommandException, e:
util.SMlog("Error creating parent cache: %s" % e)
self.alert_no_cache(session, vdi_uuid, local_sr_uuid, e.code)
return None
# local write node
leaf_size = vhdutil.getSizeVirt(self.target.vdi.path)
local_leaf_path = "%s/%s.vhdcache" % \
(local_sr.path, self.target.vdi.uuid)
if util.pathexists(local_leaf_path):
util.SMlog("Local leaf node (%s) already exists, deleting" % \
local_leaf_path)
os.unlink(local_leaf_path)
try:
vhdutil.snapshot(local_leaf_path, read_cache_path, False,
msize = leaf_size / 1024 / 1024, checkEmpty = False)
except util.CommandException, e:
util.SMlog("Error creating leaf cache: %s" % e)
self.alert_no_cache(session, vdi_uuid, local_sr_uuid, e.code)
return None
local_leaf_size = vhdutil.getSizeVirt(local_leaf_path)
if leaf_size > local_leaf_size:
util.SMlog("Leaf size %d > local leaf cache size %d, resizing" %
(leaf_size, local_leaf_size))
vhdutil.setSizeVirtFast(local_leaf_path, leaf_size)
vdi_type = self.target.get_vdi_type()
prt_tapdisk = Tapdisk.find_by_path(read_cache_path)
if not prt_tapdisk:
parent_options = copy.deepcopy(options)
parent_options["rdonly"] = False
parent_options["lcache"] = True
blktap = Blktap.allocate()
try:
blktap.set_pool_name("lcache-parent-pool-%s" % blktap.minor)
# no need to change pool_size since each parent tapdisk is in
# its own pool
prt_tapdisk = \
Tapdisk.launch_on_tap(blktap, read_cache_path,
'vhd', parent_options)
except:
blktap.free()
raise
secondary = "%s:%s" % (self.target.get_vdi_type(),
self.PhyLink.from_uuid(sr_uuid, vdi_uuid).readlink())
util.SMlog("Parent tapdisk: %s" % prt_tapdisk)
leaf_tapdisk = Tapdisk.find_by_path(local_leaf_path)
if not leaf_tapdisk:
blktap = Blktap.allocate()
child_options = copy.deepcopy(options)
child_options["rdonly"] = False
child_options["lcache"] = False
child_options["existing_prt"] = prt_tapdisk.minor
child_options["secondary"] = secondary
child_options["standby"] = scratch_mode
try:
leaf_tapdisk = \
Tapdisk.launch_on_tap(blktap, local_leaf_path,
'vhd', child_options)
except:
blktap.free()
raise
lock.release()
util.SMlog("Local read cache: %s, local leaf: %s" % \
(read_cache_path, local_leaf_path))
return leaf_tapdisk.get_devpath()
def remove_cache(self, sr_uuid, vdi_uuid, params):
if not self.target.has_cap("SR_CACHING"):
return
caching = params.get(self.CONF_KEY_ALLOW_CACHING) == "true"
local_sr_uuid = params.get(self.CONF_KEY_CACHE_SR)
if caching and not local_sr_uuid:
util.SMlog("ERROR: Local cache SR not specified, ignore")
return
if caching:
self._remove_cache(self._session, local_sr_uuid)
if self._session is not None:
self._updateCacheRecord(self._session, self.target.vdi.uuid, None, None)
def _is_tapdisk_in_use(self, minor):
(retVal, links) = util.findRunningProcessOrOpenFile("tapdisk")
if not retVal:
# err on the side of caution
return True
for link in links:
if link.find("tapdev%d" % minor) != -1:
return True
return False
def _remove_cache(self, session, local_sr_uuid):
import SR
import EXTSR
import NFSSR
import XenAPI
from lock import Lock
from FileSR import FileVDI
parent_uuid = vhdutil.getParent(self.target.vdi.path,
FileVDI.extractUuid)
if not parent_uuid:
util.SMlog("ERROR: No parent for VDI %s, ignore" % \
self.target.vdi.uuid)
return
util.SMlog("Tearing down the cache")
parent_uuid = parent_uuid.strip()
shared_target = NFSSR.NFSFileVDI(self.target.vdi.sr, parent_uuid)
SR.registerSR(EXTSR.EXTSR)
local_sr = SR.SR.from_uuid(session, local_sr_uuid)
lock = Lock(self.LOCK_CACHE_SETUP, parent_uuid)
lock.acquire()
# local write node
local_leaf_path = "%s/%s.vhdcache" % \
(local_sr.path, self.target.vdi.uuid)
if util.pathexists(local_leaf_path):
util.SMlog("Deleting local leaf node %s" % local_leaf_path)
os.unlink(local_leaf_path)
read_cache_path = "%s/%s.vhdcache" % (local_sr.path, shared_target.uuid)
prt_tapdisk = Tapdisk.find_by_path(read_cache_path)
if not prt_tapdisk:
util.SMlog("Parent tapdisk not found")
elif not self._is_tapdisk_in_use(prt_tapdisk.minor):
util.SMlog("Parent tapdisk not in use: shutting down %s" % \
read_cache_path)
try:
prt_tapdisk.shutdown()
except:
util.logException("shutting down parent tapdisk")
else:
util.SMlog("Parent tapdisk still in use: %s" % read_cache_path)
# the parent cache files are removed during the local SR's background
# GC run
lock.release()
PythonKeyError = KeyError
class UEventHandler(object):
def __init__(self):
self._action = None
class KeyError(PythonKeyError):
def __str__(self):
return \
"Key '%s' missing in environment. " % self.args[0] + \
"Not called in udev context?"
@classmethod
def getenv(cls, key):
try:
return os.environ[key]
except KeyError, e:
raise cls.KeyError(e.args[0])
def get_action(self):
if not self._action:
self._action = self.getenv('ACTION')
return self._action
class UnhandledEvent(Exception):
def __init__(self, event, handler):
self.event = event
self.handler = handler
def __str__(self):
return "Uevent '%s' not handled by %s" % \
(self.event, self.handler.__class__.__name__)
ACTIONS = {}
def run(self):
action = self.get_action()
try:
fn = self.ACTIONS[action]
except KeyError:
raise self.UnhandledEvent(action, self)
return fn(self)
def __str__(self):
try: action = self.get_action()
except: action = None
return "%s[%s]" % (self.__class__.__name__, action)
class __BlktapControl(ClassDevice):
SYSFS_CLASSTYPE = "misc"
def __init__(self):
ClassDevice.__init__(self)
self._default_pool = None
def sysfs_devname(self):
return "blktap!control"
class DefaultPool(Attribute):
SYSFS_NODENAME = "default_pool"
def get_default_pool_attr(self):
if not self._default_pool:
self._default_pool = self.DefaultPool.from_kobject(self)
return self._default_pool
def get_default_pool_name(self):
return self.get_default_pool_attr().readline()
def set_default_pool_name(self, name):
self.get_default_pool_attr().writeline(name)
def get_default_pool(self):
return BlktapControl.get_pool(self.get_default_pool_name())
def set_default_pool(self, pool):
self.set_default_pool_name(pool.name)
class NoSuchPool(Exception):
def __init__(self, name):
self.name = name
def __str__(self):
return "No such pool: %s", self.name
def get_pool(self, name):
path = "%s/pools/%s" % (self.sysfs_path(), name)
if not os.path.isdir(path):
raise self.NoSuchPool(name)
return PagePool(path)
BlktapControl = __BlktapControl()
class PagePool(KObject):
def __init__(self, path):
self.path = path
self._size = None
def sysfs_path(self):
return self.path
class Size(Attribute):
SYSFS_NODENAME = "size"
def get_size_attr(self):
if not self._size:
self._size = self.Size.from_kobject(self)
return self._size
def set_size(self, pages):
pages = str(pages)
self.get_size_attr().writeline(pages)
def get_size(self):
pages = self.get_size_attr().readline()
return int(pages)
class BusDevice(KObject):
SYSFS_BUSTYPE = None
@classmethod
def sysfs_bus_path(cls):
return "/sys/bus/%s" % cls.SYSFS_BUSTYPE
def sysfs_path(self):
path = "%s/devices/%s" % (self.sysfs_bus_path(),
self.sysfs_devname())
return path
class XenbusDevice(BusDevice):
"""Xenbus device, in XS and sysfs"""
XBT_NIL = ""
XENBUS_DEVTYPE = None
def __init__(self, domid, devid):
self.domid = int(domid)
self.devid = int(devid)
self._xbt = XenbusDevice.XBT_NIL
import xen.lowlevel.xs
self.xs = xen.lowlevel.xs.xs()
def xs_path(self, key=None):
path = "backend/%s/%d/%d" % (self.XENBUS_DEVTYPE,
self.domid,
self.devid)
if key is not None:
path = "%s/%s" % (path, key)
return path
def _log(self, prio, msg):
syslog(prio, msg)
def info(self, msg):
self._log(_syslog.LOG_INFO, msg)
def warn(self, msg):
self._log(_syslog.LOG_WARNING, "WARNING: " + msg)
def _xs_read_path(self, path):
val = self.xs.read(self._xbt, path)
#self.info("read %s = '%s'" % (path, val))
return val
def _xs_write_path(self, path, val):
self.xs.write(self._xbt, path, val);
self.info("wrote %s = '%s'" % (path, val))
def _xs_rm_path(self, path):
self.xs.rm(self._xbt, path)
self.info("removed %s" % path)
def read(self, key):
return self._xs_read_path(self.xs_path(key))
def has_key(self, key):
return self.read(key) is not None
def write(self, key, val):
self._xs_write_path(self.xs_path(key), val)
def rm(self, key):
self._xs_rm_path(self.xs_path(key))
def exists(self):
return self.has_key(None)
def begin(self):
assert(self._xbt == XenbusDevice.XBT_NIL)
self._xbt = self.xs.transaction_start()
def commit(self):
ok = self.xs.transaction_end(self._xbt, 0)
self._xbt = XenbusDevice.XBT_NIL
return ok
def abort(self):
ok = self.xs.transaction_end(self._xbt, 1)
assert(ok == True)
self._xbt = XenbusDevice.XBT_NIL
def create_physical_device(self):
"""The standard protocol is: toolstack writes 'params', linux hotplug
script translates this into physical-device=%x:%x"""
if self.has_key("physical-device"):
return
try:
params = self.read("params")
frontend = self.read("frontend")
is_cdrom = self._xs_read_path("%s/device-type") == "cdrom"
# We don't have PV drivers for CDROM devices, so we prevent blkback
# from opening the physical-device
if not(is_cdrom):
major_minor = os.stat(params).st_rdev
major, minor = divmod(major_minor, 256)
self.write("physical-device", "%x:%x" % (major, minor))
except:
util.logException("BLKTAP2:create_physical_device")
def signal_hotplug(self, online=True):
xapi_path = "/xapi/%d/hotplug/%s/%d/hotplug" % (self.domid,
self.XENBUS_DEVTYPE,
self.devid)
upstream_path = self.xs_path("hotplug-status")
if online:
self._xs_write_path(xapi_path, "online")
self._xs_write_path(upstream_path, "connected")
else:
self._xs_rm_path(xapi_path)
self._xs_rm_path(upstream_path)
def sysfs_devname(self):
return "%s-%d-%d" % (self.XENBUS_DEVTYPE,
self.domid, self.devid)
def __str__(self):
return self.sysfs_devname()
@classmethod
def find(cls):
pattern = "/sys/bus/%s/devices/%s*" % (cls.SYSFS_BUSTYPE,
cls.XENBUS_DEVTYPE)
for path in glob.glob(pattern):
name = os.path.basename(path)
(_type, domid, devid) = name.split('-')
yield cls(domid, devid)
class XenBackendDevice(XenbusDevice):
"""Xenbus backend device"""
SYSFS_BUSTYPE = "xen-backend"
@classmethod
def from_xs_path(cls, _path):
(_backend, _type, domid, devid) = _path.split('/')
assert _backend == 'backend'
assert _type == cls.XENBUS_DEVTYPE
domid = int(domid)
devid = int(devid)
return cls(domid, devid)
class Blkback(XenBackendDevice):
"""A blkback VBD"""
XENBUS_DEVTYPE = "vbd"
def __init__(self, domid, devid):
XenBackendDevice.__init__(self, domid, devid)
self._phy = None
self._vdi_uuid = None
self._q_state = None
self._q_events = None
class XenstoreValueError(Exception):
KEY = None
def __init__(self, vbd, _str):
self.vbd = vbd
self.str = _str
def __str__(self):
return "Backend %s " % self.vbd + \
"has %s = %s" % (self.KEY, self.str)
class PhysicalDeviceError(XenstoreValueError):
KEY = "physical-device"
class PhysicalDevice(object):
def __init__(self, major, minor):
self.major = int(major)
self.minor = int(minor)
@classmethod
def from_xbdev(cls, xbdev):
phy = xbdev.read("physical-device")
try:
major, minor = phy.split(':')
major = int(major, 0x10)
minor = int(minor, 0x10)
except Exception, e:
raise xbdev.PhysicalDeviceError(xbdev, phy)
return cls(major, minor)
def makedev(self):
return os.makedev(self.major, self.minor)
def is_tap(self):
return self.major == Tapdisk.major()
def __str__(self):
return "%s:%s" % (self.major, self.minor)
def __eq__(self, other):
return \
self.major == other.major and \
self.minor == other.minor
def get_physical_device(self):
if not self._phy:
self._phy = self.PhysicalDevice.from_xbdev(self)
return self._phy
class QueueEvents(Attribute):
"""Blkback sysfs node to select queue-state event
notifications emitted."""
SYSFS_NODENAME = "queue_events"
QUEUE_RUNNING = (1<<0)
QUEUE_PAUSE_DONE = (1<<1)
QUEUE_SHUTDOWN_DONE = (1<<2)
QUEUE_PAUSE_REQUEST = (1<<3)
QUEUE_SHUTDOWN_REQUEST = (1<<4)
def get_mask(self):
return int(self.readline(), 0x10)
def set_mask(self, mask):
self.writeline("0x%x" % mask)
def get_queue_events(self):
if not self._q_events:
self._q_events = self.QueueEvents.from_kobject(self)
return self._q_events
def get_vdi_uuid(self):
if not self._vdi_uuid:
self._vdi_uuid = self.read("sm-data/vdi-uuid")
return self._vdi_uuid
def pause_requested(self):
return self.has_key("pause")
def shutdown_requested(self):
return self.has_key("shutdown-request")
def shutdown_done(self):
return self.has_key("shutdown-done")
def running(self):
return self.has_key('queue-0/kthread-pid')
@classmethod
def find_by_physical_device(cls, phy):
for dev in cls.find():
try:
_phy = dev.get_physical_device()
except cls.PhysicalDeviceError:
continue
if _phy == phy:
yield dev
@classmethod
def find_by_tap_minor(cls, minor):
phy = cls.PhysicalDevice(Tapdisk.major(), minor)
return cls.find_by_physical_device(phy)
@classmethod
def find_by_tap(cls, tapdisk):
return cls.find_by_tap_minor(tapdisk.minor)
def has_tap(self):
if not self.can_tap():
return False
phy = self.get_physical_device()
if phy:
return phy.is_tap()
return False
def is_bare_hvm(self):
"""File VDIs for bare HVM. These are directly accessible by Qemu."""
try:
self.get_physical_device()
except self.PhysicalDeviceError, e:
vdi_type = self.read("type")
self.info("HVM VDI: type=%s" % vdi_type)
if e.str is not None or vdi_type != 'file':
raise
return True
return False
def can_tap(self):
return not self.is_bare_hvm()
class BlkbackEventHandler(UEventHandler):
LOG_FACILITY = _syslog.LOG_DAEMON
def __init__(self, ident=None, action=None):
if not ident: ident = self.__class__.__name__
self.ident = ident
self._vbd = None
self._tapdisk = None
UEventHandler.__init__(self)
def run(self):
self.xs_path = self.getenv('XENBUS_PATH')
openlog(str(self), 0, self.LOG_FACILITY)
UEventHandler.run(self)
def __str__(self):
try: path = self.xs_path
except: path = None
try: action = self.get_action()
except: action = None
return "%s[%s](%s)" % (self.ident, action, path)
def _log(self, prio, msg):
syslog(prio, msg)
util.SMlog("%s: " % self + msg)
def info(self, msg):
self._log(_syslog.LOG_INFO, msg)
def warn(self, msg):
self._log(_syslog.LOG_WARNING, "WARNING: " + msg)
def error(self, msg):
self._log(_syslog.LOG_ERR, "ERROR: " + msg)
def get_vbd(self):
if not self._vbd:
self._vbd = Blkback.from_xs_path(self.xs_path)
return self._vbd
def get_tapdisk(self):
if not self._tapdisk:
minor = self.get_vbd().get_physical_device().minor
self._tapdisk = Tapdisk.from_minor(minor)
return self._tapdisk
#
# Events
#
def __add(self):
vbd = self.get_vbd()
# Manage blkback transitions
# self._manage_vbd()
vbd.create_physical_device()
vbd.signal_hotplug()
@retried(backoff=.5, limit=10)
def add(self):
try:
self.__add()
except Attribute.NoSuchAttribute, e:
#
# FIXME: KOBJ_ADD is racing backend.probe, which
# registers device attributes. So poll a little.
#
self.warn("%s, still trying." % e)
raise RetryLoop.TransientFailure(e)
def __change(self):
vbd = self.get_vbd()
# 1. Pause or resume tapdisk (if there is one)
if vbd.has_tap():
pass
#self._pause_update_tap()
# 2. Signal Xapi.VBD.pause/resume completion
self._signal_xapi()
def change(self):
vbd = self.get_vbd()
# NB. Beware of spurious change events between shutdown
# completion and device removal. Also, Xapi.VM.migrate will
# hammer a couple extra shutdown-requests into the source VBD.
while True:
vbd.begin()
if not vbd.exists() or \
vbd.shutdown_done():
break
self.__change()
if vbd.commit():
return
vbd.abort()
self.info("spurious uevent, ignored.")
def remove(self):
vbd = self.get_vbd()
vbd.signal_hotplug(False)
ACTIONS = { 'add': add,
'change': change,
'remove': remove }
#
# VDI.pause
#
def _tap_should_pause(self):
"""Enumerate all VBDs on our tapdisk. Returns true iff any was
paused"""
tapdisk = self.get_tapdisk()
TapState = Tapdisk.PauseState
PAUSED = 'P'
RUNNING = 'R'
PAUSED_SHUTDOWN = 'P,S'
# NB. Shutdown/paused is special. We know it's not going
# to restart again, so it's a RUNNING. Still better than
# backtracking a removed device during Vbd.unplug completion.
next = TapState.RUNNING
vbds = {}
for vbd in Blkback.find_by_tap(tapdisk):
name = str(vbd)
pausing = vbd.pause_requested()
closing = vbd.shutdown_requested()
running = vbd.running()
if pausing:
if closing and not running:
vbds[name] = PAUSED_SHUTDOWN
else:
vbds[name] = PAUSED
next = TapState.PAUSED
else:
vbds[name] = RUNNING
self.info("tapdev%d (%s): %s -> %s"
% (tapdisk.minor, tapdisk.pause_state(),
vbds, next))
return next == TapState.PAUSED
def _pause_update_tap(self):
vbd = self.get_vbd()
if self._tap_should_pause():
self._pause_tap()
else:
self._resume_tap()
def _pause_tap(self):
tapdisk = self.get_tapdisk()
if not tapdisk.is_paused():
self.info("pausing %s" % tapdisk)
tapdisk.pause()
def _resume_tap(self):
tapdisk = self.get_tapdisk()
# NB. Raw VDI snapshots. Refresh the physical path and
# type while resuming.
vbd = self.get_vbd()
vdi_uuid = vbd.get_vdi_uuid()
if tapdisk.is_paused():
self.info("loading vdi uuid=%s" % vdi_uuid)
vdi = VDI.from_cli(vdi_uuid)
_type = vdi.get_tap_type()
path = vdi.get_phy_path()
self.info("resuming %s on %s:%s" % (tapdisk, _type, path))
tapdisk.unpause(_type, path)
#
# VBD.pause/shutdown
#
def _manage_vbd(self):
vbd = self.get_vbd()
# NB. Hook into VBD state transitions.
events = vbd.get_queue_events()
mask = 0
mask |= events.QUEUE_PAUSE_DONE # pause/unpause
mask |= events.QUEUE_SHUTDOWN_DONE # shutdown
# TODO: mask |= events.QUEUE_SHUTDOWN_REQUEST, for shutdown=force
# TODO: mask |= events.QUEUE_RUNNING, for ionice updates etc
events.set_mask(mask)
self.info("wrote %s = %#02x" % (events.path, mask))
def _signal_xapi(self):
vbd = self.get_vbd()
pausing = vbd.pause_requested()
closing = vbd.shutdown_requested()
running = vbd.running()
handled = 0
if pausing and not running:
if not vbd.has_key('pause-done'):
vbd.write('pause-done', '')
handled += 1
if not pausing:
if vbd.has_key('pause-done'):
vbd.rm('pause-done')
handled += 1
if closing and not running:
if not vbd.has_key('shutdown-done'):
vbd.write('shutdown-done', '')
handled += 1
if handled > 1:
self.warn("handled %d events, " % handled +
"pausing=%s closing=%s running=%s" % \
(pausing, closing, running))
if __name__ == '__main__':
import sys
prog = os.path.basename(sys.argv[0])
#
# Simple CLI interface for manual operation
#
# tap.* level calls go down to local Tapdisk()s (by physical path)
# vdi.* level calls run the plugin calls across host boundaries.
#
def usage(stream):
print >>stream, \
"usage: %s tap.{list|major}" % prog
print >>stream, \
" %s tap.{launch|find|get|pause|" % prog + \
"unpause|shutdown|stats} {[<tt>:]<path>} | [minor=]<int> | .. }"
print >>stream, \
" %s vbd.uevent" % prog
try:
cmd = sys.argv[1]
except IndexError:
usage(sys.stderr)
sys.exit(1)
try:
_class, method = cmd.split('.')
except:
usage(sys.stderr)
sys.exit(1)
#
# Local Tapdisks
#
if cmd == 'tap.major':
print "%d" % Tapdisk.major()
elif cmd == 'tap.launch':
tapdisk = Tapdisk.launch_from_arg(sys.argv[2])
print >> sys.stderr, "Launched %s" % tapdisk
elif _class == 'tap':
attrs = {}
for item in sys.argv[2:]:
try:
key, val = item.split('=')
attrs[key] = val
continue
except ValueError:
pass
try:
attrs['minor'] = int(item)
continue
except ValueError:
pass
try:
arg = Tapdisk.Arg.parse(item)
attrs['_type'] = arg.type
attrs['path'] = arg.path
continue
except Tapdisk.Arg.InvalidArgument:
pass
attrs['path'] = item
if cmd == 'tap.list':
for tapdisk in Tapdisk.list(**attrs):
blktap = tapdisk.get_blktap()
print tapdisk,
print "%s: task=%s pool=%s" % \
(blktap,
blktap.get_task_pid(),
blktap.get_pool_name())
elif cmd == 'tap.vbds':
# Find all Blkback instances for a given tapdisk
for tapdisk in Tapdisk.list(**attrs):
print "%s:" % tapdisk,
for vbd in Blkback.find_by_tap(tapdisk):
print vbd,
print
else:
if not attrs:
usage(sys.stderr)
sys.exit(1)
try:
tapdisk = Tapdisk.get(**attrs)
except TypeError:
usage(sys.stderr)
sys.exit(1)
if cmd == 'tap.shutdown':
# Shutdown a running tapdisk, or raise
tapdisk.shutdown()
print >> sys.stderr, "Shut down %s" % tapdisk
elif cmd == 'tap.pause':
# Pause an unpaused tapdisk, or raise
tapdisk.pause()
print >> sys.stderr, "Paused %s" % tapdisk
elif cmd == 'tap.unpause':
# Unpause a paused tapdisk, or raise
tapdisk.unpause()
print >> sys.stderr, "Unpaused %s" % tapdisk
elif cmd == 'tap.stats':
# Gather tapdisk status
stats = tapdisk.stats()
print "%s:" % tapdisk
print json.dumps(stats, indent=True)
else:
usage(sys.stderr)
sys.exit(1)
elif cmd == 'vbd.uevent':
hnd = BlkbackEventHandler(cmd)
if not sys.stdin.isatty():
try:
hnd.run()
except Exception, e:
hnd.error("Unhandled Exception: %s" % e)
import traceback
_type, value, tb = sys.exc_info()
trace = traceback.format_exception(_type, value, tb)
for entry in trace:
for line in entry.rstrip().split('\n'):
util.SMlog(line)
else:
hnd.run()
elif cmd == 'vbd.list':
for vbd in Blkback.find():
print vbd, \
"physical-device=%s" % vbd.get_physical_device(), \
"pause=%s" % vbd.pause_requested()
else:
usage(sys.stderr)
sys.exit(1)
|
lgpl-2.1
| -5,769,786,515,209,374,000
| 30.340035
| 129
| 0.533656
| false
| 3.767804
| true
| false
| false
|
intel-ctrlsys/actsys
|
actsys/control/commands/bios/bios_version.py
|
1
|
1059
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 Intel Corp.
#
"""
Get BIOS version
"""
from ...commands.command import CommandResult
from ...plugin import DeclarePlugin
from .bios import BiosCommand
@DeclarePlugin('bios_version', 100)
class BiosVersionCommand(BiosCommand):
"""Bios Get Version Command"""
def __init__(self, device_name, configuration, plugin_manager, logger=None):
"""Retrieve dependencies and prepare for power on"""
BiosCommand.__init__(self, device_name, configuration, plugin_manager, logger)
def execute(self):
"""Execute the command"""
try:
self.setup()
result = []
result_dict = self.node_controller.get_version(self.device_data, self.bmc_data)
for key, value in result_dict.items():
command_result = CommandResult(0, value)
command_result.device_name = key
result.append(command_result)
except Exception as ex:
return [CommandResult(255, str(ex))]
return result
|
apache-2.0
| -7,822,982,988,380,406,000
| 31.090909
| 91
| 0.623229
| false
| 4.120623
| false
| false
| false
|
RedHatQE/cfme_tests
|
cfme/services/service_catalogs/__init__.py
|
1
|
3979
|
import importscan
import sentaku
from widgetastic.utils import deflatten_dict
from widgetastic.utils import Parameter
from widgetastic.utils import ParametrizedLocator
from widgetastic.utils import ParametrizedString
from widgetastic.widget import ParametrizedView
from widgetastic.widget import Select
from widgetastic.widget import Text
from widgetastic.widget import View
from widgetastic_patternfly import BootstrapSelect
from widgetastic_patternfly import Input
from cfme.common import Taggable
from cfme.exceptions import ItemNotFound
from cfme.utils.appliance import Navigatable
from cfme.utils.update import Updateable
from cfme.utils.wait import TimedOutError
class ServiceCatalogs(Navigatable, Taggable, Updateable, sentaku.modeling.ElementMixin):
"""
Service Catalogs main class to context switch between ui
and ssui. All the below methods are implemented in both ui
and ssui side .
"""
order = sentaku.ContextualMethod()
add_to_shopping_cart = sentaku.ContextualMethod()
def __init__(self, appliance, catalog=None, name=None, stack_data=None,
dialog_values=None, ansible_dialog_values=None):
Navigatable.__init__(self, appliance=appliance)
self.catalog = catalog
self.name = name
self.stack_data = stack_data
self.dialog_values = dialog_values
self.ansible_dialog_values = ansible_dialog_values
self.parent = self.appliance.context
class BaseOrderForm(View):
"""Represents the order form of a service.
This form doesn't have a static set of elements apart from titles and buttons. In the most cases
the fields can be either regular inputs or dropdowns. Their locators depend on field names. In
order to find and fill required fields a parametrized view is used here. The keys of a fill
dictionary should match ids of the fields. For instance there is a field with such html
<input id="some_key"></input>, so a fill dictionary should look like that:
{"some_key": "some_value"}
"""
title = Text('#explorer_title_text')
dialog_title = Text(".//div[@id='main_div']//h2")
@ParametrizedView.nested
class fields(ParametrizedView): # noqa
PARAMETERS = ("key",)
input = Input(id=Parameter("key"))
select = Select(id=Parameter("key"))
param_input = Input(id=ParametrizedString("param_{key}"))
dropdown = BootstrapSelect(locator=ParametrizedLocator(
".//div[contains(@class, 'bootstrap-select')]/select[@id={key|quote}]/.."))
param_dropdown = BootstrapSelect(locator=ParametrizedLocator(
".//div[contains(@class, 'bootstrap-select')]/select[@id='param_{key}']/.."))
@property
def visible_widget(self):
for widget in (self.input, self.dropdown, self.param_input,
self.param_dropdown, self.select):
try:
widget.wait_displayed('2s')
return widget
except TimedOutError:
pass
else:
raise ItemNotFound("Visible widget is not found")
def read(self):
return self.visible_widget.read()
def fill(self, value):
return self.visible_widget.fill(value)
def fill(self, fill_data):
values = deflatten_dict(fill_data)
was_change = False
self.before_fill(values)
for key, value in values.items():
widget = self.fields(key)
if value is None:
self.logger.debug('Skipping fill of %r because value was None', key)
continue
try:
if widget.fill(value):
was_change = True
except NotImplementedError:
continue
self.after_fill(was_change)
return was_change
from . import ui, ssui # NOQA last for import cycles
importscan.scan(ui)
importscan.scan(ssui)
|
gpl-2.0
| -2,037,682,114,986,794,200
| 37.259615
| 100
| 0.656195
| false
| 4.179622
| false
| false
| false
|
WardBenjamin/FRCBuild
|
setup.py
|
1
|
1637
|
"""
CLI Tool for building FIRST Robotics (FRC) C++ projects w/ WPILib
"""
from setuptools import find_packages, setup
dependencies = ['click']
setup(
name='frcbuild',
version='0.1.0',
url='https://github.com/WardBenjamin/frc-build',
license='BSD',
author='Benjamin Ward',
author_email='ward.programm3r@gmail.com',
description='CLI Tool for building FIRST Robotics (FRC) C++ projects w/ WPILib',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'frcbuild = frcbuild.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
# 'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
gpl-2.0
| 2,587,404,946,052,572,700
| 33.104167
| 84
| 0.598045
| false
| 4.03202
| false
| false
| false
|
phillc/django-filebrowser-fork
|
views.py
|
1
|
20390
|
# coding: utf-8
from django.shortcuts import render_to_response
from django.template import RequestContext as Context
from django.http import HttpResponseRedirect
from django.contrib.admin.views.decorators import staff_member_required
from django.views.decorators.cache import never_cache
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from time import gmtime, strftime, localtime, mktime, time
import os, string, ftplib, re, Image, decimal
from django import forms
# get settings
from filebrowser.fb_settings import *
# get functions
from filebrowser.functions import _get_path, _get_subdir_list, _get_dir_list, _get_breadcrumbs, _get_sub_query, _get_query, _get_filterdate, _get_filesize, _make_filedict, _get_settings_var, _handle_file_upload, _get_file_type, _make_image_thumbnail, _image_generator, _image_crop_generator, _is_image_version
# get forms
from filebrowser.forms import MakeDirForm, RenameForm, UploadForm, BaseUploadFormSet
def index(request, dir_name=None):
"""
Show list of files on a server-directory.
"""
path = _get_path(dir_name)
query = _get_query(request.GET)
# INITIAL VARIABLES
results_var = {'results_total': 0, 'results_current': 0, 'delete_total': 0, 'change_total': 0, 'imagegenerator_total': 0 }
counter = {}
for k,v in EXTENSIONS.iteritems():
counter[k] = 0
dir_list = os.listdir(os.path.join(PATH_SERVER, path))
file_list = []
for file in dir_list:
# VARIABLES
var_filesize_long = '' # filesize
var_filesize_str = '' # filesize in B, kB, MB
var_date = '' # YYYY-MM-dd
var_path_thumb = '' # path to thumbnail
var_link = '' # link to file (using URL_WWW), link to folder (using URL_ADMIN)
var_select_link = '' # link to file (using URL_WWW)
var_file_extension = '' # see EXTENSIONS in fb_settings.py
var_file_type = '' # Folder, Image, Video, Document, Sound, Code, ...
var_image_dimensions = '' # Image Dimensions (width, height)
var_thumb_dimensions = '' # Thumbnail Dimensions (width, height)
var_flag_makethumb = False # True, if Image has no Thumbnail.
var_flag_deletedir = False # True, if Directory is empty.
var_image_version = False # True, if Image is generated with ImageGenerator.
# DON'T DISPLAY FILES STARTING WITH %THUMB_PREFIX% OR "."
if re.compile(THUMB_PREFIX, re.M).search(file) or \
file.startswith('.'): # ... or with a '.' \
continue
else:
results_var['results_total'] += 1
# SIZE
var_filesize_long = os.path.getsize(os.path.join(PATH_SERVER, path, file))
var_filesize_str = _get_filesize(var_filesize_long)
# DATE / TIME
date_time = os.path.getmtime(os.path.join(PATH_SERVER, path, file))
var_date = strftime("%Y-%m-%d", gmtime(date_time))
# EXTENSION / FLAG_EMPTYDIR / DELETE_TOTAL
if os.path.isfile(os.path.join(PATH_SERVER, path, file)): # file
var_file_extension = os.path.splitext(file)[1].lower()
var_select_link = var_link = "%s%s%s" % (URL_WWW, path, file)
elif os.path.isdir(os.path.join(PATH_SERVER, path, file)): # folder
var_link = "%s%s%s" % (URL_ADMIN, path, file)
var_select_link = "%s%s%s/" % (URL_WWW, path, file)
if not os.listdir(os.path.join(PATH_SERVER, path, file)):
var_flag_deletedir = True # only empty directories are allowed to be deleted
# FILETYPE / COUNTER
var_file_type = _get_file_type(file)
if var_file_type:
counter[var_file_type] += 1
# DIMENSIONS / MAKETHUMB / SELECT
if var_file_type == 'Image':
try:
im = Image.open(os.path.join(PATH_SERVER, path, file))
var_image_dimensions = im.size
var_path_thumb = "%s%s%s%s" % (URL_WWW, path, THUMB_PREFIX, file)
try:
thumb = Image.open(os.path.join(PATH_SERVER, path, THUMB_PREFIX + file))
var_thumb_dimensions = thumb.size
except:
# if thumbnail does not exist, show makethumb-Icon instead.
var_path_thumb = settings.URL_FILEBROWSER_MEDIA + 'img/filebrowser_Thumb.gif'
var_flag_makethumb = True
except:
# if image is corrupt, change filetype to not defined
var_file_type = ''
# check, if image is generated with ImageGenerator
var_image_version = _is_image_version(file)
if var_image_version == False:
results_var['imagegenerator_total'] += 1
# FILTER / SEARCH
flag_extend = False
if query['filter_type'] != '' and query['filter_date'] != '' and var_file_type == query['filter_type'] and _get_filterdate(query['filter_date'], date_time):
flag_extend = True
elif query['filter_type'] != '' and query['filter_date'] == '' and var_file_type == query['filter_type']:
flag_extend = True
elif query['filter_type'] == '' and query['filter_date'] != '' and _get_filterdate(query['filter_date'], date_time):
flag_extend = True
elif query['filter_type'] == '' and query['filter_date'] == '':
flag_extend = True
if query['q'] and not re.compile(query['q'].lower(), re.M).search(file.lower()):
flag_extend = False
# APPEND FILE_LIST
if flag_extend == True:
file_list.append([file, var_filesize_long, var_filesize_str, var_date, var_path_thumb, var_link, var_select_link, var_file_extension, var_file_type, var_image_dimensions, var_thumb_dimensions, file.lower(), var_flag_makethumb, var_flag_deletedir, var_image_version])
# SORT LIST
file_list.sort(lambda x, y: cmp(x[int(query['o'])], y[int(query['o'])]))
if query['ot'] == "desc":
file_list.reverse()
# MAKE DICTIONARY (for better readability in the templates)
file_dict = _make_filedict(file_list)
# RESULTS
results_var['results_current'] = len(file_list)
for file in file_dict:
if file['file_type'] == 'Image':
results_var['change_total'] += 1
if file['file_type'] != 'Folder':
results_var['delete_total'] += 1
elif file['file_type'] == 'Folder' and file['flag_deletedir'] == True:
results_var['delete_total'] += 1
return render_to_response('filebrowser/index.html', {
'dir': dir_name,
'file_dict': file_dict,
'results_var': results_var,
'query': query,
'counter': counter,
'settings_var': _get_settings_var(request.META['HTTP_HOST'], path),
'breadcrumbs': _get_breadcrumbs(_get_query(request.GET), dir_name, ''),
'title': _('FileBrowser'),
'root_path': URL_HOME,
}, context_instance=Context(request))
index = staff_member_required(never_cache(index))
def mkdir(request, dir_name=None):
"""
Make directory
"""
path = _get_path(dir_name)
query = _get_query(request.GET)
if request.method == 'POST':
form = MakeDirForm(PATH_SERVER, path, request.POST)
if form.is_valid():
server_path = os.path.join(PATH_SERVER, path, form.cleaned_data['dir_name'].lower())
try:
os.mkdir(server_path)
os.chmod(server_path, 0775)
# MESSAGE & REDIRECT
msg = _('The directory %s was successfully created.') % (form.cleaned_data['dir_name'].lower())
request.user.message_set.create(message=msg)
# on redirect, sort by date desc to see the new directory on top of the list
return HttpResponseRedirect(URL_ADMIN + path + "?&ot=desc&o=3&" + query['pop'])
except OSError, (errno, strerror):
if errno == 13:
form.errors['dir_name'] = forms.util.ErrorList([_('Permission denied.')])
else:
form.errors['dir_name'] = forms.util.ErrorList([_('Error creating directory.')])
else:
form = MakeDirForm(PATH_SERVER, path)
return render_to_response('filebrowser/makedir.html', {
'form': form,
'query': query,
'settings_var': _get_settings_var(request.META['HTTP_HOST'], path),
'breadcrumbs': _get_breadcrumbs(_get_query(request.GET), dir_name, 'Make Directory'),
'title': _('Make directory'),
'root_path': URL_HOME,
}, context_instance=Context(request))
mkdir = staff_member_required(never_cache(mkdir))
def upload(request, dir_name=None):
"""
Multipe Upload.
"""
from django.forms.formsets import formset_factory
path = _get_path(dir_name)
query = _get_query(request.GET)
# PIL's Error "Suspension not allowed here" work around:
# s. http://mail.python.org/pipermail/image-sig/1999-August/000816.html
import ImageFile
ImageFile.MAXBLOCK = IMAGE_MAXBLOCK # default is 64k
UploadFormSet = formset_factory(UploadForm, formset=BaseUploadFormSet, extra=5)
if request.method == 'POST':
formset = UploadFormSet(data=request.POST, files=request.FILES, path_server=PATH_SERVER, path=path)
if formset.is_valid():
for cleaned_data in formset.cleaned_data:
if cleaned_data:
# UPLOAD FILE
_handle_file_upload(PATH_SERVER, path, cleaned_data['file'])
if _get_file_type(cleaned_data['file'].name) == "Image":
# MAKE THUMBNAIL
_make_image_thumbnail(PATH_SERVER, path, cleaned_data['file'].name)
# IMAGE GENERATOR
if FORCE_GENERATOR or (cleaned_data['use_image_generator'] and (IMAGE_GENERATOR_LANDSCAPE != "" or IMAGE_GENERATOR_PORTRAIT != "")):
_image_generator(PATH_SERVER, path, cleaned_data['file'].name)
# GENERATE CROPPED/RECTANGULAR IMAGE
if FORCE_GENERATOR or (cleaned_data['use_image_generator'] and IMAGE_CROP_GENERATOR != ""):
_image_crop_generator(PATH_SERVER, path, cleaned_data['file'].name)
# MESSAGE & REDIRECT
msg = _('Upload successful.')
request.user.message_set.create(message=msg)
# on redirect, sort by date desc to see the uploaded files on top of the list
redirect_url = URL_ADMIN + path + "?&ot=desc&o=3&" + query['pop']
return HttpResponseRedirect(redirect_url)
else:
formset = UploadFormSet(path_server=PATH_SERVER, path=path)
return render_to_response('filebrowser/upload.html', {
'formset': formset,
'dir': dir_name,
'query': _get_query(request.GET),
'settings_var': _get_settings_var(request.META['HTTP_HOST'], path),
'breadcrumbs': _get_breadcrumbs(_get_query(request.GET), dir_name, 'Multiple Upload'),
'title': _('Select files to upload'),
'root_path': URL_HOME,
}, context_instance=Context(request))
upload = staff_member_required(never_cache(upload))
def makethumb(request, dir_name=None, file_name=None):
"""
Make Thumbnail(s) for existing Image or Directory
This is useful if someone uploads images via FTP, not using the
upload functionality of the FileBrowser.
"""
path = _get_path(dir_name)
query = _get_query(request.GET)
if file_name:
# MAKE THUMB FOR SINGLE IMAGE
file_path = os.path.join(PATH_SERVER, path, file_name)
if os.path.isfile(file_path):
_make_image_thumbnail(PATH_SERVER, path, file_name)
else:
# MAKE THUMBS FOR WHOLE DIRECTORY
dir_path = os.path.join(PATH_SERVER, path)
dir_list = os.listdir(dir_path)
for file in dir_list:
if os.path.isfile(os.path.join(PATH_SERVER, path, file)) and not os.path.isfile(os.path.join(PATH_SERVER, path, THUMB_PREFIX + file)) and not re.compile(THUMB_PREFIX, re.M).search(file) and _get_file_type(file) == "Image":
_make_image_thumbnail(PATH_SERVER, path, file)
# MESSAGE & REDIRECT
msg = _('Thumbnail creation successful.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(URL_ADMIN + path + query['query_str_total'])
return render_to_response('filebrowser/index.html', {
'dir': dir_name,
'query': query,
'settings_var': _get_settings_var(request.META['HTTP_HOST'], path),
'breadcrumbs': _get_breadcrumbs(_get_query(request.GET), dir_name, ''),
'root_path': URL_HOME,
}, context_instance=Context(request))
makethumb = staff_member_required(never_cache(makethumb))
def delete(request, dir_name=None):
"""
Delete existing File/Directory.
If file is an Image, also delete thumbnail.
When trying to delete a directory, the directory has to be empty.
"""
path = _get_path(dir_name)
query = _get_query(request.GET)
msg = ""
if request.GET:
if request.GET.get('type') != "Folder":
server_path = os.path.join(PATH_SERVER, path, request.GET.get('filename'))
try:
# DELETE FILE
os.unlink(server_path)
# TRY DELETING THUMBNAIL
path_thumb = os.path.join(PATH_SERVER, path, THUMB_PREFIX + request.GET.get('filename'))
try:
os.unlink(path_thumb)
except OSError:
pass
# TRY DELETING IMAGE_VERSIONS
versions_path = os.path.join(PATH_SERVER, path, request.GET.get('filename').replace(".", "_").lower() + IMAGE_GENERATOR_DIRECTORY)
try:
dir_list = os.listdir(versions_path)
for file in dir_list:
file_path = os.path.join(versions_path, file)
os.unlink(file_path)
os.rmdir(versions_path)
except OSError:
pass
# MESSAGE & REDIRECT
msg = _('The file %s was successfully deleted.') % (request.GET.get('filename').lower())
request.user.message_set.create(message=msg)
return HttpResponseRedirect(URL_ADMIN + path + query['query_nodelete'])
except OSError:
# todo: define error message
msg = OSError
else:
server_path = os.path.join(PATH_SERVER, path, request.GET.get('filename'))
try:
os.rmdir(server_path)
# MESSAGE & REDIRECT
msg = _('The directory %s was successfully deleted.') % (request.GET.get('filename').lower())
request.user.message_set.create(message=msg)
return HttpResponseRedirect(URL_ADMIN + path + query['query_nodelete'])
except OSError:
# todo: define error message
msg = OSError
if msg:
request.user.message_set.create(message=msg)
return render_to_response('filebrowser/index.html', {
'dir': dir_name,
'file': request.GET.get('filename', ''),
'query': query,
'settings_var': _get_settings_var(request.META['HTTP_HOST'], path),
'breadcrumbs': _get_breadcrumbs(_get_query(request.GET), dir_name, ''),
'root_path': URL_HOME,
}, context_instance=Context(request))
delete = staff_member_required(never_cache(delete))
def rename(request, dir_name=None, file_name=None):
"""
Rename existing File/Directory.
"""
path = _get_path(dir_name)
query = _get_query(request.GET)
if os.path.isfile(os.path.join(PATH_SERVER, path, file_name)): # file
file_type = _get_file_type(file_name)
file_extension = os.path.splitext(file_name)[1].lower()
else:
file_extension = ""
file_type = ""
if request.method == 'POST':
form = RenameForm(PATH_SERVER, path, file_extension, request.POST)
if form.is_valid():
old_path = os.path.join(PATH_SERVER, path, file_name)
new_path = os.path.join(PATH_SERVER, path, request.POST.get('name').lower() + file_extension)
try:
os.rename(old_path, new_path)
# RENAME IMAGE_THUMBNAILS
if file_type == 'Image':
old_thumb_path = os.path.join(PATH_SERVER, path, THUMB_PREFIX + file_name)
new_thumb_path = os.path.join(PATH_SERVER, path, THUMB_PREFIX + request.POST.get('name').lower() + file_extension)
try:
os.rename(old_thumb_path, new_thumb_path)
except OSError, (errno, strerror):
form.errors['name'] = forms.util.ErrorList([_('Error renaming Thumbnail.')])
# RENAME IMAGE VERSIONS? TOO MUCH MAGIC?
# MESSAGE & REDIRECT
if not form.errors:
msg = _('Renaming was successful.')
request.user.message_set.create(message=msg)
# on redirect, sort by date desc to see the new stuff on top of the list
return HttpResponseRedirect(URL_ADMIN + path + "?&ot=desc&o=3&" + query['pop'])
except OSError, (errno, strerror):
form.errors['name'] = forms.util.ErrorList([_('Error.')])
else:
form = RenameForm(PATH_SERVER, path, file_extension)
return render_to_response('filebrowser/rename.html', {
'form': form,
'query': query,
'file_extension': file_extension,
'settings_var': _get_settings_var(request.META['HTTP_HOST'], path),
'breadcrumbs': _get_breadcrumbs(_get_query(request.GET), dir_name, 'Rename'),
'title': _('Rename "%s"') % file_name,
'root_path': URL_HOME,
}, context_instance=Context(request))
rename = staff_member_required(never_cache(rename))
def generateimages(request, dir_name=None, file_name=None):
"""
Generate Image Versions for existing singe Image or a whole Directory.
This is useful if someone uploads images via FTP, not using the
upload functionality of the FileBrowser.
"""
path = _get_path(dir_name)
query = _get_query(request.GET)
if file_name:
# GENERATE IMAGES
if IMAGE_GENERATOR_LANDSCAPE != "" or IMAGE_GENERATOR_PORTRAIT != "":
_image_generator(PATH_SERVER, path, file_name)
# GENERATE CROPPED/RECTANGULAR IMAGE
if IMAGE_CROP_GENERATOR != "":
_image_crop_generator(PATH_SERVER, path, file_name)
else:
# GENERATE IMAGES FOR WHOLE DIRECTORY
dir_path = os.path.join(PATH_SERVER, path)
dir_list = os.listdir(dir_path)
for file in dir_list:
if os.path.isfile(os.path.join(PATH_SERVER, path, file)) and not re.compile(THUMB_PREFIX, re.M).search(file) and _get_file_type(file) == "Image":
# GENERATE IMAGES
if IMAGE_GENERATOR_LANDSCAPE != "" or IMAGE_GENERATOR_PORTRAIT != "":
_image_generator(PATH_SERVER, path, file)
# GENERATE CROPPED/RECTANGULAR IMAGE
if IMAGE_CROP_GENERATOR != "":
_image_crop_generator(PATH_SERVER, path, file)
# MESSAGE & REDIRECT
msg = _('Successfully generated Images.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(URL_ADMIN + path + query['query_str_total'])
return render_to_response('filebrowser/index.html', {
'dir': dir_name,
'query': query,
'settings_var': _get_settings_var(request.META['HTTP_HOST'], path),
'breadcrumbs': _get_breadcrumbs(_get_query(request.GET), dir_name, ''),
'root_path': URL_HOME,
}, context_instance=Context(request))
makethumb = staff_member_required(never_cache(makethumb))
|
bsd-3-clause
| -4,157,568,425,170,416,600
| 43.714912
| 309
| 0.582688
| false
| 3.776625
| false
| false
| false
|
Dev-Cloud-Platform/Dev-Cloud
|
dev_cloud/cc1/src/clm/utils/tokens.py
|
1
|
2164
|
# -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.clm.utils.tokens
@author Piotr Wójcik
@date 21.09.2010
"""
from django.utils.http import int_to_base36, base36_to_int
class PasswordResetTokenGenerator(object):
"""
Class for generating tokens during password reset.
"""
def make_token(self, user):
"""
@parameter{user,User} instance of the User whom Token should be
generated for
@returns{string} Token with timestamp generated for specified User
"""
import hashlib
h = hashlib.sha1(user.password +
unicode(user.last_login_date) +
unicode(user.id)).hexdigest()[::2]
return "%s-%s" % (int_to_base36(user.id), h)
def check_token(self, user, token):
"""
@parameter{user,User} instance of the User whose Token should be
checked.
@parameter{token,string} Token to check
@returns{bool} @val{true} for right Token, @val{false} for wrong Token
"""
try:
ts_b36 = token.split("-")[0]
except ValueError:
return False
try:
uid = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the uid has not been tampered with
if uid != user.id:
return False
if self.make_token(user) != token:
return False
return True
default_token_generator = PasswordResetTokenGenerator()
|
apache-2.0
| -4,509,081,930,120,977,400
| 27.84
| 78
| 0.626445
| false
| 3.939891
| false
| false
| false
|
bioconda/bioconda-utils
|
bioconda_utils/lint/check_noarch.py
|
1
|
4912
|
"""Use of ``noarch`` and ``skip``
When to use ``noarch`` and when to use ``skip`` or pin the interpreter
is non-intuitive and idiosynractic due to ``conda`` legacy
behavior. These checks aim at getting the right settings.
"""
import re
from . import LintCheck, ERROR, WARNING, INFO
# Noarch or not checks:
#
# - Python packages that use no compiler should be
# a) Marked ``noarch: python``
# b) Not use ``skip: True # [...]`` except for osx/linux,
# but use ``- python [<>]3``
# - Python packages that use a compiler should be
# a) NOT marked ``noarch: python``
# b) Not use ``- python [<>]3``,
# but use ``skip: True # [py[23]k]``
class should_be_noarch_python(LintCheck):
"""The recipe should be build as ``noarch``
Please add::
build:
noarch: python
Python packages that don't require a compiler to build are
normally architecture independent and go into the ``noarch``
subset of packages.
"""
def check_deps(self, deps):
if 'python' not in deps:
return # not a python package
if all('build' not in loc for loc in deps['python']):
return # only uses python in run/host
if any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/noarch', None) == 'python':
return # already marked noarch: python
self.message(section='build', data=True)
def fix(self, _message, _data):
self.recipe.set('build/noarch', 'python')
return True
class should_be_noarch_generic(LintCheck):
"""The recipe should be build as ``noarch``
Please add::
build:
noarch: generic
Packages that don't require a compiler to build are normally
architecture independent and go into the ``noarch`` subset of
packages.
"""
requires = ['should_be_noarch_python']
def check_deps(self, deps):
if any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/noarch', None):
return # already marked noarch
self.message(section='build', data=True)
def fix(self, _message, _data):
self.recipe.set('build/noarch', 'generic')
return True
class should_not_be_noarch_compiler(LintCheck):
"""The recipe uses a compiler but is marked noarch
Recipes using a compiler should not be marked noarch.
Please remove the ``build: noarch:`` section.
"""
def check_deps(self, deps):
if not any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/noarch', False) is False:
return # no noarch, or noarch=False
self.message(section='build/noarch')
class should_not_be_noarch_skip(LintCheck):
"""The recipe uses ``skip: True`` but is marked noarch
Recipes marked as ``noarch`` cannot use skip.
"""
def check_recipe(self, recipe):
if self.recipe.get('build/noarch', False) is False:
return # no noarch, or noarch=False
if self.recipe.get('build/skip', False) is False:
return # no skip or skip=False
self.message(section='build/noarch')
class should_not_use_skip_python(LintCheck):
"""The recipe should be noarch and not use python based skipping
Please use::
requirements:
build:
- python >3 # or <3
run:
- python >3 # or <3
The ``build: skip: True`` feature only works as expected for
packages built specifically for each "platform" (i.e. Python
version and OS). This package should be ``noarch`` and not use
skips.
"""
bad_skip_terms = ('py2k', 'py3k', 'python')
def check_deps(self, deps):
if 'python' not in deps:
return # not a python package
if any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/skip', None) is None:
return # no build: skip: section
skip_line = self.recipe.get_raw('build/skip')
if not any(term in skip_line for term in self.bad_skip_terms):
return # no offending skip terms
self.message(section='build/skip')
class should_not_be_noarch_source(LintCheck):
"""The recipe uses per platform sources and cannot be noarch
You are downloading different upstream sources for each
platform. Remove the noarch section or use just one source for all
platforms.
"""
_pat = re.compile(r'# +\[.*\]')
def check_source(self, source, section):
if self.recipe.get('build/noarch', False) is False:
return # no noarch, or noarch=False
# just search the entire source entry for a comment
if self._pat.search(self.recipe.get_raw(f"{section}")):
self.message(section)
|
mit
| 7,150,494,295,863,050,000
| 30.487179
| 70
| 0.617875
| false
| 3.787201
| false
| false
| false
|
normcyr/sopel-modules
|
strava.py
|
1
|
1773
|
#!/usr/bin/python3
'''
strava.py - strava activity module
author: Norm1 <normand.cyr@gmail.com>
found here: https://github.com/normcyr/sopel-modules
'''
import requests
from bs4 import BeautifulSoup
from sopel.module import commands, example
def fetch_new_activity(url):
r = requests.get(url)
if r.status_code == 200:
return(r)
else:
print('URL error')
def make_soup(r):
soup = BeautifulSoup(r.text, 'html.parser')
return(soup)
def retreive_activity_info(soup):
athlete_name = soup.find('h2', {'class': 'bottomless'}).text.strip()
activity_title = soup.find('div', {'class': 'hgroup'}).text.strip()
activity_type = soup.find('div', {'class': 'activity-type-date'}).find('strong').text.strip()
activity_distance = soup.find('li', {'class': 'distance'}).find('strong').text.strip()
activity_info = {'Name': athlete_name, 'Title': activity_title, 'Type': activity_type, 'Distance': activity_distance}
return(activity_info)
@commands('strava')
@example('.strava https://www.strava.com/activities/1474462480')
def strava(bot, trigger):
'''.strava <activity_url> - Retreive the Strava data from an activity. This assumes that the activity is public.'''
url = trigger.group(2)
#url = 'https://www.strava.com/activities/1474462480'
try:
r = fetch_new_activity(url)
soup = make_soup(r)
activity_info = retreive_activity_info(soup)
bot.say('{} just did a {} {}.'.format(activity_info['Name'], activity_info['Distance'], activity_info['Type']))
#print('{} just did a {} {}.'.format(activity_info['Name'], activity_info['Distance'], activity_info['Type']))
except:
return bot.say("No URL given")
#if __name__ == '__main__':
#strava()
|
gpl-3.0
| -6,875,788,661,379,818,000
| 29.568966
| 121
| 0.64467
| false
| 3.271218
| false
| false
| false
|
andrewyoung1991/supriya
|
supriya/tools/ugentools/Lag3UD.py
|
1
|
4787
|
# -*- encoding: utf-8 -*-
from supriya.tools.synthdeftools.CalculationRate import CalculationRate
from supriya.tools.ugentools.Filter import Filter
class Lag3UD(Filter):
r'''An up/down exponential lag generator.
::
>>> source = ugentools.In.ar(bus=0)
>>> lag_3_ud = ugentools.Lag3UD.ar(
... lag_time_d=0.1,
... lag_time_u=0.1,
... source=source,
... )
>>> lag_3_ud
Lag3UD.ar()
'''
### CLASS VARIABLES ###
__documentation_section__ = 'Filter UGens'
__slots__ = ()
_ordered_input_names = (
'source',
'lag_time_u',
'lag_time_d',
)
_valid_rates = (
CalculationRate.AUDIO,
CalculationRate.CONTROL,
)
### INITIALIZER ###
def __init__(
self,
calculation_rate=None,
lag_time_d=0.1,
lag_time_u=0.1,
source=None,
):
Filter.__init__(
self,
calculation_rate=calculation_rate,
lag_time_d=lag_time_d,
lag_time_u=lag_time_u,
source=source,
)
### PUBLIC METHODS ###
@classmethod
def ar(
cls,
lag_time_d=0.1,
lag_time_u=0.1,
source=None,
):
r'''Constructs an audio-rate Lag3UD.
::
>>> source = ugentools.In.ar(bus=0)
>>> lag_3_ud = ugentools.Lag3UD.ar(
... lag_time_d=0.1,
... lag_time_u=0.1,
... source=source,
... )
>>> lag_3_ud
Lag3UD.ar()
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.AUDIO
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
lag_time_d=lag_time_d,
lag_time_u=lag_time_u,
source=source,
)
return ugen
# def coeffs(): ...
@classmethod
def kr(
cls,
lag_time_d=0.1,
lag_time_u=0.1,
source=None,
):
r'''Constructs a control-rate Lag3UD.
::
>>> source = ugentools.In.ar(bus=0)
>>> lag_3_ud = ugentools.Lag3UD.kr(
... lag_time_d=0.1,
... lag_time_u=0.1,
... source=source,
... )
>>> lag_3_ud
Lag3UD.kr()
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.CONTROL
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
lag_time_d=lag_time_d,
lag_time_u=lag_time_u,
source=source,
)
return ugen
# def magResponse(): ...
# def magResponse2(): ...
# def magResponse5(): ...
# def magResponseN(): ...
# def scopeResponse(): ...
### PUBLIC PROPERTIES ###
@property
def lag_time_d(self):
r'''Gets `lag_time_d` input of Lag3UD.
::
>>> source = ugentools.In.ar(bus=0)
>>> lag_3_ud = ugentools.Lag3UD.ar(
... lag_time_d=0.1,
... lag_time_u=0.1,
... source=source,
... )
>>> lag_3_ud.lag_time_d
0.1
Returns ugen input.
'''
index = self._ordered_input_names.index('lag_time_d')
return self._inputs[index]
@property
def lag_time_u(self):
r'''Gets `lag_time_u` input of Lag3UD.
::
>>> source = ugentools.In.ar(bus=0)
>>> lag_3_ud = ugentools.Lag3UD.ar(
... lag_time_d=0.1,
... lag_time_u=0.1,
... source=source,
... )
>>> lag_3_ud.lag_time_u
0.1
Returns ugen input.
'''
index = self._ordered_input_names.index('lag_time_u')
return self._inputs[index]
@property
def source(self):
r'''Gets `source` input of Lag3UD.
::
>>> source = ugentools.In.ar(bus=0)
>>> lag_3_ud = ugentools.Lag3UD.ar(
... lag_time_d=0.1,
... lag_time_u=0.1,
... source=source,
... )
>>> lag_3_ud.source
OutputProxy(
source=In(
bus=0.0,
calculation_rate=CalculationRate.AUDIO,
channel_count=1
),
output_index=0
)
Returns ugen input.
'''
index = self._ordered_input_names.index('source')
return self._inputs[index]
|
mit
| 4,332,414,997,754,665,500
| 22.820896
| 71
| 0.448715
| false
| 3.545926
| false
| false
| false
|
rwg0/ironlab
|
ILabPythonLib/ironplot/ironplot_mscharts.py
|
1
|
1682
|
from ironplot_windows import *
import clr
import System
import System.Windows.Controls
from System.Windows.Controls import *
clr.AddReferenceByPartialName("System.Windows.Forms.DataVisualization")
clr.AddReferenceByPartialName("System.Drawing")
clr.AddReferenceToFile("IronPlot.dll")
import System.Windows.Forms.DataVisualization as dv
import System.Drawing as dr
import System
import numpy as np
from System.Windows import Thickness, Visibility
from IronPlot import *
from IronPlot.Plotting3D import Plot3D
floatarray = System.Array[float]
numpyAvailable = True
try:
import numpy as np
except ImportError:
numpyAvailable = False
def radial(theta, r, **kwargs):
""" Create a radial plot (or overwite current plot if hold is set)
"""
if len(theta) != len(r):
raise ValueError('Arrays must be of the same length.')
if PlotContext.CurrentWindowIndex == None:
PlotContext.OpenNextWindow()
if (PlotContext.CurrentPlot == None) or (PlotContext.HoldState == False):
# New plot or overwite plot
host = MSChartHost()
chart = host.Chart
chartArea = dv.Charting.ChartArea(Name = "Default")
chart.ChartAreas.Add(chartArea)
PlotContext.AddPlot(host)
else:
# Add to current plot
chart = PlotContext.CurrentPlot.Chart
seriesName = "Series" + str(chart.Series.Count)
series = dv.Charting.Series(ChartType = dv.Charting.SeriesChartType.Polar, Name = seriesName)
chart.Series.Add(series)
for a, b in zip(theta, r):
chart.Series[seriesName].Points.AddXY(float(a), float(b))
# Apply kwargs
setprops(series, **kwargs)
return series
|
lgpl-3.0
| -5,491,246,122,984,367,000
| 31.68
| 96
| 0.708086
| false
| 3.571125
| false
| false
| false
|
DaveBerkeley/lasercut
|
laser/gears.py
|
1
|
3508
|
#!/usr/bin/python
import sys
import math
from laser import Polygon, Circle, Collection, Config
from laser import radians, rotate_2d
from render import DXF as dxf
# Involute gears, see :
# http://www.cartertools.com/involute.html
#
#
def circle_intersect(v, r):
# see http://mathworld.wolfram.com/Circle-LineIntersection.html
x1, y1 = v.points[-2]
x2, y2 = v.points[-1]
dx = x1 - x2
dy = y1 - y2
dr = math.sqrt((dx * dx) + (dy * dy))
D = (x1 * y2) - (x2 * y1)
def sgn(a):
return -1
x = -((D * dy) - (sgn(dy)*dx*math.sqrt(((r*r)*(dr*dr))-(D*D)))) / (dr*dr)
y = -((-D*dx) - (abs(dy)* math.sqrt(((r*r)*(dr*dr))-(D*D)))) / (dr*dr)
# truncate the last line segment to fit the radius
v.points[-1] = x, y
#
#
def make_involute(pitch_dia, N, PA=20.0, teeth=None):
m = float(pitch_dia) / N
P = 1.0 / m
D = N / P # Pitch Diameter
R = D / 2.0 # Pitch Radius
DB = D * math.cos(radians(PA)) # Base Circle Diameter
RB = DB / 2.0 # Base Circle Radius
a = 1.0 / P # Addendum
d = 1.157 / P # Dedendum
DO = D + (2 * a) # Outside Diameter
RO = DO / 2.0 # Outside Radius
DR = D - (2 * d) # Root Diameter
RR = DR / 2.0 # Root Radius
CB = math.pi * DB # Circumference of Base Circle
fcb = RB / 20.0
ncb = CB / fcb
acb = 360 / ncb
gt = 360.0 / N # Gear Tooth Spacing
info = {
"outside_dia" : DO,
"pitch_dia" : D,
"root_dia" : DR,
}
v = Polygon()
v.add(0, RR)
# generate involute curve points where
# radius is with the D and DO circles
first = None
for i in range(20):
x, y = i * RB / 20.0, RB
x, y = rotate_2d(radians(i * acb), x, y)
r = abs(complex(x, y))
if r < R:
first = x, y
continue
if first:
v.add(*first)
first = None
v.add(x, y)
if r > RO:
break
# need to trim last involute line segment
# so it doesn't exceed the outside_radius
circle_intersect(v, RO)
# rotate back 1/4 tooth
v.rotate(-gt / 4.0)
# add reflection to itself
w = v.copy()
w.reflect_v()
# make sure the halves are joined correctly
w.points.reverse()
v.add_poly(w)
work = Polygon()
work.info = info
# add all the teeth to the work
for i in range(teeth or N):
c = v.copy()
c.rotate(gt * i)
work.add_poly(c)
# join the ends together
if teeth is None:
work.close()
return work
#
#
if __name__ == "__main__":
x_margin = 10
y_margin = 20
draw = False
if len(sys.argv) > 1:
draw = True
def commit(work):
#work.translate(x_margin, y_margin)
work.draw(drawing, config.cut())
config = Config()
drawing = dxf.drawing("test.dxf")
N = 20
PA = 20.0
pitch_dia = 20
nteeth = None # 6 # set if only some teeth required
work = make_involute(pitch_dia, N, PA, teeth=nteeth)
if nteeth:
work.add(0, 0)
work.close()
if draw:
for label in [ "outside_dia", "root_dia", "pitch_dia" ]:
d = work.info[label]
c = Circle((0, 0), d / 2.0, colour=Config.draw_colour)
work.add(c)
commit(work)
drawing.save()
# FIN
|
gpl-2.0
| -6,214,801,681,637,845,000
| 22.702703
| 77
| 0.498575
| false
| 3.029361
| false
| false
| false
|
spjmurray/openstack-sentinel
|
sentinel/conf/opts.py
|
1
|
1449
|
# Copyright 2017 DataCentred Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
from oslo_config import cfg
MODULES = [
'sentinel.clients',
'sentinel.log',
'sentinel.tests.functional.base',
'sentinel.whitelist',
]
def list_opts():
opts = []
for module_name in MODULES:
module = importlib.import_module(module_name)
group = None
if module.OPTS_GROUP:
group = module.OPTS_GROUP.name
opts.append((group, module.OPTS))
return opts
def configure():
conf = cfg.ConfigOpts()
for module_name in MODULES:
module = importlib.import_module(module_name)
group = None
if module.OPTS_GROUP:
group = module.OPTS_GROUP.name
conf.register_group(module.OPTS_GROUP)
conf.register_opts(module.OPTS, group=group)
conf([], project='sentinel')
return conf
# vi: ts=4 et:
|
apache-2.0
| 1,883,553,916,626,965,200
| 25.345455
| 78
| 0.661836
| false
| 3.9375
| false
| false
| false
|
zestrada/nova-cs498cc
|
nova/exception.py
|
1
|
34128
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Nova base exception handling.
Includes decorator for re-raising Nova-type exceptions.
SHOULD include dedicated exception logging.
"""
import functools
from oslo.config import cfg
import webob.exc
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova import safe_utils
LOG = logging.getLogger(__name__)
exc_log_opts = [
cfg.BoolOpt('fatal_exception_format_errors',
default=False,
help='make exception message format errors fatal'),
]
CONF = cfg.CONF
CONF.register_opts(exc_log_opts)
class ConvertedException(webob.exc.WSGIHTTPException):
def __init__(self, code=0, title="", explanation=""):
self.code = code
self.title = title
self.explanation = explanation
super(ConvertedException, self).__init__()
class ProcessExecutionError(IOError):
def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None,
description=None):
self.exit_code = exit_code
self.stderr = stderr
self.stdout = stdout
self.cmd = cmd
self.description = description
if description is None:
description = _('Unexpected error while running command.')
if exit_code is None:
exit_code = '-'
message = _('%(description)s\nCommand: %(cmd)s\n'
'Exit code: %(exit_code)s\nStdout: %(stdout)r\n'
'Stderr: %(stderr)r') % locals()
IOError.__init__(self, message)
def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return dict((k, v) for k, v in original.iteritems() if not "_pass" in k)
def wrap_exception(notifier=None, publisher_id=None, event_type=None,
level=None):
"""This decorator wraps a method to catch any exceptions that may
get thrown. It logs the exception as well as optionally sending
it to the notification system.
"""
# TODO(sandy): Find a way to import nova.notifier.api so we don't have
# to pass it in as a parameter. Otherwise we get a cyclic import of
# nova.notifier.api -> nova.utils -> nova.exception :(
def inner(f):
def wrapped(self, context, *args, **kw):
# Don't store self or context in the payload, it now seems to
# contain confidential information.
try:
return f(self, context, *args, **kw)
except Exception, e:
with excutils.save_and_reraise_exception():
if notifier:
payload = dict(exception=e)
call_dict = safe_utils.getcallargs(f, *args, **kw)
cleansed = _cleanse_dict(call_dict)
payload.update({'args': cleansed})
# Use a temp vars so we don't shadow
# our outer definitions.
temp_level = level
if not temp_level:
temp_level = notifier.ERROR
temp_type = event_type
if not temp_type:
# If f has multiple decorators, they must use
# functools.wraps to ensure the name is
# propagated.
temp_type = f.__name__
notifier.notify(context, publisher_id, temp_type,
temp_level, payload)
return functools.wraps(f)(wrapped)
return inner
class NovaException(Exception):
"""Base Nova Exception
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred.")
code = 500
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
try:
message = self.message % kwargs
except Exception as e:
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_('Exception in string format operation'))
for name, value in kwargs.iteritems():
LOG.error("%s: %s" % (name, value))
if CONF.fatal_exception_format_errors:
raise e
else:
# at least get the core message out if something happened
message = self.message
super(NovaException, self).__init__(message)
def format_message(self):
if self.__class__.__name__.endswith('_Remote'):
return self.args[0]
else:
return unicode(self)
class EC2APIError(NovaException):
message = _("Unknown")
def __init__(self, message=None, code=None):
self.msg = message
self.code = code
outstr = '%s' % message
super(EC2APIError, self).__init__(outstr)
class EncryptionFailure(NovaException):
message = _("Failed to encrypt text: %(reason)s")
class DecryptionFailure(NovaException):
message = _("Failed to decrypt text: %(reason)s")
class VirtualInterfaceCreateException(NovaException):
message = _("Virtual Interface creation failed")
class VirtualInterfaceMacAddressException(NovaException):
message = _("5 attempts to create virtual interface"
"with unique mac address failed")
class GlanceConnectionFailed(NovaException):
message = _("Connection to glance host %(host)s:%(port)s failed: "
"%(reason)s")
class NotAuthorized(NovaException):
message = _("Not authorized.")
code = 403
class AdminRequired(NotAuthorized):
message = _("User does not have admin privileges")
class PolicyNotAuthorized(NotAuthorized):
message = _("Policy doesn't allow %(action)s to be performed.")
class ImageNotActive(NovaException):
message = _("Image %(image_id)s is not active.")
class ImageNotAuthorized(NovaException):
message = _("Not authorized for image %(image_id)s.")
class Invalid(NovaException):
message = _("Unacceptable parameters.")
code = 400
class InvalidBDM(Invalid):
message = _("Block Device Mapping is Invalid.")
class InvalidBDMSnapshot(InvalidBDM):
message = _("Block Device Mapping is Invalid: "
"failed to get snapshot %(id)s.")
class InvalidBDMVolume(InvalidBDM):
message = _("Block Device Mapping is Invalid: "
"failed to get volume %(id)s.")
class VolumeUnattached(Invalid):
message = _("Volume %(volume_id)s is not attached to anything")
class VolumeNotCreated(NovaException):
message = _("Volume %(volume_id)s did not finish being created"
" even after we waited %(seconds)s seconds or %(attempts)s"
" attempts.")
class InvalidKeypair(Invalid):
message = _("Keypair data is invalid")
class InvalidRequest(Invalid):
message = _("The request is invalid.")
class InvalidInput(Invalid):
message = _("Invalid input received") + ": %(reason)s"
class InvalidVolume(Invalid):
message = _("Invalid volume") + ": %(reason)s"
class InvalidMetadata(Invalid):
message = _("Invalid metadata") + ": %(reason)s"
class InvalidMetadataSize(Invalid):
message = _("Invalid metadata size") + ": %(reason)s"
class InvalidPortRange(Invalid):
message = _("Invalid port range %(from_port)s:%(to_port)s. %(msg)s")
class InvalidIpProtocol(Invalid):
message = _("Invalid IP protocol %(protocol)s.")
class InvalidContentType(Invalid):
message = _("Invalid content type %(content_type)s.")
class InvalidCidr(Invalid):
message = _("Invalid cidr %(cidr)s.")
class InvalidUnicodeParameter(Invalid):
message = _("Invalid Parameter: "
"Unicode is not supported by the current database.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
message = _("%(err)s")
class InvalidAggregateAction(Invalid):
message = _("Cannot perform action '%(action)s' on aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidGroup(Invalid):
message = _("Group not valid. Reason: %(reason)s")
class InvalidSortKey(Invalid):
message = _("Sort key supplied was not valid.")
class InstanceInvalidState(Invalid):
message = _("Instance %(instance_uuid)s in %(attr)s %(state)s. Cannot "
"%(method)s while the instance is in this state.")
class InstanceNotRunning(Invalid):
message = _("Instance %(instance_id)s is not running.")
class InstanceNotInRescueMode(Invalid):
message = _("Instance %(instance_id)s is not in rescue mode")
class InstanceNotRescuable(Invalid):
message = _("Instance %(instance_id)s cannot be rescued: %(reason)s")
class InstanceNotReady(Invalid):
message = _("Instance %(instance_id)s is not ready")
class InstanceSuspendFailure(Invalid):
message = _("Failed to suspend instance") + ": %(reason)s"
class InstanceResumeFailure(Invalid):
message = _("Failed to resume instance: %(reason)s.")
class InstancePowerOnFailure(Invalid):
message = _("Failed to power on instance: %(reason)s.")
class InstancePowerOffFailure(Invalid):
message = _("Failed to power off instance: %(reason)s.")
class InstanceRebootFailure(Invalid):
message = _("Failed to reboot instance") + ": %(reason)s"
class InstanceTerminationFailure(Invalid):
message = _("Failed to terminate instance") + ": %(reason)s"
class InstanceDeployFailure(Invalid):
message = _("Failed to deploy instance") + ": %(reason)s"
class ServiceUnavailable(Invalid):
message = _("Service is unavailable at this time.")
class ComputeResourcesUnavailable(ServiceUnavailable):
message = _("Insufficient compute resources.")
class ComputeServiceUnavailable(ServiceUnavailable):
message = _("Compute service of %(host)s is unavailable at this time.")
class UnableToMigrateToSelf(Invalid):
message = _("Unable to migrate instance (%(instance_id)s) "
"to current host (%(host)s).")
class InvalidHypervisorType(Invalid):
message = _("The supplied hypervisor type of is invalid.")
class DestinationHypervisorTooOld(Invalid):
message = _("The instance requires a newer hypervisor version than "
"has been provided.")
class DestinationDiskExists(Invalid):
message = _("The supplied disk path (%(path)s) already exists, "
"it is expected not to exist.")
class InvalidDevicePath(Invalid):
message = _("The supplied device path (%(path)s) is invalid.")
class DevicePathInUse(Invalid):
message = _("The supplied device path (%(path)s) is in use.")
code = 409
class DeviceIsBusy(Invalid):
message = _("The supplied device (%(device)s) is busy.")
class InvalidCPUInfo(Invalid):
message = _("Unacceptable CPU info") + ": %(reason)s"
class InvalidIpAddressError(Invalid):
message = _("%(address)s is not a valid IP v4/6 address.")
class InvalidVLANTag(Invalid):
message = _("VLAN tag is not appropriate for the port group "
"%(bridge)s. Expected VLAN tag is %(tag)s, "
"but the one associated with the port group is %(pgroup)s.")
class InvalidVLANPortGroup(Invalid):
message = _("vSwitch which contains the port group %(bridge)s is "
"not associated with the desired physical adapter. "
"Expected vSwitch is %(expected)s, but the one associated "
"is %(actual)s.")
class InvalidDiskFormat(Invalid):
message = _("Disk format %(disk_format)s is not acceptable")
class ImageUnacceptable(Invalid):
message = _("Image %(image_id)s is unacceptable: %(reason)s")
class InstanceUnacceptable(Invalid):
message = _("Instance %(instance_id)s is unacceptable: %(reason)s")
class InvalidEc2Id(Invalid):
message = _("Ec2 id %(ec2_id)s is unacceptable.")
class InvalidUUID(Invalid):
message = _("Expected a uuid but received %(uuid)s.")
class InvalidID(Invalid):
message = _("Invalid ID received %(id)s.")
class InvalidPeriodicTaskArg(Invalid):
message = _("Unexpected argument for periodic task creation: %(arg)s.")
class ConstraintNotMet(NovaException):
message = _("Constraint not met.")
code = 412
class NotFound(NovaException):
message = _("Resource could not be found.")
code = 404
class AgentBuildNotFound(NotFound):
message = _("No agent-build associated with id %(id)s.")
class VolumeNotFound(NotFound):
message = _("Volume %(volume_id)s could not be found.")
class SnapshotNotFound(NotFound):
message = _("Snapshot %(snapshot_id)s could not be found.")
class ISCSITargetNotFoundForVolume(NotFound):
message = _("No target id found for volume %(volume_id)s.")
class DiskNotFound(NotFound):
message = _("No disk at %(location)s")
class VolumeDriverNotFound(NotFound):
message = _("Could not find a handler for %(driver_type)s volume.")
class InvalidImageRef(Invalid):
message = _("Invalid image href %(image_href)s.")
class ImageNotFound(NotFound):
message = _("Image %(image_id)s could not be found.")
class ImageNotFoundEC2(ImageNotFound):
message = _("Image %(image_id)s could not be found. The nova EC2 API "
"assigns image ids dynamically when they are listed for the "
"first time. Have you listed image ids since adding this "
"image?")
class ProjectNotFound(NotFound):
message = _("Project %(project_id)s could not be found.")
class StorageRepositoryNotFound(NotFound):
message = _("Cannot find SR to read/write VDI.")
class NetworkDuplicated(NovaException):
message = _("Network %(network_id)s is duplicated.")
class NetworkInUse(NovaException):
message = _("Network %(network_id)s is still in use.")
class NetworkNotCreated(NovaException):
message = _("%(req)s is required to create a network.")
class NetworkNotFound(NotFound):
message = _("Network %(network_id)s could not be found.")
class PortNotFound(NotFound):
message = _("Port id %(port_id)s could not be found.")
class NetworkNotFoundForBridge(NetworkNotFound):
message = _("Network could not be found for bridge %(bridge)s")
class NetworkNotFoundForUUID(NetworkNotFound):
message = _("Network could not be found for uuid %(uuid)s")
class NetworkNotFoundForCidr(NetworkNotFound):
message = _("Network could not be found with cidr %(cidr)s.")
class NetworkNotFoundForInstance(NetworkNotFound):
message = _("Network could not be found for instance %(instance_id)s.")
class NoNetworksFound(NotFound):
message = _("No networks defined.")
class NetworkNotFoundForProject(NotFound):
message = _("Either Network uuid %(network_uuid)s is not present or "
"is not assigned to the project %(project_id)s.")
class DatastoreNotFound(NotFound):
message = _("Could not find the datastore reference(s) which the VM uses.")
class PortInUse(NovaException):
message = _("Port %(port_id)s is still in use.")
class PortNotUsable(NovaException):
message = _("Port %(port_id)s not usable for instance %(instance)s.")
class PortNotFree(NovaException):
message = _("No free port available for instance %(instance)s.")
class FixedIpNotFound(NotFound):
message = _("No fixed IP associated with id %(id)s.")
class FixedIpNotFoundForAddress(FixedIpNotFound):
message = _("Fixed ip not found for address %(address)s.")
class FixedIpNotFoundForInstance(FixedIpNotFound):
message = _("Instance %(instance_uuid)s has zero fixed ips.")
class FixedIpNotFoundForNetworkHost(FixedIpNotFound):
message = _("Network host %(host)s has zero fixed ips "
"in network %(network_id)s.")
class FixedIpNotFoundForSpecificInstance(FixedIpNotFound):
message = _("Instance %(instance_uuid)s doesn't have fixed ip '%(ip)s'.")
class FixedIpNotFoundForNetwork(FixedIpNotFound):
message = _("Fixed IP address (%(address)s) does not exist in "
"network (%(network_uuid)s).")
class FixedIpAlreadyInUse(NovaException):
message = _("Fixed IP address %(address)s is already in use on instance "
"%(instance_uuid)s.")
class FixedIpAssociatedWithMultipleInstances(NovaException):
message = _("More than one instance is associated with fixed ip address "
"'%(address)s'.")
class FixedIpInvalid(Invalid):
message = _("Fixed IP address %(address)s is invalid.")
class NoMoreFixedIps(NovaException):
message = _("Zero fixed ips available.")
class NoFixedIpsDefined(NotFound):
message = _("Zero fixed ips could be found.")
#TODO(bcwaldon): EOL this exception!
class Duplicate(NovaException):
pass
class FloatingIpExists(Duplicate):
message = _("Floating ip %(address)s already exists.")
class FloatingIpNotFound(NotFound):
message = _("Floating ip not found for id %(id)s.")
class FloatingIpDNSExists(Invalid):
message = _("The DNS entry %(name)s already exists in domain %(domain)s.")
class FloatingIpNotFoundForAddress(FloatingIpNotFound):
message = _("Floating ip not found for address %(address)s.")
class FloatingIpNotFoundForHost(FloatingIpNotFound):
message = _("Floating ip not found for host %(host)s.")
class FloatingIpMultipleFoundForAddress(NovaException):
message = _("Multiple floating ips are found for address %(address)s.")
class FloatingIpPoolNotFound(NotFound):
message = _("Floating ip pool not found.")
safe = True
class NoMoreFloatingIps(FloatingIpNotFound):
message = _("Zero floating ips available.")
safe = True
class FloatingIpAssociated(NovaException):
message = _("Floating ip %(address)s is associated.")
class FloatingIpNotAssociated(NovaException):
message = _("Floating ip %(address)s is not associated.")
class NoFloatingIpsDefined(NotFound):
message = _("Zero floating ips exist.")
class NoFloatingIpInterface(NotFound):
message = _("Interface %(interface)s not found.")
class CannotDisassociateAutoAssignedFloatingIP(NovaException):
message = _("Cannot disassociate auto assigined floating ip")
class KeypairNotFound(NotFound):
message = _("Keypair %(name)s not found for user %(user_id)s")
class CertificateNotFound(NotFound):
message = _("Certificate %(certificate_id)s not found.")
class ServiceNotFound(NotFound):
message = _("Service %(service_id)s could not be found.")
class HostNotFound(NotFound):
message = _("Host %(host)s could not be found.")
class ComputeHostNotFound(HostNotFound):
message = _("Compute host %(host)s could not be found.")
class HostBinaryNotFound(NotFound):
message = _("Could not find binary %(binary)s on host %(host)s.")
class InvalidReservationExpiration(Invalid):
message = _("Invalid reservation expiration %(expire)s.")
class InvalidQuotaValue(Invalid):
message = _("Change would make usage less than 0 for the following "
"resources: %(unders)s")
class QuotaNotFound(NotFound):
message = _("Quota could not be found")
class QuotaResourceUnknown(QuotaNotFound):
message = _("Unknown quota resources %(unknown)s.")
class ProjectQuotaNotFound(QuotaNotFound):
message = _("Quota for project %(project_id)s could not be found.")
class QuotaClassNotFound(QuotaNotFound):
message = _("Quota class %(class_name)s could not be found.")
class QuotaUsageNotFound(QuotaNotFound):
message = _("Quota usage for project %(project_id)s could not be found.")
class ReservationNotFound(QuotaNotFound):
message = _("Quota reservation %(uuid)s could not be found.")
class OverQuota(NovaException):
message = _("Quota exceeded for resources: %(overs)s")
class SecurityGroupNotFound(NotFound):
message = _("Security group %(security_group_id)s not found.")
class SecurityGroupNotFoundForProject(SecurityGroupNotFound):
message = _("Security group %(security_group_id)s not found "
"for project %(project_id)s.")
class SecurityGroupNotFoundForRule(SecurityGroupNotFound):
message = _("Security group with rule %(rule_id)s not found.")
class SecurityGroupExistsForInstance(Invalid):
message = _("Security group %(security_group_id)s is already associated"
" with the instance %(instance_id)s")
class SecurityGroupNotExistsForInstance(Invalid):
message = _("Security group %(security_group_id)s is not associated with"
" the instance %(instance_id)s")
class SecurityGroupDefaultRuleNotFound(Invalid):
message = _("Security group default rule (%rule_id)s not found.")
class SecurityGroupCannotBeApplied(Invalid):
message = _("Network requires port_security_enabled and subnet associated"
" in order to apply security groups.")
class NoUniqueMatch(NovaException):
message = _("No Unique Match Found.")
code = 409
class MigrationNotFound(NotFound):
message = _("Migration %(migration_id)s could not be found.")
class MigrationNotFoundByStatus(MigrationNotFound):
message = _("Migration not found for instance %(instance_id)s "
"with status %(status)s.")
class ConsolePoolNotFound(NotFound):
message = _("Console pool %(pool_id)s could not be found.")
class ConsolePoolNotFoundForHostType(NotFound):
message = _("Console pool of type %(console_type)s "
"for compute host %(compute_host)s "
"on proxy host %(host)s not found.")
class ConsoleNotFound(NotFound):
message = _("Console %(console_id)s could not be found.")
class ConsoleNotFoundForInstance(ConsoleNotFound):
message = _("Console for instance %(instance_uuid)s could not be found.")
class ConsoleNotFoundInPoolForInstance(ConsoleNotFound):
message = _("Console for instance %(instance_uuid)s "
"in pool %(pool_id)s could not be found.")
class ConsoleTypeInvalid(Invalid):
message = _("Invalid console type %(console_type)s")
class InstanceTypeNotFound(NotFound):
message = _("Instance type %(instance_type_id)s could not be found.")
class InstanceTypeNotFoundByName(InstanceTypeNotFound):
message = _("Instance type with name %(instance_type_name)s "
"could not be found.")
class FlavorNotFound(NotFound):
message = _("Flavor %(flavor_id)s could not be found.")
class FlavorAccessNotFound(NotFound):
message = _("Flavor access not found for %(flavor_id)s / "
"%(project_id)s combination.")
class CellNotFound(NotFound):
message = _("Cell %(cell_name)s doesn't exist.")
class CellRoutingInconsistency(NovaException):
message = _("Inconsistency in cell routing: %(reason)s")
class CellServiceAPIMethodNotFound(NotFound):
message = _("Service API method not found: %(detail)s")
class CellTimeout(NotFound):
message = _("Timeout waiting for response from cell")
class CellMaxHopCountReached(NovaException):
message = _("Cell message has reached maximum hop count: %(hop_count)s")
class NoCellsAvailable(NovaException):
message = _("No cells available matching scheduling criteria.")
class CellError(NovaException):
message = _("Exception received during cell processing: %(exc_name)s.")
class InstanceUnknownCell(NotFound):
message = _("Cell is not known for instance %(instance_uuid)s")
class SchedulerHostFilterNotFound(NotFound):
message = _("Scheduler Host Filter %(filter_name)s could not be found.")
class InstanceMetadataNotFound(NotFound):
message = _("Instance %(instance_uuid)s has no metadata with "
"key %(metadata_key)s.")
class InstanceSystemMetadataNotFound(NotFound):
message = _("Instance %(instance_uuid)s has no system metadata with "
"key %(metadata_key)s.")
class InstanceTypeExtraSpecsNotFound(NotFound):
message = _("Instance Type %(instance_type_id)s has no extra specs with "
"key %(extra_specs_key)s.")
class FileNotFound(NotFound):
message = _("File %(file_path)s could not be found.")
class NoFilesFound(NotFound):
message = _("Zero files could be found.")
class SwitchNotFoundForNetworkAdapter(NotFound):
message = _("Virtual switch associated with the "
"network adapter %(adapter)s not found.")
class NetworkAdapterNotFound(NotFound):
message = _("Network adapter %(adapter)s could not be found.")
class ClassNotFound(NotFound):
message = _("Class %(class_name)s could not be found: %(exception)s")
class NotAllowed(NovaException):
message = _("Action not allowed.")
class ImageRotationNotAllowed(NovaException):
message = _("Rotation is not allowed for snapshots")
class RotationRequiredForBackup(NovaException):
message = _("Rotation param is required for backup image_type")
class KeyPairExists(Duplicate):
message = _("Key pair %(key_name)s already exists.")
class InstanceExists(Duplicate):
message = _("Instance %(name)s already exists.")
class InstanceTypeExists(Duplicate):
message = _("Instance Type with name %(name)s already exists.")
class InstanceTypeIdExists(Duplicate):
message = _("Instance Type with ID %(flavor_id)s already exists.")
class FlavorAccessExists(Duplicate):
message = _("Flavor access alreay exists for flavor %(flavor_id)s "
"and project %(project_id)s combination.")
class InvalidSharedStorage(NovaException):
message = _("%(path)s is not on shared storage: %(reason)s")
class InvalidLocalStorage(NovaException):
message = _("%(path)s is not on local storage: %(reason)s")
class MigrationError(NovaException):
message = _("Migration error") + ": %(reason)s"
class MalformedRequestBody(NovaException):
message = _("Malformed message body: %(reason)s")
# NOTE(johannes): NotFound should only be used when a 404 error is
# appropriate to be returned
class ConfigNotFound(NovaException):
message = _("Could not find config at %(path)s")
class PasteAppNotFound(NovaException):
message = _("Could not load paste app '%(name)s' from %(path)s")
class CannotResizeToSameFlavor(NovaException):
message = _("When resizing, instances must change flavor!")
class ResizeError(NovaException):
message = _("Resize error: %(reason)s")
class ImageTooLarge(NovaException):
message = _("Image is larger than instance type allows")
class InstanceTypeMemoryTooSmall(NovaException):
message = _("Instance type's memory is too small for requested image.")
class InstanceTypeDiskTooSmall(NovaException):
message = _("Instance type's disk is too small for requested image.")
class InsufficientFreeMemory(NovaException):
message = _("Insufficient free memory on compute node to start %(uuid)s.")
class CouldNotFetchMetrics(NovaException):
message = _("Could not fetch bandwidth/cpu/disk metrics for this host.")
class NoValidHost(NovaException):
message = _("No valid host was found. %(reason)s")
class QuotaError(NovaException):
message = _("Quota exceeded") + ": code=%(code)s"
code = 413
headers = {'Retry-After': 0}
safe = True
class TooManyInstances(QuotaError):
message = _("Quota exceeded for %(overs)s: Requested %(req)s,"
" but already used %(used)d of %(allowed)d %(resource)s")
class FloatingIpLimitExceeded(QuotaError):
message = _("Maximum number of floating ips exceeded")
class FixedIpLimitExceeded(QuotaError):
message = _("Maximum number of fixed ips exceeded")
class MetadataLimitExceeded(QuotaError):
message = _("Maximum number of metadata items exceeds %(allowed)d")
class OnsetFileLimitExceeded(QuotaError):
message = _("Personality file limit exceeded")
class OnsetFilePathLimitExceeded(QuotaError):
message = _("Personality file path too long")
class OnsetFileContentLimitExceeded(QuotaError):
message = _("Personality file content too long")
class KeypairLimitExceeded(QuotaError):
message = _("Maximum number of key pairs exceeded")
class SecurityGroupLimitExceeded(QuotaError):
message = _("Maximum number of security groups or rules exceeded")
class AggregateError(NovaException):
message = _("Aggregate %(aggregate_id)s: action '%(action)s' "
"caused an error: %(reason)s.")
class AggregateNotFound(NotFound):
message = _("Aggregate %(aggregate_id)s could not be found.")
class AggregateNameExists(Duplicate):
message = _("Aggregate %(aggregate_name)s already exists.")
class AggregateHostNotFound(NotFound):
message = _("Aggregate %(aggregate_id)s has no host %(host)s.")
class AggregateMetadataNotFound(NotFound):
message = _("Aggregate %(aggregate_id)s has no metadata with "
"key %(metadata_key)s.")
class AggregateHostExists(Duplicate):
message = _("Aggregate %(aggregate_id)s already has host %(host)s.")
class InstanceTypeCreateFailed(NovaException):
message = _("Unable to create instance type")
class InstancePasswordSetFailed(NovaException):
message = _("Failed to set admin password on %(instance)s "
"because %(reason)s")
safe = True
class DuplicateVlan(Duplicate):
message = _("Detected existing vlan with id %(vlan)d")
class CidrConflict(NovaException):
message = _("There was a conflict when trying to complete your request.")
code = 409
class InstanceNotFound(NotFound):
message = _("Instance %(instance_id)s could not be found.")
class InstanceInfoCacheNotFound(NotFound):
message = _("Info cache for instance %(instance_uuid)s could not be "
"found.")
class NodeNotFound(NotFound):
message = _("Node %(node_id)s could not be found.")
class NodeNotFoundByUUID(NotFound):
message = _("Node with UUID %(node_uuid)s could not be found.")
class MarkerNotFound(NotFound):
message = _("Marker %(marker)s could not be found.")
class InvalidInstanceIDMalformed(Invalid):
message = _("Invalid id: %(val)s (expecting \"i-...\").")
class CouldNotFetchImage(NovaException):
message = _("Could not fetch image %(image_id)s")
class CouldNotUploadImage(NovaException):
message = _("Could not upload image %(image_id)s")
class TaskAlreadyRunning(NovaException):
message = _("Task %(task_name)s is already running on host %(host)s")
class TaskNotRunning(NovaException):
message = _("Task %(task_name)s is not running on host %(host)s")
class InstanceIsLocked(InstanceInvalidState):
message = _("Instance %(instance_uuid)s is locked")
class ConfigDriveMountFailed(NovaException):
message = _("Could not mount vfat config drive. %(operation)s failed. "
"Error: %(error)s")
class ConfigDriveUnknownFormat(NovaException):
message = _("Unknown config drive format %(format)s. Select one of "
"iso9660 or vfat.")
class InterfaceAttachFailed(Invalid):
message = _("Failed to attach network adapter device to %(instance)s")
class InterfaceDetachFailed(Invalid):
message = _("Failed to detach network adapter device from %(instance)s")
class InstanceUserDataTooLarge(NovaException):
message = _("User data too large. User data must be no larger than "
"%(maxsize)s bytes once base64 encoded. Your data is "
"%(length)d bytes")
class InstanceUserDataMalformed(NovaException):
message = _("User data needs to be valid base 64.")
class UnexpectedTaskStateError(NovaException):
message = _("unexpected task state: expecting %(expected)s but "
"the actual state is %(actual)s")
class InstanceActionNotFound(NovaException):
message = _("Action for request_id %(request_id)s on instance"
" %(instance_uuid)s not found")
class InstanceActionEventNotFound(NovaException):
message = _("Event %(event)s not found for action id %(action_id)s")
class CryptoCAFileNotFound(FileNotFound):
message = _("The CA file for %(project)s could not be found")
class CryptoCRLFileNotFound(FileNotFound):
message = _("The CRL file for %(project)s could not be found")
class InstanceRecreateNotSupported(Invalid):
message = _('Instance recreate is not implemented by this virt driver.')
class ServiceGroupUnavailable(NovaException):
message = _("The service from servicegroup driver %(driver) is "
"temporarily unavailable.")
class DBNotAllowed(NovaException):
message = _('%(binary)s attempted direct database access which is '
'not allowed by policy')
class UnsupportedVirtType(Invalid):
message = _("Virtualization type '%(virt)s' is not supported by "
"this compute driver")
class UnsupportedHardware(Invalid):
message = _("Requested hardware '%(model)s' is not supported by "
"the '%(virt)s' virt driver")
class Base64Exception(NovaException):
message = _("Invalid Base 64 data for file %(path)s")
|
apache-2.0
| 1,228,007,779,956,419,300
| 27.251656
| 79
| 0.67241
| false
| 4.234243
| false
| false
| false
|
rishig/zulip
|
zproject/backends.py
|
1
|
46330
|
# Documentation for Zulip's authentication backends is split across a few places:
#
# * https://zulip.readthedocs.io/en/latest/production/authentication-methods.html and
# zproject/prod_settings_template.py have user-level configuration documentation.
# * https://zulip.readthedocs.io/en/latest/subsystems/auth.html has developer-level
# documentation, especially on testing authentication backends in the Zulip
# development environment.
#
# Django upstream's documentation for authentication backends is also
# helpful background. The most important detail to understand for
# reading this file is that the Django authenticate() function will
# call the authenticate methods of all backends registered in
# settings.AUTHENTICATION_BACKENDS that have a function signature
# matching the args/kwargs passed in the authenticate() call.
import copy
import logging
import magic
from typing import Any, Dict, List, Optional, Set, Tuple, Union
from django_auth_ldap.backend import LDAPBackend, _LDAPUser
from django.contrib.auth import get_backends
from django.contrib.auth.backends import RemoteUserBackend
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from requests import HTTPError
from social_core.backends.github import GithubOAuth2, GithubOrganizationOAuth2, \
GithubTeamOAuth2
from social_core.backends.azuread import AzureADOAuth2
from social_core.backends.base import BaseAuth
from social_core.backends.oauth import BaseOAuth2
from social_core.pipeline.partial import partial
from social_core.exceptions import AuthFailed, SocialAuthBaseException
from zerver.lib.actions import do_create_user, do_reactivate_user, do_deactivate_user, \
do_update_user_custom_profile_data, validate_email_for_realm
from zerver.lib.avatar import is_avatar_new
from zerver.lib.avatar_hash import user_avatar_content_hash
from zerver.lib.dev_ldap_directory import init_fakeldap
from zerver.lib.request import JsonableError
from zerver.lib.users import check_full_name, validate_user_custom_profile_field
from zerver.models import CustomProfileField, DisposableEmailError, DomainNotAllowedForRealmError, \
EmailContainsPlusError, PreregistrationUser, UserProfile, Realm, custom_profile_fields_for_realm, \
email_allowed_for_realm, get_default_stream_groups, get_user_profile_by_id, remote_user_to_email, \
email_to_username, get_realm, get_user_by_delivery_email, supported_auth_backends
# This first batch of methods is used by other code in Zulip to check
# whether a given authentication backend is enabled for a given realm.
# In each case, we both needs to check at the server level (via
# `settings.AUTHENTICATION_BACKENDS`, queried via
# `django.contrib.auth.get_backends`) and at the realm level (via the
# `Realm.authentication_methods` BitField).
def pad_method_dict(method_dict: Dict[str, bool]) -> Dict[str, bool]:
"""Pads an authentication methods dict to contain all auth backends
supported by the software, regardless of whether they are
configured on this server"""
for key in AUTH_BACKEND_NAME_MAP:
if key not in method_dict:
method_dict[key] = False
return method_dict
def auth_enabled_helper(backends_to_check: List[str], realm: Optional[Realm]) -> bool:
if realm is not None:
enabled_method_dict = realm.authentication_methods_dict()
pad_method_dict(enabled_method_dict)
else:
enabled_method_dict = dict((method, True) for method in Realm.AUTHENTICATION_FLAGS)
pad_method_dict(enabled_method_dict)
for supported_backend in supported_auth_backends():
for backend_name in backends_to_check:
backend = AUTH_BACKEND_NAME_MAP[backend_name]
if enabled_method_dict[backend_name] and isinstance(supported_backend, backend):
return True
return False
def ldap_auth_enabled(realm: Optional[Realm]=None) -> bool:
return auth_enabled_helper(['LDAP'], realm)
def email_auth_enabled(realm: Optional[Realm]=None) -> bool:
return auth_enabled_helper(['Email'], realm)
def password_auth_enabled(realm: Optional[Realm]=None) -> bool:
return ldap_auth_enabled(realm) or email_auth_enabled(realm)
def dev_auth_enabled(realm: Optional[Realm]=None) -> bool:
return auth_enabled_helper(['Dev'], realm)
def google_auth_enabled(realm: Optional[Realm]=None) -> bool:
return auth_enabled_helper(['Google'], realm)
def github_auth_enabled(realm: Optional[Realm]=None) -> bool:
return auth_enabled_helper(['GitHub'], realm)
def any_oauth_backend_enabled(realm: Optional[Realm]=None) -> bool:
"""Used by the login page process to determine whether to show the
'OR' for login with Google"""
return auth_enabled_helper(OAUTH_BACKEND_NAMES, realm)
def require_email_format_usernames(realm: Optional[Realm]=None) -> bool:
if ldap_auth_enabled(realm):
if settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN:
return False
return True
def common_get_active_user(email: str, realm: Realm,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
"""This is the core common function used by essentially all
authentication backends to check if there's an active user account
with a given email address in the organization, handling both
user-level and realm-level deactivation correctly.
"""
try:
user_profile = get_user_by_delivery_email(email, realm)
except UserProfile.DoesNotExist:
# If the user doesn't have an account in the target realm, we
# check whether they might have an account in another realm,
# and if so, provide a helpful error message via
# `invalid_subdomain`.
if not UserProfile.objects.filter(delivery_email__iexact=email).exists():
return None
if return_data is not None:
return_data['invalid_subdomain'] = True
return None
if not user_profile.is_active:
if return_data is not None:
if user_profile.is_mirror_dummy:
# Record whether it's a mirror dummy account
return_data['is_mirror_dummy'] = True
return_data['inactive_user'] = True
return None
if user_profile.realm.deactivated:
if return_data is not None:
return_data['inactive_realm'] = True
return None
return user_profile
class ZulipAuthMixin:
"""This common mixin is used to override Django's default behavior for
looking up a logged-in user by ID to use a version that fetches
from memcached before checking the database (avoiding a database
query in most cases).
"""
def get_user(self, user_profile_id: int) -> Optional[UserProfile]:
"""Override the Django method for getting a UserProfile object from
the user_profile_id,."""
try:
return get_user_profile_by_id(user_profile_id)
except UserProfile.DoesNotExist:
return None
class ZulipDummyBackend(ZulipAuthMixin):
"""Used when we want to log you in without checking any
authentication (i.e. new user registration or when otherwise
authentication has already been checked earlier in the process).
We ensure that this backend only ever successfully authenticates
when explicitly requested by including the use_dummy_backend kwarg.
"""
def authenticate(self, *, username: str, realm: Realm,
use_dummy_backend: bool=False,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
if use_dummy_backend:
return common_get_active_user(username, realm, return_data)
return None
class EmailAuthBackend(ZulipAuthMixin):
"""
Email+Password Authentication Backend (the default).
Allows a user to sign in using an email/password pair.
"""
def authenticate(self, *, username: str, password: str,
realm: Realm,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
""" Authenticate a user based on email address as the user name. """
if not password_auth_enabled(realm):
if return_data is not None:
return_data['password_auth_disabled'] = True
return None
if not email_auth_enabled(realm):
if return_data is not None:
return_data['email_auth_disabled'] = True
return None
user_profile = common_get_active_user(username, realm, return_data=return_data)
if user_profile is None:
return None
if user_profile.check_password(password):
return user_profile
return None
class GoogleMobileOauth2Backend(ZulipAuthMixin):
"""
Google Apps authentication for the legacy Android app.
DummyAuthBackend is what's actually used for our modern Google auth,
both for web and mobile (the latter via the mobile_flow_otp feature).
Allows a user to sign in using a Google-issued OAuth2 token.
Ref:
https://developers.google.com/+/mobile/android/sign-in#server-side_access_for_your_app
https://developers.google.com/accounts/docs/CrossClientAuth#offlineAccess
"""
def authenticate(self, *, google_oauth2_token: str, realm: Realm,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
# We lazily import apiclient as part of optimizing the base
# import time for a Zulip management command, since it's only
# used in this one code path and takes 30-50ms to import.
from apiclient.sample_tools import client as googleapiclient
from oauth2client.crypt import AppIdentityError
if return_data is None:
return_data = {}
if not google_auth_enabled(realm=realm):
return_data["google_auth_disabled"] = True
return None
try:
token_payload = googleapiclient.verify_id_token(google_oauth2_token, settings.GOOGLE_CLIENT_ID)
except AppIdentityError:
return None
if token_payload["email_verified"] not in (True, "true"):
return_data["valid_attestation"] = False
return None
return_data["valid_attestation"] = True
return common_get_active_user(token_payload["email"], realm, return_data)
class ZulipRemoteUserBackend(RemoteUserBackend):
"""Authentication backend that reads the Apache REMOTE_USER variable.
Used primarily in enterprise environments with an SSO solution
that has an Apache REMOTE_USER integration. For manual testing, see
https://zulip.readthedocs.io/en/latest/production/authentication-methods.html
See also remote_user_sso in zerver/views/auth.py.
"""
create_unknown_user = False
def authenticate(self, *, remote_user: str, realm: Realm,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
if not auth_enabled_helper(["RemoteUser"], realm):
return None
email = remote_user_to_email(remote_user)
return common_get_active_user(email, realm, return_data=return_data)
def is_valid_email(email: str) -> bool:
try:
validate_email(email)
except ValidationError:
return False
return True
def email_belongs_to_ldap(realm: Realm, email: str) -> bool:
"""Used to make determinations on whether a user's email address is
managed by LDAP. For environments using both LDAP and
Email+Password authentication, we do not allow EmailAuthBackend
authentication for email addresses managed by LDAP (to avoid a
security issue where one create separate credentials for an LDAP
user), and this function is used to enforce that rule.
"""
if not ldap_auth_enabled(realm):
return False
# If we don't have an LDAP domain, it's impossible to tell which
# accounts are LDAP accounts, so treat all of them as LDAP
# accounts
if not settings.LDAP_APPEND_DOMAIN:
return True
# Otherwise, check if the email ends with LDAP_APPEND_DOMAIN
return email.strip().lower().endswith("@" + settings.LDAP_APPEND_DOMAIN)
class ZulipLDAPException(_LDAPUser.AuthenticationFailed):
"""Since this inherits from _LDAPUser.AuthenticationFailed, these will
be caught and logged at debug level inside django-auth-ldap's authenticate()"""
pass
class ZulipLDAPExceptionOutsideDomain(ZulipLDAPException):
pass
class ZulipLDAPConfigurationError(Exception):
pass
LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK = 2
class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
"""Common code between LDAP authentication (ZulipLDAPAuthBackend) and
using LDAP just to sync user data (ZulipLDAPUserPopulator).
To fully understand our LDAP backend, you may want to skim
django_auth_ldap/backend.py from the upstream django-auth-ldap
library. It's not a lot of code, and searching around in that
file makes the flow for LDAP authentication clear.
"""
def __init__(self) -> None:
# Used to initialize a fake LDAP directly for both manual
# and automated testing in a development environment where
# there is no actual LDAP server.
if settings.DEVELOPMENT and settings.FAKE_LDAP_MODE: # nocoverage
init_fakeldap()
# Disable django-auth-ldap's permissions functions -- we don't use
# the standard Django user/group permissions system because they
# are prone to performance issues.
def has_perm(self, user: Optional[UserProfile], perm: Any, obj: Any=None) -> bool:
return False
def has_module_perms(self, user: Optional[UserProfile], app_label: Optional[str]) -> bool:
return False
def get_all_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]:
return set()
def get_group_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]:
return set()
def django_to_ldap_username(self, username: str) -> str:
if settings.LDAP_APPEND_DOMAIN:
if is_valid_email(username):
if not username.endswith("@" + settings.LDAP_APPEND_DOMAIN):
raise ZulipLDAPExceptionOutsideDomain("Email %s does not match LDAP domain %s." % (
username, settings.LDAP_APPEND_DOMAIN))
return email_to_username(username)
return username
def ldap_to_django_username(self, username: str) -> str:
if settings.LDAP_APPEND_DOMAIN:
return "@".join((username, settings.LDAP_APPEND_DOMAIN))
return username
def sync_avatar_from_ldap(self, user: UserProfile, ldap_user: _LDAPUser) -> None:
if 'avatar' in settings.AUTH_LDAP_USER_ATTR_MAP:
# We do local imports here to avoid import loops
from zerver.lib.upload import upload_avatar_image
from zerver.lib.actions import do_change_avatar_fields
from io import BytesIO
avatar_attr_name = settings.AUTH_LDAP_USER_ATTR_MAP['avatar']
if avatar_attr_name not in ldap_user.attrs: # nocoverage
# If this specific user doesn't have e.g. a
# thumbnailPhoto set in LDAP, just skip that user.
return
ldap_avatar = ldap_user.attrs[avatar_attr_name][0]
avatar_changed = is_avatar_new(ldap_avatar, user)
if not avatar_changed:
# Don't do work to replace the avatar with itself.
return
io = BytesIO(ldap_avatar)
# Structurally, to make the S3 backend happy, we need to
# provide a Content-Type; since that isn't specified in
# any metadata, we auto-detect it.
content_type = magic.from_buffer(copy.deepcopy(io).read()[0:1024], mime=True)
if content_type.startswith("image/"):
upload_avatar_image(io, user, user, content_type=content_type)
do_change_avatar_fields(user, UserProfile.AVATAR_FROM_USER)
# Update avatar hash.
user.avatar_hash = user_avatar_content_hash(ldap_avatar)
user.save(update_fields=["avatar_hash"])
else:
logging.warning("Could not parse %s field for user %s" %
(avatar_attr_name, user.id))
def is_account_control_disabled_user(self, ldap_user: _LDAPUser) -> bool:
"""Implements the userAccountControl check for whether a user has been
disabled in an Active Directory server being integrated with
Zulip via LDAP."""
account_control_value = ldap_user.attrs[settings.AUTH_LDAP_USER_ATTR_MAP['userAccountControl']][0]
ldap_disabled = bool(int(account_control_value) & LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK)
return ldap_disabled
@classmethod
def get_mapped_name(cls, ldap_user: _LDAPUser) -> Tuple[str, str]:
"""Constructs the user's Zulip full_name and short_name fields from
the LDAP data"""
if "full_name" in settings.AUTH_LDAP_USER_ATTR_MAP:
full_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["full_name"]
short_name = full_name = ldap_user.attrs[full_name_attr][0]
elif all(key in settings.AUTH_LDAP_USER_ATTR_MAP for key in {"first_name", "last_name"}):
first_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["first_name"]
last_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["last_name"]
short_name = ldap_user.attrs[first_name_attr][0]
full_name = short_name + ' ' + ldap_user.attrs[last_name_attr][0]
else:
raise ZulipLDAPException("Missing required mapping for user's full name")
if "short_name" in settings.AUTH_LDAP_USER_ATTR_MAP:
short_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["short_name"]
short_name = ldap_user.attrs[short_name_attr][0]
return full_name, short_name
def sync_full_name_from_ldap(self, user_profile: UserProfile,
ldap_user: _LDAPUser) -> None:
from zerver.lib.actions import do_change_full_name
full_name, _ = self.get_mapped_name(ldap_user)
if full_name != user_profile.full_name:
try:
full_name = check_full_name(full_name)
except JsonableError as e:
raise ZulipLDAPException(e.msg)
do_change_full_name(user_profile, full_name, None)
def sync_custom_profile_fields_from_ldap(self, user_profile: UserProfile,
ldap_user: _LDAPUser) -> None:
values_by_var_name = {} # type: Dict[str, Union[int, str, List[int]]]
for attr, ldap_attr in settings.AUTH_LDAP_USER_ATTR_MAP.items():
if not attr.startswith('custom_profile_field__'):
continue
var_name = attr.split('custom_profile_field__')[1]
try:
value = ldap_user.attrs[ldap_attr][0]
except KeyError:
# If this user doesn't have this field set then ignore this
# field and continue syncing other fields. `django-auth-ldap`
# automatically logs error about missing field.
continue
values_by_var_name[var_name] = value
fields_by_var_name = {} # type: Dict[str, CustomProfileField]
custom_profile_fields = custom_profile_fields_for_realm(user_profile.realm.id)
for field in custom_profile_fields:
var_name = '_'.join(field.name.lower().split(' '))
fields_by_var_name[var_name] = field
existing_values = {}
for data in user_profile.profile_data:
var_name = '_'.join(data['name'].lower().split(' ')) # type: ignore # data field values can also be int
existing_values[var_name] = data['value']
profile_data = [] # type: List[Dict[str, Union[int, str, List[int]]]]
for var_name, value in values_by_var_name.items():
try:
field = fields_by_var_name[var_name]
except KeyError:
raise ZulipLDAPException('Custom profile field with name %s not found.' % (var_name,))
if existing_values.get(var_name) == value:
continue
result = validate_user_custom_profile_field(user_profile.realm.id, field, value)
if result is not None:
raise ZulipLDAPException('Invalid data for %s field: %s' % (var_name, result))
profile_data.append({
'id': field.id,
'value': value,
})
do_update_user_custom_profile_data(user_profile, profile_data)
def get_or_build_user(self, username: str,
ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]:
"""This is used only in non-authentication contexts such as:
./manage.py sync_ldap_user_data
In authentication contexts, this is overriden in ZulipLDAPAuthBackend.
"""
(user, built) = super().get_or_build_user(username, ldap_user)
self.sync_avatar_from_ldap(user, ldap_user)
self.sync_full_name_from_ldap(user, ldap_user)
self.sync_custom_profile_fields_from_ldap(user, ldap_user)
if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP:
user_disabled_in_ldap = self.is_account_control_disabled_user(ldap_user)
if user_disabled_in_ldap and user.is_active:
logging.info("Deactivating user %s because they are disabled in LDAP." %
(user.email,))
do_deactivate_user(user)
return (user, built)
if not user_disabled_in_ldap and not user.is_active:
logging.info("Reactivating user %s because they are not disabled in LDAP." %
(user.email,))
do_reactivate_user(user)
return (user, built)
class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase):
REALM_IS_NONE_ERROR = 1
def authenticate(self, *, username: str, password: str, realm: Realm,
prereg_user: Optional[PreregistrationUser]=None,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
self._realm = realm
self._prereg_user = prereg_user
if not ldap_auth_enabled(realm):
return None
try:
username = self.django_to_ldap_username(username)
except ZulipLDAPExceptionOutsideDomain:
if return_data is not None:
return_data['outside_ldap_domain'] = True
return None
# Call into (ultimately) the django-auth-ldap authenticate
# function. This will check the username/password pair
# against the LDAP database, and assuming those are correct,
# end up calling `self.get_or_build_user` with the
# authenticated user's data from LDAP.
return ZulipLDAPAuthBackendBase.authenticate(self,
request=None,
username=username,
password=password)
def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]:
"""The main function of our authentication backend extension of
django-auth-ldap. When this is called (from `authenticate`),
django-auth-ldap will already have verified that the provided
username and password match those in the LDAP database.
This function's responsibility is to check (1) whether the
email address for this user obtained from LDAP has an active
account in this Zulip realm. If so, it will log them in.
Otherwise, to provide a seamless Single Sign-On experience
with LDAP, this function can automatically create a new Zulip
user account in the realm (assuming the realm is configured to
allow that email address to sign up).
"""
return_data = {} # type: Dict[str, Any]
if settings.LDAP_EMAIL_ATTR is not None:
# Get email from ldap attributes.
if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs:
return_data["ldap_missing_attribute"] = settings.LDAP_EMAIL_ATTR
raise ZulipLDAPException("LDAP user doesn't have the needed %s attribute" % (
settings.LDAP_EMAIL_ATTR,))
username = ldap_user.attrs[settings.LDAP_EMAIL_ATTR][0]
if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP: # nocoverage
ldap_disabled = self.is_account_control_disabled_user(ldap_user)
if ldap_disabled:
# Treat disabled users as deactivated in Zulip.
return_data["inactive_user"] = True
raise ZulipLDAPException("User has been deactivated")
user_profile = common_get_active_user(username, self._realm, return_data)
if user_profile is not None:
# An existing user, successfully authed; return it.
return user_profile, False
if return_data.get("inactive_realm"):
# This happens if there is a user account in a deactivated realm
raise ZulipLDAPException("Realm has been deactivated")
if return_data.get("inactive_user"):
raise ZulipLDAPException("User has been deactivated")
# An invalid_subdomain `return_data` value here is ignored,
# since that just means we're trying to create an account in a
# second realm on the server (`ldap_auth_enabled(realm)` would
# have been false if this user wasn't meant to have an account
# in this second realm).
if self._realm.deactivated:
# This happens if no account exists, but the realm is
# deactivated, so we shouldn't create a new user account
raise ZulipLDAPException("Realm has been deactivated")
# Makes sure that email domain hasn't be restricted for this
# realm. The main thing here is email_allowed_for_realm; but
# we also call validate_email_for_realm just for consistency,
# even though its checks were already done above.
try:
email_allowed_for_realm(username, self._realm)
validate_email_for_realm(self._realm, username)
except DomainNotAllowedForRealmError:
raise ZulipLDAPException("This email domain isn't allowed in this organization.")
except (DisposableEmailError, EmailContainsPlusError):
raise ZulipLDAPException("Email validation failed.")
# We have valid LDAP credentials; time to create an account.
full_name, short_name = self.get_mapped_name(ldap_user)
try:
full_name = check_full_name(full_name)
except JsonableError as e:
raise ZulipLDAPException(e.msg)
opts = {} # type: Dict[str, Any]
if self._prereg_user:
invited_as = self._prereg_user.invited_as
opts['prereg_user'] = self._prereg_user
opts['is_realm_admin'] = invited_as == PreregistrationUser.INVITE_AS['REALM_ADMIN']
opts['is_guest'] = invited_as == PreregistrationUser.INVITE_AS['GUEST_USER']
opts['default_stream_groups'] = get_default_stream_groups(self._realm)
user_profile = do_create_user(username, None, self._realm, full_name, short_name, **opts)
self.sync_avatar_from_ldap(user_profile, ldap_user)
self.sync_custom_profile_fields_from_ldap(user_profile, ldap_user)
return user_profile, True
class ZulipLDAPUserPopulator(ZulipLDAPAuthBackendBase):
"""Just like ZulipLDAPAuthBackend, but doesn't let you log in. Used
for syncing data like names, avatars, and custom profile fields
from LDAP in `manage.py sync_ldap_user_data` as well as in
registration for organizations that use a different SSO solution
for managing login (often via RemoteUserBackend).
"""
def authenticate(self, *, username: str, password: str, realm: Realm,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
return None
def sync_user_from_ldap(user_profile: UserProfile) -> bool:
backend = ZulipLDAPUserPopulator()
updated_user = backend.populate_user(backend.django_to_ldap_username(user_profile.email))
if not updated_user:
if settings.LDAP_DEACTIVATE_NON_MATCHING_USERS:
do_deactivate_user(user_profile)
return False
return True
# Quick tool to test whether you're correctly authenticating to LDAP
def query_ldap(email: str) -> List[str]:
values = []
backend = next((backend for backend in get_backends() if isinstance(backend, LDAPBackend)), None)
if backend is not None:
ldap_attrs = _LDAPUser(backend, backend.django_to_ldap_username(email)).attrs
if ldap_attrs is None:
values.append("No such user found")
else:
for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items():
value = ldap_attrs.get(ldap_field, ["LDAP field not present", ])[0]
if django_field == "avatar":
if isinstance(value, bytes):
value = "(An avatar image file)"
values.append("%s: %s" % (django_field, value))
if settings.LDAP_EMAIL_ATTR is not None:
values.append("%s: %s" % ('email', ldap_attrs[settings.LDAP_EMAIL_ATTR][0]))
else:
values.append("LDAP backend not configured on this server.")
return values
class DevAuthBackend(ZulipAuthMixin):
"""Allow logging in as any user without a password. This is used for
convenience when developing Zulip, and is disabled in production."""
def authenticate(self, *, dev_auth_username: str, realm: Realm,
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
if not dev_auth_enabled(realm):
return None
return common_get_active_user(dev_auth_username, realm, return_data=return_data)
def redirect_deactivated_user_to_login() -> HttpResponseRedirect:
# Specifying the template name makes sure that the user is not redirected to dev_login in case of
# a deactivated account on a test server.
login_url = reverse('zerver.views.auth.login_page', kwargs = {'template_name': 'zerver/login.html'})
redirect_url = login_url + '?is_deactivated=true'
return HttpResponseRedirect(redirect_url)
def social_associate_user_helper(backend: BaseAuth, return_data: Dict[str, Any],
*args: Any, **kwargs: Any) -> Optional[UserProfile]:
"""Responsible for doing the Zulip-account lookup and validation parts
of the Zulip Social auth pipeline (similar to the authenticate()
methods in most other auth backends in this file).
Returns a UserProfile object for successful authentication, and None otherwise.
"""
subdomain = backend.strategy.session_get('subdomain')
try:
realm = get_realm(subdomain)
except Realm.DoesNotExist:
return_data["invalid_realm"] = True
return None
return_data["realm_id"] = realm.id
if not auth_enabled_helper([backend.auth_backend_name], realm):
return_data["auth_backend_disabled"] = True
return None
if 'auth_failed_reason' in kwargs.get('response', {}):
return_data["social_auth_failed_reason"] = kwargs['response']["auth_failed_reason"]
return None
elif hasattr(backend, 'get_verified_emails'):
# Some social backends, like GitHubAuthBackend, don't
# guarantee that the `details` data is validated (i.e., it's
# possible users can put any string they want in the "email"
# field of the `details` object). For those backends, we have
# custom per-backend code to properly fetch only verified
# email addresses from the appropriate third-party API.
verified_emails = backend.get_verified_emails(*args, **kwargs)
verified_emails_length = len(verified_emails)
if verified_emails_length == 0:
# TODO: Provide a nice error message screen to the user
# for this case, rather than just logging a warning.
logging.warning("Social auth (%s) failed because user has no verified emails" %
(backend.auth_backend_name,))
return_data["email_not_verified"] = True
return None
if verified_emails_length == 1:
chosen_email = verified_emails[0]
else:
chosen_email = backend.strategy.request_data().get('email')
if not chosen_email:
return render(backend.strategy.request, 'zerver/social_auth_select_email.html', context = {
'primary_email': verified_emails[0],
'verified_non_primary_emails': verified_emails[1:],
'backend': 'github'
})
try:
validate_email(chosen_email)
except ValidationError:
return_data['invalid_email'] = True
return None
if chosen_email not in verified_emails:
# If a user edits the submit value for the choose email form, we might
# end up with a wrong email associated with the account. The below code
# takes care of that.
logging.warning("Social auth (%s) failed because user has no verified"
" emails associated with the account" %
(backend.auth_backend_name,))
return_data["email_not_associated"] = True
return None
validated_email = chosen_email
else: # nocoverage
# This code path isn't used by GitHubAuthBackend
validated_email = kwargs["details"].get("email")
if not validated_email: # nocoverage
# This code path isn't used with GitHubAuthBackend, but may be relevant for other
# social auth backends.
return_data['invalid_email'] = True
return None
return_data["valid_attestation"] = True
return_data['validated_email'] = validated_email
user_profile = common_get_active_user(validated_email, realm, return_data)
if 'fullname' in kwargs["details"]:
return_data["full_name"] = kwargs["details"]["fullname"]
else:
# If we add support for any of the social auth backends that
# don't provide this feature, we'll need to add code here.
raise AssertionError("Social auth backend doesn't provide fullname")
return user_profile
@partial
def social_auth_associate_user(
backend: BaseAuth,
*args: Any,
**kwargs: Any) -> Union[HttpResponse, Dict[str, Any]]:
"""A simple wrapper function to reformat the return data from
social_associate_user_helper as a dictionary. The
python-social-auth infrastructure will then pass those values into
later stages of settings.SOCIAL_AUTH_PIPELINE, such as
social_auth_finish, as kwargs.
"""
partial_token = backend.strategy.request_data().get('partial_token')
return_data = {} # type: Dict[str, Any]
user_profile = social_associate_user_helper(
backend, return_data, *args, **kwargs)
if type(user_profile) == HttpResponse:
return user_profile
else:
return {'user_profile': user_profile,
'return_data': return_data,
'partial_token': partial_token,
'partial_backend_name': backend}
def social_auth_finish(backend: Any,
details: Dict[str, Any],
response: HttpResponse,
*args: Any,
**kwargs: Any) -> Optional[UserProfile]:
"""Given the determination in social_auth_associate_user for whether
the user should be authenticated, this takes care of actually
logging in the user (if appropriate) and redirecting the browser
to the appropriate next page depending on the situation. Read the
comments below as well as login_or_register_remote_user in
`zerver/views/auth.py` for the details on how that dispatch works.
"""
from zerver.views.auth import (login_or_register_remote_user,
redirect_and_log_into_subdomain)
user_profile = kwargs['user_profile']
return_data = kwargs['return_data']
no_verified_email = return_data.get("email_not_verified")
auth_backend_disabled = return_data.get('auth_backend_disabled')
inactive_user = return_data.get('inactive_user')
inactive_realm = return_data.get('inactive_realm')
invalid_realm = return_data.get('invalid_realm')
invalid_email = return_data.get('invalid_email')
auth_failed_reason = return_data.get("social_auth_failed_reason")
email_not_associated = return_data.get("email_not_associated")
if invalid_realm:
from zerver.views.auth import redirect_to_subdomain_login_url
return redirect_to_subdomain_login_url()
if inactive_user:
return redirect_deactivated_user_to_login()
if auth_backend_disabled or inactive_realm or no_verified_email or email_not_associated:
# Redirect to login page. We can't send to registration
# workflow with these errors. We will redirect to login page.
return None
if invalid_email:
# In case of invalid email, we will end up on registration page.
# This seems better than redirecting to login page.
logging.warning(
"{} got invalid email argument.".format(backend.auth_backend_name)
)
return None
if auth_failed_reason:
logging.info(auth_failed_reason)
return None
# Structurally, all the cases where we don't have an authenticated
# email for the user should be handled above; this assertion helps
# prevent any violations of that contract from resulting in a user
# being incorrectly authenticated.
assert return_data.get('valid_attestation') is True
strategy = backend.strategy # type: ignore # This comes from Python Social Auth.
email_address = return_data['validated_email']
full_name = return_data['full_name']
is_signup = strategy.session_get('is_signup') == '1'
redirect_to = strategy.session_get('next')
realm = Realm.objects.get(id=return_data["realm_id"])
multiuse_object_key = strategy.session_get('multiuse_object_key', '')
mobile_flow_otp = strategy.session_get('mobile_flow_otp')
# At this point, we have now confirmed that the user has
# demonstrated control over the target email address.
#
# The next step is to call login_or_register_remote_user, but
# there are two code paths here because of an optimization to save
# a redirect on mobile.
if mobile_flow_otp is not None:
# For mobile app authentication, login_or_register_remote_user
# will redirect to a special zulip:// URL that is handled by
# the app after a successful authentication; so we can
# redirect directly from here, saving a round trip over what
# we need to do to create session cookies on the right domain
# in the web login flow (below).
return login_or_register_remote_user(strategy.request, email_address,
user_profile, full_name,
mobile_flow_otp=mobile_flow_otp,
is_signup=is_signup,
redirect_to=redirect_to)
# If this authentication code were executing on
# subdomain.zulip.example.com, we would just call
# login_or_register_remote_user as in the mobile code path.
# However, because third-party SSO providers generally don't allow
# wildcard addresses in their redirect URLs, for multi-realm
# servers, we will have just completed authentication on e.g.
# auth.zulip.example.com (depending on
# settings.SOCIAL_AUTH_SUBDOMAIN), which cannot store cookies on
# the subdomain.zulip.example.com domain. So instead we serve a
# redirect (encoding the authentication result data in a
# cryptographically signed token) to a route on
# subdomain.zulip.example.com that will verify the signature and
# then call login_or_register_remote_user.
return redirect_and_log_into_subdomain(realm, full_name, email_address,
is_signup=is_signup,
redirect_to=redirect_to,
multiuse_object_key=multiuse_object_key)
class SocialAuthMixin(ZulipAuthMixin):
auth_backend_name = "undeclared"
# Used to determine how to order buttons on login form, backend with
# higher sort order are displayed first.
sort_order = 0
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
"""This is a small wrapper around the core `auth_complete` method of
python-social-auth, designed primarily to prevent 500s for
exceptions in the social auth code from situations that are
really user errors. Returning `None` from this function will
redirect the browser to the login page.
"""
try:
# Call the auth_complete method of social_core.backends.oauth.BaseOAuth2
return super().auth_complete(*args, **kwargs) # type: ignore # monkey-patching
except AuthFailed as e:
# When a user's social authentication fails (e.g. because
# they did something funny with reloading in the middle of
# the flow), don't throw a 500, just send them back to the
# login page and record the event at the info log level.
logging.info(str(e))
return None
except SocialAuthBaseException as e:
# Other python-social-auth exceptions are likely
# interesting enough that we should log a warning.
logging.warning(str(e))
return None
class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2):
auth_backend_name = "GitHub"
sort_order = 100
def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]:
access_token = kwargs["response"]["access_token"]
try:
emails = self._user_data(access_token, '/emails')
except (HTTPError, ValueError, TypeError): # nocoverage
# We don't really need an explicit test for this code
# path, since the outcome will be the same as any other
# case without any verified emails
emails = []
verified_emails = [] # type: List[str]
for email_obj in self.filter_usable_emails(emails):
# social_associate_user_helper assumes that the first email in
# verified_emails is primary.
if email_obj.get("primary"):
verified_emails.insert(0, email_obj["email"])
else:
verified_emails.append(email_obj["email"])
return verified_emails
def filter_usable_emails(self, emails: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
# We only let users login using email addresses that are verified
# by GitHub, because the whole point is for the user to
# demonstrate that they control the target email address. We also
# disallow the @noreply.github.com email addresses, because
# structurally, we only want to allow email addresses that can
# receive emails, and those cannot.
return [
email for email in emails
if email.get('verified') and not email["email"].endswith("@noreply.github.com")
]
def user_data(self, access_token: str, *args: Any, **kwargs: Any) -> Dict[str, str]:
"""This patched user_data function lets us combine together the 3
social auth backends into a single Zulip backend for GitHub Oauth2"""
team_id = settings.SOCIAL_AUTH_GITHUB_TEAM_ID
org_name = settings.SOCIAL_AUTH_GITHUB_ORG_NAME
if team_id is None and org_name is None:
# I believe this can't raise AuthFailed, so we don't try to catch it here.
return super().user_data(
access_token, *args, **kwargs
)
elif team_id is not None:
backend = GithubTeamOAuth2(self.strategy, self.redirect_uri)
try:
return backend.user_data(access_token, *args, **kwargs)
except AuthFailed:
return dict(auth_failed_reason="GitHub user is not member of required team")
elif org_name is not None:
backend = GithubOrganizationOAuth2(self.strategy, self.redirect_uri)
try:
return backend.user_data(access_token, *args, **kwargs)
except AuthFailed:
return dict(auth_failed_reason="GitHub user is not member of required organization")
raise AssertionError("Invalid configuration")
class AzureADAuthBackend(SocialAuthMixin, AzureADOAuth2):
sort_order = 50
auth_backend_name = "AzureAD"
AUTH_BACKEND_NAME_MAP = {
'Dev': DevAuthBackend,
'Email': EmailAuthBackend,
'Google': GoogleMobileOauth2Backend,
'LDAP': ZulipLDAPAuthBackend,
'RemoteUser': ZulipRemoteUserBackend,
} # type: Dict[str, Any]
OAUTH_BACKEND_NAMES = ["Google"] # type: List[str]
SOCIAL_AUTH_BACKENDS = [] # type: List[BaseOAuth2]
# Authomatically add all of our social auth backends to relevant data structures.
for social_auth_subclass in SocialAuthMixin.__subclasses__():
AUTH_BACKEND_NAME_MAP[social_auth_subclass.auth_backend_name] = social_auth_subclass
if issubclass(social_auth_subclass, BaseOAuth2):
OAUTH_BACKEND_NAMES.append(social_auth_subclass.auth_backend_name)
SOCIAL_AUTH_BACKENDS.append(social_auth_subclass)
|
apache-2.0
| -2,213,531,157,359,025,700
| 46.083333
| 118
| 0.651349
| false
| 4.183673
| false
| false
| false
|
NitishT/minio-py
|
tests/unit/minio_mocks.py
|
1
|
2552
|
# -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015,2016 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from minio.compat import _is_py3
from minio.fold_case_dict import FoldCaseDict
if _is_py3:
import http.client as httplib
else:
import httplib
from nose.tools import eq_
class MockResponse(object):
def __init__(self, method, url, headers, status_code, response_headers=None,
content=None):
self.method = method
self.url = url
self.request_headers = FoldCaseDict()
for header in headers:
self.request_headers[header] = headers[header]
self.status = status_code
self.headers = response_headers
self.data = content
if content is None:
self.reason = httplib.responses[status_code]
# noinspection PyUnusedLocal
def read(self, amt=1024):
return self.data
def mock_verify(self, method, url, headers):
eq_(self.method, method)
eq_(self.url, url)
for header in headers:
eq_(self.request_headers[header], headers[header])
# noinspection PyUnusedLocal
def stream(self, chunk_size=1, decode_unicode=False):
if self.data is not None:
return iter(bytearray(self.data, 'utf-8'))
return iter([])
# dummy release connection call.
def release_conn(self):
return
class MockConnection(object):
def __init__(self):
self.requests = []
def mock_add_request(self, request):
self.requests.append(request)
# noinspection PyUnusedLocal
def request(self, method, url, headers, redirect=False):
return_request = self.requests.pop(0)
return_request.mock_verify(method, url, headers)
return return_request
# noinspection PyRedeclaration,PyUnusedLocal,PyUnusedLocal
def urlopen(self, method, url, headers, preload_content=False, body=None,
redirect=False):
return self.request(method, url, headers)
|
apache-2.0
| -269,605,990,221,122,240
| 32.142857
| 80
| 0.668887
| false
| 3.981279
| false
| false
| false
|
sternshus/arelle2.7
|
svr-2.7/arelle/plugin/cdrFormula/cdrContext.py
|
1
|
19169
|
'''
cdrContext provides the validation and execution context for CDR Formula language expressions.
(c) Copyright 2013 Mark V Systems Limited, California US, All rights reserved.
'''
from collections import OrderedDict
from .cdrParser import astNode
from arelle.ModelFormulaObject import aspectModels, Aspect, aspectStr
from arelle.ModelInstanceObject import ModelFact, ModelDimensionValue
from arelle.FormulaEvaluator import implicitFilter, aspectsMatch
from arelle.ModelValue import QName
from arelle.ModelXbrl import DEFAULT, NONDEFAULT
from arelle import XmlUtil
class CdrContext:
def __init__(self, cdrFormulas, orderedFormulaQnames, modelXbrl=None):
self.modelXbrl = modelXbrl # the DTS and input instance (if any)
self.cdrFormulas = cdrFormulas
self.orderedFormulaQnames = orderedFormulaQnames
self.hyperspaceBindings = None
if modelXbrl is not None:
self.formulaOptions = modelXbrl.modelManager.formulaOptions
self.defaultDimensionAspects = set(modelXbrl.qnameDimensionDefaults.keys())
def close(self):
# dereference grammar
for modelFormula in self.cdrFormulas.items():
for node in modelFormula.prog:
if isinstance(node, astNode):
node.clear()
modelFormula.prog.clear()
self.__dict__.clear() # delete local attributes
class EvaluationBindings:
def __init__(self, cdrContext):
self.sCtx = cdrContext
self.parentHyperspaceBindings = cdrContext.evaluationBindings
cdrContext.evaluationBindings = self
self.evaluationBindings = []
self.nodeBindings = {}
self.withRestrictionBindings = []
self.aspectBoundFacts = {}
self.aggregationNode = None
self.isValuesIteration = False
def close(self):
if self.sCtx.evaluationBindings is self:
self.sCtx.evaluationBindings = self.parentEvaluationBindings
for binding in self.evaluationBindings:
binding.close()
self.__dict__.clear() # dereference
def nodeBinding(self, node, isWithRestrictionNode=False):
if node in self.nodeBindings:
return self.nodeBindings[node]
if self.aggregationNode and self.aggregationNode not in self.nodeBindings and not self.isValuesIteration:
agrgBalNode = EvaluationBinding(self, node, isBalancingBinding=True)
self.nodeBindings[self.aggregationNode] = agrgBalNode
nodeBinding = EvaluationBinding(self, node, isWithRestrictionNode=isWithRestrictionNode)
self.nodeBindings[node] = nodeBinding
self.evaluationBindings.append(nodeBinding)
return nodeBinding
def forBinding(self, node):
if node in self.nodeBindings:
return self.nodeBindings[node]
nodeBinding = ForBinding(self, node)
self.nodeBindings[node] = nodeBinding
self.evaluationBindings.append(nodeBinding)
return nodeBinding
def next(self, iterateAbove=-1, bindingsLen=-1):
# iterate evaluation bindings
if not self.evaluationBindings:
raise StopIteration
evaluationBindingsToReset = []
if bindingsLen == -1:
bindingsLen = len(self.evaluationBindings)
for iB in range(bindingsLen - 1, iterateAbove, -1):
evalBinding = self.evaluationBindings[iB]
try:
evalBinding.next()
for evalBinding in evaluationBindingsToReset:
evalBinding.reset()
return # hsB has another value to return
except StopIteration:
evaluationBindingsToReset.insert(0, evalBinding) # reset after outer iterator advanced
raise StopIteration # no more outermost loop of iteration
@property
def boundFacts(self):
return [binding.yieldedFact
for binding in self.evaluationBindings
if isinstance(binding, EvaluationBinding) and
not binding.fallenBack and binding.yieldedFact is not None]
class EvaluationBinding:
def __init__(self, evaluationBindings, node, fallback=False, isWithRestrictionNode=False, isBalancingBinding=False):
self.evaluationBindings = evaluationBindings
self.sCtx = evaluationBindings.sCtx
self.node = node
self.isWithRestrictionNode = isWithRestrictionNode
self.isBalancingBinding = isBalancingBinding
self.isValuesIteration = evaluationBindings.isValuesIteration
self.fallback = fallback
self.aspectsQualified = set()
self.aspectsDefined = set(aspectModels["dimensional"])
if evaluationBindings.withRestrictionBindings:
withAspectsQualified = evaluationBindings.withRestrictionBindings[-1].aspectsQualified
else:
withAspectsQualified = set()
# axes from macros need to be expanded
self.aspectAxisTuples = []
self.axesAspects = set()
for hsAxis in node.axes:
if hsAxis.aspect: # no aspect if just a where clause
aspect = evaluate(hsAxis.aspect, self.sCtx, value=True)
if aspect not in self.aspectsDefined and not isinstance(aspect, QName):
raise CdrException(node, "cdr:aspectValue",
_("Evaluation aspect indeterminate %(aspect)s"),
aspect=aspect)
if isinstance(aspect, QName):
if aspect not in self.sCtx.dimensionIsExplicit: # probably dynamic macro aspect
concept = self.sCtx.modelXbrl.qnameConcepts.get(aspect)
if concept is None or not concept.isDimensionItem:
raise CdrException(node, "cdrBinding:axisNotDimension",
_("Axis aspect is not a dimension in the DTS %(aspect)s"),
aspect=aspect)
self.sCtx.dimensionIsExplicit[aspect] = concept.isExplicitDimension
self.axesAspects.add(aspect) # resolved aspect value
self.aspectAxisTuples.append( (aspect, hsAxis) )
self.aspectsQualified = self.axesAspects | withAspectsQualified
self.reset() # will raise StopIteration if no facts or fallback
def close(self):
self.__dict__.clear() # dereference
@property
def value(self):
if self.fallenBack:
return None
if self.yieldedFact is not None:
return self.yieldedFact.xValue
return None
@property
def var(self): # used in implicitFilter winnowing trace
return []
@property
def qname(self): # used in implicitFilter winnowing trace
return ''
def __repr__(self):
if self.fallenBack:
return "fallen-back"
if self.yieldedFact is not None:
return self.yieldedFact.__repr__()
return "none"
def reset(self):
# start with all facts
if self.evaluationBindings.withRestrictionBindings:
facts = self.evaluationBindings.withRestrictionBindings[-1].yieldedFactsPartition
else:
facts = self.sCtx.modelXbrl.nonNilFactsInInstance
if self.sCtx.formulaOptions.traceVariableFilterWinnowing:
self.sCtx.modelXbrl.info("cdr:trace",
_("Evaluation %(variable)s binding: start with %(factCount)s facts"),
sourceFileLine=self.node.sourceFileLine, variable=str(self.node), factCount=len(facts))
# filter by binding aspects
facts = self.filterFacts(facts)
for fact in facts:
if fact.isItem:
self.aspectsDefined |= fact.context.dimAspects(self.sCtx.defaultDimensionAspects)
self.unQualifiedAspects = self.aspectsDefined - self.aspectsQualified - {Aspect.DIMENSIONS}
# implicitly filter by prior uncoveredAspectFacts
if self.hyperspaceBindings.aspectBoundFacts and not self.isValuesIteration:
facts = implicitFilter(self.sCtx, self, facts, self.unQualifiedAspects, self.hyperspaceBindings.aspectBoundFacts)
if self.sCtx.formulaOptions.traceVariableFiltersResult:
self.sCtx.modelXbrl.info("cdr:trace",
_("Evaluation %(variable)s binding: filters result %(factCount)s facts"),
sourceFileLine=self.node.sourceFileLine, variable=str(self.node), factCount=len(facts))
if self.isWithRestrictionNode: # if withNode, combine facts into partitions by qualified aspects
factsPartitions = []
for fact in facts:
matched = False
for partition in factsPartitions:
if aspectsMatch(self.sCtx, fact, partition[0], self.aspectsQualified):
partition.append(fact)
matched = True
break
if not matched:
factsPartitions.append([fact,])
self.factIter = iter([set(p) for p in factsPartitions]) # must be sets
self.yieldedFactsPartition = []
else: # just a hyperspaceExpression node
self.factIter = iter(facts)
self.yieldedFact = None
self.fallenBack = False
self.next()
def next(self): # will raise StopIteration if no (more) facts or fallback
uncoveredAspectFacts = self.evaluationBindings.aspectBoundFacts
if self.yieldedFact is not None and self.evaluationBindings.aggregationNode is None:
for aspect, priorFact in self.evaluationContributedUncoveredAspects.items():
if priorFact == "none":
del uncoveredAspectFacts[aspect]
else:
uncoveredAspectFacts[aspect] = priorFact
self.evaluationContributedUncoveredAspects.clear()
try:
if self.isWithRestrictionNode:
self.yieldedFactsPartition = next(self.factIter)
for self.yieldedFact in self.yieldedFactsPartition:
break
else:
self.yieldedFact = next(self.factIter)
self.evaluationContributedUncoveredAspects = {}
if not self.isValuesIteration:
for aspect in self.unQualifiedAspects: # covered aspects may not be defined e.g., test 12062 v11, undefined aspect is a complemented aspect
if uncoveredAspectFacts.get(aspect) is None:
self.evaluationContributedUncoveredAspects[aspect] = uncoveredAspectFacts.get(aspect,"none")
uncoveredAspectFacts[aspect] = None if aspect in self.axesAspects else self.yieldedFact
if self.sCtx.formulaOptions.traceVariableFiltersResult:
self.sCtx.modelXbrl.info("cdr:trace",
_("Evaluation %(variable)s: bound value %(result)s"),
sourceFileLine=self.node.sourceFileLine, variable=str(self.node), result=str(self.yieldedFact))
except StopIteration:
self.yieldedFact = None
if self.isWithRestrictionNode:
self.yieldedFactsPartition = []
if self.fallback and not self.fallenBack:
self.fallenBack = True
if self.sCtx.formulaOptions.traceVariableExpressionResult:
self.sCtx.modelXbrl.info("cdr:trace",
_("Evaluation %(variable)s: fallbackValue result %(result)s"),
sourceFileLine=self.node.sourceFileLine, variable=str(self.node), result=0)
else:
raise StopIteration
def filterFacts(self, facts):
modelXbrl = self.sCtx.modelXbrl
# process with bindings and this node
for i, aspectAxis in enumerate(self.aspectAxisTuples):
aspect, hsAxis = aspectAxis
# value is an astHyperspaceAxis
if hsAxis.restriction:
restriction = evaluate(hsAxis.restriction, self.sCtx, value=True)
if aspect == Aspect.CONCEPT:
aspectQualifiedFacts = [modelXbrl.factsByQname[qn]
for qn in restriction
if isinstance(qn, QName)]
facts = facts & set.union(*aspectQualifiedFacts) if aspectQualifiedFacts else set()
elif aspect == Aspect.PERIOD:
facts = set(f for f in facts if isPeriodEqualTo(f, restriction))
elif aspect == Aspect.ENTITY_IDENTIFIER:
facts = set(f for f in facts if isEntityIdentifierEqualTo(f, restriction))
elif isinstance(aspect, QName):
if self.sCtx.dimensionIsExplicit.get(aspect):
# explicit dim facts (value None will match the default member)
aspectQualifiedFacts = []
for qn in restriction:
if self.isBalancingBinding: # complement dimension for aggregation balancing binding
if isinstance(qn, QName) or qn is NONDEFAULT:
qn = DEFAULT
else:
qn = NONDEFAULT
if qn is NONE:
qn = DEFAULT
elif not (isinstance(qn, QName) or qn is DEFAULT or qn is NONDEFAULT):
continue
aspectQualifiedFacts.append(modelXbrl.factsByDimMemQname(aspect, qn))
facts = facts & set.union(*aspectQualifiedFacts) if aspectQualifiedFacts else set()
else:
facts = facts & set(fact
for fact in facts
for typedDimValue in hsAxis.restriction
if typedDimTest(aspect, typedDimValue, fact))
if hsAxis.whereExpr and facts: # process where against facts passing restriction
whereMatchedFacts = set()
asVars = set()
for fact in facts:
for asAspectAxis in self.aspectAxisTuples[0:i+1]:
asAspect, asHsAxis = asAspectAxis
if asHsAxis.asVariableName:
self.sCtx.localVariables[asHsAxis.asVariableName] = factAspectValue(fact, asAspect)
asVars.add(asHsAxis.asVariableName)
self.sCtx.localVariables["item"] = fact
if evaluate(hsAxis.whereExpr, self.sCtx) ^ self.isBalancingBinding:
whereMatchedFacts.add(fact)
del self.sCtx.localVariables["item"]
for asVar in asVars:
del self.sCtx.localVariables[asVar]
facts = whereMatchedFacts
if self.sCtx.formulaOptions.traceVariableFilterWinnowing:
self.sCtx.modelXbrl.info("cdr:trace",
_("Evaluation %(variable)s: %(filter)s filter passes %(factCount)s facts"),
sourceFileLine=self.node.sourceFileLine, variable=str(self.node), filter=aspectStr(aspect), factCount=len(facts))
if self.node.isClosed: # winnow out non-qualified dimension breakdowns
facts = facts - set(fact
for fact in facts
if fact.dimAspects - self.aspectsQualified )
if self.sCtx.formulaOptions.traceVariableFilterWinnowing:
self.sCtx.modelXbrl.info("cdr:trace",
_("Evaluation %(variable)s: closed selection filter passes %(factCount)s facts"),
sourceFileLine=self.node.sourceFileLine, variable=str(self.node), factCount=len(facts))
return facts
def isPeriodEqualTo(fact, periodRestriction):
context = fact.context
if context is not None:
for period in periodRestriction:
if ((context.isInstantPeriod and context.instantDatetime == period) or
(context.isStartEndPeriod and (context.startDatetime, context.endDatetime) == period) or
(context.isForeverPeriod and period == (None, None))):
return True
return False
def isEntityIdentifierEqualTo(fact, entityIdentifierRestriction):
context = fact.context
if context is not None:
for entityIdentifier in entityIdentifierRestriction:
if context.entityIdentifier == entityIdentifier:
return True
return False
def typedDimTest(aspect, value, fact):
if fact.context is None:
return False
modelDim = fact.context.dimValue(aspect)
if isinstance(modelDim, ModelDimensionValue):
memElt = modelDim.typedMember
if memElt is None or memElt.get("{http://www.w3.org/2001/XMLSchema-instance}nil") == "true":
return value is NONE or value is DEFAULT
if value is NONDEFAULT:
return True
return memElt.textValue == value
else:
return value is NONE or value is DEFAULT
class ForBinding:
def __init__(self, hyperspaceBindings, node):
self.hyperspaceBindings = hyperspaceBindings
self.sCtx = hyperspaceBindings.sCtx
self.node = node
self.collection = evaluate(node.collectionExpr, self.sCtx)
self.reset() # will raise StopIteration if no for items
def close(self):
self.__dict__.clear() # dereference
@property
def value(self):
if self.yieldedValue is not None:
return self.yieldedValue
return None
def __repr__(self):
if self.yieldedValue is not None:
return self.yieldedValue.__repr__()
return "none"
def reset(self):
self.forIter = iter(self.collection)
self.yieldedValue = None
self.next()
def next(self): # will raise StopIteration if no (more) facts or fallback
try:
self.yieldedValue = next(self.forIter)
# set next value here as well as in for node, because may be cleared above context of for node
self.sCtx.localVariables[self.node.name] = self.yieldedValue
if self.sCtx.formulaOptions.traceVariableFiltersResult:
self.sCtx.modelXbrl.info("sphinx:trace",
_("For loop %(variable)s: bound value %(result)s"),
sourceFileLine=self.node.sourceFileLine, variable=str(self.node.name), result=str(self.yieldedValue))
except StopIteration:
if self.yieldedValue is not None:
del self.sCtx.localVariables[self.node.name]
self.yieldedValue = None
raise StopIteration
from .cdrEvaluator import evaluate, factAspectValue, CdrException, NONE
|
apache-2.0
| -6,810,582,852,387,637,000
| 48.5323
| 156
| 0.608952
| false
| 4.49449
| false
| false
| false
|
zerosum0x0/koadic
|
modules/implant/inject/shellcode_dynwrapx.py
|
1
|
2827
|
import core.implant
import core.job
import string
import uuid
class DynWrapXShellcodeJob(core.job.Job):
def create(self):
self.fork32Bit = True
self.options.set("DLLUUID", uuid.uuid4().hex)
self.options.set("MANIFESTUUID", uuid.uuid4().hex)
self.options.set("SHELLCODEDECCSV", self.convert_shellcode(shellcode))
self.options.set("DIRECTORY", self.options.get('DIRECTORY').replace("\\", "\\\\").replace('"', '\\"'))
def report(self, handler, data, sanitize = False):
task = handler.get_header(self.options.get("UUIDHEADER"), False)
if task == self.options.get("DLLUUID"):
handler.send_file(self.options.get("DYNWRAPXDLL"))
return
if task == self.options.get("MANIFESTUUID"):
handler.send_file(self.options.get("DYNWRAPXMANIFEST"))
return
super(DynWrapXShellcodeJob, self).report(handler, data)
def done(self):
self.results = "Cpmpleted"
self.display()
def display(self):
pass
#self.shell.print_plain(str(self.errno))
class DynWrapXShellcodeImplant(core.implant.Implant):
NAME = "Shellcode via Dynamic Wrapper X"
DESCRIPTION = "Executes arbitrary shellcode using the Dynamic Wrapper X COM object"
AUTHORS = ["zerosum0x0"]
STATE = "implant/inject/shellcode_dynwrapx"
def load(self):
self.options.register("DIRECTORY", "%TEMP%", "writeable directory on zombie", required=False)
self.options.register("SHELLCODE", "90c3", "in ASCII hex format (e.g.: 31c0c3)", required=True)
self.options.register("SHELLCODEDECCSV", "", "decimal CSV shellcode", hidden=True)
self.options.register("DYNWRAPXDLL", "data/bin/dynwrapx.dll", "relative path to dynwrapx.dll", required=True, advanced=True)
self.options.register("DYNWRAPXMANIFEST", "data/bin/dynwrapx.manifest", "relative path to dynwrapx.manifest", required=True, advanced=True)
self.options.register("UUIDHEADER", "ETag", "HTTP header for UUID", advanced=True)
self.options.register("DLLUUID", "ETag", "HTTP header for UUID", hidden=True)
self.options.register("MANIFESTUUID", "ETag", "HTTP header for UUID", hidden=True)
def job(self):
return DynWrapXShellcodeJob
def run(self):
shellcode = self.options.get("SHELLCODE")
if not self.validate_shellcode(shellcode):
self.shell.print_error("SHELLCODE option is an invalid hex string.")
return
#vba = self.loader.load_script("data/implant/inject/shellcode.vba", self.options)
#vba = vba.decode().replace("\n", "\\n")
#self.options.set("VBACODE", vba)
workloads = {}
workloads["js"] = "data/implant/inject/shellcode_dynwrapx.js"
self.dispatch(workloads, self.job)
|
apache-2.0
| 7,122,812,421,590,287,000
| 37.202703
| 147
| 0.655111
| false
| 3.619718
| false
| false
| false
|
0mu-Project/Gensokyo-Server-Platform
|
Yakumo/Yakumo-Instance/muMDAU_app/docker.py
|
1
|
2456
|
# -*- coding: utf-8 -*-
# muMDAU_app main / first page
from muMDAU_app import app, socketio
from threading import Thread
from flask import render_template, url_for, redirect, session, request
from docker import Client
pullthread = None
# index page main route page
@app.route('/project')
def pview():
return render_template('project.html')
@app.route('/docker')
def dockerview():
if 'username' in session:
cli = Client(base_url='tcp://'+ session['username'] +'.docker:14438')
c = cli.containers(all=True)
images = cli.images()
return render_template('docker.html', **locals())
else:
return redirect(url_for('main.index'))
@app.route('/docker/run')
def run():
if 'username' in session:
clir = Client(base_url='tcp://'+ session['username'] +'.docker:14438')
clirt = clir.create_container(tty=True, detach=True, image='0muproject/0mu-flask', name='0mu-Flask-06', ports=['8510', '22'], host_config=clir.create_host_config(port_bindings={8510: 8510, 22: 2222}))
clir.start(clirt.get('Id'))
return redirect(url_for('dockerview'))
else:
return redirect(url_for('main.index'))
@app.route('/docker/stop/<Name>')
def dockerstop(Name):
if 'username' in session:
cli = Client(base_url='tcp://172.17.0.2:14458')
cli.stop(container=Name)
return redirect(url_for('dockerview'))
else:
return redirect(url_for('main.index'))
@app.route('/docker/start/<Name>')
def dockerstart(Name):
if 'username' in session:
return redirect(url_for('dockerview'))
else:
return redirect(url_for('main.index'))
@app.route('/docker/pull/<Name>', methods=['GET', 'POST'])
def dockerpull(Name):
if request.method == 'POST':
global pullthread
if 'username' in session:
pullthread = Thread(target=pull_connect(Name))
pullthread.daemon = True
pullthread.start()
return '開始進行Pull'
else:
return redirect(url_for('main.index'))
def pull_connect(Name):
cli = Client(base_url='tcp://172.17.0.2:14458')
for line in cli.pull(Name, stream=True):
socketio.emit('pull', {'info': eval(line.decode('utf-8')).get('status') + '</br>' +str(eval(line.decode('utf-8')).get('progress',''))}, namespace='/pull/info')
socketio.emit('pull', {'info': "[Pull-Done] 請重新整理 Hakurei-Docker 界面"}, namespace='/pull/info')
|
gpl-3.0
| 3,742,627,619,181,106,700
| 35.878788
| 208
| 0.628595
| false
| 3.302578
| false
| false
| false
|
bast/gitink
|
gitink/color.py
|
1
|
1192
|
def whiter_shade_of_pale(hex_color):
'''
This function pales the color a bit for the interior
of the boxes.
'''
pale_shift = 70
# separate the red, green, blue parts
r_hex = hex_color[1:3]
g_hex = hex_color[3:5]
b_hex = hex_color[5:7]
# convert from hex to dec
r_dec = int(r_hex, 16)
g_dec = int(g_hex, 16)
b_dec = int(b_hex, 16)
# make the color paler but make sure we do not go
# beyond 255 or ff
r_dec = min(255, r_dec + pale_shift)
g_dec = min(255, g_dec + pale_shift)
b_dec = min(255, b_dec + pale_shift)
# convert from dec to hex
r_hex = format(r_dec, '02x')
g_hex = format(g_dec, '02x')
b_hex = format(b_dec, '02x')
# stitch them again together
return '#{0}{1}{2}'.format(r_hex, g_hex, b_hex)
def get_color(text):
# this is the deep palette of https://seaborn.pydata.org/
palette = ['#4C72B0',
'#55A868',
'#C44E52',
'#8172B2',
'#CCB974',
'#64B5CD']
position = ord(text[0]) % len(palette)
color = palette[position]
pale_color = whiter_shade_of_pale(color)
return color, pale_color
|
mpl-2.0
| 8,822,537,647,600,122,000
| 24.361702
| 61
| 0.551174
| false
| 2.865385
| false
| false
| false
|
analogue/mythbox
|
resources/src/mythbox/mythtv/protocol.py
|
1
|
13858
|
#
# MythBox for XBMC - http://mythbox.googlecode.com
# Copyright (C) 2012 analogue@yahoo.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
from mythbox.mythtv.enums import TVState, TVState44, TVState58
# MythTV Protcol Constants
initVersion = 8
initToken = ''
separator = u'[]:[]'
serverVersion = None
class ProtocolException(Exception):
'''
Thrown on protcol version mismatch between frontend and backend or
general protocol related errors.
'''
pass
class BaseProtocol(object):
def recordSize(self):
return len(self.recordFields())
def emptyRecordFields(self):
return ['episode','inetref','season']
def protocolToken(self):
return ""
class Protocol40(BaseProtocol):
def version(self):
return 40
def mythVersion(self):
return '0.21'
def recordFields(self):
# Based on from https://github.com/MythTV/mythtv/blob/v0.23.1/mythtv/bindings/python/MythTV/MythData.py
return [ 'title', 'subtitle', 'description',
'category', 'chanid', 'channum',
'callsign', 'channame', 'filename',
'fs_high', 'fs_low', 'starttime',
'endtime', 'duplicate', 'shareable',
'findid', 'hostname', 'sourceid',
'cardid', 'inputid', 'recpriority',
'recstatus', 'recordid', 'rectype',
'dupin', 'dupmethod', 'recstartts',
'recendts', 'repeat', 'programflags',
'recgroup', 'commfree', 'outputfilters',
'seriesid', 'programid', 'lastmodified',
'stars', 'airdate', 'hasairdate',
'playgroup', 'recpriority2', 'parentid',
'storagegroup', 'audio_props', 'video_props',
'subtitle_type']
def hasOriginalAirDate(self, program):
return int(program._data['hasairdate']) == 1
def tvState(self):
return TVState
def buildAnnounceFileTransferCommand(self, hostname, filePath):
return ["ANN FileTransfer %s" % hostname, filePath]
def buildRescheduleRequest(self, scheduleId):
return ['RESCHEDULE_RECORDINGS %s' % scheduleId]
def getLiveTvBrain(self, settings, translator):
from mythbox.ui.livetv import MythLiveTvBrain
return MythLiveTvBrain(settings, translator)
def getFileSize(self, program):
return self.decodeLongLong(int(program._data['fs_low']), int(program._data['fs_high'])) / 1024.0
def genPixMapCommand(self):
return ['QUERY_GENPIXMAP']
def genQueryRecordingsCommand(self):
return ['QUERY_RECORDINGS Play']
def genPixMapPreviewFilename(self, program):
return program.getBareFilename() + '.640x360.png'
def supportsStreaming(self, platform):
return True
def readLong(self, reply, remove=False):
d = self.decodeLongLong(int(reply[1]), int(reply[0]))
if remove:
reply.pop(0)
reply.pop(0)
return d
def writeLong(self, d, request):
low, high = self.encodeLongLong(d)
request.append('%d' % high)
request.append('%d' % low)
def decodeLongLong(self, low32Bits, high32Bits):
"""
@type low32Bits: int or str
@type high32Bits: int or str
@return: Decodes two 32bit ints to a 64bit long
@rtype: long
"""
if isinstance(low32Bits, basestring):
low32Bits = long(low32Bits)
if isinstance(high32Bits, basestring):
high32Bits = long(high32Bits)
return low32Bits & 0xffffffffL | (high32Bits << 32)
def encodeLongLong(self, long64Bits):
"""
@rtype: (low32Bits, high32Bits)
@return: Encodes 64bit long into pair of 32 bit ints
"""
return long64Bits & 0xffffffffL, long64Bits >> 32
class Protocol41(Protocol40):
def version(self):
return 41
class Protocol42(Protocol41):
def version(self):
return 42
class Protocol43(Protocol42):
def version(self):
return 43
def recordFields(self):
# Copied from https://github.com/MythTV/mythtv/blob/v0.23.1/mythtv/bindings/python/MythTV/MythData.py
return [ 'title', 'subtitle', 'description',
'category', 'chanid', 'channum',
'callsign', 'channame', 'filename',
'fs_high', 'fs_low', 'starttime',
'endtime', 'duplicate', 'shareable',
'findid', 'hostname', 'sourceid',
'cardid', 'inputid', 'recpriority',
'recstatus', 'recordid', 'rectype',
'dupin', 'dupmethod', 'recstartts',
'recendts', 'repeat', 'programflags',
'recgroup', 'commfree', 'outputfilters',
'seriesid', 'programid', 'lastmodified',
'stars', 'airdate', 'hasairdate',
'playgroup', 'recpriority2', 'parentid',
'storagegroup', 'audio_props', 'video_props',
'subtitle_type','year']
class Protocol44(Protocol43):
def version(self):
return 44
def tvState(self):
return TVState44
class Protocol45(Protocol44):
def version(self):
return 45
def buildAnnounceFileTransferCommand(self, hostname, filePath):
# TODO: Storage group should be non-empty for recordings
storageGroup = ''
return ['ANN FileTransfer %s' % hostname, filePath, storageGroup]
class Protocol46(Protocol45):
def version(self):
return 46
class Protocol47(Protocol46):
def version(self):
return 47
class Protocol48(Protocol47):
def version(self):
return 48
class Protocol49(Protocol48):
def version(self):
return 49
class Protocol50(Protocol49):
def version(self):
return 50
def mythVersion(self):
return '0.22'
class Protocol56(Protocol50):
def version(self):
return 56
def mythVersion(self):
return '0.23'
class Protocol23056(Protocol56):
def version(self):
return 23056
def mythVersion(self):
return '0.23.1'
class Protocol57(Protocol56):
def version(self):
return 57
def mythVersion(self):
return '0.24'
def recordFields(self):#
return ['title','subtitle','description',
'category','chanid','channum',
'callsign','channame','filename',
'filesize','starttime','endtime',
'findid','hostname','sourceid',
'cardid','inputid','recpriority',
'recstatus','recordid','rectype',
'dupin','dupmethod','recstartts',
'recendts','programflags','recgroup',
'outputfilters','seriesid','programid',
'lastmodified','stars','airdate',
'playgroup','recpriority2','parentid',
'storagegroup','audio_props','video_props',
'subtitle_type','year']
def hasOriginalAirDate(self, program):
d = program.originalAirDate()
return d and '-' in d
def buildAnnounceFileTransferCommand(self, hostname, filePath):
return ["ANN FileTransfer %s 0" % hostname, filePath, 'Default']
def getFileSize(self, program):
return int(program._data['filesize']) / 1024.0
def supportsStreaming(self, platform):
# Eden and up
return platform.xbmcVersion() >= 11.0
class Protocol58(Protocol57):
def tvState(self):
return TVState58
def version(self):
return 58
class Protocol59(Protocol58):
def version(self):
return 59
class Protocol60(Protocol59):
def version(self):
return 60
def buildAnnounceFileTransferCommand(self, hostname, filePath):
return ["ANN FileTransfer %s 0 1 10000" % hostname, filePath, 'Default']
def genPixMapCommand(self):
return ['QUERY_GENPIXMAP2', 'do_not_care']
def genPixMapPreviewFilename(self, program):
return '<EMPTY>'
class Protocol61(Protocol60):
def version(self):
return 61
class Protocol62(Protocol61):
def version(self):
return 62
def protocolToken(self):
return "78B5631E"
class Protocol63(Protocol62):
def version(self):
return 63
def protocolToken(self):
return "3875641D"
class Protocol64(Protocol63):
def version(self):
return 64
def protocolToken(self):
return "8675309J"
class Protocol65(Protocol64):
def version(self):
return 65
def protocolToken(self):
return "D2BB94C2"
def genQueryRecordingsCommand(self):
# technically the old query recs command works but actually causes sorting which would be redundant and may be removed in the future
return ['QUERY_RECORDINGS Unsorted']
class Protocol66(Protocol65):
def version(self):
return 66
def protocolToken(self):
return "0C0FFEE0"
def readLong(self, reply, remove=False):
d = long(reply[0])
if remove:
reply.pop(0)
return d
def writeLong(self, d, request):
request.append('%d' % long(d))
class Protocol67(Protocol66):
def version(self):
return 67
def protocolToken(self):
return "0G0G0G0"
def recordFields(self):
# Copied from mythtv/mythtv/bindings/python/MythTV/mythproto.py
return ['title', 'subtitle', 'description',
'season', 'episode', 'category',
'chanid', 'channum', 'callsign',
'channame', 'filename', 'filesize',
'starttime', 'endtime', 'findid',
'hostname', 'sourceid', 'cardid',
'inputid', 'recpriority', 'recstatus',
'recordid', 'rectype', 'dupin',
'dupmethod', 'recstartts', 'recendts',
'programflags', 'recgroup', 'outputfilters',
'seriesid', 'programid', 'inetref',
'lastmodified', 'stars', 'airdate',
'playgroup', 'recpriority2', 'parentid',
'storagegroup', 'audio_props', 'video_props',
'subtitle_type','year']
class Protocol68(Protocol67):
def version(self):
return 68
def protocolToken(self):
return "90094EAD"
class Protocol69(Protocol68):
def version(self):
return 69
def protocolToken(self):
return "63835135"
class Protocol70(Protocol69):
def version(self):
return 70
def protocolToken(self):
return "53153836"
class Protocol71(Protocol70):
def version(self):
return 71
def protocolToken(self):
return "05e82186"
class Protocol72(Protocol71):
def version(self):
return 72
def protocolToken(self):
return "D78EFD6F"
class Protocol73(Protocol72):
def version(self):
return 73
def protocolToken(self):
return "D7FE8D6F"
def buildRescheduleRequest(self, scheduleId):
if scheduleId == 0:
return ['RESCHEDULE_RECORDINGS CHECK 0 0 0 MythBoxFrontend **any**']
else:
if scheduleId == -1:
scheduleId = 0
return ['RESCHEDULE_RECORDINGS MATCH %s 0 0 - MythBoxFrontend' % scheduleId]
class Protocol74(Protocol73):
def version(self):
return 74
def protocolToken(self):
return "SingingPotato"
# Current rev in mythversion.h
protocols = {
40: Protocol40(), # 0.21
41: Protocol41(),
42: Protocol42(),
43: Protocol43(),
44: Protocol44(),
45: Protocol45(),
46: Protocol46(),
47: Protocol47(),
48: Protocol48(),
49: Protocol49(),
50: Protocol50(), # 0.22
56: Protocol56(), # 0.23
23056: Protocol23056(), # 0.23.1 - mythbuntu weirdness
57: Protocol57(), # 0.24
58: Protocol58(), # 0.24
59: Protocol59(), # 0.24
60: Protocol60(), # 0.24
61: Protocol61(), # 0.24
62: Protocol62(), # 0.24
63: Protocol63(), # 0.24
64: Protocol64(), # 0.25
65: Protocol65(), # 0.25
66: Protocol66(), # 0.25
67: Protocol67(), # 0.25
68: Protocol68(), # 0.25 - VIDEO_LIST_UPDATE
69: Protocol69(), # 0.25 - QUERY_FILE_HASH
70: Protocol70(), # 0.25 - REOPEN
71: Protocol71(), # 0.25 - ASK_RECORDING GET_FREE_INPUTS
72: Protocol72(), # 0.25 - QUERY_ACTIVE_BACKENDS
73: Protocol73(), # 0.26 - RESCHEDULE_RECORDINGS
74: Protocol74() # 0.26
}
|
gpl-2.0
| -5,766,141,552,974,468,000
| 26.119374
| 140
| 0.571655
| false
| 3.894885
| false
| false
| false
|
jffm/pyanalyzer
|
core/rule.py
|
1
|
2373
|
# Copyright (c) 2008-2009 Junior (Frederic) FLEURIAL MONFILS
#
# This file is part of PyAnalyzer.
#
# PyAnalyzer is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# or see <http://www.opensource.org/licenses/gpl-3.0.html>
#
# Contact:
# Junior FLEURIAL MONFILS <frederic dot fleurialmonfils at cetic dot be>
__author__ = "Frederic F. MONFILS"
__version__ = "$Revision: $".split()[1]
__revision__ = __version__
# $Source: $
__date__ = "$Date: $"
__copyright__ = "Copyright (c) 2008-2009 Junior (Frederic) FLEURIAL MONFILS"
__license__ = "GPLv3"
__contact__ = "ffm at cetic.be"
"""This module implements a Rule
"""
import sys
import compiler
from core.metric import Metric
from core.writer.sqlwriter import SqlWriter
from core.writer.textwriter import TextWriter
class Rule(Metric):
"""A Rule is a Metric that is directly printed to stderr
"""
class config:
severity = None
code = 0
message = None
def __init__(self, writer):
self.writer = writer
self.row = dict(
(key,value)
for (key,value) in self.config.__dict__.items()
if not key.startswith("__"))
self.row.update(
code="%s%04d" % (self.config.severity[0].upper(), self.config.code),
message=self.__doc__.split("\n")[0]
)
def report(self, node, mapping):
self.row.update(
kind=node.__class__.__name__,
filename=self.filename,
name=getattr(node, "name", ""),
lineno=node.lineno,
message=self.row["message"] % mapping)
self.writer.writerow(self.row)
def visitModule(self, node, *args):
self.filename = node.filename
self.default(node, *args)
|
gpl-3.0
| -3,664,105,495,476,861,400
| 33.41791
| 80
| 0.626633
| false
| 3.50517
| false
| false
| false
|
leppa/home-assistant
|
homeassistant/components/xiaomi_miio/vacuum.py
|
1
|
15770
|
"""Support for the Xiaomi vacuum cleaner robot."""
import asyncio
from functools import partial
import logging
from miio import DeviceException, Vacuum # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.vacuum import (
ATTR_CLEANED_AREA,
PLATFORM_SCHEMA,
STATE_CLEANING,
STATE_DOCKED,
STATE_ERROR,
STATE_IDLE,
STATE_PAUSED,
STATE_RETURNING,
SUPPORT_BATTERY,
SUPPORT_CLEAN_SPOT,
SUPPORT_FAN_SPEED,
SUPPORT_LOCATE,
SUPPORT_PAUSE,
SUPPORT_RETURN_HOME,
SUPPORT_SEND_COMMAND,
SUPPORT_START,
SUPPORT_STATE,
SUPPORT_STOP,
StateVacuumDevice,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_TOKEN,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from .const import (
DOMAIN,
SERVICE_CLEAN_ZONE,
SERVICE_MOVE_REMOTE_CONTROL,
SERVICE_MOVE_REMOTE_CONTROL_STEP,
SERVICE_START_REMOTE_CONTROL,
SERVICE_STOP_REMOTE_CONTROL,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Xiaomi Vacuum cleaner"
DATA_KEY = "vacuum.xiaomi_miio"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TOKEN): vol.All(str, vol.Length(min=32, max=32)),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
},
extra=vol.ALLOW_EXTRA,
)
FAN_SPEEDS = {"Quiet": 38, "Balanced": 60, "Turbo": 77, "Max": 90, "Gentle": 105}
ATTR_CLEAN_START = "clean_start"
ATTR_CLEAN_STOP = "clean_stop"
ATTR_CLEANING_TIME = "cleaning_time"
ATTR_DO_NOT_DISTURB = "do_not_disturb"
ATTR_DO_NOT_DISTURB_START = "do_not_disturb_start"
ATTR_DO_NOT_DISTURB_END = "do_not_disturb_end"
ATTR_MAIN_BRUSH_LEFT = "main_brush_left"
ATTR_SIDE_BRUSH_LEFT = "side_brush_left"
ATTR_FILTER_LEFT = "filter_left"
ATTR_SENSOR_DIRTY_LEFT = "sensor_dirty_left"
ATTR_CLEANING_COUNT = "cleaning_count"
ATTR_CLEANED_TOTAL_AREA = "total_cleaned_area"
ATTR_CLEANING_TOTAL_TIME = "total_cleaning_time"
ATTR_ERROR = "error"
ATTR_RC_DURATION = "duration"
ATTR_RC_ROTATION = "rotation"
ATTR_RC_VELOCITY = "velocity"
ATTR_STATUS = "status"
ATTR_ZONE_ARRAY = "zone"
ATTR_ZONE_REPEATER = "repeats"
VACUUM_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids})
SERVICE_SCHEMA_REMOTE_CONTROL = VACUUM_SERVICE_SCHEMA.extend(
{
vol.Optional(ATTR_RC_VELOCITY): vol.All(
vol.Coerce(float), vol.Clamp(min=-0.29, max=0.29)
),
vol.Optional(ATTR_RC_ROTATION): vol.All(
vol.Coerce(int), vol.Clamp(min=-179, max=179)
),
vol.Optional(ATTR_RC_DURATION): cv.positive_int,
}
)
SERVICE_SCHEMA_CLEAN_ZONE = VACUUM_SERVICE_SCHEMA.extend(
{
vol.Required(ATTR_ZONE_ARRAY): vol.All(
list,
[
vol.ExactSequence(
[vol.Coerce(int), vol.Coerce(int), vol.Coerce(int), vol.Coerce(int)]
)
],
),
vol.Required(ATTR_ZONE_REPEATER): vol.All(
vol.Coerce(int), vol.Clamp(min=1, max=3)
),
}
)
SERVICE_SCHEMA_CLEAN_ZONE = VACUUM_SERVICE_SCHEMA.extend(
{
vol.Required(ATTR_ZONE_ARRAY): vol.All(
list,
[
vol.ExactSequence(
[vol.Coerce(int), vol.Coerce(int), vol.Coerce(int), vol.Coerce(int)]
)
],
),
vol.Required(ATTR_ZONE_REPEATER): vol.All(
vol.Coerce(int), vol.Clamp(min=1, max=3)
),
}
)
SERVICE_TO_METHOD = {
SERVICE_START_REMOTE_CONTROL: {"method": "async_remote_control_start"},
SERVICE_STOP_REMOTE_CONTROL: {"method": "async_remote_control_stop"},
SERVICE_MOVE_REMOTE_CONTROL: {
"method": "async_remote_control_move",
"schema": SERVICE_SCHEMA_REMOTE_CONTROL,
},
SERVICE_MOVE_REMOTE_CONTROL_STEP: {
"method": "async_remote_control_move_step",
"schema": SERVICE_SCHEMA_REMOTE_CONTROL,
},
SERVICE_CLEAN_ZONE: {
"method": "async_clean_zone",
"schema": SERVICE_SCHEMA_CLEAN_ZONE,
},
}
SUPPORT_XIAOMI = (
SUPPORT_STATE
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_RETURN_HOME
| SUPPORT_FAN_SPEED
| SUPPORT_SEND_COMMAND
| SUPPORT_LOCATE
| SUPPORT_BATTERY
| SUPPORT_CLEAN_SPOT
| SUPPORT_START
)
STATE_CODE_TO_STATE = {
2: STATE_IDLE,
3: STATE_IDLE,
5: STATE_CLEANING,
6: STATE_RETURNING,
7: STATE_CLEANING,
8: STATE_DOCKED,
9: STATE_ERROR,
10: STATE_PAUSED,
11: STATE_CLEANING,
12: STATE_ERROR,
15: STATE_RETURNING,
16: STATE_CLEANING,
17: STATE_CLEANING,
18: STATE_CLEANING,
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Xiaomi vacuum cleaner robot platform."""
if DATA_KEY not in hass.data:
hass.data[DATA_KEY] = {}
host = config[CONF_HOST]
token = config[CONF_TOKEN]
name = config[CONF_NAME]
# Create handler
_LOGGER.info("Initializing with host %s (token %s...)", host, token[:5])
vacuum = Vacuum(host, token)
mirobo = MiroboVacuum(name, vacuum)
hass.data[DATA_KEY][host] = mirobo
async_add_entities([mirobo], update_before_add=True)
async def async_service_handler(service):
"""Map services to methods on MiroboVacuum."""
method = SERVICE_TO_METHOD.get(service.service)
params = {
key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID
}
entity_ids = service.data.get(ATTR_ENTITY_ID)
if entity_ids:
target_vacuums = [
vac
for vac in hass.data[DATA_KEY].values()
if vac.entity_id in entity_ids
]
else:
target_vacuums = hass.data[DATA_KEY].values()
update_tasks = []
for vacuum in target_vacuums:
await getattr(vacuum, method["method"])(**params)
for vacuum in target_vacuums:
update_coro = vacuum.async_update_ha_state(True)
update_tasks.append(update_coro)
if update_tasks:
await asyncio.wait(update_tasks)
for vacuum_service in SERVICE_TO_METHOD:
schema = SERVICE_TO_METHOD[vacuum_service].get("schema", VACUUM_SERVICE_SCHEMA)
hass.services.async_register(
DOMAIN, vacuum_service, async_service_handler, schema=schema
)
class MiroboVacuum(StateVacuumDevice):
"""Representation of a Xiaomi Vacuum cleaner robot."""
def __init__(self, name, vacuum):
"""Initialize the Xiaomi vacuum cleaner robot handler."""
self._name = name
self._vacuum = vacuum
self.vacuum_state = None
self._available = False
self.consumable_state = None
self.clean_history = None
self.dnd_state = None
self.last_clean = None
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the status of the vacuum cleaner."""
if self.vacuum_state is not None:
# The vacuum reverts back to an idle state after erroring out.
# We want to keep returning an error until it has been cleared.
if self.vacuum_state.got_error:
return STATE_ERROR
try:
return STATE_CODE_TO_STATE[int(self.vacuum_state.state_code)]
except KeyError:
_LOGGER.error(
"STATE not supported: %s, state_code: %s",
self.vacuum_state.state,
self.vacuum_state.state_code,
)
return None
@property
def battery_level(self):
"""Return the battery level of the vacuum cleaner."""
if self.vacuum_state is not None:
return self.vacuum_state.battery
@property
def fan_speed(self):
"""Return the fan speed of the vacuum cleaner."""
if self.vacuum_state is not None:
speed = self.vacuum_state.fanspeed
if speed in FAN_SPEEDS.values():
return [key for key, value in FAN_SPEEDS.items() if value == speed][0]
return speed
@property
def fan_speed_list(self):
"""Get the list of available fan speed steps of the vacuum cleaner."""
return list(sorted(FAN_SPEEDS.keys(), key=lambda s: FAN_SPEEDS[s]))
@property
def device_state_attributes(self):
"""Return the specific state attributes of this vacuum cleaner."""
attrs = {}
if self.vacuum_state is not None:
attrs.update(
{
ATTR_DO_NOT_DISTURB: STATE_ON
if self.dnd_state.enabled
else STATE_OFF,
ATTR_DO_NOT_DISTURB_START: str(self.dnd_state.start),
ATTR_DO_NOT_DISTURB_END: str(self.dnd_state.end),
# Not working --> 'Cleaning mode':
# STATE_ON if self.vacuum_state.in_cleaning else STATE_OFF,
ATTR_CLEANING_TIME: int(
self.vacuum_state.clean_time.total_seconds() / 60
),
ATTR_CLEANED_AREA: int(self.vacuum_state.clean_area),
ATTR_CLEANING_COUNT: int(self.clean_history.count),
ATTR_CLEANED_TOTAL_AREA: int(self.clean_history.total_area),
ATTR_CLEANING_TOTAL_TIME: int(
self.clean_history.total_duration.total_seconds() / 60
),
ATTR_MAIN_BRUSH_LEFT: int(
self.consumable_state.main_brush_left.total_seconds() / 3600
),
ATTR_SIDE_BRUSH_LEFT: int(
self.consumable_state.side_brush_left.total_seconds() / 3600
),
ATTR_FILTER_LEFT: int(
self.consumable_state.filter_left.total_seconds() / 3600
),
ATTR_SENSOR_DIRTY_LEFT: int(
self.consumable_state.sensor_dirty_left.total_seconds() / 3600
),
ATTR_STATUS: str(self.vacuum_state.state),
}
)
if self.last_clean:
attrs[ATTR_CLEAN_START] = self.last_clean.start
attrs[ATTR_CLEAN_STOP] = self.last_clean.end
if self.vacuum_state.got_error:
attrs[ATTR_ERROR] = self.vacuum_state.error
return attrs
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def supported_features(self):
"""Flag vacuum cleaner robot features that are supported."""
return SUPPORT_XIAOMI
async def _try_command(self, mask_error, func, *args, **kwargs):
"""Call a vacuum command handling error messages."""
try:
await self.hass.async_add_executor_job(partial(func, *args, **kwargs))
return True
except DeviceException as exc:
_LOGGER.error(mask_error, exc)
return False
async def async_start(self):
"""Start or resume the cleaning task."""
await self._try_command(
"Unable to start the vacuum: %s", self._vacuum.resume_or_start
)
async def async_pause(self):
"""Pause the cleaning task."""
await self._try_command("Unable to set start/pause: %s", self._vacuum.pause)
async def async_stop(self, **kwargs):
"""Stop the vacuum cleaner."""
await self._try_command("Unable to stop: %s", self._vacuum.stop)
async def async_set_fan_speed(self, fan_speed, **kwargs):
"""Set fan speed."""
if fan_speed.capitalize() in FAN_SPEEDS:
fan_speed = FAN_SPEEDS[fan_speed.capitalize()]
else:
try:
fan_speed = int(fan_speed)
except ValueError as exc:
_LOGGER.error(
"Fan speed step not recognized (%s). " "Valid speeds are: %s",
exc,
self.fan_speed_list,
)
return
await self._try_command(
"Unable to set fan speed: %s", self._vacuum.set_fan_speed, fan_speed
)
async def async_return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock."""
await self._try_command("Unable to return home: %s", self._vacuum.home)
async def async_clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
await self._try_command(
"Unable to start the vacuum for a spot clean-up: %s", self._vacuum.spot
)
async def async_locate(self, **kwargs):
"""Locate the vacuum cleaner."""
await self._try_command("Unable to locate the botvac: %s", self._vacuum.find)
async def async_send_command(self, command, params=None, **kwargs):
"""Send raw command."""
await self._try_command(
"Unable to send command to the vacuum: %s",
self._vacuum.raw_command,
command,
params,
)
async def async_remote_control_start(self):
"""Start remote control mode."""
await self._try_command(
"Unable to start remote control the vacuum: %s", self._vacuum.manual_start
)
async def async_remote_control_stop(self):
"""Stop remote control mode."""
await self._try_command(
"Unable to stop remote control the vacuum: %s", self._vacuum.manual_stop
)
async def async_remote_control_move(
self, rotation: int = 0, velocity: float = 0.3, duration: int = 1500
):
"""Move vacuum with remote control mode."""
await self._try_command(
"Unable to move with remote control the vacuum: %s",
self._vacuum.manual_control,
velocity=velocity,
rotation=rotation,
duration=duration,
)
async def async_remote_control_move_step(
self, rotation: int = 0, velocity: float = 0.2, duration: int = 1500
):
"""Move vacuum one step with remote control mode."""
await self._try_command(
"Unable to remote control the vacuum: %s",
self._vacuum.manual_control_once,
velocity=velocity,
rotation=rotation,
duration=duration,
)
def update(self):
"""Fetch state from the device."""
try:
state = self._vacuum.status()
self.vacuum_state = state
self.consumable_state = self._vacuum.consumable_status()
self.clean_history = self._vacuum.clean_history()
self.last_clean = self._vacuum.last_clean_details()
self.dnd_state = self._vacuum.dnd_status()
self._available = True
except OSError as exc:
_LOGGER.error("Got OSError while fetching the state: %s", exc)
except DeviceException as exc:
_LOGGER.warning("Got exception while fetching the state: %s", exc)
async def async_clean_zone(self, zone, repeats=1):
"""Clean selected area for the number of repeats indicated."""
for _zone in zone:
_zone.append(repeats)
_LOGGER.debug("Zone with repeats: %s", zone)
try:
await self.hass.async_add_executor_job(self._vacuum.zoned_clean, zone)
except (OSError, DeviceException) as exc:
_LOGGER.error("Unable to send zoned_clean command to the vacuum: %s", exc)
|
apache-2.0
| 5,748,690,026,502,680,000
| 32.2
| 88
| 0.58104
| false
| 3.694939
| false
| false
| false
|
themattrix/bashup
|
bashup/test/test_bashup.py
|
1
|
6069
|
import subprocess
import os
import itertools
import textwrap
import pathlib2 as pathlib
import pytest
import temporary
from .. import test
# Compile some bashup and run it against multiple versions of bash. The versions are expected to be found in
# $BASH_VERSIONS_DIR. If none are found, or the environment variable is not set, the tests are skipped.
def test_compiled_bash(): # pragma: no cover
bash_binaries = __find_bash_binaries()
if not bash_binaries:
pytest.skip('bash executable not found')
for bash_binary in bash_binaries:
yield __assert_compiled_bash, bash_binary, __BASHUP_STR, __EXPECTED_OUTPUT, 55
# Compile some bashup and run it! This will only work if bash exists on the system. Otherwise the test is skipped.
def test_direct_run(): # pragma: no cover
if not __is_bash_in_path():
pytest.skip('bash executable not found')
if not __is_bashup_in_path():
pytest.skip('bashup executable not found')
with temporary.temp_file(__BASHUP_STR) as in_file:
p = subprocess.Popen(
args=('bashup', '--run', str(in_file)),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = [o.decode('utf-8').strip() for o in p.communicate()]
test.assert_eq(stdout, __EXPECTED_OUTPUT)
assert p.returncode == 55
def test_docopt(): # pragma: no cover
bash_binaries = __find_bash_binaries()
if not bash_binaries:
pytest.skip('bash executable not found')
docopt_str = textwrap.dedent("""
#!/bin/bash
#
# Naval Fate.
#
# Usage:
# naval_fate ship new <name>...
# naval_fate ship <name> move <x> <y> [--speed=<kn>]
# naval_fate ship shoot <x> <y>
# naval_fate mine (set|remove) <x> <y> [--moored|--drifting]
# naval_fate -h | --help
# naval_fate --version
#
# Options:
# -h --help Show this screen.
# --version Show version.
# --speed=<kn> Speed in knots [default: 10].
# --moored Moored (anchored) mine.
# --drifting Drifting mine.
#
# Version:
# Naval Fate 2.0
args=("${@}")
printf '%s\n' 'args=('
for i in "${!args[@]}"; do
printf ' [%q]=%q\n' "${i}" "${args[${i}]}"
done
printf ')\n'
""").strip()
expected_return_code = 0
# @fn main {
# @echo @args
# }
#
# @sourced || {
# @docopt
# main
# }
args_and_expected_output = (
(('ship', 'new', ' ship name'),
textwrap.dedent("""
args=(
[0]=ship
[1]=new
[2]=\\ \\ ship\\ \\ name
)
""").strip()),
(('ship', ' ship name', 'move', '-100', '200', '--speed=5.5'),
textwrap.dedent("""
args=(
[0]=ship
[1]=\\ \\ ship\\ \\ name
[2]=move
[3]=-100
[4]=200
[5]=--speed=5.5
)
""").strip()),
)
parameters = itertools.product(bash_binaries, args_and_expected_output)
for bash_binary, (script_args, expected_output) in parameters:
yield __assert_compiled_bash, bash_binary, docopt_str, expected_output, expected_return_code, script_args
#
# Test Helpers
#
__BASHUP_STR = textwrap.dedent("""
#!/bin/bash
@fn hi greeting='Hello', target='World' {
echo "${greeting}, ${target}!$@"
}
# We could do this with grep, but this way is pure bash.
@fn filter regex {
while read line; do
if [[ ${line} =~ ${regex} ]]; then
echo "${line}"
fi
done
}
# Ensure that default parameters work and can be overridden.
hi
hi --target="Human"
hi --greeting="Greetings"
hi --greeting="Greetings" --target="Human"
hi --greeting="Greetings" --target="Human" " Have" "fun!"
# Ensure that piping between fns works.
{
hi --greeting="What now" --target="Human?"
hi --greeting="Welcome" --target="Cyborg"
hi --greeting="Hi" --target="human"
} | filter --regex="[Hh]uman"
exit 55
""").strip()
__EXPECTED_OUTPUT = '\n'.join((
'Hello, World!',
'Hello, Human!',
'Greetings, World!',
'Greetings, Human!',
'Greetings, Human! Have fun!',
'What now, Human?!',
'Hi, human!'))
def __find_bash_binaries():
try:
return tuple((str(p) for p in pathlib.Path(os.environ['BASH_VERSIONS_DIR']).glob('bash*')))
except KeyError: # pragma: no cover
return () # pragma: no cover
def __is_bash_in_path():
try:
subprocess.check_call(('bash', '-c', ':'))
return True # pragma: no cover
except (subprocess.CalledProcessError, OSError): # pragma: no cover
return False # pragma: no cover
def __is_bashup_in_path():
try:
subprocess.check_call(('bashup', '--version'))
return True # pragma: no cover
except (subprocess.CalledProcessError, OSError): # pragma: no cover
return False # pragma: no cover
@temporary.in_temp_dir()
def __assert_compiled_bash(
bash_binary,
bashup_str,
expected_output,
expected_return_code,
script_args=()): # pragma: no cover
with temporary.temp_file(bashup_str) as in_file:
subprocess.check_call(args=(
'bashup',
'--in', str(in_file),
'--out', 'out.sh'))
p = subprocess.Popen(
args=(bash_binary, 'out.sh') + tuple(script_args),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = [o.decode('UTF-8').strip() for o in p.communicate()]
test.assert_eq(stdout, expected_output)
assert p.returncode == expected_return_code
|
mit
| 6,559,511,140,477,054,000
| 27.097222
| 114
| 0.526116
| false
| 3.680412
| true
| false
| false
|
vpelletier/neoppod
|
neo/admin/handler.py
|
1
|
5095
|
#
# Copyright (C) 2009-2016 Nexedi SA
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from neo.lib import logging, protocol
from neo.lib.handler import EventHandler
from neo.lib.protocol import uuid_str, Packets
from neo.lib.exception import PrimaryFailure
def check_primary_master(func):
def wrapper(self, *args, **kw):
if self.app.bootstrapped:
return func(self, *args, **kw)
raise protocol.NotReadyError('Not connected to a primary master.')
return wrapper
def forward_ask(klass):
return check_primary_master(lambda self, conn, *args, **kw:
self.app.master_conn.ask(klass(*args, **kw),
conn=conn, msg_id=conn.getPeerId()))
class AdminEventHandler(EventHandler):
"""This class deals with events for administrating cluster."""
@check_primary_master
def askPartitionList(self, conn, min_offset, max_offset, uuid):
logging.info("ask partition list from %s to %s for %s",
min_offset, max_offset, uuid_str(uuid))
self.app.sendPartitionTable(conn, min_offset, max_offset, uuid)
@check_primary_master
def askNodeList(self, conn, node_type):
if node_type is None:
node_type = 'all'
node_filter = None
else:
node_filter = lambda n: n.getType() is node_type
logging.info("ask list of %s nodes", node_type)
node_list = self.app.nm.getList(node_filter)
node_information_list = [node.asTuple() for node in node_list ]
p = Packets.AnswerNodeList(node_information_list)
conn.answer(p)
@check_primary_master
def askClusterState(self, conn):
conn.answer(Packets.AnswerClusterState(self.app.cluster_state))
@check_primary_master
def askPrimary(self, conn):
master_node = self.app.master_node
conn.answer(Packets.AnswerPrimary(master_node.getUUID()))
askLastIDs = forward_ask(Packets.AskLastIDs)
askLastTransaction = forward_ask(Packets.AskLastTransaction)
addPendingNodes = forward_ask(Packets.AddPendingNodes)
askRecovery = forward_ask(Packets.AskRecovery)
tweakPartitionTable = forward_ask(Packets.TweakPartitionTable)
setClusterState = forward_ask(Packets.SetClusterState)
setNodeState = forward_ask(Packets.SetNodeState)
checkReplicas = forward_ask(Packets.CheckReplicas)
truncate = forward_ask(Packets.Truncate)
class MasterEventHandler(EventHandler):
""" This class is just used to dispacth message to right handler"""
def _connectionLost(self, conn):
app = self.app
if app.listening_conn: # if running
assert app.master_conn in (conn, None)
conn.cancelRequests("connection to master lost")
app.reset()
app.uuid = None
raise PrimaryFailure
def connectionFailed(self, conn):
self._connectionLost(conn)
def connectionClosed(self, conn):
self._connectionLost(conn)
def dispatch(self, conn, packet, kw={}):
if 'conn' in kw:
# expected answer
if packet.isResponse():
packet.setId(kw['msg_id'])
kw['conn'].answer(packet)
else:
self.app.request_handler.dispatch(conn, packet, kw)
else:
# unexpected answers and notifications
super(MasterEventHandler, self).dispatch(conn, packet, kw)
def answerClusterState(self, conn, state):
self.app.cluster_state = state
def answerNodeInformation(self, conn):
# XXX: This will no more exists when the initialization module will be
# implemented for factorize code (as done for bootstrap)
logging.debug("answerNodeInformation")
def notifyPartitionChanges(self, conn, ptid, cell_list):
self.app.pt.update(ptid, cell_list, self.app.nm)
def answerPartitionTable(self, conn, ptid, row_list):
self.app.pt.load(ptid, row_list, self.app.nm)
self.app.bootstrapped = True
def sendPartitionTable(self, conn, ptid, row_list):
if self.app.bootstrapped:
self.app.pt.load(ptid, row_list, self.app.nm)
def notifyClusterInformation(self, conn, cluster_state):
self.app.cluster_state = cluster_state
def notifyNodeInformation(self, conn, node_list):
self.app.nm.update(node_list)
class MasterRequestEventHandler(EventHandler):
""" This class handle all answer from primary master node"""
# XXX: to be deleted ?
|
gpl-2.0
| 7,400,558,522,298,591,000
| 37.308271
| 78
| 0.671443
| false
| 3.901225
| false
| false
| false
|
Maslor/freshman-berries
|
FreshmanClientV8.py
|
1
|
1046
|
# -*- encoding: latin1 -*-
#FRESHMAN BERRIES
#Cliente
#Version: 8.1
#Author: NEETI
import socket
def enviar( mensagem ):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
servidor=('neetiproj.tagus.ist.utl.pt', 4000)
sock.connect( servidor )
mensagens = []
try:
msg = mensagem.encode('latin1')
sock.sendall( msg )
if ( mensagem[:2] == "/r" ):
while True:
data = sock.recv(2048)
data = data.decode('latin1')
if ( data is not None ):
mensagens.append(data)
break;
finally:
sock.close()
return mensagens
def menu():
a = None
while ( a is not "/x" ):
a = str(input(": "))
d = enviar(a)
if ( d is not None ):
for m in d:
print(m)
''' try:
menu()
except Exception as ex:
print (ex)
input() '''
|
gpl-2.0
| -5,482,627,804,460,246,000
| 21.288889
| 60
| 0.441683
| false
| 3.619377
| false
| false
| false
|
prasannav7/ggrc-core
|
test/integration/ggrc_workflows/notifications/test_recurring_cycles.py
|
1
|
3523
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from integration.ggrc import TestCase
from freezegun import freeze_time
from mock import patch
from ggrc.notifications import common
from ggrc.models import Person
from integration.ggrc_workflows.generator import WorkflowsGenerator
from integration.ggrc.api_helper import Api
from integration.ggrc.generator import ObjectGenerator
class TestRecurringCycleNotifications(TestCase):
def setUp(self):
TestCase.setUp(self)
self.api = Api()
self.generator = WorkflowsGenerator()
self.object_generator = ObjectGenerator()
_, self.assignee = self.object_generator.generate_person(
user_role="gGRC Admin")
self.create_test_cases()
def tearDown(self):
pass
def test_cycle_starts_in_less_than_X_days(self):
with freeze_time("2015-02-01"):
_, wf = self.generator.generate_workflow(self.quarterly_wf_1)
response, wf = self.generator.activate_workflow(wf)
self.assert200(response)
assignee = Person.query.get(self.assignee.id)
with freeze_time("2015-01-01"):
_, notif_data = common.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
with freeze_time("2015-01-29"):
_, notif_data = common.get_todays_notifications()
self.assertIn(assignee.email, notif_data)
with freeze_time("2015-02-01"):
_, notif_data = common.get_todays_notifications()
self.assertIn(assignee.email, notif_data)
# TODO: this should mock google email api.
@patch("ggrc.notifications.common.send_email")
def test_marking_sent_notifications(self, mail_mock):
mail_mock.return_value = True
with freeze_time("2015-02-01"):
_, wf = self.generator.generate_workflow(self.quarterly_wf_1)
response, wf = self.generator.activate_workflow(wf)
self.assert200(response)
assignee = Person.query.get(self.assignee.id)
with freeze_time("2015-01-01"):
_, notif_data = common.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
with freeze_time("2015-01-29"):
common.send_todays_digest_notifications()
_, notif_data = common.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
with freeze_time("2015-02-01"):
_, notif_data = common.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
def create_test_cases(self):
def person_dict(person_id):
return {
"href": "/api/people/%d" % person_id,
"id": person_id,
"type": "Person"
}
self.quarterly_wf_1 = {
"title": "quarterly wf 1",
"description": "",
"owners": [person_dict(self.assignee.id)],
"frequency": "quarterly",
"notify_on_change": True,
"task_groups": [{
"title": "tg_1",
"contact": person_dict(self.assignee.id),
"task_group_tasks": [{
"contact": person_dict(self.assignee.id),
"description": self.generator.random_str(100),
"relative_start_day": 5,
"relative_start_month": 2,
"relative_end_day": 25,
"relative_end_month": 2,
},
],
},
]
}
self.all_workflows = [
self.quarterly_wf_1,
]
|
apache-2.0
| -6,311,678,311,958,132,000
| 29.903509
| 78
| 0.642066
| false
| 3.587576
| true
| false
| false
|
christopherjbly/tasks-indicator
|
src/googletasksapi.py
|
1
|
18600
|
#!/usr/bin/python
# -*- coding: iso-8859-1 -*-
#
#
# googletasksapi.py
#
# Copyright (C) 2011 Lorenzo Carbonell
# lorenzo.carbonell.cerezo@gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
#
from services import GoogleService
from logindialog import LoginDialog
from urllib.parse import urlencode, quote
import os
import json
import io
import comun
import datetime
import time
import uuid
import rfc3339
'''
Dependencies:
python-gflags
'''
OAUTH2_URL = 'https://accounts.google.com/o/oauth2/'
AUTH_URL = 'https://accounts.google.com/o/oauth2/auth'
TOKEN_URL = 'https://accounts.google.com/o/oauth2/token'
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
REDIRECT_URI = 'http://localhost'
APIKEY = 'AIzaSyDZjnvnk8IBZMUvleSSfGWNnktdKLiKlL0'
CLIENT_ID='197445608333-fd998ofp2ivpj090oputel25imtp7ptk.apps.googleusercontent.com'
CLIENT_SECRET='5So18nKZnWZsKGzOG0pmJUWh'
SCOPE='https://www.googleapis.com/auth/tasks'
class Task(dict):
def __init__(self,entry=None):
thetime = datetime.datetime.now()
position = str(int(time.mktime(thetime.timetuple())))
if len(position)<20:
position = '0'*(20-len(position))+position
self['kind'] = "tasks#task"
self['id'] = str(uuid.uuid4())
self['title'] = None
self['updated'] = rfc3339.rfc3339(thetime)
self['selfLink'] = None
self['parent'] = None
self['position'] = position
self['notes'] = None
self['status'] = 'needsAction'
self['due'] = None
self['completed'] = None
self['deleted'] = False
self['hidden'] = False
self['links'] = []
self['tasklist_id'] = None
self['sync'] = False
self.set_from_entry(entry)
def set_due(self,due):
self['due'] = rfc3339.rfc3339(due)
def get_completed(self):
return (self['status'] == 'completed')
def set_completed(self,iscompleted = True):
if iscompleted:
self['status'] = 'completed'
self['completed'] = rfc3339.rfc3339(datetime.datetime.now())
else:
self['status'] = 'needsAction'
self['completed'] = None
def set_from_entry(self,entry):
if entry is not None:
self.update(entry)
def __str__(self):
ans = ''
for key in self.keys():
ans += '%s: %s\n'%(key,self[key])
return ans
def get_position(self):
if 'position' in self.keys():
return(self['position'])
return None
def __eq__(self,other):
for key in self.keys():
if key is not None and other is not None and key in other.keys():
if self[key] != other[key]:
return False
else:
return False
return True
def __ne__(self,other):
return not self.__eq__(other)
def __lt__(self,other):
return self.get_position() < other.get_position()
def __le__(self,other):
return self.get_position() <= other.get_position()
def __gt__(self,other):
return self.get_position() > other.get_position()
def __ge__(self,other):
return self.get_position() >= other.get_position()
class TaskList(dict):
def __init__(self,entry=None):
self['kind'] = "tasks#taskList"
self['id'] = str(uuid.uuid4())
self['title'] = None
self['updated'] = rfc3339.rfc3339(datetime.datetime.now())
self['selfLink'] = None
self['tasks'] = {}
self.set_from_entry(entry)
def set_from_entry(self,entry):
if entry is not None:
self['kind'] = entry['kind'] if 'kind' in entry.keys() else None
self['id'] = entry['id'] if 'id' in entry.keys() else None
self['title'] = entry['title'] if 'title' in entry.keys() else None
self['updated'] = entry['updated'] if 'updated' in entry.keys() else None
self['selfLink'] = entry['selfLink'] if 'selfLink' in entry.keys() else None
self['tasks'] = {}
print('aqui')
if 'tasks' in entry.keys():
for atask_value in entry['tasks'].values():
atask = Task(atask_value)
self['tasks'][atask['id']] = atask
def set_tasks(self,tasks):
self['tasks'] = tasks
def __str__(self):
ans = ''
for key in self.keys():
ans += '%s: %s\n'%(key,self[key])
return ans
class TaskAlone(object):
def __init__(self):
self.tasklists = {}
def backup(self):
f = open(comun.BACKUP_FILE,'w')
f.write(json.dumps(self.tasklists, sort_keys = True, indent = 4))
f.close()
def create_tasklist(self,title):
tasklist = TaskList()
tasklist['title'] = title
self.tasklists[tasklist['id']] = tasklist
return tasklist
def edit_tasklist(self,tasklist):
self.tasklists[tasklist['id']] = tasklist
return tasklist
def remove_tasklist(self,tasklist):
del self.tasklists[tasklist['id']]
def create_task(self,atasklist,title):
atask = Task()
atask['title'] = title
atask['tasklist_id'] = atasklist['id']
self.tasklists[atasklist['id']]['tasks'][atask['id']] = atask
return atask
def edit_task(self,task):
self.tasklists[task['tasklist_id']]['tasks'][task['id']] = task
return task
def remove_task(self,task):
del self.tasklists[task['tasklist_id']]['tasks'][task['id']]
def move_tasks(self,first_task,last_task):
temporal_position = first_task['position']
first_task['position'] = last_task['position']
last_task['position'] = temporal_position
def move_task_first(self,atask,tasklist_id=None):
tasks = self.get_tasks(tasklist_id)
if len(tasks)>0:
self.move_tasks(atask,tasks[0])
def get_tasklists(self):
return self.tasklists.values()
def get_tasks(self,tasklist_id = None):
tasks = []
if tasklist_id is None:
for tasklist in self.tasklists.values():
tasks.extend(tasklist['tasks'].values())
else:
if tasklist_id in self.tasklists.keys():
tasks = self.tasklists[tasklist_id]['tasks'].values()
return sorted(tasks)
def clear_completed_tasks(self,tasklist_id = None):
for task in self.get_tasks(tasklist_id = tasklist_id):
if task['status'] == 'completed':
self.remove_task(task)
def restore(self):
if os.path.exists(comun.BACKUP_FILE):
f = open(comun.BACKUP_FILE,'r')
data = f.read()
f.close()
midata = json.loads(data)
self.tasklists = {}
for tasklist_value in midata.values():
atasklist = TaskList(tasklist_value)
self.tasklists[atasklist['id']] = atasklist
else:
self.tasklists = {}
class GTAService(GoogleService):
def __init__(self,token_file):
GoogleService.__init__(self,auth_url=AUTH_URL,token_url=TOKEN_URL,redirect_uri=REDIRECT_URI,scope=SCOPE,client_id=CLIENT_ID,client_secret=CLIENT_SECRET,token_file=comun.TOKEN_FILE)
self.tasklists = {}
def read(self):
for atasklist in self._get_tasklists().values():
atasklist['tasks'] = self._get_tasks(atasklist['id'])
self.tasklists[atasklist['id']] = atasklist
def backup(self):
f = open(comun.BACKUP_FILE,'w')
f.write(json.dumps(self.tasklists, sort_keys = True, indent = 4))
f.close()
def restore(self):
f = open(comun.BACKUP_FILE,'r')
data = f.read()
f.close()
midata = json.loads(data)
self.tasklists = {}
for tasklist_value in midata.values():
atasklist = TaskList(tasklist_value)
tasks = {}
for task_value in atasklist['tasks'].values():
atask = Task(task_value)
tasks[atask['id']] = atask
atasklist['tasks'] = tasks
self.tasklists[atasklist['id']] = atasklist
def __do_request(self,method,url,addheaders=None,data=None,params=None,first=True):
headers ={'Authorization':'OAuth %s'%self.access_token}
if addheaders:
headers.update(addheaders)
print(headers)
if data:
if params:
response = self.session.request(method,url,data=data,headers=headers,params=params)
else:
response = self.session.request(method,url,data=data,headers=headers)
else:
if params:
response = self.session.request(method,url,headers=headers,params=params)
else:
response = self.session.request(method,url,headers=headers)
print(response)
if response.status_code == 200 or response.status_code == 201 or response.status_code == 204:
return response
elif (response.status_code == 401 or response.status_code == 403) and first:
ans = self.do_refresh_authorization()
print(ans)
if ans:
return self.__do_request(method,url,addheaders,data,params,first=False)
return None
def _get_tasklists(self):
tasklists = {}
params = {'maxResults':1000000}
response = self.__do_request('GET','https://www.googleapis.com/tasks/v1/users/@me/lists',params=params)
if response and response.text:
try:
answer = json.loads(response.text)
if 'items' in answer.keys():
for item in answer['items']:
atasklist = TaskList(item)
tasklists[atasklist['id']] = atasklist
except:
pass
return tasklists
def _add_tasklist(self,title):
url = 'https://www.googleapis.com/tasks/v1/users/@me/lists'
data = {'kind': 'tasks#taskList','title':title}
body = json.dumps(data).encode('utf-8')
addheaders={'Content-type':'application/json'}
response = self.__do_request('POST',url,addheaders=addheaders,data = body)
if response and response.text:
try:
ans = json.loads(response.text)
print(ans)
return TaskList(ans)
except Exception as e:
print(e)
return None
def _edit_tasklist(self,tasklist_id, title):
params = {'tasklist':tasklist_id}
url = 'https://www.googleapis.com/tasks/v1/users/@me/lists/%s'%(tasklist_id)
data = {
'title':title
}
body = json.dumps(data).encode('utf-8')
addheaders={'Content-type':'application/json'}
response = self.__do_request('PATCH',url,addheaders=addheaders,params=params,data = body)
if response and response.text:
try:
atasklist = TaskList(json.loads(response.text))
except Exception as e:
print(e)
return None
def _delete_tasklist(self,tasklist):
url = 'https://www.googleapis.com/tasks/v1/users/@me/lists/%s'%(tasklist['id'])
params = {'tasklist':tasklist['id']}
response = self.__do_request('DELETE',url,params = params)
if response and response.text:
try:
return True
except Exception as e:
print(e)
return False
def _get_tasks(self,tasklist_id = '@default'):
tasks = {}
params = {'tasklist':tasklist_id,'maxResults':1000000}
url = 'https://www.googleapis.com/tasks/v1/lists/%s/tasks'%(tasklist_id)
response = self.__do_request('GET',url,params=params)
if response and response.text:
try:
answer = json.loads(response.text)
if 'items' in answer.keys():
for item in answer['items']:
atask = Task(item)
atask['tasklist_id'] = tasklist_id
tasks[atask['id']] = atask
except:
pass
return tasks
def _clear_completed_tasks(self,tasklist_id = '@default'):
params = {'tasklist':tasklist_id}
url = 'https://www.googleapis.com/tasks/v1/lists/%s/clear'%(tasklist_id)
addheaders={'Content-Length':'0'}
response = self.__do_request('POST',url,params=params,addheaders=addheaders)
if response is not None:
try:
return True
except Exception as e:
print(e)
return False
def _delete_task(self,tasklist_id,task_id):
params = {'tasklist':tasklist_id,'task':task_id}
url = 'https://www.googleapis.com/tasks/v1/lists/%s/tasks/%s'%(tasklist_id,task_id)
response = self.__do_request('DELETE',url,params=params)
if response and response.text:
try:
return True
except Exception as e:
print(e)
return False
def _edit_task(self,tasklist_id,task_id, title,notes=None, iscompleted=False, due=None, data_completed=None,deleted=False):
params = {'tasklist':tasklist_id,'task':task_id}
url = 'https://www.googleapis.com/tasks/v1/lists/%s/tasks/%s'%(tasklist_id,task_id)
data = {
'kind': 'tasks#task',
'title':title,
'deleted':deleted
}
if notes is not None:
data['notes'] = notes
if iscompleted:
data['status'] = 'completed'
if data_completed is not None:
data['completed'] = rfc3339.rfc3339(data_completed)
else:
data['completed'] = rfc3339.rfc3339(datetime.datetime.now())
else:
data['status'] = 'needsAction'
data['completed'] = None
if due is not None:
data['due'] = rfc3339.rfc3339(due)
body = json.dumps(data).encode('utf-8')
addheaders={'Content-type':'application/json'}
response = self.__do_request('PATCH',url,addheaders=addheaders,params=params,data = body)
if response and response.text:
try:
atask = Task(json.loads(response.text))
atask['tasklist_id'] = tasklist_id
return atask
except Exception as e:
print(e)
return None
def _move_task(self,tasklist_id,task_id,parent_id=None,previous_id=None):
params = {'tasklist':tasklist_id,'task':task_id}
if parent_id is not None:
params['parent'] = parent_id
if previous_id is not None:
params['previous'] = previous_id
addheaders={'Content-Length':'0'}
url = 'https://www.googleapis.com/tasks/v1/lists/%s/tasks/%s/move'%(tasklist_id,task_id)
response = self.__do_request('POST',url,params=params,addheaders=addheaders)
if response and response.text:
try:
atask = Task(json.loads(response.text))
atask['tasklist_id'] = tasklist_id
return atask
except Exception as e:
print(e)
return None
def _add_task(self,tasklist_id,title,notes=None, iscompleted=False, due=None, data_completed=None,deleted=False):
params = {'tasklist':tasklist_id}
url = 'https://www.googleapis.com/tasks/v1/lists/%s/tasks'%(tasklist_id)
data = {
'kind': 'tasks#task',
'title':title,
'deleted':deleted
}
if notes is not None:
data['notes'] = notes
if iscompleted:
data['status'] = 'completed'
if data_completed is not None:
data['completed'] = rfc3339.rfc3339(data_completed)
else:
data['completed'] = rfc3339.rfc3339(datetime.datetime.now())
else:
data['status'] = 'needsAction'
data['completed'] = None
if due is not None:
data['due'] = rfc3339.rfc3339(due)
body = json.dumps(data).encode('utf-8')
addheaders={'Content-type':'application/json'}
response = self.__do_request('POST',url,addheaders=addheaders,params=params,data = body)
if response and response.text:
try:
atask = Task(json.loads(response.text))
atask['tasklist_id'] = tasklist_id
return atask
except Exception as e:
print(e)
return None
def get_tasklists(self):
tasklists = self._get_tasklists()
return tasklists
def create_tasklist(self,title):
return self._add_tasklist(title)
def update_tasklist(self, tasklist):
return self._edit_tasklist(tasklist)
def delete_tasklist(self,tasklist):
return self._delete_tasklist(tasklist)
def clear_completed_tasks(self,tasklist_id = '@default'):
return self._clear_completed_tasks(tasklist_id = tasklist_id)
def get_tasks(self, tasklist_id = '@default'):
tasks = {}
if tasklist_id is None:
for atasklist in self._get_tasklists().values():
for task in self._get_tasks(atasklist['id']).values():
tasks[task['id']] = task
else:
tasks = self._get_tasks(tasklist_id)
return tasks
def create_task(self, tasklist_id = '@default', title = '', notes=None, iscompleted=False, due=None, data_completed=None,deleted=False):
atask = self._add_task(tasklist_id,title,notes=notes, iscompleted=iscompleted, due=due, data_completed=data_completed,deleted=deleted)
return atask
def move_task(self, task_id, previous_task_id,tasklist_id = '@default'):
return self._move_task(tasklist_id,task_id,previous_id=previous_task_id)
def move_task_first(self,task_id, tasklist_id = '@default'):
return self._move_task(tasklist_id,task_id)
def edit_tasklist(self, tasklist_id, title):
return self._edit_tasklist(tasklist_id,title)
def edit_task(self, task_id, tasklist_id = '@default', title = None, notes = None, iscompleted = False, due = None):
return self._edit_task(tasklist_id,task_id,title,notes,iscompleted)
def delete_task(self, task_id, tasklist_id = '@default'):
return self._delete_task(tasklist_id,task_id)
if __name__ == '__main__':
ta = TaskAlone()
ta.restore()
print(ta.tasklists)
#tasklist = ta.tasklists['398cecc5-a699-4b4d-94da-5c856244d04c']
#task = ta.create_task(tasklist,'otra prueba')
'''
print(ta.tasklists)
tasklist = ta.tasklists['398cecc5-a699-4b4d-94da-5c856244d04c']
tasklist = ta.create_tasklist('lista de prueba')
print(tasklist)
task = ta.create_task(tasklist,'prueba')
print(task)
print(tasklist)
print(ta.tasklists)
'''
'''
tasklist = ta.create_tasklist('prueba')
print(tasklist)
task = ta.create_task(tasklist,'prueba')
print(task)
print(tasklist)
task['title'] = 'La tarea de la lista'
print(tasklist)
'''
ta.backup()
'''
gta = GTAService(token_file = comun.TOKEN_FILE)
#gc = GoogleCalendar(token_file = comun.TOKEN_FILE)
print(gta.do_refresh_authorization())
if gta.access_token is None or gta.refresh_token is None:
authorize_url = gta.get_authorize_url()
print(authorize_url)
ld = LoginDialog(authorize_url)
ld.run()
temporary_token = ld.code
ld.destroy()
print(temporary_token)
print(gta.get_authorization(temporary_token))
print(gta.get_tasklists())
#print(gta.create_tasklist('Una lista de ejemplo'))
#print(gta.get_tasks())
print'#############################################################'
print(gta.clear_completed_tasks('@default'))
print'#############################################################'
atask = (gta.create_task(tasklist_id='MDU4MDg5OTIxODI5ODgyMTE0MTg6MTA2NTc3MDc0Mzow',title='prueba'))
print'#############################################################'
print(atask)
print'#############################################################'
gta.move_task_first(atask['id'],atask['tasklist_id'])
gta.read()
atask = gta.edit_task(atask['id'],atask['tasklist_id'],title='otra prueba')
print(atask)
'''
'''
for tasklist in gta.get_tasklists():
print '########################################################'
print tasklist
for task in gta.get_tasks(tasklist_id = tasklist['id']):
print task
'''
'''
for tasklist in gta.get_tasklists():
print tasklist
#print gta.create_tasklist('desde ubuntu')
#print gta.get_tasklist('MDU4MDg5OTIxODI5ODgyMTE0MTg6MDow')
print gta.get_tasks()
for task in gta.get_tasks():
print '%s -> %s'%(task['title'],task['id'])
#print gta.create_task(title = 'prueba2 desde ubuntu',notes = 'primera prueba')
gta.move_task_first('MDU4MDg5OTIxODI5ODgyMTE0MTg6MDoy')
'''
|
gpl-3.0
| -4,239,862,703,708,010,500
| 30.260504
| 182
| 0.671075
| false
| 2.894942
| false
| false
| false
|
MKLab-ITI/DanceAnno
|
DanceAnno_Loader.py
|
1
|
20273
|
# This class loads the data
# 1. Skeleton (.skel or .mat)
# 2. Video (a folder with frames XXXXXX_[index].png or .jpg or .jpeg
# if you have actual video you can use ffmpeg to split it.
# 3. Choreography (.svl)
# 4. Music beats (.txt)
import os
import DanceAnno_Application
__author__ = 'DIMITRIOS'
from tkinter import *
from tkinter import ttk # ttk is a little more beautiful than tk.
from tkinter.filedialog import askopenfilename, askdirectory
from tkinter.messagebox import showerror
from tkinter import messagebox
sys.path.append( os.path.join('.', 'Utils' ))
import readsvl # function to read svls
import readtxt # function to read txts
import readskel # function to read body skeleton trajectories
# if PyCharm underlines them with red, just ignore (alt+enter -> ignore)
class Loader:
def __init__(self):
# This variable will automatic drive the folders selection and it will shorten your clicks
self.debug_FLAG = False
self.db = 'salsa' # salsa or calus
self.debug_fastaccess = 'bertrand_c3_t1'
# Are the data loaded ?
self.skeletonLoadedFlag = False
self.videoLoadedFlag = False
self.choreoLoadedFlag = False
# GUI init
self.root = Tk()
self.root.configure(background='#000')
self.root.title("Dance Annotator")
# ask for permission to close window
self.root.protocol("WM_DELETE_WINDOW", self.close_window)
# Window initial dimensions
w = 900 # The value of the width
h = 300 # The value of the height of the window
# Your screen width and height
ws = self.root.winfo_screenwidth()
hs = self.root.winfo_screenheight()
# Top left corner of the window
x = (ws/2) - (w/2)
y = (hs/2) - (h/2)
self.root.geometry('%dx%d+%d+%d' % (w, h, x, y))
# Data
# Sampling rate of Kinect
self.Fs = 0
# Length of the Kinect signals
self.length_signal_samples = 0
# Number of music beats (optional)
self.nBeats = 0
# Store the index of Video frames (XXXXXXX_[index].jpg)
self.indexFrames = []
self.dname = "" # directory where video frames are located
self.prefixname = "" # the part before underscore of XXXXX_[index].jpg
self.annotationSecs = [] # First level annotation
self.labels = {}
self.annotationSecsB = [] # Second level annotation
self.labelsB = {}
self.beats = {} # Beats indicators
# Vars to indicate the parsing status of each file
self.skeletonStatusSTR = StringVar()
self.skeletonStatusSTR.set("Empty")
self.videoStatusSTR = StringVar()
self.videoStatusSTR.set("Empty")
self.choreoStatusSTR = StringVar()
self.choreoStatusSTR.set("Empty")
self.mbeatsStatusSTR = StringVar()
self.mbeatsStatusSTR.set("Empty")
# Start the GUI design
# Coloring style for ttk
style = ttk.Style()
style.configure("BW.TFrame", foreground="black", background="white")
style.configure("BW.TLabel", foreground="black", background="white")
style.configure("BW.TCheckbutton", foreground="black", background="white")
# Frame containing the loading functionalities
self.fr_filedialog = ttk.Frame(self.root, style="BW.TFrame")
# Frame containing the GUI navigation processes (Continue or Exit)
self.fr_exitcontinue = ttk.Frame(self.root, style="BW.TFrame")
# Just some text to explain what we are doing
self.lbl_explain = ttk.Label(self.fr_filedialog, text="Select the resources to annotate", style="BW.TLabel")
# --- FILE SELECTION WIDGETS ----
# 1 SKELETON
self.lbl_namelbl_mat_skeleton = ttk.Label(self.fr_filedialog, text="Skeleton Data", style="BW.TLabel")
self.entry_name_mat = Entry(self.fr_filedialog)
self.bt_mat_load = Button(self.fr_filedialog, text="...", command=self.loadSkeletonData)
self.lbl_namelbl_hint_skeleton = ttk.Label(self.fr_filedialog, text=".mat or .skel", style="BW.TLabel")
self.lbl_namelbl_status_skeleton = ttk.Label(self.fr_filedialog, textvariable=self.skeletonStatusSTR, style="BW.TLabel")
#self.separatorBtSkel = ttk.Separator(self.fr_filedialog,orient=VERTICAL)
# 2 VIDEO FRAMES
self.lbl_namelbl_frames_video = ttk.Label(self.fr_filedialog, text="Folder with frame data", style="BW.TLabel")
self.entry_name_frames = Entry(self.fr_filedialog)
self.bt_frames = Button(self.fr_filedialog, text="...", command= self.loadFramesByDirectory)
self.lbl_namelbl_hint_video = ttk.Label(self.fr_filedialog, text="A folder with jpeg, jpg, or png files", style="BW.TLabel")
self.lbl_namelbl_status_video = ttk.Label(self.fr_filedialog, textvariable=self.videoStatusSTR, style="BW.TLabel")
#self.separatorFramesVideo = ttk.Separator(self.fr_filedialog,orient=VERTICAL)
# 3 CHOREOGRAPHY
self.lbl_load_choreo = ttk.Label(self.fr_filedialog, text="Load existing choreography (Optional)", style="BW.TLabel")
self.entry_name_choreo = Entry(self.fr_filedialog)
self.bt_load_ch = Button(self.fr_filedialog, text="...", command= self.loadChoreography)
self.lbl_namelbl_hint_choreo = ttk.Label(self.fr_filedialog, text="Provide an existing .txt otherwise a new one will be created", style="BW.TLabel" )
self.lbl_namelbl_status_choreo = ttk.Label(self.fr_filedialog, textvariable=self.choreoStatusSTR, style="BW.TLabel")
# 4 Music beats
self.lbl_load_mbeats = ttk.Label(self.fr_filedialog, text="Load music beats (Optional)", style="BW.TLabel")
self.entry_name_mbeats = Entry(self.fr_filedialog)
self.bt_load_mbeats = Button(self.fr_filedialog, text="...", command= self.loadMusicBeats)
self.lbl_namelbl_hint_mbeats = ttk.Label(self.fr_filedialog, text="Music beats in .txt format", style="BW.TLabel")
self.lbl_namelbl_status_mbeats = ttk.Label(self.fr_filedialog, textvariable=self.mbeatsStatusSTR, style="BW.TLabel")
self.bt_continue = Button(self.fr_exitcontinue, text="Continue", command=self.StartAnno, state = DISABLED)
self.bt_exit = Button(self.fr_exitcontinue, text="Exit", command=self.close_window)
# --- PLACEMENT OF WIDGETs IN THE ROOT WINDOW -------
self.fr_filedialog.grid(row=0, column=0, columnspan=4, sticky=(N, S, E, W), padx=5)
self.fr_exitcontinue.grid(row=1, column=0, columnspan=4, sticky=(E), ipadx=50, padx=5)
# Explanation
self.lbl_explain.grid(row=0, column=0, columnspan=4, rowspan=1, sticky=(E,W), padx=5)
# Labels
self.lbl_namelbl_mat_skeleton.grid(column=0, sticky=(W), row=1, columnspan=1, rowspan=1, pady=5, padx=5)
self.entry_name_mat.grid(column=1, sticky=(N, S, E, W), row=1, columnspan=1, rowspan=1, pady=5, padx=5)
self.bt_mat_load.grid(column=2, sticky=(N, S, E, W), row=1, columnspan=1, rowspan=1, pady=5, padx=5)
self.lbl_namelbl_hint_skeleton.grid(column=3, sticky=(W), row=1, columnspan=1, rowspan=1, padx=5)
self.lbl_namelbl_status_skeleton.grid(column=4, sticky=(W), row=1, columnspan=1, rowspan=1, padx=5)
#self.separatorBtSkel.pack(side="left", fill=Y, padx=5)
self.lbl_namelbl_frames_video.grid(row=2, column=0, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.entry_name_frames.grid(row=2, column=1, columnspan=1, rowspan=1, sticky=(N, S, E, W), pady=5, padx=5)
self.bt_frames.grid(row=2, column=2, columnspan=1, rowspan=1, sticky=(N, S, E, W), pady=5, padx=5)
self.lbl_namelbl_hint_video.grid(row=2, column=3, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.lbl_namelbl_status_video.grid(row=2, column=4, columnspan=1, rowspan=1, sticky=(W), padx=5)
#self.separatorFramesVideo.pack(side="left", fill=Y, padx=5)
self.lbl_load_choreo.grid(row=3, column=0, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.entry_name_choreo.grid(row=3, column=1, columnspan=1, rowspan=1, sticky=(N, S, E, W), pady=5, padx=5)
self.bt_load_ch.grid(row=3, column=2, columnspan=1, rowspan=1, sticky=(N, S, E, W), pady=5, padx=5)
self.lbl_namelbl_hint_choreo.grid(row=3, column=3, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.lbl_namelbl_status_choreo.grid(row=3, column=4, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.lbl_load_mbeats.grid(row=4, column=0, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.entry_name_mbeats.grid(row=4, column=1, columnspan=1, rowspan=1, sticky=(N, S, E, W), pady=5, padx=5)
self.bt_load_mbeats.grid(row=4, column=2, columnspan=1, rowspan=1, sticky=(N, S, E, W), pady=5, padx=5)
self.lbl_namelbl_hint_mbeats.grid(row=4, column=3, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.lbl_namelbl_status_mbeats.grid(row=4, column=4, columnspan=1, rowspan=1, sticky=(W), padx=5)
self.bt_exit.grid(row = 0, column = 3, sticky = (E), pady = 5, padx = 15, ipadx=25)
self.bt_continue.grid(row = 0, column = 4, sticky = (W), pady = 5, padx = 15, ipadx = 15)
ttk.Sizegrip().grid(row=6, column=3, sticky=(E))
#--------------------
self.root.columnconfigure(0, weight=1)
self.root.rowconfigure(0, weight=1)
self.fr_filedialog.columnconfigure(0, weight=1)
self.fr_filedialog.columnconfigure(1, weight=1)
self.fr_filedialog.columnconfigure(2, weight=1, minsize=30)
self.fr_filedialog.columnconfigure(3, weight=1)
self.fr_filedialog.columnconfigure(4, weight=1, minsize=100)
# for i in range(4):
# self.fr_filedialog.rowconfigure(i, weight=1)
self.root.resizable(True, True)
# If in debugging mode then load automatically the files
if self.debug_FLAG:
self.loadSkeletonData()
self.loadFramesByDirectory()
self.loadChoreography()
self.loadMusicBeats()
self.root.after(1000, self.StartAnno)
# Ignite GUI
self.root.mainloop()
return
# --- SKELETON DATA -------
def loadSkeletonData(self):
if self.debug_FLAG:
if self.db == 'salsa':
fname = 'Data\\Salsa\\performance-trajectories\\' + self.debug_fastaccess + '_kinect_1.mat'
elif self.db == 'calus':
fname = 'Data\\Calus\\rec.skel'
else:
if self.db == 'salsa':
fname = askopenfilename(initialdir='Data\\Salsa\\performance-trajectories',
filetypes=(("mat file", "*.mat"),("skel file", "*.skel"), ("All files", "*.*") ))
elif self.db == 'calus':
fname = askopenfilename(initialdir='Data\\Calus',
filetypes=(("skel file", "*.skel"), ("mat file", "*.mat"), ("All files", "*.*") )) #performance-trajectories
if fname:
try:
self.entry_name_mat.insert(0, "..." + fname[-30:])
dummy, fextension = os.path.splitext(fname)
# ------- load skeleton trajectories -----------------------
if fextension=='.mat':
self.signals_wrapper, self.Fs = readskel.readmatlab_wrapper(fname)
else: # .skel
self.signals_wrapper, self.Fs = readskel.skelparser(fname)
nJoints = len(self.signals_wrapper)
sigA = next(iter(self.signals_wrapper.values()))
nTrajects = len(sigA[0])
self.skeletonStatusSTR.set(str(nTrajects) + " trajects")
self.skeletonLoadedFlag = True
self.checkContinueEnable()
# global Fs, length_signal_samples
self.length_signal_samples = nTrajects
# put a separation line
separatorBtsA = ttk.Separator(self.fr_filedialog, orient=HORIZONTAL)
separatorBtsA.grid(row=5, column=0, columnspan=5, sticky="WE")
# show available joints
self.signalsSelected = {}
self.chb_joint = {}
i = 0
for key,v in sorted(self.signals_wrapper.items()):
self.signalsSelected[key] = IntVar()
if key in ('Left foot', 'Right foot'):
self.signalsSelected[key].set(1)
self.chb_joint[key] = ttk.Checkbutton(self.fr_filedialog, text = key, variable = self.signalsSelected[key], style="BW.TCheckbutton")
self.chb_joint[key].grid(row=6 + i % 10, column=1+i//10, columnspan=1, rowspan=1, sticky=(W))
i += 1
#make my screen dimensions work
w = 900 #The value of the width
h = 300 + 12*22 #The value of the height of the window
ws = self.root.winfo_screenwidth()#This value is the width of the screen
hs = self.root.winfo_screenheight()#This is the height of the screen
# calculate position x, y
x = (ws/2) - (w/2)
y = (hs/2) - (h/2)
self.root.geometry('%dx%d+%d+%d' % (w, h, x, y))
#self.root.mainloop()
except Exception as e: # <- naked except is a bad idea
self.skeletonLoadedFlag = False
self.checkContinueEnable()
print(e)
showerror("Open Source File", "Failed to read file\n'%s'\n'%s'" % (fname, e))
return
return
#=========== Load directory of frames ======================================
def loadFramesByDirectory(self):
if self.debug_FLAG:
if self.db == 'salsa':
self.dname = "Data\\Salsa\\Videos\\" + self.debug_fastaccess + "_kinect_1"
elif self.db == 'calus':
self.dname = "Data\\Calus\\frames"
else:
if self.db == 'salsa':
self.dname = askdirectory(initialdir='Data\\Salsa\\Videos')
elif self.db == 'calus':
self.dname = askdirectory(initialdir='Data\\Calus')
if self.dname:
try:
self.entry_name_frames.insert(0,"..." + self.dname[-30:])
self.indexFrames = []
for file in os.listdir(self.dname):
dum, self.checkvideof_ext = os.path.splitext(file)
if self.checkvideof_ext in ('.jpeg', '.JPG', '.JPEG', '.png', '.bmp', '.PNG', '.BMP'):
dum, self.videof_ext = os.path.splitext(file)
k = file.rfind("_")
l = file.rfind(".")
iFrame = file[k+1:l]
if iFrame[0] == 'f':
iFrame = iFrame[1:]
self.indexFrames.append(int(iFrame))
self.prefixname = file[:k+2]
else:
self.indexFrames.append(int(iFrame))
self.prefixname = file[:k+1]
self.indexFrames = sorted(self.indexFrames)
self.videoStatusSTR.set( str(len(self.indexFrames)) + " Frames" )
self.videoLoadedFlag = True
elif file in ('Thumbs.db'):
continue
else:
showerror("Fail", "Only jpeg, jpg, JPG, bmp, BMP, png, PNG frames are supported")
self.videoLoadedFlag = False
return
self.checkContinueEnable()
except Exception as e: # <- naked except is a bad idea
self.videoLoadedFlag = False
self.checkContinueEnable()
showerror("Error", ("Open Source File\n'%s'" % e) + "\n" + ("Failed to open directory\n'%s'" % self.dname))
return
return
# =========== LOAD SVL CHOREOGRAPHY ===============================
def loadChoreography(self):
if self.debug_FLAG:
if self.db == 'salsa':
tempf =self.debug_fastaccess
tempf = list(tempf)
tempf[0] = tempf[0].upper()
tempf = ''.join(tempf)
fname = "Data\\Salsa\\SVL\\" + tempf + "_DanceAnnotationTool.svl"
elif self.db == 'calus':
fname = "Data\\Calus\\DanceAnnotationTool.txt"
else:
if self.db == 'salsa':
fname = askopenfilename(initialdir='Data\\Salsa\\SVL', filetypes=(("svl file", "*.svl"), ("txt file", "*.txt"), ("All files", "*.*") ))
elif self.db == 'calus':
fname = askopenfilename(initialdir='Data\\Calus', filetypes=(("txt file", "*.txt"), ("svl file", "*.svl"), ("All files", "*.*") ))
dummy, fextension = os.path.splitext(fname)
if fname:
try:
if fextension == '.svl':
params, self.annotationSecs, self.labels = readsvl.extractSvlAnnotRegionFile(fname)
self.entry_name_choreo.insert(0,"..." + fname[-30:])
self.choreoStatusSTR.set(str(len(self.labels)) + " labels")
self.choreoLoadedFlag = True
self.checkContinueEnable()
elif fextension == '.txt':
self.annotationSecs, self.labels, self.annotationSecsB, self.labelsB = readtxt.parse(fname)
self.entry_name_choreo.insert(0,"..." + fname[-30:])
self.choreoStatusSTR.set(str(len(self.labels)) + " labels")
self.choreoLoadedFlag = True
self.checkContinueEnable()
else:
showerror("Waring", "Parser does not exists for such a file, only svl or txt are supported")
except Exception as e:
self.choreoLoadedFlag = False
self.checkContinueEnable()
msg = "There was a problem in loading!\n'%s'" % e
if messagebox.askyesno("Error", msg + "\n" + "Do you want to choose another file?"):
self.loadChoreography()
else:
return
return
#=================== Music beats ========================================
def loadMusicBeats(self):
if self.debug_FLAG:
if self.db=='salsa':
fname = 'Data\\Salsa\\MusicBeats\\' + self.debug_fastaccess + '_feetcam-beats.txt'
else:
fname = None
else:
fname = askopenfilename(initialdir='Data\\Salsa\\MusicBeats',
filetypes=(("beats file", "*.txt"), ("All files", "*.*") )) #performance-trajectories
if fname:
try:
self.entry_name_mbeats.insert(0, "..." + fname[-30:])
dummy, fextension = os.path.splitext(fname)
# ------- load skeleton trajectories -----------------------
if fextension=='.txt':
self.beats = readtxt.parse_mbeats(fname)
else:
showerror("Error","Only txt file extension is supported")
return
self.nBeats = len(self.beats)
self.mbeatsStatusSTR.set(str(self.nBeats) + " Beats")
except Exception as e: # <- naked except is a bad idea
showerror("Open Source File", "Failed to read file\n'%s'\n'%s'" % (fname, e))
return
return
def close_window(self):
#if messagebox.askokcancel("Exit", "Are you sure?"):
self.root.destroy()
def StartAnno(self):
self.root.destroy()
DanceAnno_Application.Application.StartAnnotating(self)
def checkContinueEnable(self):
if self.skeletonLoadedFlag and self.videoLoadedFlag: # and self.choreoLoadedFlag:
self.bt_continue.config(state = NORMAL)
|
apache-2.0
| 6,038,724,737,106,642,000
| 43.656388
| 157
| 0.565432
| false
| 3.611794
| true
| false
| false
|
wantee/pocolm
|
scripts/validate_int_dir.py
|
2
|
5948
|
#!/usr/bin/env python
# we're using python 3.x style print but want it to work in python 2.x,
from __future__ import print_function
import os
import argparse
import sys
import subprocess
parser = argparse.ArgumentParser(description="Validates directory containing integerized "
"text data, as produced by prepare_int_data.py",
epilog="E.g. validate_int_dir.py data/int.100k",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("int_dir",
help="Directory in which to find the data")
args = parser.parse_args()
os.environ['PATH'] = (os.environ['PATH'] + os.pathsep +
os.path.abspath(os.path.dirname(sys.argv[0])))
if not os.path.exists(args.int_dir):
sys.exit("validate_int_dir.py: Expected directory {0} to exist".format(args.int_dir))
if not os.path.exists("{0}/dev.txt.gz".format(args.int_dir)):
sys.exit("validate_int_dir.py: Expected file {0}/dev.txt.gz to exist".format(args.int_dir))
if not os.path.exists("{0}/num_train_sets".format(args.int_dir)):
sys.exit("validate_int_dir.py: Expected file {0}/num_train_sets to exist".format(args.int_dir))
# the following code checks num_train_sets and sets num_train_sets
# to the appropriate variable.
f = open("{0}/num_train_sets".format(args.int_dir))
line = f.readline()
try:
num_train_sets = int(line)
assert num_train_sets > 0 and len(line.split()) == 1
assert f.readline() == ''
except Exception as e:
sys.exit("validate_int_dir.py: Expected file {0}/num_train_sets to contain "
"an integer >0: {1}".format(args.int_dir, str(e)))
f.close()
# the following code checks num_words.
f = open("{0}/num_words".format(args.int_dir))
line = f.readline()
try:
num_words = int(line)
assert num_words > 0 and len(line.split()) == 1
assert f.readline() == ''
except Exception as e:
sys.exit("validate_int_dir.py: Expected file {0}/num_words to contain "
"an integer >0: {1}".format(args.int_dir, str(e)))
f.close()
# call validate_vocab.py to check the vocab.
if os.system("validate_vocab.py --num-words={0} {1}/words.txt".format(
num_words, args.int_dir)) != 0:
sys.exit(1)
num_words = subprocess.check_output("cat {0}/words.txt | wc -l".format(args.int_dir), shell=True)
try:
num_words = int(num_words) + 1
except:
sys.exit("validate_int_dir.py: error getting number of words from {0}/words.txt".format(
args.int_dir))
names = set()
# check the 'names' file; it should have lines like:
# 1 switchboard
# 2 fisher
# etc.
f = open("{0}/names".format(args.int_dir))
for n in range(1, num_train_sets + 1):
line = f.readline()
try:
[m, name] = line.split()
if name in names:
sys.exit("validate_int_dir.py: repeated name {0} in {1}/names".format(
name, args.int_dir))
names.add(name)
assert int(m) == n
except:
sys.exit("validate_int_dir.py: bad {0}'th line of {1}/names: '{2}'".format(
n, args.int_dir, line[0:-1]))
f.close()
# validate the 'unigram_weights' file, if it exists. the 'unigram_weights' file
# is an optional part of the directory format; we put it here so it can be used
# to initialize the metaparameters in a reasonable way.
if os.path.exists("{0}/unigram_weights".format(args.int_dir)):
f = open("{0}/unigram_weights".format(args.int_dir))
names_with_weights = set()
while True:
line = f.readline()
if line == '':
break
try:
[name, weight] = line.split()
weight = float(weight)
assert weight >= 0.0 and weight <= 1.0
if name not in names:
sys.exit("validate_int_dir.py: bad line '{0}' in file {1}/unigram_weights: "
"name {2} does not appear in {1}/names".format(
line[:-1], args.int_dir, name))
if name in names_with_weights:
sys.exit("validate_int_dir.py: bad line '{0}' in file {1}/unigram_weights: "
"name {2} appears twice".format(
line[:-1], args.int_dir, name))
names_with_weights.add(name)
except Exception as e:
sys.exit("validate_int_dir.py: bad line '{0}' in file {1}/unigram_weights: {2}".format(
line[:-1], args.int_dir, str(e)))
for name in names:
if name not in names_with_weights:
sys.exit("validate_int_dir.py: expected the name {0} to appear in "
"{1}/unigram_weights".format(name, args.int_dir))
f.close()
names = ['dev']
for n in range(1, num_train_sets + 1):
names.append(str(n))
for name in names:
p = subprocess.Popen("gunzip -c {0}/{1}.txt.gz 2>/dev/null".format(args.int_dir, name),
stdout=subprocess.PIPE, shell=True)
num_ints = 0
for l in range(10):
line = p.stdout.readline()
if line is None:
break
try:
ints = [int(x) for x in line.split()]
num_ints += len(ints)
for i in ints:
if i < 3 or i > num_words:
sys.exit("validate_int_dir.py: value {0} out of range in file {1}/{2}.txt.gz".format(
i, args.int_dir, name))
except:
sys.exit("validate_int_dir.py: bad line {0} in file {1}/{2}.txt.gz".format(
line.strip('\n'), args.int_dir, name))
if num_ints == 0:
# in theory it's possible that a file whose first 10 lines is empty
# could be valid, a there is nothing wrong in principle with modeling
# empty sequences. But it's very odd.
sys.exit("validate_int_dir.py: did not see any data in file {0}/{1}.txt.gz".format(
args.int_dir, name))
p.terminate()
|
apache-2.0
| -8,972,085,041,014,795,000
| 38.390728
| 105
| 0.583053
| false
| 3.352875
| false
| false
| false
|
haphaeu/yoshimi
|
GumbleBootstrap/matrix.py
|
1
|
2495
|
'''
matrix.py
Basic operations with matrixes:
- multiply
- transpose
- invert
And a simple linear least squares solver,
performing a linear fit between two vectors
yi = a+b.xi
Revision History
rev Date Description
0.1 2013.02.13 first issue, basic insanity check
Rafael Rossi
RaRossi@external.technip.com
rossirafael@yahoo.com
'''
#importing deepcopy to copy list and make sure the
#original lists are not altered
from copy import deepcopy
'''
matrix A with m rows and n columns
matrix B with o rows and p columns
AB = A.B with m rows and o columns
constraint: n==o
'''
def mmult(A,B):
n=len(A)
m=len(A[0])
p=len(B)
o=len(B[0])
if not n==o: return 0
AB=[[0.0 for i in range(m)] for j in range(p)]
for i in range(m):
for j in range(p):
AB[j][i]=0.0
for k in range(n):
AB[j][i]+=A[k][i]*B[j][k]
return AB
'''
returns the transpose of a matrix
matrix A with m rows and n columns
'''
def transpose(A):
n=len(A)
m=len(A[0])
B=[[0.0 for i in range(n)] for j in range(m)]
for i in range(m):
for j in range(n):
B[i][j]=A[j][i]
return B
'''
returns the inverse of a *square* matrix
'''
def minverse(Ao):
A=deepcopy(Ao)
m = len(A)
if not m==len(A[0]): return 0
#create zero matrix
AI=[[0.0 for i in range(m)] for j in range(m)]
#fill identity matrix
for i in range(m): AI[i][i]=1.0
#invert - Gaussian elimination
for k in range(m):
for i in range(k,m):
tmp = 1.0 * A[k][i]
for j in range(k,m):
A[j][i] /= tmp
for j in range(m):
AI[j][i] /= tmp
for i in range(k+1,m):
for j in range(k,m):
A[j][i]-= A[j][k]
for j in range(m):
AI[j][i] -= AI[j][k]
for i in range(m-2, -1, -1):
for j in range(m-1, i, -1):
for k in range(m):
AI[k][i] -= A[j][i] * AI[k][j]
for k in range(m):
A[k][i] -= A[j][i]*A[k][j]
return AI
'''
perform linear least squares fit between
2 vectors xo and yo.
returns coefficients a and b such that
yoi = a+b.xoi
constraints: both xo and yo need to be a row
vector xo=[n,n,n,n] with same size.
'''
def leastsquares(xo,yo):
n=len(xo)
if not n==len(yo): return 0
y=[deepcopy(yo)]
x=[[1]*n,deepcopy(xo)]
return mmult(mmult(minverse(mmult(transpose(x),x)),transpose(x)),y)[0]
|
lgpl-3.0
| 1,635,034,303,182,393,000
| 22.317757
| 74
| 0.550701
| false
| 2.867816
| false
| false
| false
|
ryanpetrello/cleaver
|
cleaver/backend/db/__init__.py
|
1
|
8256
|
from datetime import datetime
from . import model
from .session import session_for
from cleaver.experiment import Experiment as CleaverExperiment
from cleaver.backend import CleaverBackend
def _sqlalchemy_installed():
try:
import sqlalchemy
except ImportError: # pragma: nocover
raise ImportError(
'The database backend requires SQLAlchemy to be installed. '
'See http://pypi.python.org/pypi/SQLAlchemy'
)
return sqlalchemy
_sqlalchemy_installed()
from sqlalchemy import and_ # noqa
class SQLAlchemyBackend(CleaverBackend):
"""
Provides an interface for persisting and retrieving A/B test results
to a SQLAlchemy-supported database.
"""
def __init__(self, dburi='sqlite://', engine_options={}):
self.dburi = dburi
self.engine_options = engine_options
self.Session = session_for(
dburi=self.dburi,
**self.engine_options
)
def experiment_factory(self, experiment):
if experiment is None:
return None
return CleaverExperiment(
backend=self,
name=experiment.name,
started_on=experiment.started_on,
variants=tuple(v.name for v in experiment.variants)
)
def all_experiments(self):
"""
Retrieve every available experiment.
Returns a list of ``cleaver.experiment.Experiment``s
"""
try:
return [
self.experiment_factory(e)
for e in model.Experiment.query.all()
]
finally:
self.Session.close()
def get_experiment(self, name, variants):
"""
Retrieve an experiment by its name and variants (assuming it exists).
:param name a unique string name for the experiment
:param variants a list of strings, each with a unique variant name
Returns a ``cleaver.experiment.Experiment`` or ``None``
"""
try:
return self.experiment_factory(model.Experiment.get_by(name=name))
finally:
self.Session.close()
def save_experiment(self, name, variants):
"""
Persist an experiment and its variants (unless they already exist).
:param name a unique string name for the experiment
:param variants a list of strings, each with a unique variant name
"""
try:
model.Experiment(
name=name,
started_on=datetime.utcnow(),
variants=[
model.Variant(name=v, order=i)
for i, v in enumerate(variants)
]
)
self.Session.commit()
finally:
self.Session.close()
def is_verified_human(self, identity):
try:
return model.VerifiedHuman.get_by(identity=identity) is not None
finally:
self.Session.close()
def mark_human(self, identity):
try:
if model.VerifiedHuman.get_by(identity=identity) is None:
model.VerifiedHuman(identity=identity)
self.Session.commit()
finally:
self.Session.close()
def get_variant(self, identity, experiment_name):
"""
Retrieve the variant for a specific user and experiment (if it exists).
:param identity a unique user identifier
:param experiment_name the string name of the experiment
Returns a ``String`` or `None`
"""
try:
match = model.Participant.query.join(
model.Experiment
).filter(and_(
model.Participant.identity == identity,
model.Experiment.name == experiment_name
)).first()
return match.variant.name if match else None
finally:
self.Session.close()
def set_variant(self, identity, experiment_name, variant_name):
"""
Set the variant for a specific user.
:param identity a unique user identifier
:param experiment_name the string name of the experiment
:param variant_name the string name of the variant
"""
try:
experiment = model.Experiment.get_by(name=experiment_name)
variant = model.Variant.get_by(name=variant_name)
if experiment and variant and model.Participant.query.filter(and_(
model.Participant.identity == identity,
model.Participant.experiment_id == experiment.id,
model.Participant.variant_id == variant.id
)).count() == 0:
model.Participant(
identity=identity,
experiment=experiment,
variant=variant
)
self.Session.commit()
finally:
self.Session.close()
def _mark_event(self, type, experiment_name, variant_name):
try:
experiment = model.Experiment.get_by(name=experiment_name)
variant = model.Variant.get_by(name=variant_name)
if experiment and variant and model.TrackedEvent.query.filter(and_(
model.TrackedEvent.type == type,
model.TrackedEvent.experiment_id == experiment.id,
model.TrackedEvent.variant_id == variant.id
)).first() is None:
model.TrackedEvent(
type=type,
experiment=experiment,
variant=variant
)
self.Session.commit()
finally:
self.Session.close()
try:
experiment = model.Experiment.get_by(name=experiment_name)
variant = model.Variant.get_by(name=variant_name)
if experiment and variant:
self.Session.execute(
'UPDATE %s SET total = total + 1 '
'WHERE experiment_id = :experiment_id '
'AND variant_id = :variant_id '
'AND `type` = :type' % (
model.TrackedEvent.__tablename__
),
{
'experiment_id': experiment.id,
'variant_id': variant.id,
'type': type
}
)
self.Session.commit()
finally:
self.Session.close()
def mark_participant(self, experiment_name, variant):
"""
Mark a participation for a specific experiment variant.
:param experiment_name the string name of the experiment
:param variant the string name of the variant
"""
self._mark_event('PARTICIPANT', experiment_name, variant)
def mark_conversion(self, experiment_name, variant):
"""
Mark a conversion for a specific experiment variant.
:param experiment_name the string name of the experiment
:param variant the string name of the variant
"""
self._mark_event('CONVERSION', experiment_name, variant)
def _total_events(self, type, experiment_name, variant):
try:
row = model.TrackedEvent.query.join(
model.Experiment
).join(
model.Variant
).filter(and_(
model.TrackedEvent.type == type,
model.TrackedEvent.experiment_id == model.Experiment.id,
model.TrackedEvent.variant_id == model.Variant.id,
model.Experiment.name == experiment_name,
model.Variant.name == variant
)).first()
return row.total if row else 0
finally:
self.Session.close()
def participants(self, experiment_name, variant):
"""
The number of participants for a certain variant.
Returns an integer.
"""
return self._total_events('PARTICIPANT', experiment_name, variant)
def conversions(self, experiment_name, variant):
"""
The number of conversions for a certain variant.
Returns an integer.
"""
return self._total_events('CONVERSION', experiment_name, variant)
|
bsd-3-clause
| 4,451,674,343,953,962,000
| 32.975309
| 79
| 0.562379
| false
| 4.791642
| false
| false
| false
|
stackforge/wsme
|
wsme/api.py
|
1
|
7477
|
import traceback
import functools
import inspect
import logging
import six
import wsme.exc
import wsme.types
from wsme import utils
log = logging.getLogger(__name__)
def iswsmefunction(f):
return hasattr(f, '_wsme_definition')
def wrapfunc(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs)
wrapper._wsme_original_func = f
return wrapper
def getargspec(f):
f = getattr(f, '_wsme_original_func', f)
return inspect.getargspec(f)
class FunctionArgument(object):
"""
An argument definition of an api entry
"""
def __init__(self, name, datatype, mandatory, default):
#: argument name
self.name = name
#: Data type
self.datatype = datatype
#: True if the argument is mandatory
self.mandatory = mandatory
#: Default value if argument is omitted
self.default = default
def resolve_type(self, registry):
self.datatype = registry.resolve_type(self.datatype)
class FunctionDefinition(object):
"""
An api entry definition
"""
def __init__(self, func):
#: Function name
self.name = func.__name__
#: Function documentation
self.doc = func.__doc__
#: Return type
self.return_type = None
#: The function arguments (list of :class:`FunctionArgument`)
self.arguments = []
#: If the body carry the datas of a single argument, its type
self.body_type = None
#: Status code
self.status_code = 200
#: True if extra arguments should be ignored, NOT inserted in
#: the kwargs of the function and not raise UnknownArgument
#: exceptions
self.ignore_extra_args = False
#: name of the function argument to pass the host request object.
#: Should be set by using the :class:`wsme.types.HostRequest` type
#: in the function @\ :function:`signature`
self.pass_request = False
#: Dictionnary of protocol-specific options.
self.extra_options = None
@staticmethod
def get(func):
"""
Returns the :class:`FunctionDefinition` of a method.
"""
if not hasattr(func, '_wsme_definition'):
fd = FunctionDefinition(func)
func._wsme_definition = fd
return func._wsme_definition
def get_arg(self, name):
"""
Returns a :class:`FunctionArgument` from its name
"""
for arg in self.arguments:
if arg.name == name:
return arg
return None
def resolve_types(self, registry):
self.return_type = registry.resolve_type(self.return_type)
self.body_type = registry.resolve_type(self.body_type)
for arg in self.arguments:
arg.resolve_type(registry)
def set_options(self, body=None, ignore_extra_args=False, status_code=200,
rest_content_types=('json', 'xml'), **extra_options):
self.body_type = body
self.status_code = status_code
self.ignore_extra_args = ignore_extra_args
self.rest_content_types = rest_content_types
self.extra_options = extra_options
def set_arg_types(self, argspec, arg_types):
args, varargs, keywords, defaults = argspec
if args[0] == 'self':
args = args[1:]
arg_types = list(arg_types)
if self.body_type is not None:
arg_types.append(self.body_type)
for i, argname in enumerate(args):
datatype = arg_types[i]
mandatory = defaults is None or i < (len(args) - len(defaults))
default = None
if not mandatory:
default = defaults[i - (len(args) - len(defaults))]
if datatype is wsme.types.HostRequest:
self.pass_request = argname
else:
self.arguments.append(FunctionArgument(argname, datatype,
mandatory, default))
class signature(object):
"""Decorator that specify the argument types of an exposed function.
:param return_type: Type of the value returned by the function
:param argN: Type of the Nth argument
:param body: If the function takes a final argument that is supposed to be
the request body by itself, its type.
:param status_code: HTTP return status code of the function.
:param ignore_extra_args: Allow extra/unknow arguments (default to False)
Most of the time this decorator is not supposed to be used directly,
unless you are not using WSME on top of another framework.
If an adapter is used, it will provide either a specialised version of this
decororator, either a new decorator named @wsexpose that takes the same
parameters (it will in addition expose the function, hence its name).
"""
def __init__(self, *types, **options):
self.return_type = types[0] if types else None
self.arg_types = []
if len(types) > 1:
self.arg_types.extend(types[1:])
if 'body' in options:
self.arg_types.append(options['body'])
self.wrap = options.pop('wrap', False)
self.options = options
def __call__(self, func):
argspec = getargspec(func)
if self.wrap:
func = wrapfunc(func)
fd = FunctionDefinition.get(func)
if fd.extra_options is not None:
raise ValueError("This function is already exposed")
fd.return_type = self.return_type
fd.set_options(**self.options)
if self.arg_types:
fd.set_arg_types(argspec, self.arg_types)
return func
sig = signature
class Response(object):
"""
Object to hold the "response" from a view function
"""
def __init__(self, obj, status_code=None, error=None,
return_type=wsme.types.Unset):
#: Store the result object from the view
self.obj = obj
#: Store an optional status_code
self.status_code = status_code
#: Return error details
#: Must be a dictionnary with the following keys: faultcode,
#: faultstring and an optional debuginfo
self.error = error
#: Return type
#: Type of the value returned by the function
#: If the return type is wsme.types.Unset it will be ignored
#: and the default return type will prevail.
self.return_type = return_type
def format_exception(excinfo, debug=False):
"""Extract informations that can be sent to the client."""
error = excinfo[1]
code = getattr(error, 'code', None)
if code and utils.is_valid_code(code) and utils.is_client_error(code):
faultstring = (error.faultstring if hasattr(error, 'faultstring')
else six.text_type(error))
r = dict(faultcode="Client",
faultstring=faultstring)
log.debug("Client-side error: %s" % r['faultstring'])
r['debuginfo'] = None
return r
else:
faultstring = six.text_type(error)
debuginfo = "\n".join(traceback.format_exception(*excinfo))
log.error('Server-side error: "%s". Detail: \n%s' % (
faultstring, debuginfo))
r = dict(faultcode="Server", faultstring=faultstring)
if debug:
r['debuginfo'] = debuginfo
else:
r['debuginfo'] = None
return r
|
mit
| 1,039,123,132,913,184,800
| 30.548523
| 79
| 0.604521
| false
| 4.193494
| false
| false
| false
|
session-id/pineapple-ai
|
policies.py
|
1
|
15317
|
from collections import defaultdict
import math
import numpy as np
import random
import feature_extractors
import game as g
import hand_optimizer
class BasePolicy(object):
'''
Base class for all policies.
'''
def __init__(self, game, args=None):
self.game = game
# Must return the optimal action as determined by the policy for the given state
def get_action(self, state):
raise NotImplementedError
class HumanPolicy(BasePolicy):
'''
A policy that asks for human input for every move.
'''
def get_action(self, state):
while True:
self.game.print_state(state)
try:
# Action input format is Pos1 Pos2 ... PosN
# Example: 0 0 1 2 0
inp = raw_input("Card placements (space separated, x for discard): ").upper()
inp = inp.split(' ')
draw = sorted(state.draw)
action = tuple(sorted((draw[i], int(inp[i])) for i in range(len(inp)) if inp[i] != 'X'))
new_state = self.game.get_random_outcome(state, action) # Check if valid
return action
except Exception as e:
print 'Invalid action: {}'.format(e)
class RandomPolicy(BasePolicy):
'''
Policy that chooses an action uniformly randomly from all possible actions.
'''
def get_action(self, state):
actions = self.game.actions(state)
return random.sample(actions, 1)[0]
class BaselinePolicy(BasePolicy):
'''
Baseline policy as described in project proposal.
Starts off by placing cards at or below top_cutoff on top row, those
at or below mid_cutoff in mid row, and the rest in the bottom row.
Then, for every 3 card draw, takes the 2 largest cards and slots them according
to the same rule when possible, otherwise slotting them as low as possible.
'''
def __init__(self, game, args):
super(BaselinePolicy, self).__init__(game, args)
self.top_cutoff = 4
self.mid_cutoff = 9
def value_to_slot(self, value):
if value <= self.top_cutoff:
return 0
elif value <= self.mid_cutoff:
return 1
else:
return 2
def get_action(self, state):
remaining_capacities = self.game.get_remaining_capacities(state)
# Sort in decreasing order
draw = sorted(state.draw, lambda x, y: g.card_value(y) - g.card_value(x))
assert len(draw) == 5 or len(draw) == 3
# Always take highest 2 cards
if len(draw) == 3:
draw = draw[:-1]
values = [g.card_value(card) for card in draw]
action = []
for i in range(len(values)):
desired_row = self.value_to_slot(values[i])
slotted = False
# Search downwards first for spots
for j in range(desired_row, 3):
if remaining_capacities[j] > 0:
action += [(draw[i], j)]
remaining_capacities[j] -= 1
slotted = True
break
if not slotted:
# Then search upwards
for j in range(desired_row-1, -1, -1):
if remaining_capacities[j] > 0:
action += [(draw[i], j)]
remaining_capacities[j] -= 1
slotted = True
break
if not slotted:
self.game.print_state(state)
raise RuntimeError("Couldn't slot card anywhere!")
return tuple(action)
class NeverBustPolicy(BasePolicy):
'''
A policy that never plays a move that makes the current hierarchy of cards a bust. The policy
randomly samples from all viable moves.
'''
def get_action(self, state):
actions = self.game.actions(state)
def eval_action(action):
outcome = self.game.sim_place_cards(state, action)
hands = [g.compute_hand(row) for row in outcome.rows]
return g.compare_hands(hands[1], hands[0]) >= 0 and g.compare_hands(hands[2], hands[1]) >= 0
evals = [(eval_action(action), action) for action in actions]
viable = [y for x, y in evals if x == max(evals)[0]]
return random.sample(viable, 1)[0]
class HeuristicNeverBustPolicy(BasePolicy):
'''
A policy that never plays a move that makes the current hierarchy of cards a bust. Within viable
moves, it attempts to greedily form hands to maximize the total sum of hand values as denoted by
a heuristic table.
Afterwards, it tries to maximize the flexibility of the playable hand, which is the sum of the
number of remaining slots per row raised to a preset power.
'''
def get_action(self, state):
actions = self.game.actions(state)
# Heuristic hand values
self.hand_values = {
'1': 0,
'2': 1,
'2+2': 2,
'3': 4,
'St': 8,
'Fl': 8,
'3+2': 12,
'4': 20,
'StFl': 30,
'RoFl': 50
}
def eval_action(action):
outcome = self.game.sim_place_cards(state, action)
hands = [g.compute_hand(row) for row in outcome.rows]
total_value = sum(self.hand_values[hand[0]] for hand in hands)
flexibility = sum([x ** 0.3 for x in self.game.get_remaining_capacities(outcome)])
return (g.compare_hands(hands[1], hands[0]) >= 0 and g.compare_hands(hands[2], hands[1]) >= 0,
total_value, flexibility)
evals = [(eval_action(action), action) for action in actions]
viable = [y for x, y in evals if x == max(evals)[0]]
return random.sample(viable, 1)[0]
class RLPolicy(BasePolicy):
'''
Base class for all RL policies with incorporate_feedback.
'''
def incorporate_feedback(self, state, action, new_state):
raise NotImplementedError
class QLearningPolicy(RLPolicy):
'''
A class that uses linear approximations of Q values built off of features to guide actions taken while
learning optimal linear weights through feedback incorporation.
'''
def __init__(self, game, args):
'''
Input:
game: Pineapple game instance
feature_extractor: a function that extracts features from a given row. See feature_extractor.py for interface.
exploration_prob: initial probability of exploration
'''
# Initialize step size, weight vector, etc
# Add field to indicate whether training - this determines whether epsilon greedy policy is used
super(QLearningPolicy, self).__init__(game, args)
self.feature_extractor = feature_extractors.name_to_extractor(args.feature_extractor)
self.distinguish_draws = args.distinguish_draws
self.exploration_prob = args.exploration_prob
self.train = True
self.step_size = args.step_size
self.weights = defaultdict(float)
feature_extractors.parse_probability_files()
def get_step_size(self):
return self.step_size
def get_features(self, state, action):
state = self.game.sim_place_cards(state, action)
num_to_draw = self.game.num_to_draw(state)
features = {}
for row_num, cards in enumerate(state.rows):
for k, v in self.feature_extractor(row_num, cards, state.remaining, num_to_draw).iteritems():
if self.distinguish_draws:
features[(num_to_draw, row_num, k)] = v
else:
features[(row_num, k)] = v
return features
def get_q(self, state, action):
# Find exact solution if about to finish
final_state = self.game.sim_place_cards(state, action)
if self.game.is_end(final_state):
return self.game.utility(final_state)
# Otherwise use linear approximation
features = self.get_features(state, action)
return sum(self.weights[key] * features[key] for key in features)
def get_action(self, state):
actions = self.game.actions(state)
if self.train and random.random() < self.exploration_prob:
return random.choice(actions)
return max((self.get_q(state, action), action) for action in actions)[1]
def incorporate_feedback(self, state, action, new_state):
if not self.train:
return
if self.game.is_end(new_state):
return
else:
prediction = self.get_q(state, action)
V_opt = max(self.get_q(new_state, a) for a in self.game.actions(new_state))
features = self.get_features(state, action)
deviation = prediction - V_opt
total_update = 0
for (name, value) in features.iteritems():
self.weights[name] -= self.get_step_size() * deviation * value
total_update += abs(self.get_step_size() * deviation * value)
# print "Total update:", total_update, "Deviation:", deviation, "len(features):", len(features) #,
class QLearningPolicy2(QLearningPolicy):
'''
A version of QLearningPolicy above that uses feature extractors that work on generic state, action
pairs.
'''
def __init__(self, game, args):
super(QLearningPolicy2, self).__init__(game, args)
self.feature_extractor = self.feature_extractor(self.game)
self.weights = self.feature_extractor.default_weights()
def get_features(self, state, action):
return self.feature_extractor.extract(state, action)
class OracleEvalPolicy(BasePolicy):
'''
A policy that uses the oracle best case royalties averaged over several draws to optimize the
current action.
'''
def __init__(self, game, args):
super(OracleEvalPolicy, self).__init__(game, args)
self.num_sims = args.num_oracle_sims
self.alpha = args.oracle_outcome_weighting
def get_action(self, state):
actions = self.game.actions(state)
def eval_action(action):
outcome = self.game.sim_place_cards(state, action)
values = []
if self.game.num_to_draw(outcome) == 0:
return self.game.utility(outcome)
num_to_draw_map = {12: 8, 9: 6, 6: 5, 3: 3}
# num_to_draw = int(math.ceil(self.game.num_to_draw(outcome) * 0.7))
num_to_draw = num_to_draw_map[self.game.num_to_draw(outcome)]
num_sims = self.num_sims
for _ in xrange(self.num_sims):
draw = random.sample(outcome.remaining, num_to_draw)
values += [hand_optimizer.optimize_hand(outcome.rows, draw)]
values = np.array(values)
return (np.mean(np.sign(values) * np.abs(values) ** self.alpha)) ** (1. / self.alpha)
eval_actions = [(eval_action(action), action) for action in actions]
# print "Estimated value: {}".format(max(eval_actions)[0])
return max(eval_actions)[1]
class VarSimOracleEvalPolicy(BasePolicy):
'''
OracleEvalPolicy with a different exploration policy that explores the best actions in greater depth.
'''
def __init__(self, game, args):
super(VarSimOracleEvalPolicy, self).__init__(game, args)
self.num_sims = args.num_oracle_sims
def get_action(self, state):
actions = self.game.actions(state)
outcomes = [(self.game.sim_place_cards(state, action), action) for action in actions]
num_to_draw_map = {12: 8, 9: 6, 6: 5, 3: 3, 0: 0}
def interpolate_action(prev, outcome, num_sims, round_num):
values = []
num_to_draw = num_to_draw_map[self.game.num_to_draw(outcome)]
for _ in xrange(num_sims):
draw = random.sample(outcome.remaining, num_to_draw)
values += [hand_optimizer.optimize_hand(outcome.rows, draw)]
values = np.array(values)
return prev * (1 - 1. / round_num) + np.mean(values) / round_num
outcomes_with_histories = [(0., outcome, action) for outcome, action in outcomes]
round_num = 1.
while len(outcomes_with_histories) > 1:
outcomes_with_histories = [(interpolate_action(prev, outcome, self.num_sims, round_num), outcome, action)
for prev, outcome, action in outcomes_with_histories]
outcomes_with_histories.sort()
outcomes_with_histories = outcomes_with_histories[len(outcomes_with_histories) / 2:]
round_num += 1
return outcomes_with_histories[0][2]
class TDLearningPolicy(RLPolicy):
'''
A class that uses linear approximations of Value functions built off of features to guide actions taken while
learning optimal linear weights through feedback incorporation.
'''
def __init__(self, game, args):
'''
Input:
game: Pineapple game instance
feature_extractor: a function that extracts features from a given row. See feature_extractor.py for interface.
exploration_prob: initial probability of exploration
'''
# Initialize step size, weight vector, etc
# Add field to indicate whether training - this determines whether epsilon greedy policy is used
super(TDLearningPolicy, self).__init__(game, args)
self.feature_extractor = feature_extractors.name_to_extractor(args.feature_extractor)
self.exploration_prob = args.exploration_prob
self.train = True
self.step_size = args.step_size
self.weights = defaultdict(float)
feature_extractors.parse_probability_files()
def get_step_size(self):
return self.step_size
def get_features(self, state, action):
pass
def get_q(self, state, action):
pass
def get_action(self, state):
pass
def incorporate_feedback(self, state, action, new_state):
pass
'''
Adversarial capable policies below
'''
class AdvVarSimOracleEvalPolicy(BasePolicy):
'''
Adversarial version of VarSimOracleEvalPolicy
'''
def __init__(self, game, args):
super(AdvVarSimOracleEvalPolicy, self).__init__(game, args)
self.num_sims = args.num_oracle_sims
self.num_opp_sims = args.num_opp_sims
def get_action(self, state):
actions = self.game.actions(state)
outcomes = [(self.game.sim_place_cards(state, action), action) for action in actions]
num_to_draw = self.game.num_to_draw(outcomes[0][0])
table = {0: 17, 5: 12, 7: 9, 9: 6, 11: 3, 13: 0}
opp_num_to_draw = table[sum(len(x) for x in state.opp_rows)]
opp_rows = state.opp_rows
# TODO: Better adversarial fantasyland bonus calculation
opp_num_to_draw_map = {12: 8, 9: 6, 6: 5, 3: 3, 0: 0}
if opp_num_to_draw <= 9:
opp_combos = []
if opp_num_to_draw > 0:
num_to_draw_sim = opp_num_to_draw_map[opp_num_to_draw]
for _ in xrange(self.num_opp_sims):
# state.remaining and outcome.remaining for any outcome should be equal
draw = random.sample(state.remaining, num_to_draw_sim)
# Assume opponent just plays to maximize their royalties
value, combo = hand_optimizer.optimize_hand(opp_rows, draw, return_combo=True)
opp_combos += [combo]
else:
opp_combos = [[g.compute_hand(cards) for cards in opp_rows]]
value_fn = lambda rows, draw: hand_optimizer.optimize_hand_adv(rows, draw, opp_combos)
else:
value_fn = lambda rows, draw: hand_optimizer.optimize_hand(rows, draw)
num_to_draw_map = {12: 8, 9: 6, 6: 5, 3: 3, 0: 0}
def interpolate_action(prev, outcome, num_sims, round_num):
values = []
num_to_draw_sim = num_to_draw_map[num_to_draw]
for _ in xrange(num_sims):
draw = random.sample(outcome.remaining, num_to_draw_sim)
values += [value_fn(outcome.rows, draw)]
values = np.array(values)
return prev * (1 - 1. / round_num) + np.mean(values) / round_num
outcomes_with_histories = [(0., outcome, action) for outcome, action in outcomes]
round_num = 1.
while len(outcomes_with_histories) > 1:
outcomes_with_histories = [(interpolate_action(prev, outcome, self.num_sims, round_num), outcome, action)
for prev, outcome, action in outcomes_with_histories]
outcomes_with_histories.sort()
outcomes_with_histories = outcomes_with_histories[len(outcomes_with_histories) / 2:]
round_num += 1
return outcomes_with_histories[0][2]
|
mit
| -9,055,318,519,636,846,000
| 35.731415
| 116
| 0.65757
| false
| 3.466953
| false
| false
| false
|
Passtechsoft/TPEAlpGen
|
blender/release/scripts/modules/bpy_types.py
|
1
|
24184
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
from _bpy import types as bpy_types
import _bpy
StructRNA = bpy_types.bpy_struct
StructMetaPropGroup = bpy_types.bpy_struct_meta_idprop
# StructRNA = bpy_types.Struct
bpy_types.BlendDataLibraries.load = _bpy._library_load
class Context(StructRNA):
__slots__ = ()
def copy(self):
from types import BuiltinMethodType
new_context = {}
generic_attrs = (
*StructRNA.__dict__.keys(),
"bl_rna", "rna_type", "copy",
)
for attr in dir(self):
if not (attr.startswith("_") or attr in generic_attrs):
value = getattr(self, attr)
if type(value) != BuiltinMethodType:
new_context[attr] = value
return new_context
class Library(bpy_types.ID):
__slots__ = ()
@property
def users_id(self):
"""ID data blocks which use this library"""
import bpy
# See: readblenentry.c, IDTYPE_FLAGS_ISLINKABLE,
# we could make this an attribute in rna.
attr_links = ("actions", "armatures", "brushes", "cameras",
"curves", "grease_pencil", "groups", "images",
"lamps", "lattices", "materials", "metaballs",
"meshes", "node_groups", "objects", "scenes",
"sounds", "speakers", "textures", "texts",
"fonts", "worlds")
return tuple(id_block
for attr in attr_links
for id_block in getattr(bpy.data, attr)
if id_block.library == self)
class Texture(bpy_types.ID):
__slots__ = ()
@property
def users_material(self):
"""Materials that use this texture"""
import bpy
return tuple(mat for mat in bpy.data.materials
if self in [slot.texture
for slot in mat.texture_slots
if slot]
)
@property
def users_object_modifier(self):
"""Object modifiers that use this texture"""
import bpy
return tuple(obj for obj in bpy.data.objects if
self in [mod.texture
for mod in obj.modifiers
if mod.type == 'DISPLACE']
)
class Group(bpy_types.ID):
__slots__ = ()
@property
def users_dupli_group(self):
"""The dupli group this group is used in"""
import bpy
return tuple(obj for obj in bpy.data.objects
if self == obj.dupli_group)
class Object(bpy_types.ID):
__slots__ = ()
@property
def children(self):
"""All the children of this object"""
import bpy
return tuple(child for child in bpy.data.objects
if child.parent == self)
@property
def users_group(self):
"""The groups this object is in"""
import bpy
return tuple(group for group in bpy.data.groups
if self in group.objects[:])
@property
def users_scene(self):
"""The scenes this object is in"""
import bpy
return tuple(scene for scene in bpy.data.scenes
if self in scene.objects[:])
class WindowManager(bpy_types.ID):
__slots__ = ()
def popup_menu(self, draw_func, title="", icon='NONE'):
import bpy
popup = self.pupmenu_begin__internal(title, icon)
try:
draw_func(popup, bpy.context)
finally:
self.pupmenu_end__internal(popup)
def popup_menu_pie(self, event, draw_func, title="", icon='NONE'):
import bpy
pie = self.piemenu_begin__internal(title, icon, event)
if pie:
try:
draw_func(pie, bpy.context)
finally:
self.piemenu_end__internal(pie)
class _GenericBone:
"""
functions for bones, common between Armature/Pose/Edit bones.
internal subclassing use only.
"""
__slots__ = ()
def translate(self, vec):
"""Utility function to add *vec* to the head and tail of this bone"""
self.head += vec
self.tail += vec
def parent_index(self, parent_test):
"""
The same as 'bone in other_bone.parent_recursive'
but saved generating a list.
"""
# use the name so different types can be tested.
name = parent_test.name
parent = self.parent
i = 1
while parent:
if parent.name == name:
return i
parent = parent.parent
i += 1
return 0
@property
def x_axis(self):
""" Vector pointing down the x-axis of the bone.
"""
from mathutils import Vector
return self.matrix.to_3x3() * Vector((1.0, 0.0, 0.0))
@property
def y_axis(self):
""" Vector pointing down the y-axis of the bone.
"""
from mathutils import Vector
return self.matrix.to_3x3() * Vector((0.0, 1.0, 0.0))
@property
def z_axis(self):
""" Vector pointing down the z-axis of the bone.
"""
from mathutils import Vector
return self.matrix.to_3x3() * Vector((0.0, 0.0, 1.0))
@property
def basename(self):
"""The name of this bone before any '.' character"""
#return self.name.rsplit(".", 1)[0]
return self.name.split(".")[0]
@property
def parent_recursive(self):
"""A list of parents, starting with the immediate parent"""
parent_list = []
parent = self.parent
while parent:
if parent:
parent_list.append(parent)
parent = parent.parent
return parent_list
@property
def center(self):
"""The midpoint between the head and the tail."""
return (self.head + self.tail) * 0.5
@property
def length(self):
"""
The distance from head to tail,
when set the head is moved to fit the length.
"""
return self.vector.length
@length.setter
def length(self, value):
self.tail = self.head + ((self.tail - self.head).normalized() * value)
@property
def vector(self):
"""
The direction this bone is pointing.
Utility function for (tail - head)
"""
return (self.tail - self.head)
@property
def children(self):
"""A list of all the bones children."""
return [child for child in self._other_bones if child.parent == self]
@property
def children_recursive(self):
"""A list of all children from this bone."""
bones_children = []
for bone in self._other_bones:
index = bone.parent_index(self)
if index:
bones_children.append((index, bone))
# sort by distance to parent
bones_children.sort(key=lambda bone_pair: bone_pair[0])
return [bone for index, bone in bones_children]
@property
def children_recursive_basename(self):
"""
Returns a chain of children with the same base name as this bone.
Only direct chains are supported, forks caused by multiple children
with matching base names will terminate the function
and not be returned.
"""
basename = self.basename
chain = []
child = self
while True:
children = child.children
children_basename = []
for child in children:
if basename == child.basename:
children_basename.append(child)
if len(children_basename) == 1:
child = children_basename[0]
chain.append(child)
else:
if children_basename:
print("multiple basenames found, "
"this is probably not what you want!",
self.name, children_basename)
break
return chain
@property
def _other_bones(self):
id_data = self.id_data
id_data_type = type(id_data)
if id_data_type == bpy_types.Object:
bones = id_data.pose.bones
elif id_data_type == bpy_types.Armature:
bones = id_data.edit_bones
if not bones: # not in edit mode
bones = id_data.bones
return bones
class PoseBone(StructRNA, _GenericBone, metaclass=StructMetaPropGroup):
__slots__ = ()
@property
def children(self):
obj = self.id_data
pbones = obj.pose.bones
self_bone = self.bone
return tuple(pbones[bone.name] for bone in obj.data.bones
if bone.parent == self_bone)
class Bone(StructRNA, _GenericBone, metaclass=StructMetaPropGroup):
__slots__ = ()
class EditBone(StructRNA, _GenericBone, metaclass=StructMetaPropGroup):
__slots__ = ()
def align_orientation(self, other):
"""
Align this bone to another by moving its tail and settings its roll
the length of the other bone is not used.
"""
vec = other.vector.normalized() * self.length
self.tail = self.head + vec
self.roll = other.roll
def transform(self, matrix, scale=True, roll=True):
"""
Transform the the bones head, tail, roll and envelope
(when the matrix has a scale component).
:arg matrix: 3x3 or 4x4 transformation matrix.
:type matrix: :class:`mathutils.Matrix`
:arg scale: Scale the bone envelope by the matrix.
:type scale: bool
:arg roll:
Correct the roll to point in the same relative
direction to the head and tail.
:type roll: bool
"""
from mathutils import Vector
z_vec = self.matrix.to_3x3() * Vector((0.0, 0.0, 1.0))
self.tail = matrix * self.tail
self.head = matrix * self.head
if scale:
scalar = matrix.median_scale
self.head_radius *= scalar
self.tail_radius *= scalar
if roll:
self.align_roll(matrix * z_vec)
def ord_ind(i1, i2):
if i1 < i2:
return i1, i2
return i2, i1
class Mesh(bpy_types.ID):
__slots__ = ()
def from_pydata(self, vertices, edges, faces):
"""
Make a mesh from a list of vertices/edges/faces
Until we have a nicer way to make geometry, use this.
:arg vertices:
float triplets each representing (X, Y, Z)
eg: [(0.0, 1.0, 0.5), ...].
:type vertices: iterable object
:arg edges:
int pairs, each pair contains two indices to the
*vertices* argument. eg: [(1, 2), ...]
:type edges: iterable object
:arg faces:
iterator of faces, each faces contains three or more indices to
the *vertices* argument. eg: [(5, 6, 8, 9), (1, 2, 3), ...]
:type faces: iterable object
"""
self.vertices.add(len(vertices))
self.edges.add(len(edges))
self.loops.add(sum((len(f) for f in faces)))
self.polygons.add(len(faces))
vertices_flat = [f for v in vertices for f in v]
self.vertices.foreach_set("co", vertices_flat)
del vertices_flat
edges_flat = [i for e in edges for i in e]
self.edges.foreach_set("vertices", edges_flat)
del edges_flat
# this is different in bmesh
loop_index = 0
for i, p in enumerate(self.polygons):
f = faces[i]
loop_len = len(f)
p.loop_start = loop_index
p.loop_total = loop_len
p.vertices = f
loop_index += loop_len
# if no edges - calculate them
if faces and (not edges):
self.update(calc_edges=True)
@property
def edge_keys(self):
return [ed.key for ed in self.edges]
class MeshEdge(StructRNA):
__slots__ = ()
@property
def key(self):
return ord_ind(*tuple(self.vertices))
class MeshTessFace(StructRNA):
__slots__ = ()
@property
def center(self):
"""The midpoint of the face."""
face_verts = self.vertices[:]
mesh_verts = self.id_data.vertices
if len(face_verts) == 3:
return (mesh_verts[face_verts[0]].co +
mesh_verts[face_verts[1]].co +
mesh_verts[face_verts[2]].co
) / 3.0
else:
return (mesh_verts[face_verts[0]].co +
mesh_verts[face_verts[1]].co +
mesh_verts[face_verts[2]].co +
mesh_verts[face_verts[3]].co
) / 4.0
@property
def edge_keys(self):
verts = self.vertices[:]
if len(verts) == 3:
return (ord_ind(verts[0], verts[1]),
ord_ind(verts[1], verts[2]),
ord_ind(verts[2], verts[0]),
)
else:
return (ord_ind(verts[0], verts[1]),
ord_ind(verts[1], verts[2]),
ord_ind(verts[2], verts[3]),
ord_ind(verts[3], verts[0]),
)
class MeshPolygon(StructRNA):
__slots__ = ()
@property
def edge_keys(self):
verts = self.vertices[:]
vlen = len(self.vertices)
return [ord_ind(verts[i], verts[(i + 1) % vlen]) for i in range(vlen)]
@property
def loop_indices(self):
start = self.loop_start
end = start + self.loop_total
return range(start, end)
class Text(bpy_types.ID):
__slots__ = ()
def as_string(self):
"""Return the text as a string."""
return "\n".join(line.body for line in self.lines)
def from_string(self, string):
"""Replace text with this string."""
self.clear()
self.write(string)
@property
def users_logic(self):
"""Logic bricks that use this text"""
import bpy
return tuple(obj for obj in bpy.data.objects
if self in [cont.text for cont in obj.game.controllers
if cont.type == 'PYTHON']
)
# values are module: [(cls, path, line), ...]
TypeMap = {}
class Sound(bpy_types.ID):
__slots__ = ()
@property
def factory(self):
"""The aud.Factory object of the sound."""
import aud
return aud._sound_from_pointer(self.as_pointer())
class RNAMeta(type):
def __new__(cls, name, bases, classdict, **args):
result = type.__new__(cls, name, bases, classdict)
if bases and bases[0] is not StructRNA:
from _weakref import ref as ref
module = result.__module__
# first part of packages only
if "." in module:
module = module[:module.index(".")]
TypeMap.setdefault(module, []).append(ref(result))
return result
@property
def is_registered(cls):
return "bl_rna" in cls.__dict__
class OrderedDictMini(dict):
def __init__(self, *args):
self.order = []
dict.__init__(self, args)
def __setitem__(self, key, val):
dict.__setitem__(self, key, val)
if key not in self.order:
self.order.append(key)
def __delitem__(self, key):
dict.__delitem__(self, key)
self.order.remove(key)
class RNAMetaPropGroup(StructMetaPropGroup, RNAMeta):
pass
class OrderedMeta(RNAMeta):
def __init__(cls, name, bases, attributes):
if attributes.__class__ is OrderedDictMini:
cls.order = attributes.order
def __prepare__(name, bases, **kwargs):
return OrderedDictMini() # collections.OrderedDict()
# Only defined so operators members can be used by accessing self.order
# with doc generation 'self.properties.bl_rna.properties' can fail
class Operator(StructRNA, metaclass=OrderedMeta):
__slots__ = ()
def __getattribute__(self, attr):
properties = StructRNA.path_resolve(self, "properties")
bl_rna = getattr(properties, "bl_rna", None)
if (bl_rna is not None) and (attr in bl_rna.properties):
return getattr(properties, attr)
return super().__getattribute__(attr)
def __setattr__(self, attr, value):
properties = StructRNA.path_resolve(self, "properties")
bl_rna = getattr(properties, "bl_rna", None)
if (bl_rna is not None) and (attr in bl_rna.properties):
return setattr(properties, attr, value)
return super().__setattr__(attr, value)
def __delattr__(self, attr):
properties = StructRNA.path_resolve(self, "properties")
bl_rna = getattr(properties, "bl_rna", None)
if (bl_rna is not None) and (attr in bl_rna.properties):
return delattr(properties, attr)
return super().__delattr__(attr)
def as_keywords(self, ignore=()):
"""Return a copy of the properties as a dictionary"""
ignore = ignore + ("rna_type",)
return {attr: getattr(self, attr)
for attr in self.properties.rna_type.properties.keys()
if attr not in ignore}
class Macro(StructRNA, metaclass=OrderedMeta):
# bpy_types is imported before ops is defined
# so we have to do a local import on each run
__slots__ = ()
@classmethod
def define(self, opname):
from _bpy import ops
return ops.macro_define(self, opname)
class PropertyGroup(StructRNA, metaclass=RNAMetaPropGroup):
__slots__ = ()
class RenderEngine(StructRNA, metaclass=RNAMeta):
__slots__ = ()
class KeyingSetInfo(StructRNA, metaclass=RNAMeta):
__slots__ = ()
class AddonPreferences(StructRNA, metaclass=RNAMeta):
__slots__ = ()
class _GenericUI:
__slots__ = ()
@classmethod
def _dyn_ui_initialize(cls):
draw_funcs = getattr(cls.draw, "_draw_funcs", None)
if draw_funcs is None:
def draw_ls(self, context):
# ensure menus always get default context
operator_context_default = self.layout.operator_context
for func in draw_ls._draw_funcs:
# so bad menu functions don't stop
# the entire menu from drawing
try:
func(self, context)
except:
import traceback
traceback.print_exc()
self.layout.operator_context = operator_context_default
draw_funcs = draw_ls._draw_funcs = [cls.draw]
cls.draw = draw_ls
return draw_funcs
@classmethod
def append(cls, draw_func):
"""
Append a draw function to this menu,
takes the same arguments as the menus draw function
"""
draw_funcs = cls._dyn_ui_initialize()
draw_funcs.append(draw_func)
@classmethod
def prepend(cls, draw_func):
"""
Prepend a draw function to this menu, takes the same arguments as
the menus draw function
"""
draw_funcs = cls._dyn_ui_initialize()
draw_funcs.insert(0, draw_func)
@classmethod
def remove(cls, draw_func):
"""Remove a draw function that has been added to this menu"""
draw_funcs = cls._dyn_ui_initialize()
try:
draw_funcs.remove(draw_func)
except ValueError:
pass
class Panel(StructRNA, _GenericUI, metaclass=RNAMeta):
__slots__ = ()
class UIList(StructRNA, _GenericUI, metaclass=RNAMeta):
__slots__ = ()
class Header(StructRNA, _GenericUI, metaclass=RNAMeta):
__slots__ = ()
class Menu(StructRNA, _GenericUI, metaclass=RNAMeta):
__slots__ = ()
def path_menu(self, searchpaths, operator,
props_default=None, filter_ext=None):
layout = self.layout
# hard coded to set the operators 'filepath' to the filename.
import os
import bpy.utils
layout = self.layout
if not searchpaths:
layout.label("* Missing Paths *")
# collect paths
files = []
for directory in searchpaths:
files.extend([(f, os.path.join(directory, f))
for f in os.listdir(directory)
if (not f.startswith("."))
if ((filter_ext is None) or
(filter_ext(os.path.splitext(f)[1])))
])
files.sort()
for f, filepath in files:
props = layout.operator(operator,
text=bpy.path.display_name(f),
translate=False)
if props_default is not None:
for attr, value in props_default.items():
setattr(props, attr, value)
props.filepath = filepath
if operator == "script.execute_preset":
props.menu_idname = self.bl_idname
def draw_preset(self, context):
"""
Define these on the subclass:
- preset_operator (string)
- preset_subdir (string)
Optionally:
- preset_extensions (set of strings)
- preset_operator_defaults (dict of keyword args)
"""
import bpy
ext_valid = getattr(self, "preset_extensions", {".py", ".xml"})
props_default = getattr(self, "preset_operator_defaults", None)
self.path_menu(bpy.utils.preset_paths(self.preset_subdir),
self.preset_operator,
props_default=props_default,
filter_ext=lambda ext: ext.lower() in ext_valid)
@classmethod
def draw_collapsible(cls, context, layout):
# helper function for (optionally) collapsed header menus
# only usable within headers
if context.area.show_menus:
cls.draw_menus(layout, context)
else:
layout.menu(cls.__name__, icon='COLLAPSEMENU')
class NodeTree(bpy_types.ID, metaclass=RNAMetaPropGroup):
__slots__ = ()
class Node(StructRNA, metaclass=RNAMetaPropGroup):
__slots__ = ()
@classmethod
def poll(cls, ntree):
return True
class NodeInternal(Node):
__slots__ = ()
class NodeSocket(StructRNA, metaclass=RNAMetaPropGroup):
__slots__ = ()
@property
def links(self):
"""List of node links from or to this socket"""
return tuple(link for link in self.id_data.links
if (link.from_socket == self or
link.to_socket == self))
class NodeSocketInterface(StructRNA, metaclass=RNAMetaPropGroup):
__slots__ = ()
# These are intermediate subclasses, need a bpy type too
class CompositorNode(NodeInternal):
__slots__ = ()
@classmethod
def poll(cls, ntree):
return ntree.bl_idname == 'CompositorNodeTree'
def update(self):
self.tag_need_exec()
class ShaderNode(NodeInternal):
__slots__ = ()
@classmethod
def poll(cls, ntree):
return ntree.bl_idname == 'ShaderNodeTree'
class TextureNode(NodeInternal):
__slots__ = ()
@classmethod
def poll(cls, ntree):
return ntree.bl_idname == 'TextureNodeTree'
|
gpl-3.0
| 5,162,792,521,147,929,000
| 27.72209
| 78
| 0.556732
| false
| 4.050243
| false
| false
| false
|
arnavd96/Cinemiezer
|
myvenv/lib/python3.4/site-packages/music21/analysis/metrical.py
|
1
|
6733
|
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: metrical.py
# Purpose: Tools for metrical analysis
#
# Authors: Christopher Ariza
# Michael Scott Cuthbert
#
# Copyright: Copyright © 2009-2012 Michael Scott Cuthbert and the music21 Project
# License: LGPL or BSD, see license.txt
#-------------------------------------------------------------------------------
'''
Various tools and utilities for doing metrical or rhythmic analysis.
See the chapter :ref:`User's Guide Chapter 14: Time Signatures <usersGuide_14_timeSignatures>`
for more information on defining
metrical structures in music21.
'''
from music21 import stream
import copy
import unittest
from music21 import environment
_MOD = "analysis.metrical.py"
environLocal = environment.Environment(_MOD)
def labelBeatDepth(streamIn):
r'''
Modify a Stream in place by annotating metrical analysis symbols.
This assumes that the Stream is already partitioned into Measures.
>>> s = stream.Stream()
>>> ts = meter.TimeSignature('4/4')
>>> s.insert(0, ts)
>>> n = note.Note(type='eighth')
>>> s.repeatAppend(n, 8)
>>> s.makeMeasures(inPlace = True)
>>> post = analysis.metrical.labelBeatDepth(s)
>>> sOut = []
>>> for n in s.flat.notes:
... stars = "".join([l.text for l in n.lyrics])
... sOut.append("{0:8s} {1}".format(n.beatStr, stars))
>>> print("\n".join(sOut))
1 ****
1 1/2 *
2 **
2 1/2 *
3 ***
3 1/2 *
4 **
4 1/2 *
'''
for m in streamIn.getElementsByClass(stream.Measure):
# this will search contexts
ts = m.getTimeSignatures(sortByCreationTime=False)[0]
# need to make a copy otherwise the .beat/.beatStr values will be messed up (1/4 the normal)
tsTemp = copy.deepcopy(ts)
tsTemp.beatSequence.subdivideNestedHierarchy(depth=3)
for n in m.notesAndRests:
if n.tie != None:
environLocal.printDebug(['note, tie', n, n.tie, n.tie.type])
if n.tie.type == 'stop':
continue
for unused_i in range(tsTemp.getBeatDepth(n.offset)):
n.addLyric('*')
return streamIn
def thomassenMelodicAccent(streamIn):
'''
adds a attribute melodicAccent to each note of a :class:`~music21.stream.Stream` object
according to the method postulated in Joseph M. Thomassen, "Melodic accent: Experiments and
a tentative model," ''Journal of the Acoustical Society of America'', Vol. 71, No. 6 (1982) pp.
1598-1605; with, Erratum, ''Journal of the Acoustical Society of America'', Vol. 73,
No. 1 (1983) p.373, and in David Huron and Matthew Royal,
"What is melodic accent? Converging evidence
from musical practice." ''Music Perception'', Vol. 13, No. 4 (1996) pp. 489-516.
Similar to the humdrum melac_ tool.
.. _melac: http://www.music-cog.ohio-state.edu/Humdrum/commands/melac.html
Takes in a Stream of :class:`~music21.note.Note` objects (use `.flat.notes` to get it, or
better `.flat.getElementsByClass('Note')` to filter out chords) and adds the attribute to
each. Note that Huron and Royal's work suggests that melodic accent has a correlation
with metrical accent only for solo works/passages; even treble passages do not have a
strong correlation. (Gregorian chants were found to have a strong ''negative'' correlation
between melodic accent and syllable onsets)
Following Huron's lead, we assign a `melodicAccent` of 1.0 to the first note in a piece
and take the accent marker of the first interval alone to the second note and
of the last interval alone to be the accent of the last note.
Example from Thomassen, figure 5:
>>> s = converter.parse('tinynotation: 7/4 c4 c c d e d d')
>>> analysis.metrical.thomassenMelodicAccent(s.flat.notes)
>>> for n in s.flat.notes:
... (n.pitch.nameWithOctave, n.melodicAccent)
('C4', 1.0)
('C4', 0.0)
('C4', 0.0)
('D4', 0.33)
('E4', 0.5561)
('D4', 0.17)
('D4', 0.0)
'''
# we use .ps instead of Intervals for speed, since
# we just need perceived contours
maxNotes = len(streamIn) - 1
p2Accent = 1.0
for i,n in enumerate(streamIn):
if i == 0:
n.melodicAccent = 1.0
continue
elif i == maxNotes:
n.melodicAccent = p2Accent
continue
lastPs = streamIn[i-1].pitch.ps
thisPs = n.pitch.ps
nextPs = streamIn[i+1].pitch.ps
if lastPs == thisPs and thisPs == nextPs:
thisAccent = 0.0
nextAccent = 0.0
elif lastPs != thisPs and thisPs == nextPs:
thisAccent = 1.0
nextAccent = 0.0
elif lastPs == thisPs and thisPs != nextPs:
thisAccent = 0.0
nextAccent = 1.0
elif lastPs < thisPs and thisPs > nextPs:
thisAccent = 0.83
nextAccent = 0.17
elif lastPs > thisPs and thisPs < nextPs:
thisAccent = 0.71
nextAccent = 0.29
elif lastPs < thisPs and thisPs < nextPs:
thisAccent = 0.33
nextAccent = 0.67
elif lastPs > thisPs and thisPs > nextPs:
thisAccent = 0.5
nextAccent = 0.5
n.melodicAccent = thisAccent * p2Accent
p2Accent = nextAccent
#-------------------------------------------------------------------------------
class TestExternal(unittest.TestCase):
def runTest(self):
pass
def testSingle(self):
'''Need to test direct meter creation w/o stream
'''
from music21 import note, meter
s = stream.Stream()
ts = meter.TimeSignature('4/4')
s.append(ts)
n = note.Note()
n.quarterLength = 1
s.repeatAppend(n, 4)
n = note.Note()
n.quarterLength = .5
s.repeatAppend(n, 8)
s = s.makeMeasures()
s = labelBeatDepth(s)
s.show()
class Test(unittest.TestCase):
'''Unit tests
'''
def runTest(self):
pass
def setUp(self):
pass
def testDoNothing(self):
pass
#-------------------------------------------------------------------------------
# define presented order in documentation
_DOC_ORDER = [labelBeatDepth]
if __name__ == "__main__":
import music21
music21.mainTest(Test) #, TestExternal)
#------------------------------------------------------------------------------
# eof
|
mit
| 3,405,661,291,353,366,000
| 30.457944
| 100
| 0.559863
| false
| 3.638919
| true
| false
| false
|
frutiger/pygit2
|
docs/conf.py
|
1
|
7971
|
# -*- coding: utf-8 -*-
#
# pygit2 documentation build configuration file, created by
# sphinx-quickstart on Sun Jan 6 09:55:26 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os, platform
from string import digits
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
version_string = sys.platform.rstrip(digits) + "-" + os.uname()[4] + "-" + ".".join(platform.python_version_tuple()[0:2])
sys.path.insert(0, os.path.abspath('../build/lib.' + version_string))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pygit2'
copyright = u'2010-2014 The pygit2 contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.22'
# The full version, including alpha/beta/rc tags.
release = '0.22.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_themes", ]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pygit2doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'pygit2.tex', u'pygit2 Documentation',
u'J. David Ibáñez', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pygit2', u'pygit2 Documentation',
[u'J. David Ibáñez'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pygit2', u'pygit2 Documentation',
u'J. David Ibáñez', 'pygit2', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
gpl-2.0
| -3,619,021,428,859,344,400
| 31.643443
| 121
| 0.702825
| false
| 3.721963
| true
| false
| false
|
AlienCowEatCake/ImageViewer
|
src/ThirdParty/Exiv2/exiv2-0.27.3-Source/tests/bugfixes/github/test_CVE_2018_11531.py
|
3
|
1094
|
# -*- coding: utf-8 -*-
from system_tests import CaseMeta, path, check_no_ASAN_UBSAN_errors
class TestCvePoC(metaclass=CaseMeta):
"""
Regression test for the bug described in:
https://github.com/Exiv2/exiv2/issues/283
"""
url = "https://github.com/Exiv2/exiv2/issues/283"
cve_url = "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-11531"
def check_no_ASAN_UBSAN_errors(self, i, command, got_stderr, expected_stderr):
""" Override of system_tests.check_no_ASAN_UBSAN_errors for this particular test case.
Here we want to also check that the two last lines of got_stderr have the expected_stderr
"""
check_no_ASAN_UBSAN_errors(self, i, command, got_stderr, expected_stderr)
self.assertListEqual(expected_stderr.splitlines(), got_stderr.splitlines()[-2:])
filename = path("$data_path/pocIssue283.jpg")
commands = ["$exiv2 $filename"]
stdout = [""]
stderr = [
"""$exiv2_exception_message $filename:
$kerCorruptedMetadata
"""]
compare_stderr = check_no_ASAN_UBSAN_errors
retval = [1]
|
gpl-3.0
| 534,201,225,946,510,660
| 34.290323
| 97
| 0.669104
| false
| 3.180233
| false
| false
| false
|
litecoinz-project/litecoinz
|
qa/rpc-tests/test_framework/script.py
|
2
|
23865
|
#
# script.py
#
# This file is modified from python-bitcoinlib.
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Scripts
Functionality to build scripts, as well as SignatureHash().
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from test_framework.mininode import CTransaction, CTxOut, hash256
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import struct
import binascii
from test_framework import bignum
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_NOP2 = CScriptOp(0xb1)
OP_NOP3 = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
VALID_OPCODES = {
OP_1NEGATE,
OP_RESERVED,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_13,
OP_14,
OP_15,
OP_16,
OP_NOP,
OP_VER,
OP_IF,
OP_NOTIF,
OP_VERIF,
OP_VERNOTIF,
OP_ELSE,
OP_ENDIF,
OP_VERIFY,
OP_RETURN,
OP_TOALTSTACK,
OP_FROMALTSTACK,
OP_2DROP,
OP_2DUP,
OP_3DUP,
OP_2OVER,
OP_2ROT,
OP_2SWAP,
OP_IFDUP,
OP_DEPTH,
OP_DROP,
OP_DUP,
OP_NIP,
OP_OVER,
OP_PICK,
OP_ROLL,
OP_ROT,
OP_SWAP,
OP_TUCK,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_SIZE,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_EQUAL,
OP_EQUALVERIFY,
OP_RESERVED1,
OP_RESERVED2,
OP_1ADD,
OP_1SUB,
OP_2MUL,
OP_2DIV,
OP_NEGATE,
OP_ABS,
OP_NOT,
OP_0NOTEQUAL,
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT,
OP_BOOLAND,
OP_BOOLOR,
OP_NUMEQUAL,
OP_NUMEQUALVERIFY,
OP_NUMNOTEQUAL,
OP_LESSTHAN,
OP_GREATERTHAN,
OP_LESSTHANOREQUAL,
OP_GREATERTHANOREQUAL,
OP_MIN,
OP_MAX,
OP_WITHIN,
OP_RIPEMD160,
OP_SHA1,
OP_SHA256,
OP_HASH160,
OP_HASH256,
OP_CODESEPARATOR,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
OP_NOP2,
OP_NOP3,
OP_NOP4,
OP_NOP5,
OP_NOP6,
OP_NOP7,
OP_NOP8,
OP_NOP9,
OP_NOP10,
OP_SMALLINTEGER,
OP_PUBKEYS,
OP_PUBKEYHASH,
OP_PUBKEY,
}
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_NOP2 : 'OP_NOP2',
OP_NOP3 : 'OP_NOP3',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
OPCODES_BY_NAME = {
'OP_0' : OP_0,
'OP_PUSHDATA1' : OP_PUSHDATA1,
'OP_PUSHDATA2' : OP_PUSHDATA2,
'OP_PUSHDATA4' : OP_PUSHDATA4,
'OP_1NEGATE' : OP_1NEGATE,
'OP_RESERVED' : OP_RESERVED,
'OP_1' : OP_1,
'OP_2' : OP_2,
'OP_3' : OP_3,
'OP_4' : OP_4,
'OP_5' : OP_5,
'OP_6' : OP_6,
'OP_7' : OP_7,
'OP_8' : OP_8,
'OP_9' : OP_9,
'OP_10' : OP_10,
'OP_11' : OP_11,
'OP_12' : OP_12,
'OP_13' : OP_13,
'OP_14' : OP_14,
'OP_15' : OP_15,
'OP_16' : OP_16,
'OP_NOP' : OP_NOP,
'OP_VER' : OP_VER,
'OP_IF' : OP_IF,
'OP_NOTIF' : OP_NOTIF,
'OP_VERIF' : OP_VERIF,
'OP_VERNOTIF' : OP_VERNOTIF,
'OP_ELSE' : OP_ELSE,
'OP_ENDIF' : OP_ENDIF,
'OP_VERIFY' : OP_VERIFY,
'OP_RETURN' : OP_RETURN,
'OP_TOALTSTACK' : OP_TOALTSTACK,
'OP_FROMALTSTACK' : OP_FROMALTSTACK,
'OP_2DROP' : OP_2DROP,
'OP_2DUP' : OP_2DUP,
'OP_3DUP' : OP_3DUP,
'OP_2OVER' : OP_2OVER,
'OP_2ROT' : OP_2ROT,
'OP_2SWAP' : OP_2SWAP,
'OP_IFDUP' : OP_IFDUP,
'OP_DEPTH' : OP_DEPTH,
'OP_DROP' : OP_DROP,
'OP_DUP' : OP_DUP,
'OP_NIP' : OP_NIP,
'OP_OVER' : OP_OVER,
'OP_PICK' : OP_PICK,
'OP_ROLL' : OP_ROLL,
'OP_ROT' : OP_ROT,
'OP_SWAP' : OP_SWAP,
'OP_TUCK' : OP_TUCK,
'OP_CAT' : OP_CAT,
'OP_SUBSTR' : OP_SUBSTR,
'OP_LEFT' : OP_LEFT,
'OP_RIGHT' : OP_RIGHT,
'OP_SIZE' : OP_SIZE,
'OP_INVERT' : OP_INVERT,
'OP_AND' : OP_AND,
'OP_OR' : OP_OR,
'OP_XOR' : OP_XOR,
'OP_EQUAL' : OP_EQUAL,
'OP_EQUALVERIFY' : OP_EQUALVERIFY,
'OP_RESERVED1' : OP_RESERVED1,
'OP_RESERVED2' : OP_RESERVED2,
'OP_1ADD' : OP_1ADD,
'OP_1SUB' : OP_1SUB,
'OP_2MUL' : OP_2MUL,
'OP_2DIV' : OP_2DIV,
'OP_NEGATE' : OP_NEGATE,
'OP_ABS' : OP_ABS,
'OP_NOT' : OP_NOT,
'OP_0NOTEQUAL' : OP_0NOTEQUAL,
'OP_ADD' : OP_ADD,
'OP_SUB' : OP_SUB,
'OP_MUL' : OP_MUL,
'OP_DIV' : OP_DIV,
'OP_MOD' : OP_MOD,
'OP_LSHIFT' : OP_LSHIFT,
'OP_RSHIFT' : OP_RSHIFT,
'OP_BOOLAND' : OP_BOOLAND,
'OP_BOOLOR' : OP_BOOLOR,
'OP_NUMEQUAL' : OP_NUMEQUAL,
'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
'OP_LESSTHAN' : OP_LESSTHAN,
'OP_GREATERTHAN' : OP_GREATERTHAN,
'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
'OP_MIN' : OP_MIN,
'OP_MAX' : OP_MAX,
'OP_WITHIN' : OP_WITHIN,
'OP_RIPEMD160' : OP_RIPEMD160,
'OP_SHA1' : OP_SHA1,
'OP_SHA256' : OP_SHA256,
'OP_HASH160' : OP_HASH160,
'OP_HASH256' : OP_HASH256,
'OP_CODESEPARATOR' : OP_CODESEPARATOR,
'OP_CHECKSIG' : OP_CHECKSIG,
'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
'OP_NOP2' : OP_NOP2,
'OP_NOP3' : OP_NOP3,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
'OP_NOP6' : OP_NOP6,
'OP_NOP7' : OP_NOP7,
'OP_NOP8' : OP_NOP8,
'OP_NOP9' : OP_NOP9,
'OP_NOP10' : OP_NOP10,
'OP_SMALLINTEGER' : OP_SMALLINTEGER,
'OP_PUBKEYS' : OP_PUBKEYS,
'OP_PUBKEYHASH' : OP_PUBKEYHASH,
'OP_PUBKEY' : OP_PUBKEY,
}
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum(object):
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(chr(absvalue & 0xff))
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, (int, long)):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
other = CScriptOp.encode_op_pushdata(bignum.bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = bytes(CScriptOp.encode_op_pushdata(other))
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return "x('%s')" % binascii.hexlify(o).decode('utf8')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut())
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
|
mit
| -3,195,067,513,813,755,400
| 25.635045
| 146
| 0.566646
| false
| 2.816594
| false
| false
| false
|
mopidy/pyspotify
|
examples/shell.py
|
1
|
6169
|
#!/usr/bin/env python
"""
This is an example of a simple command line client for Spotify using pyspotify.
You can run this file directly::
python shell.py
Then run the ``help`` command on the ``spotify>`` prompt to view all available
commands.
"""
from __future__ import print_function, unicode_literals
import cmd
import logging
import threading
import spotify
class Commander(cmd.Cmd):
doc_header = 'Commands'
prompt = 'spotify> '
logger = logging.getLogger('shell.commander')
def __init__(self):
cmd.Cmd.__init__(self)
self.logged_in = threading.Event()
self.logged_out = threading.Event()
self.logged_out.set()
self.session = spotify.Session()
self.session.on(
spotify.SessionEvent.CONNECTION_STATE_UPDATED,
self.on_connection_state_changed,
)
self.session.on(spotify.SessionEvent.END_OF_TRACK, self.on_end_of_track)
try:
self.audio_driver = spotify.AlsaSink(self.session)
except ImportError:
self.logger.warning(
'No audio sink found; audio playback unavailable.'
)
self.event_loop = spotify.EventLoop(self.session)
self.event_loop.start()
def on_connection_state_changed(self, session):
if session.connection.state is spotify.ConnectionState.LOGGED_IN:
self.logged_in.set()
self.logged_out.clear()
elif session.connection.state is spotify.ConnectionState.LOGGED_OUT:
self.logged_in.clear()
self.logged_out.set()
def on_end_of_track(self, session):
self.session.player.play(False)
def precmd(self, line):
if line:
self.logger.debug('New command: %s', line)
return line
def emptyline(self):
pass
def do_debug(self, line):
"Show more logging output"
print('Logging at DEBUG level')
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
def do_info(self, line):
"Show normal logging output"
print('Logging at INFO level')
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def do_warning(self, line):
"Show less logging output"
print('Logging at WARNING level')
logger = logging.getLogger()
logger.setLevel(logging.WARNING)
def do_EOF(self, line):
"Exit"
if self.logged_in.is_set():
print('Logging out...')
self.session.logout()
self.logged_out.wait()
self.event_loop.stop()
print('')
return True
def do_login(self, line):
"login <username> <password>"
tokens = line.split(' ', 1)
if len(tokens) != 2:
self.logger.warning("Wrong number of arguments")
return
username, password = tokens
self.session.login(username, password, remember_me=True)
self.logged_in.wait()
def do_relogin(self, line):
"relogin -- login as the previous logged in user"
try:
self.session.relogin()
self.logged_in.wait()
except spotify.Error as e:
self.logger.error(e)
def do_forget_me(self, line):
"forget_me -- forget the previous logged in user"
self.session.forget_me()
def do_logout(self, line):
"logout"
self.session.logout()
self.logged_out.wait()
def do_whoami(self, line):
"whoami"
if self.logged_in.is_set():
self.logger.info(
'I am %s aka %s. You can find me at %s',
self.session.user.canonical_name,
self.session.user.display_name,
self.session.user.link,
)
else:
self.logger.info(
'I am not logged in, but I may be %s',
self.session.remembered_user,
)
def do_play_uri(self, line):
"play <spotify track uri>"
if not self.logged_in.is_set():
self.logger.warning('You must be logged in to play')
return
try:
track = self.session.get_track(line)
track.load()
except (ValueError, spotify.Error) as e:
self.logger.warning(e)
return
self.logger.info('Loading track into player')
self.session.player.load(track)
self.logger.info('Playing track')
self.session.player.play()
def do_pause(self, line):
self.logger.info('Pausing track')
self.session.player.play(False)
def do_resume(self, line):
self.logger.info('Resuming track')
self.session.player.play()
def do_stop(self, line):
self.logger.info('Stopping track')
self.session.player.play(False)
self.session.player.unload()
def do_seek(self, seconds):
"seek <seconds>"
if not self.logged_in.is_set():
self.logger.warning('You must be logged in to seek')
return
if self.session.player.state is spotify.PlayerState.UNLOADED:
self.logger.warning('A track must be loaded before seeking')
return
self.session.player.seek(int(seconds) * 1000)
def do_search(self, query):
"search <query>"
if not self.logged_in.is_set():
self.logger.warning('You must be logged in to search')
return
try:
result = self.session.search(query)
result.load()
except spotify.Error as e:
self.logger.warning(e)
return
self.logger.info(
'%d tracks, %d albums, %d artists, and %d playlists found.',
result.track_total,
result.album_total,
result.artist_total,
result.playlist_total,
)
self.logger.info('Top tracks:')
for track in result.tracks:
self.logger.info(
'[%s] %s - %s', track.link, track.artists[0].name, track.name
)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
Commander().cmdloop()
|
apache-2.0
| 3,352,281,336,376,357,000
| 28.516746
| 80
| 0.573999
| false
| 3.9243
| false
| false
| false
|
kevinlee12/oppia
|
core/controllers/admin_test.py
|
1
|
85129
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the admin page."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import logging
from constants import constants
from core import jobs
from core import jobs_registry
from core import jobs_test
from core.domain import collection_services
from core.domain import config_domain
from core.domain import config_services
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import opportunity_services
from core.domain import question_fetchers
from core.domain import recommendations_services
from core.domain import rights_manager
from core.domain import search_services
from core.domain import skill_services
from core.domain import stats_domain
from core.domain import stats_services
from core.domain import story_domain
from core.domain import story_fetchers
from core.domain import story_services
from core.domain import topic_domain
from core.domain import topic_fetchers
from core.domain import topic_services
from core.domain import user_services
from core.platform import models
from core.platform.taskqueue import gae_taskqueue_services as taskqueue_services
from core.tests import test_utils
import feconf
import utils
(exp_models, job_models, opportunity_models, audit_models) = (
models.Registry.import_models(
[models.NAMES.exploration, models.NAMES.job, models.NAMES.opportunity,
models.NAMES.audit]))
BOTH_MODERATOR_AND_ADMIN_EMAIL = 'moderator.and.admin@example.com'
BOTH_MODERATOR_AND_ADMIN_USERNAME = 'moderatorandadm1n'
class SampleMapReduceJobManager(jobs.BaseMapReduceOneOffJobManager):
"""Test job that counts the total number of explorations."""
@classmethod
def entity_classes_to_map_over(cls):
return [exp_models.ExplorationModel]
@staticmethod
def map(item):
yield ('sum', 1)
@staticmethod
def reduce(key, values):
yield (key, sum([int(value) for value in values]))
class AdminIntegrationTest(test_utils.GenericTestBase):
"""Server integration tests for operations on the admin page."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(AdminIntegrationTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
def test_admin_page_rights(self):
"""Test access rights to the admin page."""
self.get_html_response('/admin', expected_status_int=302)
# Login as a non-admin.
self.login(self.EDITOR_EMAIL)
self.get_html_response('/admin', expected_status_int=401)
self.logout()
# Login as an admin.
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.get_html_response('/admin')
self.logout()
def test_change_configuration_property(self):
"""Test that configuration properties can be changed."""
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
new_config_value = False
response_dict = self.get_json('/adminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': False,
}, response_config_properties[
config_domain.IS_IMPROVEMENTS_TAB_ENABLED.name])
payload = {
'action': 'save_config_properties',
'new_config_property_values': {
config_domain.IS_IMPROVEMENTS_TAB_ENABLED.name: (
new_config_value),
}
}
self.post_json('/adminhandler', payload, csrf_token=csrf_token)
response_dict = self.get_json('/adminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': new_config_value,
}, response_config_properties[
config_domain.IS_IMPROVEMENTS_TAB_ENABLED.name])
self.logout()
def test_cannot_reload_exploration_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot reload an exploration in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'reload_exploration',
'exploration_id': '2'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_load_new_structures_data_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot load new structures data in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_structures_data'
}, csrf_token=csrf_token)
self.logout()
def test_non_admins_cannot_load_new_structures_data(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
assert_raises_regexp = self.assertRaisesRegexp(
Exception, 'User does not have enough rights to generate data.')
with assert_raises_regexp:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_structures_data'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_generate_dummy_skill_data_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot generate dummy skills in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_skill_data'
}, csrf_token=csrf_token)
self.logout()
def test_non_admins_cannot_generate_dummy_skill_data(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
assert_raises_regexp = self.assertRaisesRegexp(
Exception, 'User does not have enough rights to generate data.')
with assert_raises_regexp:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_skill_data'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_reload_collection_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot reload a collection in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'reload_collection',
'collection_id': '2'
}, csrf_token=csrf_token)
self.logout()
def test_reload_collection(self):
observed_log_messages = []
def _mock_logging_function(msg, *args):
"""Mocks logging.info()."""
observed_log_messages.append(msg % args)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
collection_services.load_demo('0')
collection_rights = rights_manager.get_collection_rights('0')
self.assertFalse(collection_rights.community_owned)
with self.swap(logging, 'info', _mock_logging_function):
self.post_json(
'/adminhandler', {
'action': 'reload_collection',
'collection_id': '0'
}, csrf_token=csrf_token)
collection_rights = rights_manager.get_collection_rights('0')
self.assertTrue(collection_rights.community_owned)
self.assertEqual(
observed_log_messages,
[
'[ADMIN] %s reloaded collection 0' % self.admin_id,
'Collection with id 0 was loaded.'
]
)
self.logout()
def test_load_new_structures_data(self):
self.set_admins([self.ADMIN_USERNAME])
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_structures_data'
}, csrf_token=csrf_token)
topic_summaries = topic_services.get_all_topic_summaries()
self.assertEqual(len(topic_summaries), 2)
for summary in topic_summaries:
if summary.name == 'Dummy Topic 1':
topic_id = summary.id
story_id = (
topic_fetchers.get_topic_by_id(
topic_id).canonical_story_references[0].story_id)
self.assertIsNotNone(
story_fetchers.get_story_by_id(story_id, strict=False))
skill_summaries = skill_services.get_all_skill_summaries()
self.assertEqual(len(skill_summaries), 3)
questions, _, _ = (
question_fetchers.get_questions_and_skill_descriptions_by_skill_ids(
10, [
skill_summaries[0].id, skill_summaries[1].id,
skill_summaries[2].id], '')
)
self.assertEqual(len(questions), 3)
# Testing that there are 3 hindi translation opportunities
# available on the Contributor Dashboard. Hindi was picked arbitrarily,
# any language code other than english (what the dummy explorations
# were written in) can be tested here.
translation_opportunities, _, _ = (
opportunity_services.get_translation_opportunities('hi', None))
self.assertEqual(len(translation_opportunities), 3)
self.logout()
def test_generate_dummy_skill_and_questions_data(self):
self.set_admins([self.ADMIN_USERNAME])
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_skill_data'
}, csrf_token=csrf_token)
skill_summaries = skill_services.get_all_skill_summaries()
self.assertEqual(len(skill_summaries), 1)
questions, _, _ = (
question_fetchers.get_questions_and_skill_descriptions_by_skill_ids(
20, [skill_summaries[0].id], '')
)
self.assertEqual(len(questions), 15)
self.logout()
def test_flush_migration_bot_contributions_action(self):
created_exploration_ids = ['exp_1', 'exp_2']
edited_exploration_ids = ['exp_3', 'exp_4']
user_services.create_user_contributions(
feconf.MIGRATION_BOT_USER_ID, created_exploration_ids,
edited_exploration_ids)
migration_bot_contributions_model = (
user_services.get_user_contributions(feconf.MIGRATION_BOT_USER_ID))
self.assertEqual(
migration_bot_contributions_model.created_exploration_ids,
created_exploration_ids)
self.assertEqual(
migration_bot_contributions_model.edited_exploration_ids,
edited_exploration_ids)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'flush_migration_bot_contribution_data'
}, csrf_token=csrf_token)
migration_bot_contributions_model = (
user_services.get_user_contributions(feconf.MIGRATION_BOT_USER_ID))
self.assertEqual(
migration_bot_contributions_model.created_exploration_ids, [])
self.assertEqual(
migration_bot_contributions_model.edited_exploration_ids, [])
def test_regenerate_topic_related_opportunities_action(self):
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
topic_id = 'topic'
story_id = 'story'
self.save_new_valid_exploration(
'0', owner_id, title='title',
end_state_name='End State')
self.publish_exploration(owner_id, '0')
topic = topic_domain.Topic.create_default_topic(
topic_id, 'topic', 'abbrev', 'description')
topic_services.save_new_topic(owner_id, topic)
story = story_domain.Story.create_default_story(
story_id, 'A story', 'Description', topic_id, 'story')
story_services.save_new_story(owner_id, story)
topic_services.add_canonical_story(
owner_id, topic_id, story_id)
story_services.update_story(
owner_id, story_id, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': '0'
})], 'Changes.')
all_opportunity_models = list(
opportunity_models.ExplorationOpportunitySummaryModel.get_all())
self.assertEqual(len(all_opportunity_models), 1)
old_creation_time = all_opportunity_models[0].created_on
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
result = self.post_json(
'/adminhandler', {
'action': 'regenerate_topic_related_opportunities',
'topic_id': 'topic'
}, csrf_token=csrf_token)
self.assertEqual(
result, {
'opportunities_count': 1
})
all_opportunity_models = list(
opportunity_models.ExplorationOpportunitySummaryModel.get_all())
self.assertEqual(len(all_opportunity_models), 1)
new_creation_time = all_opportunity_models[0].created_on
self.assertLess(old_creation_time, new_creation_time)
def test_admin_topics_csv_download_handler(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_custom_response(
'/admintopicscsvdownloadhandler', 'text/csv')
self.assertEqual(
response.headers['Content-Disposition'],
'attachment; filename=topic_similarities.csv')
self.assertIn(
'Architecture,Art,Biology,Business,Chemistry,Computing,Economics,'
'Education,Engineering,Environment,Geography,Government,Hobbies,'
'Languages,Law,Life Skills,Mathematics,Medicine,Music,Philosophy,'
'Physics,Programming,Psychology,Puzzles,Reading,Religion,Sport,'
'Statistics,Welcome',
response.body)
self.logout()
def test_admin_job_output_handler(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.save_new_valid_exploration('exp_id', self.admin_id)
job_id = SampleMapReduceJobManager.create_new()
SampleMapReduceJobManager.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
response = self.get_json('/adminjoboutput', params={'job_id': job_id})
self.assertIsNone(response['output'])
self.process_and_flush_pending_tasks()
response = self.get_json('/adminjoboutput', params={'job_id': job_id})
self.assertEqual(
SampleMapReduceJobManager.get_status_code(job_id),
jobs.STATUS_CODE_COMPLETED)
self.assertEqual(response['output'], ['[u\'sum\', 1]'])
self.logout()
def test_revert_config_property(self):
observed_log_messages = []
def _mock_logging_function(msg, *args):
"""Mocks logging.info()."""
observed_log_messages.append(msg % args)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
config_services.set_property(self.admin_id, 'promo_bar_enabled', True)
self.assertTrue(config_domain.PROMO_BAR_ENABLED.value)
with self.swap(logging, 'info', _mock_logging_function):
self.post_json(
'/adminhandler', {
'action': 'revert_config_property',
'config_property_id': 'promo_bar_enabled'
}, csrf_token=csrf_token)
self.assertFalse(config_domain.PROMO_BAR_ENABLED.value)
self.assertEqual(
observed_log_messages,
['[ADMIN] %s reverted config property: promo_bar_enabled'
% self.admin_id])
self.logout()
def test_start_new_one_off_job(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 0)
with self.swap(
jobs_registry, 'ONE_OFF_JOB_MANAGERS', [SampleMapReduceJobManager]):
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'start_new_job',
'job_type': 'SampleMapReduceJobManager'
}, csrf_token=csrf_token)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.logout()
def test_cancel_one_off_job(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
job_id = SampleMapReduceJobManager.create_new()
SampleMapReduceJobManager.enqueue(job_id)
self.run_but_do_not_flush_pending_tasks()
status = SampleMapReduceJobManager.get_status_code(job_id)
self.assertEqual(status, job_models.STATUS_CODE_STARTED)
with self.swap(
jobs_registry, 'ONE_OFF_JOB_MANAGERS', [SampleMapReduceJobManager]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'cancel_job',
'job_id': job_id,
'job_type': 'SampleMapReduceJobManager'
}, csrf_token=csrf_token)
status = SampleMapReduceJobManager.get_status_code(job_id)
self.assertEqual(status, job_models.STATUS_CODE_CANCELED)
self.logout()
def test_start_computation(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'start_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
self.logout()
def test_stop_computation_with_running_jobs(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
jobs_test.StartExplorationEventCounter.start_computation()
self.run_but_do_not_flush_pending_tasks()
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'stop_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
self.logout()
def test_stop_computation_with_finished_jobs(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
jobs_test.StartExplorationEventCounter.start_computation()
self.process_and_flush_pending_tasks()
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'stop_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
self.logout()
def test_stop_computation_with_stopped_jobs(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
jobs_test.StartExplorationEventCounter.start_computation()
self.run_but_do_not_flush_pending_tasks()
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
jobs_test.StartExplorationEventCounter.stop_computation(self.admin_id)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'stop_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
self.logout()
def test_upload_topic_similarities(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.assertEqual(recommendations_services.get_topic_similarity(
'Art', 'Biology'), 0.1)
self.assertEqual(recommendations_services.get_topic_similarity(
'Art', 'Art'), feconf.SAME_TOPIC_SIMILARITY)
self.assertEqual(recommendations_services.get_topic_similarity(
'Topic 1', 'Topic 2'), feconf.DEFAULT_TOPIC_SIMILARITY)
self.assertEqual(recommendations_services.get_topic_similarity(
'Topic', 'Topic'), feconf.SAME_TOPIC_SIMILARITY)
topic_similarities_data = (
'Art,Biology,Chemistry\n'
'1.0,0.2,0.1\n'
'0.2,1.0,0.8\n'
'0.1,0.8,1.0'
)
self.post_json(
'/adminhandler', {
'action': 'upload_topic_similarities',
'data': topic_similarities_data
}, csrf_token=csrf_token)
self.assertEqual(recommendations_services.get_topic_similarity(
'Art', 'Biology'), 0.2)
self.logout()
class GenerateDummyExplorationsTest(test_utils.GenericTestBase):
"""Test the conditions for generation of dummy explorations."""
def setUp(self):
super(GenerateDummyExplorationsTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
def test_generate_count_greater_than_publish_count(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 10,
'num_dummy_exps_to_publish': 3
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(len(generated_exps), 10)
self.assertEqual(len(published_exps), 3)
def test_generate_count_equal_to_publish_count(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 2,
'num_dummy_exps_to_publish': 2
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(len(generated_exps), 2)
self.assertEqual(len(published_exps), 2)
def test_generate_count_less_than_publish_count(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
generated_exps_response = self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 2,
'num_dummy_exps_to_publish': 5
},
csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(generated_exps_response['status_code'], 400)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(len(generated_exps), 0)
self.assertEqual(len(published_exps), 0)
def test_handler_raises_error_with_non_int_num_dummy_exps_to_generate(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
with self.assertRaisesRegexp(
Exception, 'invalid_type is not a number'):
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_publish': 1,
'num_dummy_exps_to_generate': 'invalid_type'
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(generated_exps, {})
self.assertEqual(published_exps, {})
self.logout()
def test_handler_raises_error_with_non_int_num_dummy_exps_to_publish(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
with self.assertRaisesRegexp(
Exception, 'invalid_type is not a number'):
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_publish': 'invalid_type',
'num_dummy_exps_to_generate': 1
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(generated_exps, {})
self.assertEqual(published_exps, {})
self.logout()
def test_cannot_generate_dummy_explorations_in_prod_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot generate dummy explorations in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 10,
'num_dummy_exps_to_publish': 3
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(generated_exps, {})
self.assertEqual(published_exps, {})
self.logout()
class AdminRoleHandlerTest(test_utils.GenericTestBase):
"""Checks the user role handling on the admin page."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(AdminRoleHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.set_admins([self.ADMIN_USERNAME])
def test_view_and_update_role(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
# Check normal user has expected role. Viewing by username.
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': 'user1'})
self.assertEqual(
response_dict, {'user1': feconf.ROLE_ID_EXPLORATION_EDITOR})
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_MODERATOR, 'username': username},
csrf_token=csrf_token,
expected_status_int=200)
self.assertEqual(response_dict, {})
# Viewing by role.
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={
'filter_criterion': 'role',
'role': feconf.ROLE_ID_MODERATOR
})
self.assertEqual(response_dict, {'user1': feconf.ROLE_ID_MODERATOR})
self.logout()
def test_invalid_username_in_filter_criterion_and_update_role(self):
username = 'myinvaliduser'
self.login(self.ADMIN_EMAIL, is_super_admin=True)
# Trying to view role of non-existent user.
self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username},
expected_status_int=400)
# Trying to update role of non-existent user.
csrf_token = self.get_new_csrf_token()
self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_MODERATOR, 'username': username},
csrf_token=csrf_token,
expected_status_int=400)
def test_cannot_view_role_with_invalid_view_filter_criterion(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'invalid', 'username': 'user1'},
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid filter criterion to view roles.')
def test_changing_user_role_from_topic_manager_to_moderator(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.set_topic_managers([username])
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(
response_dict, {username: feconf.ROLE_ID_TOPIC_MANAGER})
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_MODERATOR, 'username': username},
csrf_token=csrf_token)
self.assertEqual(response_dict, {})
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(response_dict, {username: feconf.ROLE_ID_MODERATOR})
self.logout()
def test_changing_user_role_from_exploration_editor_to_topic_manager(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
user_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
topic_id = topic_services.get_new_topic_id()
self.save_new_topic(
topic_id, user_id, name='Name',
abbreviated_name='abbrev', url_fragment='url-fragment',
description='Description', canonical_story_ids=[],
additional_story_ids=[], uncategorized_skill_ids=[],
subtopics=[], next_subtopic_id=1)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(
response_dict, {username: feconf.ROLE_ID_EXPLORATION_EDITOR})
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_TOPIC_MANAGER, 'username': username,
'topic_id': topic_id}, csrf_token=csrf_token)
self.assertEqual(response_dict, {})
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(
response_dict, {username: feconf.ROLE_ID_TOPIC_MANAGER})
self.logout()
class ExplorationsLatexSvgHandlerTest(test_utils.GenericTestBase):
"""Tests for Saving Math SVGs in explorations."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(ExplorationsLatexSvgHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.set_admins([self.ADMIN_USERNAME])
def test_get_latex_to_svg_mapping(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
multiple_explorations_math_rich_text_info = []
math_rich_text_info1 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id1', True, ['abc1', 'xyz1']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info1)
math_rich_text_info2 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id2', True, ['abc2', 'xyz2']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info2)
math_rich_text_info3 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id3', True, ['abc3', 'xyz3']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info3)
exp_services.save_multi_exploration_math_rich_text_info_model(
multiple_explorations_math_rich_text_info)
response_dict = self.get_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
params={'item_to_fetch': 'exp_id_to_latex_mapping'})
expected_response = {
'exp_id1': ['abc1', 'xyz1'],
'exp_id2': ['abc2', 'xyz2']
}
self.assertEqual(
response_dict,
{'latex_strings_to_exp_id_mapping': expected_response})
def test_get_when_invalid_item_to_fetch_item_given(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response_dict = self.get_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
params={'item_to_fetch': 'invalid'},
expected_status_int=400)
self.assertIn(
'Please specify a valid type of item to fetch.',
response_dict['error'])
def test_get_number_explorations_left_to_update(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
multiple_explorations_math_rich_text_info = []
math_rich_text_info1 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id1', True, ['abc1', 'xyz1']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info1)
math_rich_text_info2 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id2', True, ['abc2', 'xyz2']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info2)
math_rich_text_info3 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id3', True, ['abc3', 'xyz3']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info3)
exp_services.save_multi_exploration_math_rich_text_info_model(
multiple_explorations_math_rich_text_info)
response_dict = self.get_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
params={'item_to_fetch': 'number_of_explorations_left_to_update'})
self.assertEqual(
response_dict,
{'number_of_explorations_left_to_update': '3'})
def test_post_svgs_when_all_values_are_valid(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
editor_id = self.get_user_id_from_email(user_email)
post_data = {
'exp_id1': {
'+,+,+,+': {
'latexId': 'latex_id1',
'dimensions': {
'encoded_height_string': '1d429',
'encoded_width_string': '1d33',
'encoded_vertical_padding_string': '0d241'
}
},
'\\frac{x}{y}': {
'latexId': 'latex_id2',
'dimensions': {
'encoded_height_string': '1d525',
'encoded_width_string': '3d33',
'encoded_vertical_padding_string': '0d241'
}
}
}
}
csrf_token = self.get_new_csrf_token()
svg_file_1 = (
'<svg xmlns="http://www.w3.org/2000/svg" width="1.33ex" height="1.4'
'29ex" viewBox="0 -511.5 572.5 615.4" focusable="false" style="vert'
'ical-align: -0.241ex;"><g stroke="currentColor" fill="currentColo'
'r" stroke-width="0" transform="matrix(1 0 0 -1 0 0)"><path stroke'
'-width="1" d="M52 289Q59 331 106 386T222 442Q257 442 2864Q412 404'
' 406 402Q368 386 350 336Q290 115 290 78Q290 50 306 38T341 26Q37'
'8 26 414 59T463 140Q466 150 469 151T485 153H489Q504 153 504 145284'
' 52 289Z"/></g></svg>'
)
svg_file_2 = (
'<svg xmlns="http://www.w3.org/2000/svg" width="3.33ex" height="1.5'
'25ex" viewBox="0 -511.5 572.5 615.4" focusable="false" style="vert'
'ical-align: -0.241ex;"><g stroke="currentColor" fill="currentColo'
'r" stroke-width="0" transform="matrix(1 0 0 -1 0 0)"><path stroke'
'-width="1" d="M52 289Q59 331 106 386T222 442Q257 442 2864Q412 404'
' 406 402Q368 386 350 336Q290 115 290 78Q290 50 306 38T341 26Q37'
'8 26 414 59T463 140Q466 150 469 151T485 153H489Q504 153 504 145284'
' 52 289Z"/></g></svg>'
)
exploration1 = exp_domain.Exploration.create_default_exploration(
'exp_id1', title='title1', category='category')
exp_services.save_new_exploration(editor_id, exploration1)
exp_models.ExplorationMathRichTextInfoModel(
id='exp_id1',
math_images_generation_required=True,
latex_strings_without_svg=['+,+,+,+', '\\frac{x}{y}'],
estimated_max_size_of_images_in_bytes=20000).put()
response_dict = self.post_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
{'latexMapping': post_data},
csrf_token=csrf_token,
upload_files=(
('latex_id1', 'latex_id1', svg_file_1),
('latex_id2', 'latex_id2', svg_file_2), ),
expected_status_int=200)
self.assertEqual(
response_dict,
{
'number_of_explorations_updated': '1',
'number_of_explorations_left_to_update': '0'
})
self.logout()
def test_post_svgs_when_some_images_are_not_supplied(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
editor_id = self.get_user_id_from_email(user_email)
post_data = {
'exp_id1': {
'+,+,+,+': {
'latexId': 'latex_id1',
'dimensions': {
'encoded_height_string': '1d429',
'encoded_width_string': '1d33',
'encoded_vertical_padding_string': '0d241'
}
},
'\\frac{x}{y}': {
'latexId': 'latex_id2',
'dimensions': {
'encoded_height_string': '1d525',
'encoded_width_string': '3d33',
'encoded_vertical_padding_string': '0d241'
}
}
}
}
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
svg_file_1 = (
'<svg xmlns="http://www.w3.org/2000/svg" width="1.33ex" height="1.4'
'29ex" viewBox="0 -511.5 572.5 615.4" focusable="false" style="vert'
'ical-align: -0.241ex;"><g stroke="currentColor" fill="currentColo'
'r" stroke-width="0" transform="matrix(1 0 0 -1 0 0)"><path stroke'
'-width="1" d="M52 289Q59 331 106 386T222 442Q257 442 2864Q412 404'
' 406 402Q368 386 350 336Q290 115 290 78Q290 50 306 38T341 26Q37'
'8 26 414 59T463 140Q466 150 469 151T485 153H489Q504 153 504 145284'
' 52 289Z"/></g></svg>'
)
exploration1 = exp_domain.Exploration.create_default_exploration(
'exp_id1', title='title1', category='category')
exp_services.save_new_exploration(editor_id, exploration1)
response_dict = self.post_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
{'latexMapping': post_data},
csrf_token=csrf_token,
upload_files=(
('latex_id1', 'latex_id1', svg_file_1),),
expected_status_int=400)
self.assertIn(
'SVG for LaTeX string \\frac{x}{y} in exploration exp_id1 is not '
'supplied.', response_dict['error'])
self.logout()
class DataExtractionQueryHandlerTests(test_utils.GenericTestBase):
"""Tests for data extraction handler."""
EXP_ID = 'exp'
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(DataExtractionQueryHandlerTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.exploration = self.save_new_valid_exploration(
self.EXP_ID, self.editor_id, end_state_name='End')
stats_services.record_answer(
self.EXP_ID, self.exploration.version,
self.exploration.init_state_name, 'TextInput',
stats_domain.SubmittedAnswer(
'first answer', 'TextInput', 0,
0, exp_domain.EXPLICIT_CLASSIFICATION, {},
'a_session_id_val', 1.0))
stats_services.record_answer(
self.EXP_ID, self.exploration.version,
self.exploration.init_state_name, 'TextInput',
stats_domain.SubmittedAnswer(
'second answer', 'TextInput', 0,
0, exp_domain.EXPLICIT_CLASSIFICATION, {},
'a_session_id_val', 1.0))
def test_data_extraction_handler(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
# Test that it returns all answers when 'num_answers' is 0.
payload = {
'exp_id': self.EXP_ID,
'exp_version': self.exploration.version,
'state_name': self.exploration.init_state_name,
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload)
extracted_answers = response['data']
self.assertEqual(len(extracted_answers), 2)
self.assertEqual(extracted_answers[0]['answer'], 'first answer')
self.assertEqual(extracted_answers[1]['answer'], 'second answer')
# Make sure that it returns only 'num_answers' number of answers.
payload = {
'exp_id': self.EXP_ID,
'exp_version': self.exploration.version,
'state_name': self.exploration.init_state_name,
'num_answers': 1
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload)
extracted_answers = response['data']
self.assertEqual(len(extracted_answers), 1)
self.assertEqual(extracted_answers[0]['answer'], 'first answer')
def test_that_handler_raises_exception(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
payload = {
'exp_id': self.EXP_ID,
'exp_version': self.exploration.version,
'state_name': 'state name',
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload,
expected_status_int=400)
self.assertEqual(
response['error'],
'Exploration \'exp\' does not have \'state name\' state.')
def test_handler_raises_error_with_invalid_exploration_id(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
payload = {
'exp_id': 'invalid_exp_id',
'state_name': 'state name',
'exp_version': 1,
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload,
expected_status_int=400)
self.assertEqual(
response['error'],
'Entity for exploration with id invalid_exp_id and version 1 not '
'found.')
def test_handler_raises_error_with_invalid_exploration_version(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
payload = {
'exp_id': self.EXP_ID,
'state_name': 'state name',
'exp_version': 10,
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload,
expected_status_int=400)
self.assertEqual(
response['error'],
'Entity for exploration with id %s and version 10 not found.'
% self.EXP_ID)
class ClearSearchIndexTest(test_utils.GenericTestBase):
"""Tests that search index gets cleared."""
def test_clear_search_index(self):
exp_services.load_demo('0')
result_explorations = search_services.search_explorations(
'Welcome', 2)[0]
self.assertEqual(result_explorations, ['0'])
collection_services.load_demo('0')
result_collections = search_services.search_collections('Welcome', 2)[0]
self.assertEqual(result_collections, ['0'])
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
generated_exps_response = self.post_json(
'/adminhandler', {
'action': 'clear_search_index'
},
csrf_token=csrf_token)
self.assertEqual(generated_exps_response, {})
result_explorations = search_services.search_explorations(
'Welcome', 2)[0]
self.assertEqual(result_explorations, [])
result_collections = search_services.search_collections('Welcome', 2)[0]
self.assertEqual(result_collections, [])
class SendDummyMailTest(test_utils.GenericTestBase):
""""Tests for sending test mails to admin."""
def setUp(self):
super(SendDummyMailTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
def test_send_dummy_mail(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
with self.swap(feconf, 'CAN_SEND_EMAILS', True):
generated_response = self.post_json(
'/senddummymailtoadminhandler', {},
csrf_token=csrf_token, expected_status_int=200)
self.assertEqual(generated_response, {})
with self.swap(feconf, 'CAN_SEND_EMAILS', False):
generated_response = self.post_json(
'/senddummymailtoadminhandler', {},
csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
generated_response['error'], 'This app cannot send emails.')
class UpdateUsernameHandlerTest(test_utils.GenericTestBase):
"""Tests for updating usernames."""
OLD_USERNAME = 'oldUsername'
NEW_USERNAME = 'newUsername'
def setUp(self):
super(UpdateUsernameHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.OLD_USERNAME)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
def test_update_username_with_none_new_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': None},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid request: A new username must be '
'specified.')
def test_update_username_with_none_old_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': None,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid request: The old username must be '
'specified.')
def test_update_username_with_non_string_new_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': 123},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Expected new username to be a unicode '
'string, received 123')
def test_update_username_with_non_string_old_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': 123,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Expected old username to be a unicode '
'string, received 123')
def test_update_username_with_long_new_username(self):
long_username = 'a' * (constants.MAX_USERNAME_LENGTH + 1)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': long_username},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Expected new username to be less than %s '
'characters, received %s' % (
constants.MAX_USERNAME_LENGTH,
long_username))
def test_update_username_with_nonexistent_old_username(self):
non_existent_username = 'invalid'
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': non_existent_username,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(response['error'], 'Invalid username: invalid')
def test_update_username_with_new_username_already_taken(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': self.OLD_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(response['error'], 'Username already taken.')
def test_update_username(self):
user_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token)
self.assertEqual(user_services.get_username(user_id), self.NEW_USERNAME)
def test_update_username_creates_audit_model(self):
user_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
creation_time_in_millisecs = utils.get_current_time_in_millisecs()
mock_get_current_time_in_millisecs = lambda: creation_time_in_millisecs
# Since the UsernameChangeAuditModel's ID is formed from the user ID and
# a millisecond timestamp we need to make sure that
# get_current_time_in_millisecs returns the same value as we have saved
# into current_time_in_millisecs. If we don't force the same value via
# swap flakes can occur, since as the time flows the saved milliseconds
# can differ from the milliseconds saved into the
# UsernameChangeAuditModel's ID.
with self.swap(
utils, 'get_current_time_in_millisecs',
mock_get_current_time_in_millisecs):
self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token)
self.assertTrue(
audit_models.UsernameChangeAuditModel.has_reference_to_user_id(
user_id))
model_id = '%s.%d' % (user_id, creation_time_in_millisecs)
username_change_audit_model = (
audit_models.UsernameChangeAuditModel.get(model_id))
self.assertEqual(username_change_audit_model.committer_id, user_id)
self.assertEqual(
username_change_audit_model.old_username, self.OLD_USERNAME)
self.assertEqual(
username_change_audit_model.new_username, self.NEW_USERNAME)
class AddContributionReviewerHandlerTest(test_utils.GenericTestBase):
"""Tests related to add reviewers for contributor's
suggestion/application.
"""
TRANSLATION_REVIEWER_EMAIL = 'translationreviewer@example.com'
VOICEOVER_REVIEWER_EMAIL = 'voiceoverreviewer@example.com'
QUESTION_REVIEWER_EMAIL = 'questionreviewer@example.com'
def setUp(self):
super(AddContributionReviewerHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.TRANSLATION_REVIEWER_EMAIL, 'translator')
self.signup(self.VOICEOVER_REVIEWER_EMAIL, 'voiceartist')
self.signup(self.QUESTION_REVIEWER_EMAIL, 'question')
self.translation_reviewer_id = self.get_user_id_from_email(
self.TRANSLATION_REVIEWER_EMAIL)
self.voiceover_reviewer_id = self.get_user_id_from_email(
self.VOICEOVER_REVIEWER_EMAIL)
self.question_reviewer_id = self.get_user_id_from_email(
self.QUESTION_REVIEWER_EMAIL)
def test_add_reviewer_with_invalid_username_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'invalid',
'review_category': 'translation',
'language_code': 'en'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid username: invalid')
def test_add_translation_reviewer(self):
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
def test_add_translation_reviewer_in_invalid_language_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
def test_assigning_same_language_for_translation_review_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'User translator already has rights to review translation in '
'language code hi')
def test_add_voiceover_reviewer(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
def test_add_voiceover_reviewer_in_invalid_language(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
def test_assigning_same_language_for_voiceover_review_raise_error(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'User voiceartist already has rights to review voiceover in '
'language code hi')
def test_add_question_reviewer(self):
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'question'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_question_suggestions(
self.question_reviewer_id))
def test_assigning_same_user_as_question_reviewer_raise_error(self):
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'question'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_question_suggestions(
self.question_reviewer_id))
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'question'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'User question already has rights to review question.')
def test_add_reviewer_for_invalid_review_category_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid review_category: invalid')
class RemoveContributionReviewerHandlerTest(test_utils.GenericTestBase):
"""Tests related to remove reviewers from contributor dashboard page."""
TRANSLATION_REVIEWER_EMAIL = 'translationreviewer@example.com'
VOICEOVER_REVIEWER_EMAIL = 'voiceoverreviewer@example.com'
QUESTION_REVIEWER_EMAIL = 'questionreviewer@example.com'
def setUp(self):
super(RemoveContributionReviewerHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.TRANSLATION_REVIEWER_EMAIL, 'translator')
self.signup(self.VOICEOVER_REVIEWER_EMAIL, 'voiceartist')
self.signup(self.QUESTION_REVIEWER_EMAIL, 'question')
self.translation_reviewer_id = self.get_user_id_from_email(
self.TRANSLATION_REVIEWER_EMAIL)
self.voiceover_reviewer_id = self.get_user_id_from_email(
self.VOICEOVER_REVIEWER_EMAIL)
self.question_reviewer_id = self.get_user_id_from_email(
self.QUESTION_REVIEWER_EMAIL)
def test_add_reviewer_without_username_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'removal_type': 'all'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(response['error'], 'Missing username param')
def test_add_reviewer_with_invalid_username_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'invalid',
'removal_type': 'all'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid username: invalid')
def test_remove_translation_reviewer(self):
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
user_services.allow_user_to_review_translation_in_language(
self.translation_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'specific',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
def test_remove_translation_reviewer_in_invalid_language_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'specific',
'review_category': 'translation',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
def test_remove_unassigned_translation_reviewer_raise_error(self):
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'specific',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'translator does not have rights to review translation in language '
'hi.')
def test_remove_voiceover_reviewer(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
user_services.allow_user_to_review_voiceover_in_language(
self.voiceover_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'voiceartist',
'removal_type': 'specific',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
def test_remove_voiceover_reviewer_in_invalid_language_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'voiceartist',
'removal_type': 'specific',
'review_category': 'voiceover',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
def test_remove_unassigned_voiceover_reviewer_raise_error(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'voiceartist',
'removal_type': 'specific',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'voiceartist does not have rights to review voiceover in language '
'hi.')
def test_remove_question_reviewer(self):
user_services.allow_user_to_review_question(self.question_reviewer_id)
self.assertTrue(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'specific',
'review_category': 'question'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
def test_removing_unassigned_question_reviewer_raise_error(self):
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'specific',
'review_category': 'question'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'question does not have rights to review question.')
def test_remove_reviewer_for_invalid_review_category_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'specific',
'review_category': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid review_category: invalid')
def test_remove_reviewer_for_invalid_removal_type_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid removal_type: invalid')
def test_remove_reviewer_from_all_reviewable_items(self):
user_services.allow_user_to_review_question(
self.translation_reviewer_id)
self.assertTrue(user_services.can_review_question_suggestions(
self.translation_reviewer_id))
user_services.allow_user_to_review_voiceover_in_language(
self.translation_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
user_services.allow_user_to_review_translation_in_language(
self.translation_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'all'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_question_suggestions(
self.translation_reviewer_id))
self.assertFalse(
user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
class ContributionReviewersListHandlerTest(test_utils.GenericTestBase):
"""Tests ContributionReviewersListHandler."""
TRANSLATION_REVIEWER_EMAIL = 'translationreviewer@example.com'
VOICEOVER_REVIEWER_EMAIL = 'voiceoverreviewer@example.com'
QUESTION_REVIEWER_EMAIL = 'questionreviewer@example.com'
def setUp(self):
super(ContributionReviewersListHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.TRANSLATION_REVIEWER_EMAIL, 'translator')
self.signup(self.VOICEOVER_REVIEWER_EMAIL, 'voiceartist')
self.signup(self.QUESTION_REVIEWER_EMAIL, 'question')
self.translation_reviewer_id = self.get_user_id_from_email(
self.TRANSLATION_REVIEWER_EMAIL)
self.voiceover_reviewer_id = self.get_user_id_from_email(
self.VOICEOVER_REVIEWER_EMAIL)
self.question_reviewer_id = self.get_user_id_from_email(
self.QUESTION_REVIEWER_EMAIL)
def test_check_contribution_reviewer_by_translation_reviewer_role(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
user_services.allow_user_to_review_translation_in_language(
self.translation_reviewer_id, 'hi')
user_services.allow_user_to_review_translation_in_language(
self.voiceover_reviewer_id, 'hi')
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'translation',
'language_code': 'hi'
})
self.assertEqual(len(response['usernames']), 2)
self.assertTrue('translator' in response['usernames'])
self.assertTrue('voiceartist' in response['usernames'])
def test_check_contribution_reviewer_by_voiceover_reviewer_role(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
user_services.allow_user_to_review_voiceover_in_language(
self.translation_reviewer_id, 'hi')
user_services.allow_user_to_review_voiceover_in_language(
self.voiceover_reviewer_id, 'hi')
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'voiceover',
'language_code': 'hi'
})
self.assertEqual(len(response['usernames']), 2)
self.assertTrue('translator' in response['usernames'])
self.assertTrue('voiceartist' in response['usernames'])
def test_check_contribution_reviewer_by_question_reviewer_role(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
user_services.allow_user_to_review_question(self.question_reviewer_id)
user_services.allow_user_to_review_question(self.voiceover_reviewer_id)
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'question'
})
self.assertEqual(len(response['usernames']), 2)
self.assertTrue('question' in response['usernames'])
self.assertTrue('voiceartist' in response['usernames'])
def test_check_contribution_reviewer_with_invalid_language_code_raise_error(
self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'voiceover',
'language_code': 'invalid'
}, expected_status_int=400)
self.assertEqual(response['error'], 'Invalid language_code: invalid')
self.logout()
def test_check_contribution_reviewer_with_invalid_review_category_raise_error( # pylint: disable=line-too-long
self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'invalid',
'language_code': 'hi'
}, expected_status_int=400)
self.assertEqual(response['error'], 'Invalid review_category: invalid')
self.logout()
class ContributionReviewerRightsDataHandlerTest(test_utils.GenericTestBase):
"""Tests ContributionReviewerRightsDataHandler."""
REVIEWER_EMAIL = 'reviewer@example.com'
def setUp(self):
super(ContributionReviewerRightsDataHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
def test_check_contribution_reviewer_rights(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={
'username': 'reviewer'
})
self.assertEqual(
response['can_review_translation_for_language_codes'], [])
self.assertEqual(
response['can_review_voiceover_for_language_codes'], [])
self.assertEqual(response['can_review_questions'], False)
user_services.allow_user_to_review_question(self.reviewer_id)
user_services.allow_user_to_review_voiceover_in_language(
self.reviewer_id, 'hi')
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={
'username': 'reviewer'
})
self.assertEqual(
response['can_review_translation_for_language_codes'], ['hi'])
self.assertEqual(
response['can_review_voiceover_for_language_codes'], ['hi'])
self.assertEqual(response['can_review_questions'], True)
def test_check_contribution_reviewer_rights_invalid_username(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={
'username': 'invalid'
}, expected_status_int=400)
self.assertEqual(response['error'], 'Invalid username: invalid')
self.logout()
def test_check_contribution_reviewer_rights_without_username(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={},
expected_status_int=400)
self.assertEqual(response['error'], 'Missing username param')
self.logout()
|
apache-2.0
| -4,891,581,027,496,295,000
| 38.854401
| 114
| 0.606186
| false
| 3.8909
| true
| false
| false
|
Tust-Celitea/celitea_portal_ng
|
app/auth/forms.py
|
1
|
3777
|
import flask_wtf
import wtforms
from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo
from ..models import User
class LoginForm(flask_wtf.FlaskForm):
email = wtforms.StringField('电子邮件地址', validators=[DataRequired(), Length(1, 64),
Email()])
password = wtforms.PasswordField('密码', validators=[DataRequired()])
remember_me = wtforms.BooleanField('在本次会话中保存登录状态')
submit = wtforms.SubmitField('登录')
class RegistrationForm(flask_wtf.FlaskForm):
email = wtforms.StringField('电子邮件地址', validators=[DataRequired(), Length(1, 64),
Email()])
username = wtforms.StringField('用户名', validators=[
DataRequired(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
' (╯・∧・)╯ ┻━┻ 用户名只能包含字母,数字和下划线。 ')])
password = wtforms.PasswordField('密码', validators=[
DataRequired(), EqualTo('password2', message='(╯=﹁"﹁=)╯ ┻━┻ 两次输入的密码不一样')])
password2 = wtforms.PasswordField('重复密码', validators=[DataRequired()])
submit = wtforms.SubmitField('注册')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise wtforms.ValidationError('(ノ`Д´)ノ┻━┻ 这个邮箱注册过啦~<br />或许汝需要试试 <a href="/auth/login">登录</a>?')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise wtforms.ValidationError('(ノ`Д´)ノ┻━┻ 这个用户名注册过啦~')
class ChangePasswordForm(flask_wtf.FlaskForm):
old_password = wtforms.PasswordField('旧密码', validators=[DataRequired()])
password = wtforms.PasswordField('新密码', validators=[
DataRequired(), EqualTo('password2', message='(╯=﹁"﹁=)╯ ┻━┻ 两次输入的密码不一样')])
password2 = wtforms.PasswordField('重复一遍新密码', validators=[DataRequired()])
submit = wtforms.SubmitField('更改密码 | ω・`)')
class PasswordResetRequestForm(flask_wtf.FlaskForm):
email = wtforms.StringField('邮件地址', validators=[DataRequired(), Length(1, 64),
Email()])
submit = wtforms.SubmitField('发送密码重置邮件,Biu~')
class PasswordResetForm(flask_wtf.FlaskForm):
email = wtforms.StringField('邮件地址', validators=[DataRequired(), Length(1, 64),
Email()])
password = wtforms.PasswordField('新密码', validators=[
DataRequired(), EqualTo('password2', message='(╯=﹁"﹁=)╯ ┻━┻ 两次输入的密码不一样')])
password2 = wtforms.PasswordField('重复一遍新密码', validators=[DataRequired()])
submit = wtforms.SubmitField('更改密码 | ω・`)')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first() is None:
raise wtforms.ValidationError('咦?这个邮件地址咱好像不认识 😂 ')
class ChangeEmailForm(flask_wtf.FlaskForm):
email = wtforms.StringField('新的邮件地址', validators=[DataRequired(), Length(1, 64),
Email()])
password = wtforms.PasswordField('密码', validators=[DataRequired()])
submit = wtforms.SubmitField('更改邮件地址| ω・`)')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise wtforms.ValidationError('(ノ`Д´)ノ┻━┻ 这个邮箱注册过啦~')
|
gpl-3.0
| -548,536,487,658,854,460
| 45.814286
| 108
| 0.611535
| false
| 2.812876
| false
| false
| false
|
capitalone/cloud-custodian
|
tests/test_batch.py
|
1
|
2707
|
# Copyright 2018 Capital One Services, LLC
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from .common import BaseTest
class TestBatchComputeEnvironment(BaseTest):
def test_batch_compute_update(self):
session_factory = self.replay_flight_data("test_batch_compute_update")
p = self.load_policy(
{
"name": "batch-compute",
"resource": "batch-compute",
"filters": [{"computeResources.desiredvCpus": 0}, {"state": "ENABLED"}],
"actions": [{"type": "update-environment", "state": "DISABLED"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("batch")
envs = client.describe_compute_environments(
computeEnvironments=[resources[0]["computeEnvironmentName"]]
)[
"computeEnvironments"
]
self.assertEqual(envs[0]["state"], "DISABLED")
def test_batch_compute_delete(self):
session_factory = self.replay_flight_data("test_batch_compute_delete")
p = self.load_policy(
{
"name": "batch-compute",
"resource": "batch-compute",
"filters": [{"computeResources.desiredvCpus": 0}],
"actions": [{"type": "delete"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("batch")
envs = client.describe_compute_environments(
computeEnvironments=[resources[0]['computeEnvironmentName']]
)['computeEnvironments']
self.assertEqual(envs[0]['status'], 'DELETING')
class TestBatchDefinition(BaseTest):
def test_definition_deregister(self):
def_name = 'c7n_batch'
session_factory = self.replay_flight_data(
'test_batch_definition_deregister')
p = self.load_policy({
'name': 'batch-definition',
'resource': 'batch-definition',
'filters': [
{'containerProperties.image': 'amazonlinux'}],
'actions': [{'type': 'deregister'}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['jobDefinitionName'], 'c7n_batch')
client = session_factory(region='us-east-1').client('batch')
defs = client.describe_job_definitions(
jobDefinitionName=def_name)['jobDefinitions']
self.assertEqual(defs[0]['status'], 'INACTIVE')
|
apache-2.0
| 7,499,708,579,604,423,000
| 38.231884
| 88
| 0.58478
| false
| 4.236307
| true
| false
| false
|
HaprianVlad/TensorFlowProjects
|
LinearModels/dataHandler.py
|
1
|
5358
|
import tempfile
import urllib.request
import pandas as pd
import os
import tensorflow as tf
# DATA LABELS
LABEL_COLUMN = "label"
CATEGORICAL_COLUMNS = ["workclass", "education", "marital_status", "occupation",
"relationship", "race", "gender", "native_country"]
CONTINUOUS_COLUMNS = ["age", "education_num", "capital_gain", "capital_loss", "hours_per_week"]
COLUMNS = ["age", "workclass", "fnlwgt", "education", "education_num",
"marital_status", "occupation", "relationship", "race", "gender",
"capital_gain", "capital_loss", "hours_per_week", "native_country",
"income_bracket"]
def read_data():
print("Data is loading ...")
data_dir = "data"
train_file_name = data_dir + "/train_file.dat"
test_file_name = data_dir + "/test_file.dat"
if not os.path.exists(data_dir):
os.makedirs(data_dir)
if os.path.exists(train_file_name):
train_file = open(train_file_name, "r")
else:
train_file = open(train_file_name, "w+")
urllib.request.urlretrieve("https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data", train_file.name)
if os.path.exists(test_file_name):
test_file = open(test_file_name, "r")
else:
test_file = open(test_file_name, "w+")
urllib.request.urlretrieve("https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data", test_file.name)
df_train = pd.read_csv(train_file, names=COLUMNS, skipinitialspace=True)
df_test = pd.read_csv(test_file, names=COLUMNS, skipinitialspace=True, skiprows=1)
df_train[LABEL_COLUMN] = (df_train["income_bracket"].apply(lambda x: ">50K" in x)).astype(int)
df_test[LABEL_COLUMN] = (df_test["income_bracket"].apply(lambda x: ">50K" in x)).astype(int)
print("Data loading done!")
train_file.close();
test_file.close();
return [df_train, df_test]
# CREATE A TENSOR MODEL. This is represented as a dictionary: feature_name -> feature_tensor
def input_fn(df):
# Creates a dictionary mapping from each continuous feature column name (k) to
# the values of that column stored in a constant Tensor.
continuous_cols = {k: tf.constant(df[k].values) for k in CONTINUOUS_COLUMNS}
# Creates a dictionary mapping from each categorical feature column name (k)
# to the values of that column stored in a tf.SparseTensor.
categorical_cols = {k: tf.SparseTensor(
indices=[[i, 0] for i in range(df[k].size)],
values=df[k].values,
shape=[df[k].size, 1])
for k in CATEGORICAL_COLUMNS
}
# Merges the two dictionaries into one.
feature_cols = dict(continuous_cols)
feature_cols.update(categorical_cols.items())
# Converts the label column into a constant Tensor.
label = tf.constant(df[LABEL_COLUMN].values)
# Returns the feature columns (data matrix X) and the label(y) all represented as tensors.
return feature_cols, label
def train_input_fn(df_train):
return input_fn(df_train)
def eval_input_fn(df_test):
return input_fn(df_test)
# DEFINES THE TRANSFORMATIONS EACH FEATURE_TENSOR WILL SUPPORT.
def feature_transformations():
## CATEGORICAL FEATURES
gender = tf.contrib.layers.sparse_column_with_keys(column_name="gender", keys=["Female", "Male"])
race = tf.contrib.layers.sparse_column_with_keys(column_name="race", keys=["White", "Black"])
education = tf.contrib.layers.sparse_column_with_hash_bucket("education", hash_bucket_size=1000)
relationship = tf.contrib.layers.sparse_column_with_hash_bucket("relationship", hash_bucket_size=100)
workclass = tf.contrib.layers.sparse_column_with_hash_bucket("workclass", hash_bucket_size=100)
occupation = tf.contrib.layers.sparse_column_with_hash_bucket("occupation", hash_bucket_size=1000)
native_country = tf.contrib.layers.sparse_column_with_hash_bucket("native_country", hash_bucket_size=1000)
## CONTINUOS FEATURES
age = tf.contrib.layers.real_valued_column("age")
education_num = tf.contrib.layers.real_valued_column("education_num")
capital_gain = tf.contrib.layers.real_valued_column("capital_gain")
capital_loss = tf.contrib.layers.real_valued_column("capital_loss")
hours_per_week = tf.contrib.layers.real_valued_column("hours_per_week")
## TRANSFORMATIONS
### BUCKETIZATION OF CONTINOUS FEATURES
age_buckets = tf.contrib.layers.bucketized_column(age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
## DIFFERENT FEATURE SETS
wide_columns = [gender, native_country, education, occupation, workclass,
relationship, age_buckets,
tf.contrib.layers.crossed_column([education, occupation],
hash_bucket_size=int(1e4)),
tf.contrib.layers.crossed_column(
[age_buckets, education, occupation],
hash_bucket_size=int(1e6)),
tf.contrib.layers.crossed_column([native_country, occupation],
hash_bucket_size=int(1e4))]
deep_columns = [
tf.contrib.layers.embedding_column(workclass, dimension=8),
tf.contrib.layers.embedding_column(education, dimension=8),
tf.contrib.layers.embedding_column(gender, dimension=8),
tf.contrib.layers.embedding_column(relationship, dimension=8),
tf.contrib.layers.embedding_column(native_country,
dimension=8),
tf.contrib.layers.embedding_column(occupation, dimension=8),
age,
education_num,
capital_gain,
capital_loss,
hours_per_week]
return [wide_columns, deep_columns]
|
apache-2.0
| 1,279,183,477,228,595,000
| 35.44898
| 123
| 0.703434
| false
| 3.091748
| true
| false
| false
|
wger-project/wger
|
wger/exercises/api/serializers.py
|
1
|
4625
|
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Third Party
from rest_framework import serializers
# wger
from wger.exercises.models import (
Equipment,
Exercise,
ExerciseBase,
ExerciseCategory,
ExerciseComment,
ExerciseImage,
Muscle,
)
class ExerciseBaseSerializer(serializers.ModelSerializer):
"""
Exercise serializer
"""
class Meta:
model = ExerciseBase
fields = [
'id',
'uuid',
'category',
'muscles',
'muscles_secondary',
'equipment',
'creation_date',
]
class EquipmentSerializer(serializers.ModelSerializer):
"""
Equipment serializer
"""
class Meta:
model = Equipment
fields = [
'id',
'name',
]
class ExerciseImageSerializer(serializers.ModelSerializer):
"""
ExerciseImage serializer
"""
class Meta:
model = ExerciseImage
fields = [
'id',
'uuid',
'exercise_base',
'image',
'is_main',
'status',
]
class ExerciseCommentSerializer(serializers.ModelSerializer):
"""
ExerciseComment serializer
"""
class Meta:
model = ExerciseComment
fields = [
'id',
'exercise',
'comment',
]
class ExerciseCategorySerializer(serializers.ModelSerializer):
"""
ExerciseCategory serializer
"""
class Meta:
model = ExerciseCategory
fields = ['id', 'name']
class MuscleSerializer(serializers.ModelSerializer):
"""
Muscle serializer
"""
class Meta:
model = Muscle
fields = [
'id',
'name',
'is_front',
'image_url_main',
'image_url_secondary',
]
class ExerciseSerializer(serializers.ModelSerializer):
"""
Exercise serializer
The fields from the new ExerciseBase are retrieved here as to retain
compatibility with the old model where all the fields where in Exercise.
"""
category = serializers.PrimaryKeyRelatedField(read_only=True)
muscles = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
muscles_secondary = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
equipment = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
variations = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
model = Exercise
fields = (
"id",
"uuid",
"name",
"exercise_base",
"status",
"description",
"creation_date",
"category",
"muscles",
"muscles_secondary",
"equipment",
"language",
"license",
"license_author",
"variations",
)
class ExerciseInfoSerializer(serializers.ModelSerializer):
"""
Exercise info serializer
"""
images = ExerciseImageSerializer(many=True, read_only=True)
comments = ExerciseCommentSerializer(source='exercisecomment_set', many=True, read_only=True)
category = ExerciseCategorySerializer(read_only=True)
muscles = MuscleSerializer(many=True, read_only=True)
muscles_secondary = MuscleSerializer(many=True, read_only=True)
equipment = EquipmentSerializer(many=True, read_only=True)
variations = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
class Meta:
model = Exercise
depth = 1
fields = [
"id",
"name",
"uuid",
"description",
"creation_date",
"category",
"muscles",
"muscles_secondary",
"equipment",
"language",
"license",
"license_author",
"images",
"comments",
"variations",
]
|
agpl-3.0
| 7,936,270,238,942,832,000
| 23.865591
| 97
| 0.588541
| false
| 4.310345
| false
| false
| false
|
josegonzalez/chef-solo-cup
|
chef_solo_cup/helpers.py
|
1
|
15230
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import base64
import collections
import itertools
import json
import os
import re
import sys
import unidecode
import urllib2
from boto.ec2 import connect_to_region
from boto.exception import EC2ResponseError
import boto.ec2.autoscale
from fabric.api import run, sudo
from fabric.contrib.project import rsync_project
from chef_solo_cup.log import setup_custom_logger
def get_hosts(args, logger=None):
dna_path = os.path.join(os.path.realpath(os.getcwd()), 'dna')
includes = []
if args['dna_patterns']:
includes = map(lambda x: re.compile(x, re.I), args['dna_patterns'])
excludes = []
if args['exclude']:
excludes = map(lambda x: re.compile(x, re.I), args['exclude'])
all_hosts = itertools.chain(
get_filesystem_hosts(args, dna_path, logger=logger),
get_asg_hosts(args, dna_path, logger=logger),
)
hosts = _collect_valid_hosts(
all_hosts,
excludes,
includes,
args,
logger=logger
)
hosts = filter_hosts(args, hosts, logger=logger)
hosts = collections.OrderedDict(sorted(hosts.items()))
if args['quantity'] is not None:
x = itertools.islice(hosts.items(), 0, int(args['quantity']))
hosts = {}
for key, value in x:
hosts[key] = value
hosts = collections.OrderedDict(sorted(hosts.items()))
return hosts
def _collect_valid_hosts(all_hosts, excludes, includes, args, logger=None):
hosts = {}
for host, data in all_hosts:
if _skip_host(data, excludes, includes, args, logger=logger):
continue
if 'public_ip' in data and not data['public_ip']:
del data['public_ip']
if 'private_ip' in data and not data['private_ip']:
del data['private_ip']
data['host'] = host
valid_hosts = [data.get('public_ip'), data.get('private_ip'), host]
for hostname in valid_hosts:
if hostname:
data['host'] = hostname
break
hosts[host] = data
return hosts
def _skip_host(data, excludes, includes, args, logger=None):
f = data.get('file', '')
for key, value in _resolve_tags(args).iteritems():
if value != data.get('tags', {}).get(key, None):
logger.debug('Skipping {0} because tags dont match'.format(f))
return True
if len(excludes):
skip = map(lambda regex: regex.search(f), excludes)
skip = reduce(lambda x, y: x or y, skip)
if skip:
logger.debug('Skipping {0} because exclusion rule'.format(f))
return True
if len(includes):
skip = map(lambda regex: regex.search(f), includes)
skip = reduce(lambda x, y: x or y, skip)
if skip is None:
logger.debug('Skipping {0} because inclusion rule'.format(f))
return True
if args['regions'] and data.get('region') not in args['regions']:
logger.debug('Skipping {0} because regions dont match'.format(f))
return True
if args['providers'] and data.get('provider') not in args['providers']:
logger.debug('Skipping {0} because providers dont match'.format(f))
return True
if args['services'] and data.get('service') not in args['services']:
logger.debug('Skipping {0} because services dont match'.format(f))
return True
return False
def _resolve_tags(args):
if not args.get('tags', None):
return {}
tags = {}
for tag in args.get('tags', {}):
key, value = tag.split('=')
tags[key] = value
return tags
def get_filesystem_hosts(args, dna_path, logger=None):
for root, sub_folders, files in os.walk(dna_path):
files = filter(lambda f: ".json" in f, files)
for f in files:
path = root.split("/")
region = path.pop()
provider = path.pop()
service = path.pop()
host = f.replace(".json", "")
if host in ["all", "default"]:
continue
yield host, {
'file': f,
'path': os.path.join(root, f),
'root': root,
'region': region,
'provider': provider,
'service': service,
'tags': {},
'dna_path': "dna/{0}/{1}/{2}/{3}".format(
service,
provider,
region,
f
)
}
def get_asg_hosts(args, dna_path, logger=None):
if not args['regions']:
return
if not args['aws_access_key_id'] or not args['aws_secret_access_key']:
return
cwd = os.path.realpath(os.getcwd())
asg_path = os.path.join(cwd, args['asg_dna_path'])
asg_dna_files = []
for f in os.listdir(asg_path):
if os.path.isfile(os.path.join(asg_path, f)):
asg_dna_files.append(f)
response = _get_api_response(args, region=None, logger=logger)
if response:
for region in args['regions']:
groups = _group_from_region(response, region)
for group, instances in groups.items():
group_name = group.strip()
if args['use_alternate_databag']:
group_dna_file = _get_group_dna_file(
args['use_alternate_databag'],
asg_dna_files)
else:
group_dna_file = _get_group_dna_file(
group_name,
asg_dna_files)
logger.debug('== [group:{0}] [use_alternate_databag:{1}] [databag:{2}]'.format(
group,
args['use_alternate_databag'],
group_dna_file))
for name, instance in instances.items():
yield name, {
'file': slugify(name.strip()),
'region': region,
'provider': 'AWS',
'private_ip': instance['private_ip_address'],
'public_ip': instance['ip_address'],
'group_name': instance['tags']['aws:autoscaling:groupName'], # noqa
'tags': instance['tags'],
'dna_path': os.path.join(
args['asg_dna_path'],
dna_file_name_from_tags(
args,
group_dna_file.strip(),
instance['tags'])
),
}
else:
for region in args['regions']:
auto_scale_conn = _connection_autoscale(args, region)
conn = _connection_ec2(args, region)
for group in auto_scale_conn.get_all_groups():
instance_ids = [i.instance_id for i in group.instances]
if not instance_ids:
continue
try:
reservations = conn.get_all_instances(instance_ids)
except EC2ResponseError:
continue
group_name = group.name.strip()
if args['use_alternate_databag']:
group_dna_file = _get_group_dna_file(
args['use_alternate_databag'],
asg_dna_files)
else:
group_dna_file = _get_group_dna_file(
group_name,
asg_dna_files)
instances = [i for r in reservations for i in r.instances]
for instance in instances:
name = '{0}-{1}'.format(group_name, instance.id)
yield name, {
'file': slugify(name.strip()),
'region': region,
'provider': 'AWS',
'public_ip': instance.ip_address,
'private_ip': instance['private_ip_address'],
'group_name': instance['tags']['aws:autoscaling:groupName'], # noqa
'tags': instance['tags'],
'dna_path': os.path.join(
args['asg_dna_path'],
dna_file_name_from_tags(
args,
group_dna_file.strip(),
instance['tags'])
),
}
def _group_from_region(response, region):
groups = {}
for group, instances in response.items():
in_region = False
for name, instance in instances.items():
in_region = instance['region'] == region
break
if not in_region:
continue
groups[group] = {}
for name, instance in instances.items():
groups[group][name] = instance
return groups
def _connection_autoscale(args, region):
return boto.ec2.autoscale.connect_to_region(
region,
aws_access_key_id=args['aws_access_key_id'],
aws_secret_access_key=args['aws_secret_access_key'],
)
def _connection_ec2(args, region):
return connect_to_region(
region,
aws_access_key_id=args['aws_access_key_id'],
aws_secret_access_key=args['aws_secret_access_key'],
)
def _get_api_response(args, region=None, logger=None):
if logger is None:
logger = setup_custom_logger('chef-solo-cup', args)
if not args['api_url']:
return None
request_url = '{0}/nodes/group?status={1}'.format(
args['api_url'],
'running'
)
if region is not None:
request_url = '{0}®ion={1}'.format(request_url, region)
request = urllib2.Request(request_url)
has_username = 'api_username' in args
has_password = 'api_password' in args
if has_username and has_password:
base64string = base64.encodestring('{0}:{1}'.format(
args['api_username'], args['api_password']
)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib2.urlopen(request)
if int(result.getcode()) not in [200, 201, 204]:
error = 'Bad response from api'
try:
data = json.loads(result.read())
error = data.get('message', 'Bad response from api')
except:
pass
logger.error(error)
sys.exit(1)
response = None
try:
response = json.loads(result.read())
except ValueError:
logger.error('Invalid json response from api')
sys.exit(1)
groups = response['groups']
if 'None' in groups:
del groups['None']
return groups
def _get_group_dna_file(group_name, asg_dna_files):
group_name = slugify(rchop(group_name, '.json'))
group_dna_file = None
for asg_dna_file in asg_dna_files:
if asg_dna_file == group_name:
group_dna_file = asg_dna_file
break
group_name_json = group_name + '.json'
if asg_dna_file == group_name_json:
group_dna_file = asg_dna_file
break
if not group_dna_file:
for asg_dna_file in asg_dna_files:
if group_name.startswith(asg_dna_file):
group_dna_file = asg_dna_file
break
stripped_asg_dna_file = asg_dna_file.replace('.json', '')
if group_name.startswith(stripped_asg_dna_file):
group_dna_file = asg_dna_file
break
if not group_dna_file:
group_dna_file = group_name
return group_dna_file
def dna_file_name_from_tags(args, dna_file_name, tags):
env_tag = args['environment_tag']
strip_env = args['strip_environment_from_dna_file_run_tag']
tag = args['dna_file_tag']
if not args['use_alternate_databag'] and args['dna_file_tag'] and tags.get(args['dna_file_tag'], None):
dna_file_name = tags.get(tag, None)
if strip_env and env_tag and tags.get(env_tag, None):
environment = tags.get(env_tag, None)
dna_file_name = strip_left(dna_file_name, environment)
dna_file_name = strip_right(dna_file_name, environment)
dna_file_name = dna_file_name.strip('_-')
dna_file_name = rchop(dna_file_name, '.json') + '.json'
return dna_file_name
def strip_right(text, suffix):
if not text.endswith(suffix):
return text
return text[:len(text)-len(suffix)]
def strip_left(text, prefix):
if not text.startswith(prefix):
return text
return text[len(prefix):]
def rsync_project_dry(args, logger=None, **kwargs):
if logger is None:
logger = setup_custom_logger('chef-solo-cup', args)
if args['dry_run']:
logger.info("[RSYNC_PROJECT] From {0} to {1} with opts='{2}' excluding='{3}'".format(kwargs.get('local_dir'), kwargs.get('remote_dir'), kwargs.get('extra_opts'), kwargs.get('exclude'))) # noqa
else:
out = rsync_project(**kwargs)
if out.return_code != 0:
logger.info("[RSYNC_PROJECT] Failed command with status code {0}, please run `chef-solo-cup clean` against this node".format(out.return_code)) # noqa
sys.exit(0)
def run_dry(cmd, args, logger=None):
if logger is None:
logger = setup_custom_logger('chef-solo-cup', args)
if args['dry_run']:
logger.info("[RUN] {0}".format(cmd))
else:
return run(cmd)
def sudo_dry(cmd, args, logger=None):
if logger is None:
logger = setup_custom_logger('chef-solo-cup', args)
if args['dry_run']:
logger.info("[SUDO] {0}".format(cmd))
else:
return sudo(cmd)
def add_line_if_not_present_dry(args, filename, line, run_f=run, logger=None):
if logger is None:
logger = setup_custom_logger('chef-solo-cup', args)
cmd = "grep -q -e '{0}' {1} || echo '{0}' >> {1}".format(line, filename)
if args['dry_run']:
logger.info("[SUDO] {0}".format(cmd))
else:
run_f(cmd)
def filter_hosts(args, hosts, logger=None):
rules = args['blacklist_rules'].get(args['command'])
if not rules:
return hosts
excludes = []
for rule in rules:
if rule.startswith('/') and rule.endswith('/'):
pattern = rule[1:-1]
if not pattern:
continue
excludes.append(pattern)
else:
excludes.append(rule)
excludes = map(lambda x: re.compile(x), excludes)
new_hosts = {}
for host, config in hosts.items():
skip = map(lambda regex: regex.search(host), excludes)
skip = reduce(lambda x, y: x or y, skip)
if skip:
continue
new_hosts[host] = config
return new_hosts
def slugify(text):
if type(text) == unicode:
text = unidecode.unidecode(text)
text = text.strip()
text = text.lower()
text = re.sub(r'[^a-z0-9_-]+', '-', text)
text = re.sub(r'-{2,}', '-', text)
return text
def rchop(s, ending):
if s.endswith(ending):
return s[:-len(ending)]
return s
|
mit
| -8,145,522,986,752,087,000
| 30.273101
| 201
| 0.535259
| false
| 3.862541
| false
| false
| false
|
fishilico/shared
|
python/network/udp_multihome.py
|
1
|
7006
|
#!/usr/bin/env python3
# -*- coding:UTF-8 -*-
# Copyright (c) 2014 Nicolas Iooss
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""This program show how it is possible to run an UDP server with multihoming.
The main issue is to reply to incoming requests with the right source address,
when several ones are available. This is done by using recvmsg/sendmsg
functions instead of recvfrom/sendto which only control the remote address.
This use-case is called "multihoming".
This program has been insipred by OpenVPN source code (src/openvpn/socket.c)
@author: Nicolas Iooss
@license: MIT
"""
import argparse
import logging
import os
import socket
import struct
import sys
logger = logging.getLogger(__name__)
# Check feature availability (need python>=3.3)
if not hasattr(socket.socket, 'recvmsg'):
raise NotImplementedError("socket.recvmsg() not found (need Python >= 3.3)")
# Define some system-specific constants
if sys.platform.startswith('linux'):
if not hasattr(socket, 'IP_PKTINFO'):
socket.IP_PKTINFO = 8
if not hasattr(socket, 'IPV6_RECVPKTINFO'):
socket.IPV6_RECVPKTINFO = 49
if not hasattr(socket, 'IPV6_PKTINFO'):
socket.IPV6_PKTINFO = 50
if not hasattr(socket, 'SO_BINDTODEVICE'):
socket.SO_BINDTODEVICE = 25
elif os.name == 'nt':
if not hasattr(socket, 'IP_RECVDSTADDR'):
socket.IP_RECVDSTADDR = 25
if not hasattr(socket, 'IPV6_RECVDSTADDR'):
socket.IPV6_RECVDSTADDR = 25
else:
raise Exception("Unsupported system")
def main(argv=None):
parser = argparse.ArgumentParser(description="Simple multihomed UDP server")
parser.add_argument('-p', '--port', type=int, default=4242,
help="UDP port to be used (default: 4242)")
parser.add_argument('-w', '--wait', action='store_true',
help="wait for connections instead of creating one")
group = parser.add_mutually_exclusive_group()
group.add_argument('-4', '--ipv4', action='store_true',
help="create an IPv4-only socket")
group.add_argument('-6', '--ipv6', action='store_true',
help="create an IPv6-only socket")
args = parser.parse_args(argv)
# Compute local variables
af = socket.AF_INET if args.ipv4 else socket.AF_INET6
localaddr = '127.0.0.1' if args.ipv4 else '::1'
anyaddr = '0.0.0.0' if args.ipv4 else '::'
port = args.port if args.port > 0 else 4242
# Create and configure socket for multihoming
skserver = socket.socket(af, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
if not args.ipv6:
if hasattr(socket, 'IP_PKTINFO'):
skserver.setsockopt(socket.SOL_IP, socket.IP_PKTINFO, 1)
elif hasattr(socket, 'IP_RECVDSTADDR'):
skserver.setsockopt(socket.IPPROTO_IP, socket.IP_RECVDSTADDR, 1)
if not args.ipv4:
if hasattr(socket, 'IPV6_RECVPKTINFO'):
skserver.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_RECVPKTINFO, 1)
elif hasattr(socket, 'IPV6_RECVDSTADDR'):
skserver.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_RECVDSTADDR, 1)
if not args.ipv4:
skserver.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, args.ipv6)
# Listen
if args.wait:
listenaddr = anyaddr
elif args.ipv6 or args.ipv4:
listenaddr = localaddr
else:
# To protect dual-stack listen, bind the socket to the loopback interface
listenaddr = anyaddr
try:
skserver.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, b'lo\0')
except PermissionError as exc:
logger.warning("Unable to bind to loopback interface: %s", exc)
ainfos = socket.getaddrinfo(listenaddr, port, af, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
skserver.bind(ainfos[0][4])
if args.wait:
logger.info("Waiting for a connection on UDP port %d.", port)
else:
# Create a client socket, which uses IPv4-in-IPv6 if enabled
clientaf = socket.AF_INET if not args.ipv6 else socket.AF_INET6
clientdstaddr = '127.0.0.1' if not args.ipv6 else '::1'
skclient = socket.socket(clientaf, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
skclient.sendto(b'Hello, world!', (clientdstaddr, port))
# Receive an incoming packet
(msg, ancdata, _, clientaddrport) = skserver.recvmsg(1024, socket.CMSG_SPACE(100))
assert args.wait or msg == b'Hello, world!' # Check the socket channel
dst_addr = None
ifindex = None
for cmsg_level, cmsg_type, cmsg_data in ancdata:
if cmsg_level == socket.SOL_IP and hasattr(socket, 'IP_PKTINFO') and cmsg_type == socket.IP_PKTINFO:
# struct in_pktinfo { int ipi_ifindex; struct in_addr ipi_spec_dst, ipi_addr; };
assert len(cmsg_data) == 12
dst_addr = socket.inet_ntop(socket.AF_INET, cmsg_data[4:8])
ifindex = struct.unpack('I', cmsg_data[:4])[0]
elif cmsg_level == socket.IPPROTO_IPV6 and hasattr(socket, 'IPV6_PKTINFO') and cmsg_type == socket.IPV6_PKTINFO:
# struct in6_pktinfo { struct in6_addr ipi6_addr; int ipi_ifindex; };
assert len(cmsg_data) == 20
dst_addr = socket.inet_ntop(socket.AF_INET6, cmsg_data[:16])
ifindex = struct.unpack('I', cmsg_data[16:20])[0]
else:
logger.warning("Unknown anciliary data: %s, %s, %r", cmsg_level, cmsg_type, cmsg_data)
# TODO: decode IP_RECVDSTADDR/IPV6_RECVDSTADDR
text = "Received UDP packet from {0[0]} port {0[1]}".format(clientaddrport)
if dst_addr is not None:
text += " to {} port {} interface {}".format(dst_addr, port, ifindex)
logger.info(text)
# Send back a reply with the same ancillary data
skserver.sendmsg([b'Bye!\n'], ancdata, 0, clientaddrport)
skserver.close()
if not args.wait:
skclient.close()
return 0
if __name__ == '__main__':
logging.basicConfig(format='[%(levelname)s] %(message)s', level=logging.DEBUG)
sys.exit(main())
|
mit
| -7,359,975,875,040,569,000
| 42.246914
| 120
| 0.672566
| false
| 3.51002
| false
| false
| false
|
Durandaul/BounceBack_Script
|
prev_versions/bouncev1.py
|
1
|
2827
|
import imaplib
import re
import email as emlib
import json
import csv
mail = imaplib.IMAP4_SSL('imap.gmail.com')
regexMessage = b'Delivery to the following recipient failed permanently:\s{1,}.+\s'
find_bounce_back_message = re.compile(regexMessage)
regexEmail = b'.{1,}@.+'
find_email = re.compile(regexEmail)
def multipart_detector(maintype):
if maintype == 'multipart':
for part in email_message_instance.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload()
elif maintype == 'text':
return email_message_instance.get_payload()
def ok_detector(result, data):
if result == 'OK':
_data = data[0].split()
try:
rfc822 = [mail.uid('fetch', uid, '(RFC822)') for uid in _data]
print "Retrieved UIDs"
_data = [uid[1] for uid in rfc822]
return _data
except Exception as e:
print "Error Occured"
print e
def main(password):
_login = "max.humphrey@gmail.com"
mail.login(_login,password)
mail.select("BounceBack")
_result, _data = mail.uid('search', None, "ALL")
_data = ok_detector(_result, _data)
#_sender_message = [] Fix so that it's the message and the email
_email = []
_errors, _success = 0,0
for email in _data:
_email_response = emlib.message_from_string(email[0][1])
_email_response = str(_email_response)
try:
_find_delivery_failed_message = find_bounce_back_message.search(_email_response)
_delivery_failed = str(_find_delivery_failed_message.group())
print "Found Match"
try:
_email_address = find_email.search(_delivery_failed)
_email_address = _email_address.group()
_email_address_stringified =str(_email_address)
_email_address_stringified = _email_address_stringified.strip()
print _email_address_stringified
_results.append(_email_address_stringified)
_success += 1
except AttributeError as e:
print "Couldn't find Email in string"
except AttributeError as e :
pass
if _results != None:
_results_size = len(_results)
with open('BounceBackNames.csv', 'wb') as csvfile:
output = csv.writer(csvfile, delimiter=' ')
output.writerow('Email Address:')
output.writerows(_results)
else:
print " Uh... Talk to Max I guess?"
if __name__ == '__main__':
with open('mySecret.json', 'rb') as jsonInFile:
try:
password =json.load(jsonInFile)['password']
print "Password Retrievel successful"
except Exception as e:
print e
main(password)
|
unlicense
| -7,139,973,591,972,969,000
| 29.73913
| 92
| 0.585426
| false
| 3.835821
| false
| false
| false
|
analyst-collective/dbt
|
plugins/postgres/dbt/adapters/postgres/connections.py
|
1
|
5388
|
from contextlib import contextmanager
import psycopg2
import dbt.exceptions
from dbt.adapters.base import Credentials
from dbt.adapters.sql import SQLConnectionManager
from dbt.contracts.connection import AdapterResponse
from dbt.logger import GLOBAL_LOGGER as logger
from dbt.helper_types import Port
from dataclasses import dataclass
from typing import Optional
@dataclass
class PostgresCredentials(Credentials):
host: str
user: str
port: Port
password: str # on postgres the password is mandatory
role: Optional[str] = None
search_path: Optional[str] = None
keepalives_idle: int = 0 # 0 means to use the default value
sslmode: Optional[str] = None
_ALIASES = {
'dbname': 'database',
'pass': 'password'
}
@property
def type(self):
return 'postgres'
def _connection_keys(self):
return ('host', 'port', 'user', 'database', 'schema', 'search_path',
'keepalives_idle', 'sslmode')
class PostgresConnectionManager(SQLConnectionManager):
TYPE = 'postgres'
@contextmanager
def exception_handler(self, sql):
try:
yield
except psycopg2.DatabaseError as e:
logger.debug('Postgres error: {}'.format(str(e)))
try:
self.rollback_if_open()
except psycopg2.Error:
logger.debug("Failed to release connection!")
pass
raise dbt.exceptions.DatabaseException(str(e).strip()) from e
except Exception as e:
logger.debug("Error running SQL: {}", sql)
logger.debug("Rolling back transaction.")
self.rollback_if_open()
if isinstance(e, dbt.exceptions.RuntimeException):
# during a sql query, an internal to dbt exception was raised.
# this sounds a lot like a signal handler and probably has
# useful information, so raise it without modification.
raise
raise dbt.exceptions.RuntimeException(e) from e
@classmethod
def open(cls, connection):
if connection.state == 'open':
logger.debug('Connection is already open, skipping open.')
return connection
credentials = cls.get_credentials(connection.credentials)
kwargs = {}
# we don't want to pass 0 along to connect() as postgres will try to
# call an invalid setsockopt() call (contrary to the docs).
if credentials.keepalives_idle:
kwargs['keepalives_idle'] = credentials.keepalives_idle
# psycopg2 doesn't support search_path officially,
# see https://github.com/psycopg/psycopg2/issues/465
search_path = credentials.search_path
if search_path is not None and search_path != '':
# see https://postgresql.org/docs/9.5/libpq-connect.html
kwargs['options'] = '-c search_path={}'.format(
search_path.replace(' ', '\\ '))
if credentials.sslmode:
kwargs['sslmode'] = credentials.sslmode
try:
handle = psycopg2.connect(
dbname=credentials.database,
user=credentials.user,
host=credentials.host,
password=credentials.password,
port=credentials.port,
connect_timeout=10,
**kwargs)
if credentials.role:
handle.cursor().execute('set role {}'.format(credentials.role))
connection.handle = handle
connection.state = 'open'
except psycopg2.Error as e:
logger.debug("Got an error when attempting to open a postgres "
"connection: '{}'"
.format(e))
connection.handle = None
connection.state = 'fail'
raise dbt.exceptions.FailedToConnectException(str(e))
return connection
def cancel(self, connection):
connection_name = connection.name
try:
pid = connection.handle.get_backend_pid()
except psycopg2.InterfaceError as exc:
# if the connection is already closed, not much to cancel!
if 'already closed' in str(exc):
logger.debug(
f'Connection {connection_name} was already closed'
)
return
# probably bad, re-raise it
raise
sql = "select pg_terminate_backend({})".format(pid)
logger.debug("Cancelling query '{}' ({})".format(connection_name, pid))
_, cursor = self.add_query(sql)
res = cursor.fetchone()
logger.debug("Cancel query '{}': {}".format(connection_name, res))
@classmethod
def get_credentials(cls, credentials):
return credentials
@classmethod
def get_response(cls, cursor) -> AdapterResponse:
message = str(cursor.statusmessage)
rows = cursor.rowcount
status_message_parts = message.split() if message is not None else []
status_messsage_strings = [
part
for part in status_message_parts
if not part.isdigit()
]
code = ' '.join(status_messsage_strings)
return AdapterResponse(
_message=message,
code=code,
rows_affected=rows
)
|
apache-2.0
| -9,168,342,729,262,479,000
| 31.654545
| 79
| 0.591128
| false
| 4.573854
| false
| false
| false
|
onia/pygobject
|
demos/gtk-demo/demos/Icon View/iconviewbasics.py
|
1
|
7312
|
#!/usr/bin/env python
# -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
#
# Copyright (C) 2010 Red Hat, Inc., John (J5) Palmieri <johnp@redhat.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
title = "Icon View Basics"
description = """The GtkIconView widget is used to display and manipulate
icons. It uses a GtkTreeModel for data storage, so the list store example might
be helpful. We also use the Gio.File API to get the icons for each file type.
"""
import os
from gi.repository import GLib, Gio, GdkPixbuf, Gtk
class IconViewApp:
(COL_PATH,
COL_DISPLAY_NAME,
COL_PIXBUF,
COL_IS_DIRECTORY,
NUM_COLS) = list(range(5))
def __init__(self, demoapp):
self.pixbuf_lookup = {}
self.demoapp = demoapp
self.window = Gtk.Window()
self.window.set_title('Gtk.IconView demo')
self.window.set_default_size(650, 400)
self.window.connect('destroy', Gtk.main_quit)
vbox = Gtk.VBox()
self.window.add(vbox)
tool_bar = Gtk.Toolbar()
vbox.pack_start(tool_bar, False, False, 0)
up_button = Gtk.ToolButton(stock_id=Gtk.STOCK_GO_UP)
up_button.set_is_important(True)
up_button.set_sensitive(False)
tool_bar.insert(up_button, -1)
home_button = Gtk.ToolButton(stock_id=Gtk.STOCK_HOME)
home_button.set_is_important(True)
tool_bar.insert(home_button, -1)
sw = Gtk.ScrolledWindow()
sw.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
sw.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC)
vbox.pack_start(sw, True, True, 0)
# create the store and fill it with content
self.parent_dir = '/'
store = self.create_store()
self.fill_store(store)
icon_view = Gtk.IconView(model=store)
icon_view.set_selection_mode(Gtk.SelectionMode.MULTIPLE)
sw.add(icon_view)
# connect to the 'clicked' signal of the "Up" tool button
up_button.connect('clicked', self.up_clicked, store)
# connect to the 'clicked' signal of the "home" tool button
home_button.connect('clicked', self.home_clicked, store)
self.up_button = up_button
self.home_button = home_button
# we now set which model columns that correspond to the text
# and pixbuf of each item
icon_view.set_text_column(self.COL_DISPLAY_NAME)
icon_view.set_pixbuf_column(self.COL_PIXBUF)
# connect to the "item-activated" signal
icon_view.connect('item-activated', self.item_activated, store)
icon_view.grab_focus()
self.window.show_all()
def sort_func(self, store, a_iter, b_iter, user_data):
(a_name, a_is_dir) = store.get(a_iter,
self.COL_DISPLAY_NAME,
self.COL_IS_DIRECTORY)
(b_name, b_is_dir) = store.get(b_iter,
self.COL_DISPLAY_NAME,
self.COL_IS_DIRECTORY)
if a_name is None:
a_name = ''
if b_name is None:
b_name = ''
if (not a_is_dir) and b_is_dir:
return 1
elif a_is_dir and (not b_is_dir):
return -1
elif a_name > b_name:
return 1
elif a_name < b_name:
return -1
else:
return 0
def up_clicked(self, item, store):
self.parent_dir = os.path.split(self.parent_dir)[0]
self.fill_store(store)
# de-sensitize the up button if we are at the root
self.up_button.set_sensitive(self.parent_dir != '/')
def home_clicked(self, item, store):
self.parent_dir = GLib.get_home_dir()
self.fill_store(store)
# Sensitize the up button
self.up_button.set_sensitive(True)
def item_activated(self, icon_view, tree_path, store):
iter_ = store.get_iter(tree_path)
(path, is_dir) = store.get(iter_, self.COL_PATH, self.COL_IS_DIRECTORY)
if not is_dir:
return
self.parent_dir = path
self.fill_store(store)
self.up_button.set_sensitive(True)
def create_store(self):
store = Gtk.ListStore(str, str, GdkPixbuf.Pixbuf, bool)
# set sort column and function
store.set_default_sort_func(self.sort_func)
store.set_sort_column_id(-1, Gtk.SortType.ASCENDING)
return store
def file_to_icon_pixbuf(self, path):
pixbuf = None
# get the theme icon
f = Gio.file_new_for_path(path)
info = f.query_info(Gio.FILE_ATTRIBUTE_STANDARD_ICON,
Gio.FileQueryInfoFlags.NONE,
None)
gicon = info.get_icon()
# check to see if it is an image format we support
for GdkPixbufFormat in GdkPixbuf.Pixbuf.get_formats():
for mime_type in GdkPixbufFormat.get_mime_types():
content_type = Gio.content_type_from_mime_type(mime_type)
if content_type is not None:
break
#TODO: Here 'content_type' could be None, need to fix
try:
format_gicon = Gio.content_type_get_icon(content_type)
if format_gicon.equal(gicon):
gicon = f.icon_new()
break
except:
pass
if gicon in self.pixbuf_lookup:
return self.pixbuf_lookup[gicon]
if isinstance(gicon, Gio.ThemedIcon):
names = gicon.get_names()
icon_theme = Gtk.IconTheme.get_default()
for name in names:
try:
pixbuf = icon_theme.load_icon(name, 64, 0)
break
except GLib.GError:
pass
self.pixbuf_lookup[gicon] = pixbuf
elif isinstance(gicon, Gio.FileIcon):
icon_file = gicon.get_file()
path = icon_file.get_path()
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(path, 72, 72)
self.pixbuf_lookup[gicon] = pixbuf
return pixbuf
def fill_store(self, store):
store.clear()
for name in os.listdir(self.parent_dir):
path = os.path.join(self.parent_dir, name)
is_dir = os.path.isdir(path)
pixbuf = self.file_to_icon_pixbuf(path)
store.append((path, name, pixbuf, is_dir))
def main(demoapp=None):
IconViewApp(demoapp)
Gtk.main()
if __name__ == '__main__':
main()
|
lgpl-2.1
| -6,643,193,018,444,488,000
| 31.789238
| 79
| 0.58657
| false
| 3.630586
| false
| false
| false
|
henry-ngo/VIP
|
vip_hci/preproc/subsampling.py
|
1
|
4966
|
#! /usr/bin/env python
"""
Module with pixel and frame subsampling functions.
"""
from __future__ import division
from __future__ import print_function
__author__ = 'C. Gomez @ ULg'
__all__ = ['cube_collapse',
'cube_subsample',
'cube_subsample_trimmean']
import numpy as np
def cube_collapse(cube, mode='median', n=50):
""" Collapses a cube into a frame (3d array to 2d array) depending on the
parameter *mode*. It's possible to perform a trimmed mean combination of the
frames based on description in Brandt+ 2012.
Parameters
----------
cube : array_like
Cube.
mode : {'median', 'mean', 'sum', 'trimmean'}, str optional
Sets the way of collapsing the images in the cube.
n : int, optional
Sets the discarded values at high and low ends. When n = N is the same
as taking the mean, when n = 1 is like taking the median.
Returns
-------
frame : array_like
Output array, cube combined.
"""
arr = cube
if not arr.ndim == 3:
raise TypeError('The input array is not a cube or 3d array.')
if mode=='mean':
frame = np.mean(arr, axis=0)
elif mode=='median':
frame = np.median(arr, axis=0)
elif mode=='sum':
frame = np.sum(arr, axis=0)
elif mode=='trimmean':
N = arr.shape[0]
if N % 2 == 0:
k = (N - n)//2
else:
k = (N - n)/2
frame = np.empty_like(arr[0])
for index, _ in np.ndenumerate(arr[0]):
sort = np.sort(arr[:,index[0],index[1]])
frame[index] = np.mean(sort[k:N-k])
return frame
def cube_subsample(array, n, mode="mean", parallactic=None, verbose=True):
"""Mean/Median combines frames in cube with window n.
Parameters
----------
n : int
Window for mean/median.
array : array_like
Input 3d array, cube.
mode : {'mean','median'}
Switch for choosing mean or median.
parallactic: array_like
List of corresponding parallactic angles.
Returns
-------
arr_view : array_like
Resulting array.
angles : array_like
Parallactic angles.
"""
if not array.ndim == 3:
raise TypeError('The input array is not a cube or 3d array.')
m = int(array.shape[0]/n)
resid = array.shape[0]%n
y = array.shape[1]
x = array.shape[2]
arr = np.empty([m, y, x])
if parallactic is not None:
angles = np.zeros(m)
if mode == 'median': func = np.median
elif mode=='mean': func = np.mean
else:
raise ValueError('Mode should be either Mean or Median.')
for i in range(m):
arr[i, :, :] = func(array[:n, :, :], axis=0)
if parallactic is not None: angles[i] = func(parallactic[:n])
if i >= 1:
arr[i, :, :] = func(array[n*i:n*i+n, :, :], axis=0)
if parallactic is not None:
angles[i] = func(parallactic[n*i:n*i+n])
if verbose:
print("Datacube subsampled by taking the {:} of {:} frames".format(mode ,n))
if resid > 0:
msg = "Initial # of frames and window are not multiples ({:} frames were dropped)"
print(msg.format(resid))
print("New cube contains {:} frames".format(m))
if parallactic is not None:
return arr, angles
else:
return arr
def cube_subsample_trimmean(arr, n, m):
"""Performs a trimmed mean combination every m frames in a cube. Based on
description in Brandt+ 2012.
Parameters
----------
arr : array_like
Cube.
n : int
Sets the discarded values at high and low ends. When n = N is the same
as taking the mean, when n = 1 is like taking the median.
m : int
Window from the trimmed mean.
Returns
-------
arr_view : array_like
Output array, cube combined.
"""
if not arr.ndim == 3:
raise TypeError('The input array is not a cube or 3d array.')
num = int(arr.shape[0]/m)
res = int(arr.shape[0]%m)
y = arr.shape[1]
x = arr.shape[2]
arr2 = np.empty([num+2, y, x])
for i in range(num):
arr2[0] = cube_collapse(arr[:m, :, :], 'trimmean', n)
if i > 0:
arr2[i] = cube_collapse(arr[m*i:m*i+m, :, :], 'trimmean', n)
arr2[num] = cube_collapse(arr[-res:, :, :], 'trimmean', n)
arr_view = arr2[:num+1] # slicing until m+1 - last index not included
print("\nDone trimmed mean over FITS-Cube with window m=" + str(m))
return arr_view
|
mit
| -7,150,221,523,624,467,000
| 31.03871
| 126
| 0.517519
| false
| 3.745098
| false
| false
| false
|
Marto32/pyetherscan
|
tests/test_response.py
|
1
|
1328
|
"""
Tests related to response objects.
"""
import unittest
import requests
from pyetherscan import client, response, error
class FakeResponse(requests.Response):
"""Fake instance of a Response object"""
def __init__(self, status_code, text):
requests.Response.__init__(self)
self.status_code = status_code
self._text = text
@property
def text(self):
return self._text
class BaseResponseTestCase(unittest.TestCase):
def setUp(self):
self.client = client.Client()
def base_request_error(self, code, text):
"""Abstract testing for request errors"""
resp = FakeResponse(code, text)
with self.assertRaises(error.EtherscanRequestError):
response.SingleAddressBalanceResponse(resp)
class TestInitializationResponses(BaseResponseTestCase):
def test_rate_limit_error(self):
self.base_request_error(403, '')
def test_invalid_request(self):
self.base_request_error(200, '')
def test_bad_code_error(self):
self.base_request_error(405, '')
def test_data_error(self):
text = "{\"message\":\"NOTOK\", \"result\":\"Error!\"}"
resp = FakeResponse(200, text)
with self.assertRaises(error.EtherscanDataError):
response.SingleAddressBalanceResponse(resp)
|
mit
| 777,112,246,740,399,900
| 24.538462
| 63
| 0.655873
| false
| 4.061162
| true
| false
| false
|
DREAM-ODA-OS/tools
|
imgproc/img/geotiff.py
|
1
|
3410
|
#!/usr/bin/env python
#-------------------------------------------------------------------------------
#
# GeoTIFF creation subroutine
#
# Author: Martin Paces <martin.paces@eox.at>
#
#-------------------------------------------------------------------------------
# Copyright (C) 2016 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
from osgeo import gdal; gdal.UseExceptions() #pylint: disable=multiple-statements
from .file_io import ImageFileWriter, DT2GDT
from .util import FormatOptions
# default GeoTIFF file-options
DEF_GEOTIFF_FOPT = {
"TILED": "YES",
"BLOCKXSIZE": "256",
"BLOCKYSIZE": "256",
"COMPRESS": "NONE",
}
def make_gcp(x, y, z, p, l, id_, info=""):
""" Construct GDAL Ground Control Point. """
#pylint: disable=invalid-name, too-many-arguments
return gdal.GCP(x, y, z, p, l, info, str(id_))
def clone_gcp(gcp):
""" Clone GCP object. """
return gdal.GCP(
gcp.GCPX, gcp.GCPY, gcp.GCPZ, gcp.GCPPixel, gcp.GCPLine, gcp.Info, gcp.Id
)
def create_geotiff(path, dtype, nrow, ncol, nband=1, proj=None,
geotrn=None, gcps=None, nodata=None, options=None):
""" Create a GeoTIFF image and return an instance of the ImageFileWriter
class to access this file.
"""
#pylint: disable=too-many-arguments, too-many-locals
# sanitize the inputs
nrow = max(0, int(nrow))
ncol = max(0, int(ncol))
nband = max(1, int(nband))
if options is None:
options = FormatOptions(DEF_GEOTIFF_FOPT).options
# convert type to gdal type
try:
gdal_dtype = DT2GDT[dtype]
except KeyError:
raise ValueError("Unsupported data type! %r" % dtype)
# get GDAL Driver
driver = gdal.GetDriverByName("GTiff")
# create TIFF image
dataset = driver.Create(path, ncol, nrow, nband, gdal_dtype, options)
if proj and geotrn:
# set geo-transformation
dataset.SetProjection(proj)
dataset.SetGeoTransform(geotrn)
elif proj and gcps:
# copy ground control points (a.k.a. tie-points)
dataset.SetGCPs([clone_gcp(gcp) for gcp in gcps], proj)
# create image object
writer = ImageFileWriter(dataset)
#copy no-data value(s)
if nodata is not None:
writer.nodata = nodata
return writer
|
mit
| 48,940,018,558,685,450
| 33.795918
| 81
| 0.642522
| false
| 3.82287
| false
| false
| false
|
mathiasertl/django-xmpp-server-list
|
account/auth_urls.py
|
1
|
1456
|
# This file is part of django-xmpp-server-list
# (https://github.com/mathiasertl/django-xmpp-server-list)
#
# django-xmpp-server-list is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# xmppllist is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with django-xmpp-server-list. If not, see <http://www.gnu.org/licenses/>.
from django.urls import path
from . import views
# auth views are included in a separate urls file to exclude the namespace (url names are used throughout
# handy predefined templates).
urlpatterns = [
path('password_change/', views.PasswordChangeView.as_view(), name='password_change'),
path('password_reset/', views.PasswordResetView.as_view(), name='password_reset'),
path('password_reset/done/', views.PasswordResetDoneView.as_view(), name='password_reset_done'),
path('reset/<uidb64>/<token>/', views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'),
path('reset/done/', views.PasswordResetCompleteView.as_view(), name='password_reset_complete'),
]
|
gpl-3.0
| -7,956,175,443,058,019,000
| 49.206897
| 109
| 0.752747
| false
| 3.841689
| false
| false
| false
|
rsmuc/health_monitoring_plugins
|
health_monitoring_plugins/trustedfilter.py
|
1
|
5728
|
# Copyright (C) 2017-2019 rsmuc <rsmuc@sec-dev.de>
# This file is part of "Health Monitoring Plugins".
# "Health Monitoring Plugins" is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# "Health Monitoring Plugins" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with "Health Monitoring Plugins". If not, see <https://www.gnu.org/licenses/>.
from pynag.Plugins import PluginHelper,ok,warning,critical,unknown
import health_monitoring_plugins
# these dicts / definitions we need to get human readable values
states = {
1: 'ok',
2: 'failed',
3: 'absent',
4: 'AC off'
}
activity = {
1: 'standby',
2: 'active',
3: 'error'
}
# OIDs
activity_oid = '.1.3.6.1.4.1.2566.107.41.1.0' # tfDeviceActivityState
logfill_oid = '.1.3.6.1.4.1.2566.107.31.2.1.0' # slIpStatusLogFillLevel
ps1_oid = '.1.3.6.1.4.1.2566.107.31.2.2.0' # slIpStatusPowerSupplyUnit1
ps2_oid = '.1.3.6.1.4.1.2566.107.31.2.3.0' # slIpStatusPowerSupplyUnit2
fan1_oid = '.1.3.6.1.4.1.2566.107.31.2.4.0' # slIpStatusPowerFanUnit1
fan2_oid = '.1.3.6.1.4.1.2566.107.31.2.5.0' # slIpStatusPowerFanUnit2
bat_oid = '.1.3.6.1.4.1.2566.107.31.2.7.0' # slIpStatusInternalVoltage
temp_oid = '.1.3.6.1.4.1.2566.107.31.2.8.0' # slIpStatusInternalTemperature
activity_oid = '.1.3.6.1.4.1.2566.107.41.1.0' # tfDeviceActivityState
class TrustedFilter(object):
def __init__(self, helper, snmp1, snmp2=None):
self.helper = helper
self.snmp1 = snmp1
self.snmp2 = snmp2
def get_snmp_from_host1(self):
"""
Get SNMP values from 1st host.
"""
response = self.snmp1.get_oids(ps1_oid, ps2_oid, fan1_oid, fan2_oid, bat_oid, temp_oid, activity_oid, logfill_oid)
self.ps1_value = states[int(response[0])]
self.ps2_value = states[int(response[1])]
self.fan1_value = states[int(response[2])]
self.fan2_value = states[int(response[3])]
self.bat_value = states[int(response[4])]
self.temp_value = states[int(response[5])]
self.activity_value1 = activity[int(response[6])]
self.logfill_value = str(response[7])
def get_snmp_from_host2(self):
"""
Get SNMP values from 2nd host.
"""
if not self.snmp2:
self.activity_value2 = None
else:
response = self.snmp2.get_oids(activity_oid)
self.activity_value2 = activity[int(response[0])]
def check(self):
"""
Evaluate health status from device parameters.
"""
try:
self.get_snmp_from_host1()
self.get_snmp_from_host2()
except (health_monitoring_plugins.SnmpException, TypeError, KeyError):
self.helper.status(unknown)
self.helper.add_summary("SNMP response incomplete or invalid")
return
self.helper.add_summary("Filter Status")
self.helper.add_long_output("Power Supply 1: %s" % self.ps1_value)
if self.ps1_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Power Supply 1: %s" % self.ps1_value)
self.helper.add_long_output("Power Supply 2: %s" % self.ps2_value)
if self.ps2_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Power Supply 2: %s" % self.ps2_value)
self.helper.add_long_output("Fan 1: %s" % self.fan1_value)
if self.fan1_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Fan 1: %s" % self.fan1_value)
self.helper.add_long_output("Fan 2: %s" % self.fan2_value)
if self.fan2_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Fan 2: %s" % self.fan2_value)
self.helper.add_long_output("Battery: %s" % self.bat_value)
if self.bat_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Battery: %s" % self.bat_value)
self.helper.add_long_output("Temperature: %s" % self.temp_value)
if self.temp_value != "ok":
self.helper.status(critical)
self.helper.add_summary("Temperature: %s" % self.temp_value)
self.helper.add_metric(label='logfill',value=self.logfill_value, uom="%%")
self.helper.add_long_output("Fill Level internal log: %s%%" % self.logfill_value)
self.helper.add_long_output("Activity State: %s" % self.activity_value1)
if self.activity_value1 == "error":
self.helper.status(critical)
self.helper.add_summary("Activity State: %s" % self.activity_value1)
if self.activity_value2:
self.helper.add_long_output("Activity State 2: %s" % self.activity_value2)
if self.activity_value1 == "active" and self.activity_value2 == "active":
self.helper.status(critical)
self.helper.add_summary("Filter 1 and Filter 2 active!")
if self.activity_value1 == "standby" and self.activity_value2 == "standby":
self.helper.status(critical)
self.helper.add_summary("Filter 1 and Filter 2 standby!")
self.helper.check_all_metrics()
|
gpl-2.0
| 8,863,428,745,276,231,000
| 40.507246
| 122
| 0.615922
| false
| 3.225225
| false
| false
| false
|
OmnesRes/onco_lnc
|
mrna/cox/STAD/cox_regression.py
|
1
|
10031
|
## A script for finding every cox coefficient and pvalue for every mRNA in STAD Tier 3 data downloaded Jan. 5th, 2016
from rpy2 import robjects as ro
import numpy as np
import os
ro.r('library(survival)')
import re
##This call will only work if you are running python from the command line.
##If you are not running from the command line manually type in your paths.
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
## Read the follow up data
## A patient can be listed multiple times in the file. The most recent listing (furthest down in the file), contains the most recent
## follow up data. This code checks if the patient has already been loaded into the list, and if so, takes the more recent data.
## This required an empty value in the list initialization.
## Data is: [[Patient ID, time(days), Vital status],[Patient ID, time(days), Vital status],...]
f=open(os.path.join(BASE_DIR,'tcga_data','STAD','clinical','nationwidechildrens.org_clinical_follow_up_v1.0_stad.txt'))
##get the column indexes needed
columns=f.readline().split('\t')
patient_column=columns.index('bcr_patient_barcode')
alive_column=columns.index('last_contact_days_to')
death_column=columns.index('death_days_to')
f.readline()
f.readline()
data=[i.split('\t') for i in f]
clinical1=[['','','']]
for i in data:
if clinical1[-1][0]==i[patient_column]:
if re.search('^[0-9]+$',i[death_column]):
clinical1[-1]=[i[patient_column],int(i[death_column]),'Dead']
elif re.search('^[0-9]+$',i[alive_column]):
clinical1[-1]=[i[patient_column],int(i[alive_column]),'Alive']
else:
pass
else:
if re.search('^[0-9]+$',i[death_column]):
clinical1.append([i[patient_column],int(i[death_column]),'Dead'])
elif re.search('^[0-9]+$',i[alive_column]):
clinical1.append([i[patient_column],int(i[alive_column]),'Alive'])
else:
pass
## Removing the empty value.
clinical=clinical1[1:]
## Grade, sex and age information were taken from the "clinical_patient" file. A dictionary was created for grade and sex.
more_clinical={}
grade_dict={}
grade_dict['G1']=1
grade_dict['G2']=2
grade_dict['G3']=3
sex_dict={}
sex_dict['MALE']=0
sex_dict['FEMALE']=1
## The "clinical_patient" file can also contain patients not listed in the follow_up files.
## In these cases the clinical data for these patients gets appended to a new clinical list.
f=open(os.path.join(BASE_DIR,'tcga_data','STAD','clinical','nationwidechildrens.org_clinical_patient_stad.txt'))
##get the column indexes needed
columns=f.readline().split('\t')
grade_column=columns.index('tumor_grade')
sex_column=columns.index('gender')
age_column=columns.index('age_at_initial_pathologic_diagnosis')
patient_column=columns.index('bcr_patient_barcode')
alive_column=columns.index('last_contact_days_to')
death_column=columns.index('death_days_to')
f.readline()
f.readline()
data=[i.split('\t') for i in f]
clinical4=[]
for i in data:
try:
more_clinical[i[patient_column]]=[grade_dict[i[grade_column]],sex_dict[i[sex_column]],int(i[age_column])]
if re.search('^[0-9]+$',i[death_column]):
clinical4.append([i[patient_column],int(i[death_column]),'Dead'])
elif re.search('^[0-9]+$',i[alive_column]):
clinical4.append([i[patient_column],int(i[alive_column]),'Alive'])
else:
pass
except:
pass
new_clinical=[]
##It is possible that the clinical data in the clinical_patient file is more up to date than the follow_up files
##All the clinical data is merged checking which data is the most up to date
for i in clinical4:
if i[0] not in [j[0] for j in clinical]:
new_clinical.append(i)
else:
if i[1]<=clinical[[j[0] for j in clinical].index(i[0])][1]:
new_clinical.append(clinical[[j[0] for j in clinical].index(i[0])])
else:
new_clinical.append(i)
##also do the reverse since clinical can contain patients not included in clinical4
for i in clinical:
if i[0] not in [j[0] for j in new_clinical]:
new_clinical.append(i)
## only patients who had a follow up time greater than 0 days are included in the analysis
clinical=[i for i in new_clinical if i[1]>0]
final_clinical=[]
## A new list containing both follow up times and grade, sex, and age is constructed.
## Only patients with grade, sex, and age information are included.
## Data is [[Patient ID, time (days), vital status, grade, sex, age at diagnosis],...]
for i in clinical:
if i[0] in more_clinical:
final_clinical.append(i+more_clinical[i[0]])
## 01 indicates a primary tumor, and only primary tumors are included in this analysis
f=open(os.path.join(BASE_DIR,'tcga_data','STAD','FILE_SAMPLE_MAP_mrna.txt'))
f.readline()
data=[i.strip().split() for i in f if i!='\n']
## 01 indicates a primary tumor, and only primary tumors are included in this analysis
TCGA_to_mrna={}
for i in data:
## The normalized data files are used
if 'genes.normalized_results' in i[0]:
if i[1].split('-')[3][:-1]=='01':
x=''.join([k+j for k,j in zip(['','-','-'],i[1].split('-')[:3])])
TCGA_to_mrna[x]=TCGA_to_mrna.get(x,[])+[i[0]]
clinical_and_files=[]
## I only care about patients that contained complete clinical information
for i in final_clinical:
if TCGA_to_mrna.has_key(i[0]):
## The mRNA files are added to the clinical list
## Data structure: [[Patient ID, time (days), vital status, grade, sex, age at diagnosis,[mRNA files]],...]
clinical_and_files.append(i+[TCGA_to_mrna[i[0]]])
else:
pass
## A list of lists of genes is constructed, the order of gene lists is same as the clinical_and_files data
## Data structure: [[genes for patient 1], [genes for patient 2], ....]
genes=[]
for i in clinical_and_files:
temp=[]
for j in i[-1]:
f=open(os.path.join(BASE_DIR,'tcga_data','STAD','mrna',j))
f.readline()
temp.append([[i.split('|')[1].split()[0],float(i.strip().split()[-1])] for i in f])
## In the case that the patient only contained 1 primary tumor mRNA file.
if len(temp)==1:
genes.append(temp[0])
## If the patient contained more than 1 primary tumor mRNA file
## this list comprehension will average the files for any number of files.
else:
values=[]
for k in temp:
values.append([kk[1] for kk in k])
genes.append(zip([z[0] for z in temp[0]],list(sum([np.array(kkk) for kkk in values])/float(len(temp)))))
## Only want genes that meet an expression cutoff
## A cutoff of 1 RSEM and no more than a fourth of the patients containing no expression was chosen
final_genes=[[]]*len(genes)
for i in range(len(genes[0])):
temp=[]
for j in genes:
temp.append(j[i])
count=0
for k in temp:
if k[1]==0:
count+=1
median=np.median([ii[1] for ii in temp])
if count<len(genes)/4.0 and median>1:
for index, kk in enumerate(temp):
final_genes[index]=final_genes[index]+[kk]
## This will write the final genes to a large (100-300 MB file) which could be useful for further analyses, this step can be skipped.
f=open(os.path.join(BASE_DIR,'mrna','cox','STAD','final_genes.txt'),'w')
for i in final_genes:
f.write(str(i))
f.write('\n')
f.close()
##Performing Cox regression on all of the genes in final_genes
death_dic={}
death_dic['Alive']=0
death_dic['Dead']=1
coeffs=[]
pvalues=[]
genes=[] ##This list tracks the gene names
for i in range(len(final_genes[0])):
kaplan=[]
genes.append(final_genes[0][i][0])
for k,j in zip(clinical_and_files,final_genes): ## These lists contain the clinical information and mRNA data in the same order.
kaplan.append([k[1],k[2],k[3],k[4],k[5],j[i][1]])
data=[ii[-1] for ii in kaplan] ## Grabbing all the gene values for the current gene being analyzed
ro.globalenv['expression']=ro.FloatVector(data)
res=ro.r('round(qnorm((rank(expression, na.last="keep")-0.5)/sum(!is.na(expression))), digit=5)') ## Perform inverse normal transformation
inverse_norm=list(res) ## Convert robject to python list
## Prepare the variables for rpy2
ro.globalenv['gene']=ro.FloatVector(inverse_norm)
ro.globalenv['times']=ro.IntVector([ii[0] for ii in kaplan])
ro.globalenv['died']=ro.IntVector([death_dic[ii[1]] for ii in kaplan])
##grade1
grade1=[]
for ii in kaplan:
if ii[2]==1:
grade1.append(1)
else:
grade1.append(0)
##grade2
grade2=[]
for ii in kaplan:
if ii[2]==2:
grade2.append(1)
else:
grade2.append(0)
##grade3
grade3=[]
for ii in kaplan:
if ii[2]==3:
grade3.append(1)
else:
grade3.append(0)
ro.globalenv['grade1']=ro.IntVector(grade1)
ro.globalenv['grade2']=ro.IntVector(grade2)
ro.globalenv['grade3']=ro.IntVector(grade3)
ro.globalenv['sex']=ro.IntVector([ii[3] for ii in kaplan])
ro.globalenv['age']=ro.IntVector([ii[4] for ii in kaplan])
res=ro.r('coxph(Surv(times,died) ~ gene + sex + grade1 + grade2 + grade3 + age)') ## Perform Cox regression
## Parse the string of the result with python for the gene coefficient and pvalue
for entry in str(res).split('\n'):
try:
if entry.split()[0]=='gene':
coeff=entry.split()[1]
pvalue=entry.split()[-1]
break
except:
pass
coeffs.append(coeff)
pvalues.append(pvalue)
## This will write the results to a tab delimited file with gene name, cox coefficient, and pvalue.
f=open(os.path.join(BASE_DIR,'mrna','cox','STAD','coeffs_pvalues.txt'),'w')
for i,j,k in zip(genes,coeffs,pvalues):
f.write(i)
f.write('\t')
f.write(j)
f.write('\t')
f.write(k)
f.write('\n')
f.close()
|
mit
| -539,384,491,624,888,770
| 32.888514
| 142
| 0.64181
| false
| 3.101732
| false
| false
| false
|
google/retrieval-qa-eval
|
nq_to_squad.py
|
1
|
5989
|
#!/usr/bin/python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Convert the Natural Questions dataset into SQuAD JSON format.
To use this utility, first follow the directions at the URL below to download
the complete training dataset.
https://ai.google.com/research/NaturalQuestions/download
Next, run this program, specifying the data you wish to convert. For instance,
the invocation:
python nq_to_squad.py\
--data_pattern=/usr/local/data/tnq/v1.0/train/*.gz\
--output_file=/usr/local/data/tnq/v1.0/train.json
will process all training data and write the results into `train.json`. This
file can, in turn, be provided to squad_eval.py using the --squad argument.
"""
import argparse
import glob
import gzip
import json
import logging
import os
import re
def clean_text(start_token, end_token, doc_tokens, doc_bytes,
ignore_final_whitespace=True):
"""Remove HTML tags from a text span and reconstruct proper spacing."""
text = ""
for index in range(start_token, end_token):
token = doc_tokens[index]
if token["html_token"]:
continue
text += token["token"]
# Add a single space between two tokens iff there is at least one
# whitespace character between them (outside of an HTML tag). For example:
#
# token1 token2 ==> Add space.
# token1</B> <B>token2 ==> Add space.
# token1</A>token2 ==> No space.
# token1<A href="..." title="...">token2 ==> No space.
# token1<SUP>2</SUP>token2 ==> No space.
next_token = token
last_index = end_token if ignore_final_whitespace else end_token + 1
for next_token in doc_tokens[index + 1:last_index]:
if not next_token["html_token"]:
break
chars = (doc_bytes[token["end_byte"]:next_token["start_byte"]]
.decode("utf-8"))
# Since some HTML tags are missing from the token list, we count '<' and
# '>' to detect if we're inside a tag.
unclosed_brackets = 0
for char in chars:
if char == "<":
unclosed_brackets += 1
elif char == ">":
unclosed_brackets -= 1
elif unclosed_brackets == 0 and re.match(r"\s", char):
# Add a single space after this token.
text += " "
break
return text
def nq_to_squad(record):
"""Convert a Natural Questions record to SQuAD format."""
doc_bytes = record["document_html"].encode("utf-8")
doc_tokens = record["document_tokens"]
# NQ training data has one annotation per JSON record.
annotation = record["annotations"][0]
short_answers = annotation["short_answers"]
# Skip examples that don't have exactly one short answer.
# Note: Consider including multi-span short answers.
if len(short_answers) != 1:
return
short_answer = short_answers[0]
long_answer = annotation["long_answer"]
# Skip examples where annotator found no long answer.
if long_answer["start_token"] == -1:
return
# Skip examples corresponding to HTML blocks other than <P>.
long_answer_html_tag = doc_tokens[long_answer["start_token"]]["token"]
if long_answer_html_tag != "<P>":
return
paragraph = clean_text(
long_answer["start_token"], long_answer["end_token"], doc_tokens,
doc_bytes)
answer = clean_text(
short_answer["start_token"], short_answer["end_token"], doc_tokens,
doc_bytes)
before_answer = clean_text(
long_answer["start_token"], short_answer["start_token"], doc_tokens,
doc_bytes, ignore_final_whitespace=False)
return {"title": record["document_title"],
"paragraphs":
[{"context": paragraph,
"qas": [{"answers": [{"answer_start": len(before_answer),
"text": answer}],
"id": record["example_id"],
"question": record["question_text"]}]}]}
def main():
parser = argparse.ArgumentParser(
description="Convert the Natural Questions to SQuAD JSON format.")
parser.add_argument("--data_pattern", dest="data_pattern",
help=("A file pattern to match the Natural Questions "
"dataset."),
metavar="PATTERN", required=True)
parser.add_argument("--version", dest="version",
help="The version label in the output file.",
metavar="LABEL", default="nq-train")
parser.add_argument("--output_file", dest="output_file",
help="The name of the SQuAD JSON formatted output file.",
metavar="FILE", default="nq_as_squad.json")
args = parser.parse_args()
root = logging.getLogger()
root.setLevel(logging.DEBUG)
records = 0
nq_as_squad = {"version": args.version, "data": []}
for file in sorted(glob.iglob(args.data_pattern)):
logging.info("opening %s", file)
with gzip.GzipFile(file, "r") as f:
for line in f:
records += 1
nq_record = json.loads(line)
squad_record = nq_to_squad(nq_record)
if squad_record:
nq_as_squad["data"].append(squad_record)
if records % 1000 == 0:
logging.info("processed %s records", records)
print("Converted %s NQ records into %s SQuAD records." %
(records, len(nq_as_squad["data"])))
with open(args.output_file, "w") as f:
json.dump(nq_as_squad, f)
if __name__ == "__main__":
main()
|
apache-2.0
| -2,737,648,638,829,636,000
| 35.29697
| 79
| 0.625313
| false
| 3.788109
| false
| false
| false
|
TadLeonard/enumap
|
enumap.py
|
1
|
14184
|
import enum
from collections import namedtuple, OrderedDict
from itertools import zip_longest
__version__ = "1.5.0"
class EnumapMeta(enum.EnumMeta):
"""An EnumMeta for friendlier, more informative REPL behavior"""
def _iter_fmt_parts(cls):
names = cls.names()
types = cls.types()
for name in names:
type_ = types.get(name)
type_info = f": {type_.__name__}" if type_ is not None else ""
yield f"{name}{type_info}"
def __repr__(cls):
lines = cls._iter_fmt_parts()
indented_lines = (" " + l for l in lines)
return f"{cls.__name__}(\n" + ",\n".join(indented_lines) + "\n)"
def __str__(cls):
parts = cls._iter_fmt_parts()
return f"{cls.__name__}(" + ", ".join(parts) + ")"
class Enumap(enum.Enum, metaclass=EnumapMeta):
"""An Enum that maps data to its ordered, named members.
Produces OrderedDicts and namedtuples while ensuring that the
keys/fields match the names of the Enum members."""
@classmethod
def names(cls):
try:
names = cls.__names
except AttributeError:
names = cls.__names = tuple(cls.__members__)
return names
@classmethod
def map(cls, *values, **named_values):
"""Returns an OrderedDict from `values` & `named_values`, whose
keys match this Enum's members and their ordering
>>> Fruit = Enumap("Fruit", names="apple orange papaya")
>>> Fruit.map("heart-shaped", "spherical", papaya="ellipsoid")
OrderedDict([('apple', 'heart-shaped'), ('orange', ...), ...])
"""
mapping = cls._make_checked_mapping(*values, **named_values)
return OrderedDict(((k, mapping[k]) for k in cls.names()))
@classmethod
def map_casted(cls, *values, **named_values):
"""Like `map`, but values are converted with the `types`
mapping. Useful for deserializing ordered and named values.
>>> class Order(str, Enumap):
... index: int = "Order ID"
... cost: Decimal = "Total pretax cost"
... due_on: arrow.get = "Delivery date"
...
>>> Order.map_casted("342 32342.23 2017-09-01".split())
OrderedDict(('index', 342), ('cost', Decimal("3242.23")), ...)
"""
mapping = cls._make_casted_mapping(*values, **named_values)
return OrderedDict(((k, mapping[k]) for k in cls.names()))
@classmethod
def tuple(cls, *values, **named_values):
"""Returns a namedtuple from `values` & `named_values`, whose
fields match this Enum's members and their ordering
>>> Tool = Enumap("Tool", names="hammer mallet forehead")
>>> Tool.tuple("small", "heavy", forehead="unwieldy")
Tool_tuple(hammer='small', mallet='heavy', forehead='unwieldy')
"""
tuple_class = cls.tuple_class()
try:
return tuple_class(*values, **named_values)
except TypeError:
mapping = cls._make_checked_mapping(*values, **named_values)
return tuple_class(**mapping)
@classmethod
def tuple_casted(cls, *values, **named_values):
"""Like `tuple`, but values are converted with the `types`
mapping. Useful for deserializing ordered and named values."""
mapping = cls._make_casted_mapping(*values, **named_values)
return cls.tuple_class()(**mapping)
@classmethod
def tuple_class(cls):
"""`namedtuple` class with fields that match this Enum's
members and their ordering"""
try:
tuple_class = cls.__tuple_class
except AttributeError:
tuple_class = namedtuple(cls.__name__ + "_tuple", cls.names())
cls.__tuple_class = tuple_class
return tuple_class
@classmethod
def set_types(cls, *types, **named_types):
"""Set `types` mapping for `map/tuple_casted` methods.
>>> Pastry = Enumap("Pastry", names="croissant donut muffin")
>>> Pastry.set_types(int, int, int, donut=float)
>>> Pastry.types() # donut kwarg overrides donut arg
{'croissant': int, 'donut': float, 'muffin': int}
"""
# type mappings are allowed to be a subset of the member keys
# in other words, not all members have to have a type
sparse_types = SparseEnumap("temporary_types", cls.names())
sparse_type_map = sparse_types.map(*types, **named_types)
non_null_types = {k: v for k, v in sparse_type_map.items()
if v is not None}
type_subset = Enumap(f"{cls.__name__}_types",
tuple(non_null_types.keys()))
cls.__member_types = type_subset.map(*types, **named_types)
@classmethod
def types(cls):
"""Mapping like `{member_name: callable}` for `map/tuple_casted`.
This can either come from type annotations or `set_types`."""
try:
return cls.__member_types
except AttributeError:
types = dict(getattr(cls, "__annotations__", {}))
cls.__member_types = types
return cls.__member_types
@classmethod
def _make_checked_mapping(cls, *values, **named_values):
"""Generate key-value pairs where keys are strictly the names
of the members of this Enum. Raises `KeyError` for both
missing and invalid keys."""
names = cls.names()
mapping = dict(zip(names, values), **named_values)
if set(mapping) == set(names) and len(values) <= len(names):
return mapping
else:
cls._raise_invalid_args(values, mapping, names)
@classmethod
def _make_casted_mapping(cls, *values, **named_values):
"""Like `_make_checked_mapping`, but values are casted based
on the `types()` mapping"""
mapping = cls._make_checked_mapping(*values, **named_values)
mapping.update(_type_cast_items(mapping, cls.types()))
return mapping
@classmethod
def _raise_invalid_args(cls, values, mapping, names):
missing = (set(names) - set(mapping)) or {}
invalid = (set(mapping) - set(names)) or {}
if len(values) > len(names):
n_args = len(values)
n_expected = len(names)
raise KeyError(
f"{cls.__name__} requires keys {names}; "
f"expected {n_expected} arguments, got {n_args}")
else:
raise KeyError(
f"{cls.__name__} requires keys {names}; "
f"missing keys {missing}; invalid keys {invalid}")
def _type_cast_items(mapping, types):
"""Generates key/value pairs for which each
value is casted with the callable in the `types` mapping.
"""
key = None
try:
for key, type_callable in types.items():
yield key, type_callable(mapping[key])
except Exception as e:
value = mapping.get(key)
value_type = type(value).__name__
raise TypeCastError(f"Key '{key}' got invalid value '{value}' "
f"of type {value_type} (error: '{e}')", key)
class TypeCastError(TypeError):
"""Raised when an Enumap field raises an exception
during type casting for Enumap.tuple_casted or Enumap.map_casted
Attributes
key: key or field name for which a value could not be
successfully type casted
"""
def __init__(self, message, key):
super().__init__(message)
self.key = key
class default(enum.auto):
"""A subclass of enum.auto that
1. behaves as a unqiue enum member because enum members that aren't unique
effectively become aliases
2. gives the user a way of signaling that an enum value should be used as
a default in the collections created by SparseEnumap.map() or .tuple()
Sample usage:
>>> class Pets(SparseEnumap):
... dogs: int = default(3)
... cats: int = default(44)
... squirrels: float = 3 # this isn't a default at all
>>> Pets.tuple()
Pets_tuple(dogs=3, cats=44, squirrels=None)
"""
def __init__(self, default_value=None):
self._value = (enum.auto.value, default_value)
@property
def value(self):
return self
@value.setter
def value(self, new_value):
actual_default = self._value[-1]
self._value = (new_value, actual_default)
@property
def default(self):
return self._value[1]
def _iter_member_defaults(members):
"""Iterates through Enum members and teases out the default value
the user selected with `default(<user's special value>)` from the
`default` object.
"""
for k, v in members.items():
if isinstance(v.value, default):
yield k, v.value.default
# By not yielding k, v for non-default() objects, we avoid using
# things like auto() as defaults in our .tuple()/.map() collections.
# This makes it explicit when a user is using an enum value
# as a default while ALSO allowing SparseEnumaps to adhere to the
# rules of Enums. Each value of an Enum must be unique, and those that
# aren't are basically just aliases
class SparseEnumapMeta(EnumapMeta):
"""An EnumMeta for friendlier, more informative REPL behavior"""
def _iter_fmt_parts(cls):
# None defaults are not explicitly shown for readability
names = cls.names()
types = cls.types()
defaults = cls.defaults()
for name in names:
type_ = types.get(name)
default = defaults.get(name)
type_info = f": {type_.__name__}" if type_ is not None else ""
default_info = f" = {default!r}" if default is not None else ""
yield f"{name}{type_info}{default_info}"
class SparseEnumap(Enumap, metaclass=SparseEnumapMeta):
"""A less strict Enumap that provides default values
for unspecified keys. Invalid keys are still prohibited."""
@classmethod
def set_defaults(cls, *values, **named_values):
cls.__member_defaults = cls.map(*values, **named_values)
@classmethod
def defaults(cls):
try:
return cls.__member_defaults
except AttributeError:
members = cls.__members__
defaults_spec = Enumap("_Defaults", cls.names())
declared_defaults = dict(_iter_member_defaults(members))
member_defaults = defaults_spec.map(
*[None] * len(cls), **declared_defaults)
cls.__member_defaults = member_defaults
return cls.__member_defaults
@classmethod
def _make_checked_mapping(cls, *values, **named_values):
"""Generate key-value pairs where keys are strictly the names
of the members of this Enum. Raises `KeyError` for both
missing and invalid keys."""
names = cls.names()
names_set = set(names)
defaults = cls.defaults()
# Create a mapping which will be a subset of the final,
# sparse mapping. As we go, record which values are present
# in the mapping and which are missing.
if defaults:
mapping = dict(zip(names, values), **named_values)
missing = names_set - set(mapping)
mapping.update(((k, defaults[k]) for k in missing))
else:
mapping = dict(zip_longest(names, values), **named_values)
# If we haven't been passed invalid keys and we haven't been
# passed too many positional arguments, return the mapping
if set(mapping) == names_set and len(values) <= len(names):
return mapping
else:
cls._raise_invalid_args(values, mapping, names)
@classmethod
def _make_casted_mapping(cls, *values, **named_values):
"""Like `_make_checked_mapping`, but values are casted based
on the `types()` mapping"""
names = cls.names()
names_set = set(names)
defaults = cls.defaults()
# Create a mapping which will be a subset of the final,
# sparse mapping. As we go, record which values are present
# in the mapping and which are missing.
if defaults:
mapping = dict(zip(names, values), **named_values)
present = set(mapping)
missing = names_set - present
mapping.update(((k, defaults[k]) for k in missing))
else:
mapping = dict(zip(names, values), **named_values)
present = set(mapping)
missing = names_set - present
# Cast the values of our mapping with the the type function
# corresponding to their keys. We use the `missing` set of keys
# as a guide here because we don't want to cast missing or default
# values.
types = cls.types()
if types:
present_typed = present & set(types)
relevant_types = {key: types[key] for key in present_typed}
mapping.update(_type_cast_items(mapping, relevant_types))
# Handle default values to create a sparse mapping.
# Missing values will either be filled in with what's in the
# `defaults` mapping or with None if the user hasn't set defaults.
temp = dict(defaults) or {}.fromkeys(names)
temp.update(mapping)
mapping = temp
# If we haven't been passed invalid keys and we haven't been
# passed too many positional arguments, return the mapping
if not present - names_set and len(values) <= len(names):
return mapping
else:
cls._raise_invalid_args(values, mapping, names)
@classmethod
def _raise_invalid_args(cls, values, mapping, names):
if len(values) > len(names):
n_args = len(values)
n_expected = len(names)
raise KeyError(
f"{cls.__name__} requires keys {names}; "
f"expected {n_expected} arguments, got {n_args}")
else:
invalid = set(mapping) - set(names)
raise KeyError(f"{cls.__name__} requires keys {names}; "
f"invalid keys {invalid}")
|
mit
| -2,633,654,742,041,224,000
| 37.02681
| 78
| 0.590031
| false
| 4.061856
| false
| false
| false
|
atodorov/anaconda
|
pyanaconda/ui/gui/spokes/lib/accordion.py
|
1
|
21359
|
# vim: set fileencoding=utf-8
# Mountpoint selector accordion and page classes
#
# Copyright (C) 2012-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from pyanaconda.anaconda_loggers import get_module_logger
from pyanaconda.core.constants import DEFAULT_AUTOPART_TYPE
from pyanaconda.core.i18n import _, C_
from pyanaconda.product import productName, productVersion
from pyanaconda.core.storage import get_supported_autopart_choices
from pyanaconda.ui.gui.utils import escape_markup, really_hide, really_show
import gi
gi.require_version("AnacondaWidgets", "3.3")
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk, AnacondaWidgets
log = get_module_logger(__name__)
__all__ = ["MountPointSelector", "Accordion", "Page", "UnknownPage", "CreateNewPage"]
DATA_DEVICE = 0
SYSTEM_DEVICE = 1
SYSTEM_MOUNT_POINTS = [
"/", "/boot", "/boot/efi", "/tmp", "/usr",
"/var", "swap", "PPC PReP Boot", "BIOS Boot"
]
class MountPointSelector(AnacondaWidgets.MountpointSelector):
"""The mount point selector."""
def __init__(self):
super().__init__()
self.root_name = ""
@property
def device_name(self):
return self.get_property("name")
@property
def mount_point(self):
return self.get_property("mountpoint")
@property
def mount_point_type(self):
if not self.mount_point or self.mount_point in SYSTEM_MOUNT_POINTS:
return SYSTEM_DEVICE
else:
return DATA_DEVICE
class Accordion(Gtk.Box):
""" An Accordion is a box that goes on the left side of the custom partitioning spoke.
It stores multiple expanders which are here called Pages. These Pages correspond to
individual installed OSes on the system plus some special ones. When one Page is
expanded, all others are collapsed.
"""
def __init__(self):
super().__init__(orientation=Gtk.Orientation.VERTICAL, spacing=12)
self._expanders = []
self._active_selectors = []
self._current_selector = None
self._last_selected = None
def find_page_by_title(self, title):
for e in self._expanders:
if e.get_child().page_title == title:
return e.get_child()
return None
def _on_expanded(self, obj, cb=None):
# Get the content of the expander.
child = obj.get_child()
if child:
# The expander is not expanded yet.
is_expanded = not obj.get_expanded()
# Show or hide the child.
# We need to set this manually because of a gtk bug:
# https://bugzilla.gnome.org/show_bug.cgi?id=776937
child.set_visible(is_expanded)
if cb:
cb(child)
def _activate_selector(self, selector, activate, show_arrow):
selector.set_chosen(activate)
selector.props.show_arrow = show_arrow
selector.get_page().mark_selection(selector)
def add_page(self, contents, cb):
label = Gtk.Label(label="""<span size='large' weight='bold' fgcolor='black'>%s</span>""" %
escape_markup(contents.page_title), use_markup=True,
xalign=0, yalign=0.5, wrap=True)
expander = Gtk.Expander()
expander.set_label_widget(label)
expander.add(contents)
self.add(expander)
self._expanders.append(expander)
expander.connect("activate", self._on_expanded, cb)
expander.show_all()
def unselect(self):
""" Unselect all items and clear current_selector.
"""
for s in self._active_selectors:
self._activate_selector(s, False, False)
self._active_selectors.clear()
self._current_selector = None
log.debug("Unselecting all items.")
def select(self, selector):
""" Select one item. Remove selection from all other items
and clear ``current_selector`` if set. Add new selector and
append it to selected items. Also select the new item.
:param selector: Selector which we want to select.
"""
self.unselect()
self._active_selectors.append(selector)
self._current_selector = selector
self._last_selected = selector
self._activate_selector(selector, activate=True, show_arrow=True)
log.debug("Select device: %s", selector.device_name)
def _select_with_shift(self, clicked_selector):
# No items selected, only select this one
if not self._last_selected or self._last_selected is clicked_selector:
self.select(clicked_selector)
return
select_items = []
start_selection = False
for s in self.all_selectors:
if s is clicked_selector or s is self._last_selected:
if start_selection:
select_items.append(s) # append last item too
break
else:
start_selection = True
if start_selection:
select_items.append(s)
self.unselect()
self.append_selection(select_items)
def append_selection(self, selectors):
""" Append new selectors to the actual selection. This takes
list of selectors.
If more than 1 item is selected remove the ``current_selector``.
No current selection is allowed in multiselection.
:param list selectors: List of selectors which will be
appended to current selection.
"""
if not selectors:
return
# If multiselection is already active it will be active even after the new selection.
multiselection = ((self.is_multiselection or len(selectors) > 1) or
# Multiselection will be active also when there is one item already
# selected and it's not the same which is in selectors array
(self._current_selector and self._current_selector not in selectors))
# Hide arrow from current selected item if there will be multiselection.
if not self.is_multiselection and multiselection and self._current_selector:
self._current_selector.props.show_arrow = False
for s in selectors:
self._active_selectors.append(s)
if multiselection:
self._activate_selector(s, activate=True, show_arrow=False)
else:
self._activate_selector(s, activate=True, show_arrow=True)
log.debug("Append device %s to the selection.", s.device_name)
if len(selectors) == 1:
self._last_selected = selectors[-1]
if multiselection:
self._current_selector = None
else:
self._current_selector = self._active_selectors[0]
log.debug("Selected items %s; added items %s",
len(self._active_selectors), len(selectors))
def remove_selection(self, selectors):
""" Remove :param:`selectors` from current selection. If only
one item is selected after this operation it's set as
``current_selector``.
Items which are not selected are ignored.
:param list selectors: List of selectors which will be
removed from current selection.
"""
for s in selectors:
if s in self._active_selectors:
self._activate_selector(s, activate=False, show_arrow=False)
self._active_selectors.remove(s)
log.debug("Device %s is removed from the selection.", s)
if len(self._active_selectors) == 1:
self._current_selector = self._active_selectors[0]
self._current_selector.props.show_arrow = True
else:
self._current_selector = None
log.debug("Selected items %s; removed items %s",
len(self._active_selectors), len(selectors))
@property
def current_page(self):
""" The current page is really a function of the current selector.
Whatever selector on the LHS is selected, the current page is the
page containing that selector.
"""
if not self.current_selector:
return None
for page in self.all_pages:
if self.current_selector in page.members:
return page
return None
@property
def current_selector(self):
return self._current_selector
@property
def all_pages(self):
return [e.get_child() for e in self._expanders]
@property
def all_selectors(self):
return [s for p in self.all_pages for s in p.members]
@property
def all_members(self):
for page in self.all_pages:
for member in page.members:
yield (page, member)
@property
def is_multiselection(self):
return len(self._active_selectors) > 1
@property
def is_current_selected(self):
if self.current_selector:
return True
return False
@property
def selected_items(self):
return self._active_selectors
def page_for_selector(self, selector):
""" Return page for given selector. """
for page in self.all_pages:
for s in page.members:
if s is selector:
return page
def expand_page(self, page_title):
page = self.find_page_by_title(page_title)
expander = page.get_parent()
if not expander:
raise LookupError()
if not expander.get_expanded():
expander.emit("activate")
def remove_page(self, page_title):
# First, remove the expander from the list of expanders we maintain.
target = self.find_page_by_title(page_title)
if not target:
return
self._expanders.remove(target.get_parent())
for s in target.members:
if s in self._active_selectors:
self._active_selectors.remove(s)
# Then, remove it from the box.
self.remove(target.get_parent())
def remove_all_pages(self):
for e in self._expanders:
self.remove(e)
self._expanders = []
self._active_selectors = []
self._current_selector = None
def clear_current_selector(self):
""" If current selector is selected, deselect it
"""
if self._current_selector:
if self._current_selector in self._active_selectors:
self._active_selectors.remove(self._current_selector)
self._activate_selector(self._current_selector, activate=False, show_arrow=False)
self._current_selector = None
def process_event(self, selector, event, cb):
""" Process events from selectors and select items as result.
Call cb after selection is done with old selector and new selector
as arguments.
:param selector: Clicked selector
:param event: Gtk event object
:param cb: Callback which will be called after selection is done.
This callback is setup in :meth:`Page.add_selector` method.
"""
gi.require_version("Gdk", "3.0")
from gi.repository import Gdk
if event:
if event.type not in [Gdk.EventType.BUTTON_PRESS, Gdk.EventType.KEY_RELEASE,
Gdk.EventType.FOCUS_CHANGE]:
return
if event.type == Gdk.EventType.KEY_RELEASE and \
event.keyval not in [Gdk.KEY_space, Gdk.KEY_Return, Gdk.KEY_ISO_Enter, Gdk.KEY_KP_Enter, Gdk.KEY_KP_Space]:
return
old_selector = self.current_selector
# deal with multiselection
state = event.get_state()
if state & Gdk.ModifierType.CONTROL_MASK: # holding CTRL
if selector in self._active_selectors:
self.remove_selection([selector])
else:
self.append_selection([selector])
elif state & Gdk.ModifierType.SHIFT_MASK: # holding SHIFT
self._select_with_shift(selector)
else:
self.select(selector)
# Then, this callback will set up the right hand side of the screen to
# show the details for the newly selected object.
cb(old_selector, selector)
class BasePage(Gtk.Box):
""" Base class for all Pages. It implements most methods which is used
all kind of Page classes.
.. NOTE::
You should not instantiate this class. Please create a subclass
and use the subclass instead.
"""
def __init__(self, title):
super().__init__(orientation=Gtk.Orientation.VERTICAL, spacing=6)
self.members = []
self.page_title = title
self._selected_members = set()
self._data_box = None
self._system_box = None
@property
def selected_members(self):
return self._selected_members
def _get_accordion(self):
return self.get_ancestor(Accordion)
def _make_category_label(self, name):
label = Gtk.Label()
label.set_markup("""<span fgcolor='dark grey' size='large' weight='bold'>%s</span>""" %
escape_markup(name))
label.set_halign(Gtk.Align.START)
label.set_margin_start(24)
return label
def mark_selection(self, selector):
if selector.get_chosen():
self._selected_members.add(selector)
else:
self._selected_members.discard(selector)
def add_selector(self, selector, cb):
accordion = self._get_accordion()
selector.set_page(self)
selector.connect("button-press-event", accordion.process_event, cb)
selector.connect("key-release-event", accordion.process_event, cb)
selector.connect("focus-in-event", self._on_selector_focus_in, cb)
selector.set_margin_bottom(6)
self.members.append(selector)
# pylint: disable=no-member
if selector.mount_point_type == DATA_DEVICE:
self._data_box.add(selector)
else:
self._system_box.add(selector)
def _on_selector_focus_in(self, selector, event, cb):
accordion = self._get_accordion()
cb(accordion.current_selector, selector)
def _on_selector_added(self, container, widget, label):
really_show(label)
def _on_selector_removed(self, container, widget, label):
# This runs before widget is removed from container, so if it's the last
# item then the container will still not be empty.
if len(container.get_children()) == 1:
really_hide(label)
class Page(BasePage):
""" A Page is a box that is stored in an Accordion. It breaks down all the filesystems that
comprise a single installed OS into two categories - Data filesystems and System filesystems.
Each filesystem is described by a single MountpointSelector.
"""
def __init__(self, title):
super().__init__(title)
# Create the Data label and a box to store all its members in.
self._data_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self._data_label = self._make_category_label(_("DATA"))
really_hide(self._data_label)
self._data_box.add(self._data_label)
self._data_box.connect("add", self._on_selector_added, self._data_label)
self._data_box.connect("remove", self._on_selector_removed, self._data_label)
self.add(self._data_box)
# Create the System label and a box to store all its members in.
self._system_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self._system_label = self._make_category_label(_("SYSTEM"))
really_hide(self._system_label)
self._system_box.add(self._system_label)
self._system_box.connect("add", self._on_selector_added, self._system_label)
self._system_box.connect("remove", self._on_selector_removed, self._system_label)
self.add(self._system_box)
class UnknownPage(BasePage):
def add_selector(self, selector, cb):
accordion = self._get_accordion()
selector.set_page(self)
selector.connect("button-press-event", accordion.process_event, cb)
selector.connect("key-release-event", accordion.process_event, cb)
self.members.append(selector)
self.add(selector)
class CreateNewPage(BasePage):
""" This is a special Page that is displayed when no new installation
has been automatically created, and shows the user how to go about
doing that. The intention is that an instance of this class will be
packed into the Accordion first and then when the new installation
is created, it will be removed and replaced with a Page for it.
"""
def __init__(self, title, create_clicked_cb, autopart_type_changed_cb,
partitions_to_reuse=True):
super().__init__(title)
# Create a box where we store the "Here's how you create a new blah" info.
self._createBox = Gtk.Grid()
self._createBox.set_row_spacing(6)
self._createBox.set_column_spacing(6)
self._createBox.set_margin_start(16)
label = Gtk.Label(label=_("You haven't created any mount points for your "
"%(product)s %(version)s installation yet. "
"You can:") % {"product" : productName, "version" : productVersion},
wrap=True, xalign=0, yalign=0.5)
self._createBox.attach(label, 0, 0, 2, 1)
dot = Gtk.Label(label="•", xalign=0.5, yalign=0.4, hexpand=False)
self._createBox.attach(dot, 0, 1, 1, 1)
self._createNewButton = Gtk.LinkButton(uri="",
label=C_("GUI|Custom Partitioning|Autopart Page", "_Click here to create them automatically."))
label = self._createNewButton.get_children()[0]
label.set_xalign(0)
label.set_yalign(0.5)
label.set_hexpand(True)
label.set_line_wrap(True)
label.set_use_underline(True)
# Create this now to pass into the callback. It will be populated later
# on in this method.
store = Gtk.ListStore(str, int)
combo = Gtk.ComboBox(model=store)
cellrendr = Gtk.CellRendererText()
combo.pack_start(cellrendr, True)
combo.add_attribute(cellrendr, "text", 0)
combo.connect("changed", autopart_type_changed_cb)
self._createNewButton.set_has_tooltip(False)
self._createNewButton.set_halign(Gtk.Align.START)
self._createNewButton.connect("clicked", create_clicked_cb, combo)
self._createNewButton.connect("activate-link", lambda *args: Gtk.true())
self._createBox.attach(self._createNewButton, 1, 1, 1, 1)
dot = Gtk.Label(label="•", xalign=0.5, yalign=0, hexpand=False)
self._createBox.attach(dot, 0, 2, 1, 1)
label = Gtk.Label(label=_("Create new mount points by clicking the '+' button."),
xalign=0, yalign=0.5, hexpand=True, wrap=True)
self._createBox.attach(label, 1, 2, 1, 1)
if partitions_to_reuse:
dot = Gtk.Label(label="•", xalign=0.5, yalign=0, hexpand=False)
self._createBox.attach(dot, 0, 3, 1, 1)
label = Gtk.Label(label=_("Or, assign new mount points to existing "
"partitions after selecting them below."),
xalign=0, yalign=0.5, hexpand=True, wrap=True)
self._createBox.attach(label, 1, 3, 1, 1)
label = Gtk.Label(label=C_("GUI|Custom Partitioning|Autopart Page", "_New mount points will use the following partitioning scheme:"),
xalign=0, yalign=0.5, wrap=True, use_underline=True)
self._createBox.attach(label, 0, 4, 2, 1)
label.set_mnemonic_widget(combo)
default = None
for name, code in get_supported_autopart_choices():
itr = store.append([_(name), code])
if code == DEFAULT_AUTOPART_TYPE:
default = itr
combo.set_margin_start(18)
combo.set_margin_end(18)
combo.set_hexpand(False)
combo.set_active_iter(default or store.get_iter_first())
self._createBox.attach(combo, 0, 5, 2, 1)
self.add(self._createBox)
|
gpl-2.0
| -7,701,931,762,083,401,000
| 37.894353
| 141
| 0.614527
| false
| 4.024312
| false
| false
| false
|
michaelnetbiz/mistt-solution
|
app/controllers/interviews.py
|
1
|
2627
|
from app import db
from app.models.interviews import InterviewSevenDays, InterviewNinetyDays
from flask import Blueprint, make_response, jsonify
from flask_cors import cross_origin
from flask_login import login_required
# instantiate the module's blueprint
interviews = Blueprint('interviews', __name__, template_folder='interviews', url_prefix='/interviews')
@cross_origin()
@interviews.route('/seven/', methods=['GET', 'POST'])
@login_required
def get_interviews_7_days():
return make_response(jsonify([service_plan for service_plan in db.InterviewSevenDays.find()]), 200)
@cross_origin()
@interviews.route('/seven/descriptives/', methods=['GET', 'POST'])
@login_required
def get_interviews_7_days_descriptives():
return make_response(jsonify([service_plan for service_plan in db.InterviewSevenDays.find()]), 200)
@cross_origin()
@interviews.route('/ninety/', methods=['GET', 'POST'])
@login_required
def get_interviews_90_days():
return make_response(jsonify([service_plan for service_plan in db.InterviewNinetyDays.find()]), 200)
@cross_origin()
@interviews.route('/ninety/descriptives/', methods=['GET', 'POST'])
@login_required
def get_interviews_90_days_descriptives():
return make_response(jsonify([service_plan for service_plan in db.InterviewNinetyDays.find()]), 200)
@cross_origin()
@interviews.route('/seven/scores/<string:_id>/', methods=['GET', 'POST'])
@login_required
def get_interview_7_days_scores(_id):
return make_response(jsonify(db.InterviewSevenDays.find_one_or_404({'_id': _id})), 200)
@cross_origin()
@interviews.route('/ninety/scores/<string:_id>/', methods=['GET', 'POST'])
@login_required
def get_interview_90_days_scores(_id):
return make_response(jsonify(db.InterviewNinetyDays.find_one_or_404({'_id': _id})), 200)
@cross_origin()
@interviews.route('/seven/<string:_id>/', methods=['GET', 'POST'])
@login_required
def get_interview_7_days(_id):
return make_response(jsonify(db.InterviewNinetyDays.find_one_or_404({'_id': _id})), 200)
@cross_origin()
@interviews.route('/ninety/<string:_id>/', methods=['GET', 'POST'])
@login_required
def get_interview_90_days(_id):
return make_response(jsonify(db.InterviewNinetyDays.find_one_or_404({'_id': _id})), 200)
@cross_origin()
@interviews.route('/seven/count/', methods=['GET', 'POST'])
@login_required
def get_interviews_7_days_count():
return make_response(jsonify(db['interview_7_days'].count()))
@cross_origin()
@interviews.route('/ninety/count/', methods=['GET', 'POST'])
@login_required
def get_interviews_90_days_count():
return make_response(jsonify(db['interview_90_days'].count()))
|
mit
| -7,827,549,883,789,478,000
| 32.679487
| 104
| 0.719833
| false
| 3.086957
| false
| false
| false
|
hobarrera/khal
|
khal/__init__.py
|
1
|
1725
|
# Copyright (c) 2013-2017 Christian Geier et al.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
try:
from khal.version import version
except ImportError:
import sys
sys.exit('Failed to find (autogenerated) version.py. This might be due to '
'using GitHub\'s tarballs or svn access. Either clone '
'from GitHub via git or get a tarball from PyPI.')
__productname__ = 'khal'
__version__ = version
__author__ = 'Christian Geier'
__copyright__ = 'Copyright (c) 2013-2017 Christian Geier et al.'
__author_email__ = 'khal@lostpackets.de'
__description__ = 'A standards based terminal calendar'
__license__ = 'Expat/MIT, see COPYING'
__homepage__ = 'https://lostpackets.de/khal/'
|
mit
| 2,818,969,302,320,337,000
| 45.621622
| 79
| 0.738551
| false
| 4.049296
| false
| false
| false
|
meng89/epubuilder
|
epubaker/metas/dcterms.py
|
1
|
2746
|
# coding=utf-8
"""Dublin Core Metadata Initiative, see http://dublincore.org/documents/dcmi-terms/"""
from epubaker.metas.attrs import Attrs, AltScript, Dir, FileAs, Id, Role, Lang
from epubaker.xl import Element, URI_XML
def always_true(*args, **kwargs):
pass
l = [
'abstract', 'accessRights', 'accrualMethod', 'accrualPeriodicity', 'accrualPolicy', 'alternative', 'audience',
'available',
'bibliographicCitation',
'conformsTo', 'contributor', 'coverage', 'created', 'creator',
'date', 'dateAccepted', 'dateCopyrighted', 'dateSubmitted', 'description',
'educationLevel', 'extent',
'format',
'hasFormat', 'hasPart', 'hasVersion',
'identifier', 'instructionalMethod',
'isFormatOf', 'isPartOf', 'isReferencedBy', 'isReplacedBy', 'isRequiredBy', 'issued', 'isVersionOf',
'language', 'license',
'mediator', 'medium', 'modified',
'provenance', 'publisher',
'references', 'relation', 'replaces', 'requires', 'rights', 'rightsHolder',
'source', 'spatial', 'subject',
'tableOfContents', 'temporal', 'title', 'type',
'valid'
]
check_funcs = {
'modified': always_true,
}
check_funcs.update(dict((one, always_true) for one in l if one not in check_funcs.keys()))
_attr_check_funcs = {
# identifier only
'opf:scheme': always_true,
'opf:alt-script': always_true,
'dir': always_true,
'opf:file-as': always_true,
'id': always_true,
'opf:role': always_true,
'xml:lang': always_true,
# subject only
'opf:authority': always_true,
# Meta only
'scheme': always_true
}
URI_DC = 'http://purl.org/dc/elements/1.1/'
URI_OPF = 'http://www.idpf.org/2007/opf'
namespace_map = {
'dc': URI_DC,
'opf': URI_OPF,
'xml': URI_XML
}
class _Base(Attrs):
def __init__(self, text):
check_funcs[self.__class__.__name__](text)
Attrs.__init__(self)
self._text = text
@property
def text(self):
return self._text
def to_element(self):
e = Element((None, 'meta'))
e.attributes[(None, 'property')] = 'dcterms:{}'.format(self.__class__.__name__)
for attr_name, value in self._attrs.items():
uri = None
if ':' in attr_name:
prefix, attr = attr_name.split(':')
uri = namespace_map[prefix]
e.prefixes[uri] = prefix
else:
attr = attr_name
e.attributes[(uri, attr)] = value
e.children.append(self.text)
return e
_classes = {}
for k, v in check_funcs.items():
_classes[k] = type(k, (_Base, AltScript, Dir, FileAs, Id, Role, Lang), {})
def get_dcterm(name):
"""get a term class by term name"""
return _classes[name]
|
mit
| -3,042,853,000,001,800,700
| 23.300885
| 114
| 0.59177
| false
| 3.288623
| false
| false
| false
|
pygeo/pycmbs
|
pycmbs/icon.py
|
1
|
3243
|
# -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- Alexander Loew
For COPYING and LICENSE details, please refer to the LICENSE file
"""
from pycmbs.data import Data
import os
from pycmbs.netcdf import *
import numpy as np
class Icon(Data):
"""
Main class for ICON data handling
"""
def __init__(self, filename, gridfile, varname, read=False, **kwargs):
"""
Parameters
----------
filename : str
filename of data file
gridfile : str
filename of grid definition file
varname : str
name of variable to handle
read : bool
specify if data should be read immediately
"""
Data.__init__(self, filename, varname, **kwargs)
self.gridfile = gridfile
self.gridtype = 'unstructured'
#---
def read(self, time_var='time'):
"""
This is a special routine for reading data from ICON structure
a bit redundant to Data.read()
Parameters
----------
time_var : str
name of time variable (default='time')
"""
print('Reading ICON data ...')
if not os.path.exists(self.filename):
raise ValueError('File not existing: %s' % self.filename)
if not os.path.exists(self.gridfile):
raise ValueError('File not existing: %s' % self.gridfile)
#--- time variable
self.time_var = time_var
#--- data field
# [time,ncell]
self.data = self.read_netcdf(self.varname)
nt, ncell = self.data.shape
# reshape so we have a common 3D structure like always in pyCMBS
self.data = self.data.reshape((nt, 1, ncell))
if self.data is None:
raise ValueError('The data in the file %s is not existing. \
This must not happen!' % self.filename)
if self.scale_factor is None:
raise ValueError('The scale_factor for file %s is NONE, \
this must not happen!' % self.filename)
self.data *= self.scale_factor
#--- read lat/lon
File = NetCDFHandler()
File.open_file(self.gridfile, 'r')
# grid cell center coordinates
self.lon = File.get_variable('clon') * 180. / np.pi
self.lat = File.get_variable('clat') * 180. / np.pi
self.ncell = len(self.lon)
self.vlon = File.get_variable('clon_vertices') * 180. / np.pi
self.vlat = File.get_variable('clat_vertices') * 180. / np.pi
File.close()
#--- read time variable
if self.time_var is not None:
# returns either None or a masked array
self.time = self.read_netcdf(self.time_var)
if hasattr(self.time, 'mask'):
self.time = self.time.data
else:
self.time is None
if self.time is not None:
if self.time.ndim != 1:
# remove singletone dimensions
self.time = self.time.flatten()
else:
self.time = None
#--- determine time --> convert to python timestep
if self.time is not None:
self.set_time()
|
mit
| 4,772,429,266,076,823,000
| 28.752294
| 74
| 0.547641
| false
| 4.094697
| false
| false
| false
|
paopao74cn/noworkflow
|
capture/noworkflow/now/models/history.py
|
1
|
5563
|
# Copyright (c) 2015 Universidade Federal Fluminense (UFF)
# Copyright (c) 2015 Polytechnic Institute of New York University.
# This file is part of noWorkflow.
# Please, consult the license terms in the LICENSE file.
from __future__ import (absolute_import, print_function,
division, unicode_literals)
from collections import defaultdict, OrderedDict
from .model import Model
from ..graphs.history_graph import HistoryGraph
from ..utils import calculate_duration, FORMAT
from ..persistence import row_to_dict
from ..persistence import persistence as pers
class History(Model):
""" This model represents the workflow evolution history
It is possible to filter the evolution history by selecting the script:
history.script = "script1.py"
The list of scripts can be accessed by:
history.scripts()
It is also possible to filter the evolution history by selecting the
trial status:
history.execution = "finished"
The list of status are:
finished: show only finished trials
unfinished: show only unfinished trials
backup: show only backup trials
The default option for both filters is "*", which means that all trials
appear in the history
history.script = "*"
history.execution = "*"
You can change the graph width and height by the variables:
history.graph.width = 600
history.graph.height = 200
"""
DEFAULT = {
'graph.width': 700,
'graph.height': 300,
'script': '*',
'execution': '*',
'data': {},
}
REPLACE = {
'graph_width': 'graph.width',
'graph_height': 'graph.height',
}
def __init__(self, **kwargs):
super(History, self).__init__(**kwargs)
self.graph = HistoryGraph()
self.initialize_default(kwargs)
self.execution_options = ["*", "finished", "unfinished", "backup"]
def scripts(self):
""" Returns the list of scripts used for trials """
return {s[0].rsplit('/', 1)[-1] for s in pers.distinct_scripts()}
def graph_data(self, script="*", execution="*"):
""" Prepares evolution history as a dict """
if self.script != "*" and script == "*":
script = self.script
if self.execution != "*" and execution == "*":
execution = self.execution
key = (script, execution)
if key in self.data:
return self.data[key]
nodes, edges = [], []
result = {'nodes': nodes, 'edges': edges}
id_map, children = {}, defaultdict(list)
scripts, order = defaultdict(list), OrderedDict()
# Filter nodes and adds to dicts
tid = 0
for trial in map(row_to_dict, pers.load('trial', order="start")):
different_script = (trial['script'] != script)
finished = trial['finish']
unfinished = not finished and trial['run']
backup = not finished and not trial['run']
if script != '*' and different_script:
continue
if execution == 'finished' and not finished:
continue
if execution == 'unfinished' and not unfinished:
continue
if execution == 'backup' and not backup:
continue
trial_id = trial["id"]
trial["level"] = 0
trial["status"] = "Finished" if trial["finish"] else "Unfinished"
if not trial['run']:
trial["status"] = "Backup"
trial["tooltip"] = """
<b>{script}</b><br>
{status}<br>
Start: {start}<br>
Finish: {finish}
""".format(**trial)
if trial['finish']:
duration = calculate_duration(trial)
trial["tooltip"] += """
<br>
Duration: {duration}ns
""".format(duration=duration)
trial['duration'] = duration
id_map[trial_id] = tid
scripts[trial['script']].append(trial)
nodes.append(trial)
tid += 1
# Create edges
for trial in reversed(nodes):
trial_id, parent_id = trial["id"], trial["parent_id"]
if parent_id and parent_id in id_map:
edges.append({
'source': id_map[trial_id],
'target': id_map[parent_id],
'right': 1,
'level': 0
})
children[parent_id].append(trial_id)
order[trial['script']] = 1
# Set position
level = 0
for script in order:
last = level
for trial in scripts[script]:
trial_id, parent_id = trial["id"], trial["parent_id"]
if parent_id and parent_id in id_map:
parent = nodes[id_map[parent_id]]
if children[parent_id].index(trial_id) > 0:
trial["level"] = last
last += 1
else:
trial["level"] = parent["level"]
level = max(level, trial["level"] + 1)
else:
trial["level"] = level
level += 1
last += 1
self.data[key] = result
return result
def _repr_html_(self):
""" Display d3 graph on ipython notebook """
return self.graph._repr_html_(history=self)
|
mit
| -6,656,497,510,584,656,000
| 32.926829
| 77
| 0.528132
| false
| 4.544935
| false
| false
| false
|
openstack/manila
|
releasenotes/source/conf.py
|
1
|
8885
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Manila Release Notes documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'reno.sphinxext',
'openstackdocstheme',
]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/manila'
openstackdocs_bug_project = 'manila'
openstackdocs_bug_tag = 'release notes'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = u'2015, Manila Developers'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = [openstackdocstheme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ManilaReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ManilaReleaseNotes.tex', u'Manila Release Notes Documentation',
u'Manila Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'manilareleasenotes', u'Manila Release Notes Documentation',
[u'Manila Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ManilaReleaseNotes', u'Manila Release Notes Documentation',
u'Manila Developers', 'ManilaReleaseNotes',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
|
apache-2.0
| 3,138,875,757,549,318,700
| 32.152985
| 79
| 0.706697
| false
| 3.824796
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.