code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from __future__ import with_statement
import tempfile
import shutil
import os
import sys
from cStringIO import StringIO
from akara import commandline, read_config
class Config(object):
def __init__(self, server_root):
self.config_filename = os.path.join(server_root, "pid_test.ini")
self.pid_filename = os.path.join(server_root, "pid.txt")
def save_pid(self, text):
f = open(self.pid_filename, "w")
try:
f.write(text)
finally:
f.close()
# 'akara stop' and 'akara restart' essentially just call get_pid()
# plus do one extra call. The os.kill calls are tested manually.
def tmpdir(func):
def wrapper():
dirname = tempfile.mkdtemp(prefix="akara_test_")
try:
func(dirname)
finally:
shutil.rmtree(dirname)
wrapper.__name__ = func.__name__
return wrapper
@tmpdir
def test_get_pid(server_root):
config = Config(server_root)
f = open(config.config_filename, "w")
try:
f.write("class Akara:\n PidFile = %r\nMODULES=[]\n" % config.pid_filename)
finally:
f.close()
try:
commandline.get_pid(config)
raise AssertionError("But the file does not exist!")
except SystemExit, err:
assert "Could not open Akara PID file" in str(err), err
assert config.pid_filename in str(err), err
config.save_pid("")
try:
commandline.get_pid(config)
except SystemExit, err:
assert "Empty Akara PID file" in str(err), err
assert config.pid_filename in str(err), err
config.save_pid("hello\n")
try:
commandline.get_pid(config)
except SystemExit, err:
assert "does not contain a PID" in str(err), err
assert config.pid_filename in str(err), err
config.save_pid("123\n")
pid = commandline.get_pid(config)
assert pid == 123
class CaptureStdout(object):
def __init__(self):
self.io = StringIO()
def __enter__(self):
self.io.reset()
self.io.truncate()
self.stdout = sys.stdout
sys.stdout = self.io
def __exit__(self, *args):
sys.stdout = self.stdout
self.content = self.io.getvalue()
@tmpdir
def test_status(server_root):
config = Config(server_root)
capture = CaptureStdout()
with capture:
try:
commandline.status(config)
raise AssertionError("should not get here")
except SystemExit:
pass
assert "Could not open Akara configuration file" in capture.content
assert config.config_filename in capture.content
assert "Error log file" not in capture.content
f = open(config.config_filename, "w")
f.write("class Akara: PidFile = %r\n" % config.pid_filename)
f.close()
with capture:
commandline.status(config)
assert "PID file" in capture.content
assert "PID file does not exist" in capture.content
assert "Akara is not running" in capture.content
assert "Cannot open PID file" not in capture.content
config.save_pid("Scobby-doo!\n")
with capture:
try:
commandline.status(config)
raise AssertionError("where was the exit? %r" % capture.io.getvalue())
except SystemExit:
pass
assert "Unable to parse the PID" in capture.content
assert "Scobby-doo" in capture.content
os.chmod(config.pid_filename, 0)
with capture:
try:
commandline.status(config)
raise AssertionError("where was the exit? %r" % capture.io.getvalue())
except SystemExit:
pass
assert "*** Cannot open PID file" in capture.content
os.chmod(config.pid_filename, 0644)
my_pid = str(os.getpid())
config.save_pid(my_pid)
with capture:
commandline.status(config)
assert ("PID is %s and there is a process" % my_pid) in capture.content
assert "Akara is running" in capture.content
# I can't think of a good way to test for a PID which does not exist.
# That test is done manually.
@tmpdir
def test_setup_config_file(server_root):
config_file = os.path.join(server_root, "blah_subdir", "test_config.ini")
assert not os.path.exists(config_file)
capture = CaptureStdout()
with capture:
commandline._setup_config_file(config_file)
assert "Copying reference configuration file" in capture.content
assert "Creating directory" in capture.content
assert "blah_subdir" in capture.content
assert os.path.exists(config_file)
s = open(config_file).read()
assert "class Akara" in s
assert " Listen" in s
with capture:
commandline._setup_config_file(config_file)
assert "Configuration file already exists" in capture.content
@tmpdir
def test_setup(server_root):
config = Config(server_root)
capture = CaptureStdout()
f = open(config.config_filename, "w")
f.write("class Akara: ServerRoot = %r\n" % server_root)
f.close()
with capture:
commandline.setup(config)
assert "Created error log directory" in capture.content
assert "Access log directory exists" in capture.content
assert "PID file directory exists" in capture.content, capture.content
assert "Created extension modules directory" in capture.content
assert os.path.exists(os.path.join(server_root, "logs"))
assert os.path.exists(os.path.join(server_root, "modules"))
@tmpdir
def test_log_rotate(server_root):
config = Config(server_root)
capture = CaptureStdout()
error_log_filename = os.path.join(server_root, "testing.log")
def find_backup_logs():
return [name for name in os.listdir(server_root)
if name.startswith("testing.log.")]
with open(config.config_filename, "w") as f:
f.write("class Akara:\n ServerRoot = %r\n ErrorLog=%r\n" %
(server_root, error_log_filename))
# No log file present
with capture:
commandline.main(["akara", "-f", config.config_filename, "rotate"])
assert "No log file"
MESSAGE = "It was the best of times it was the worst of times.\n"
with open(error_log_filename, "w") as f:
f.write(MESSAGE)
# Existing log file is present. Rotate
with capture:
commandline.main(["akara", "-f", config.config_filename, "rotate"])
assert "testing.log.2" in capture.content, capture.content
assert "Rotated log" in capture.content, capture.content
filenames = find_backup_logs()
assert len(filenames) == 1, ("should have one backup", filenames)
# Check that the content rotated
content = open(os.path.join(server_root, filenames[0])).read()
assert content == MESSAGE, (content, MESSAGE)
# The log file should not be present
assert not os.path.exists(error_log_filename)
MESSAGE = "When shall we three meet again?\n"
with open(error_log_filename, "w") as f:
f.write(MESSAGE)
# And rotate again. Should now have two backups
commandline.main(["akara", "-f", config.config_filename, "rotate"])
filenames = find_backup_logs()
assert len(filenames) == 2, ("should have two backups", filenames)
assert not os.path.exists(error_log_filename)
|
uogbuji/akara
|
test/test_commandline.py
|
Python
|
apache-2.0
| 7,233
|
# Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import time
import urllib
import six # noqa
from tempest_lib.common import rest_client # noqa
from tempest_lib.common.utils import data_utils # noqa
from tempest_lib import exceptions as lib_exc # noqa
from tempest import config_share as config
from tempest import exceptions
from tempest import share_exceptions
CONF = config.CONF
class SharesClient(rest_client.RestClient):
"""Tempest REST client for Manila.
It handles shares and access to it in OpenStack.
"""
def __init__(self, auth_provider):
super(SharesClient, self).__init__(
auth_provider,
CONF.share.catalog_type,
CONF.share.region or CONF.identity.region,
endpoint_type=CONF.share.endpoint_type)
self.share_protocol = None
if CONF.share.enable_protocols:
self.share_protocol = CONF.share.enable_protocols[0]
self.share_network_id = CONF.share.share_network_id
self.build_interval = CONF.share.build_interval
self.build_timeout = CONF.share.build_timeout
def create_share(self, share_protocol=None, size=1,
name=None, snapshot_id=None, description=None,
metadata=None, share_network_id=None,
share_type_id=None, is_public=False):
metadata = metadata or {}
if name is None:
name = data_utils.rand_name("tempest-created-share")
if description is None:
description = data_utils.rand_name("tempest-created-share-desc")
if share_protocol is None:
share_protocol = self.share_protocol
if share_protocol is None:
raise share_exceptions.ShareProtocolNotSpecified()
post_body = {
"share": {
"share_proto": share_protocol,
"description": description,
"snapshot_id": snapshot_id,
"name": name,
"size": size,
"metadata": metadata,
"is_public": is_public,
}
}
if share_network_id:
post_body["share"]["share_network_id"] = share_network_id
if share_type_id:
post_body["share"]["share_type"] = share_type_id
body = json.dumps(post_body)
resp, body = self.post("shares", body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def delete_share(self, share_id):
resp, body = self.delete("shares/%s" % share_id)
self.expected_success(202, resp.status)
return body
def manage_share(self, service_host, protocol, export_path,
share_type_id, name=None, description=None):
post_body = {
"share": {
"export_path": export_path,
"service_host": service_host,
"protocol": protocol,
"share_type": share_type_id,
"name": name,
"description": description,
}
}
body = json.dumps(post_body)
resp, body = self.post("os-share-manage", body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def unmanage_share(self, share_id):
resp, body = self.post(
"os-share-unmanage/%s/unmanage" % share_id, None)
self.expected_success(202, resp.status)
return body
def list_shares(self, detailed=False, params=None):
"""Get list of shares w/o filters."""
uri = 'shares/detail' if detailed else 'shares'
uri += '?%s' % urllib.urlencode(params) if params else ''
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def list_shares_with_detail(self, params=None):
"""Get detailed list of shares w/o filters."""
return self.list_shares(detailed=True, params=params)
def get_share(self, share_id):
resp, body = self.get("shares/%s" % share_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def create_access_rule(self, share_id, access_type="ip",
access_to="0.0.0.0", access_level=None):
post_body = {
"os-allow_access": {
"access_type": access_type,
"access_to": access_to,
"access_level": access_level,
}
}
body = json.dumps(post_body)
resp, body = self.post("shares/%s/action" % share_id, body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def list_access_rules(self, share_id):
body = {"os-access_list": None}
resp, body = self.post("shares/%s/action" % share_id, json.dumps(body))
self.expected_success(200, resp.status)
return self._parse_resp(body)
def delete_access_rule(self, share_id, rule_id):
post_body = {
"os-deny_access": {
"access_id": rule_id,
}
}
body = json.dumps(post_body)
resp, body = self.post("shares/%s/action" % share_id, body)
self.expected_success(202, resp.status)
return body
def extend_share(self, share_id, new_size):
post_body = {
"os-extend": {
"new_size": new_size,
}
}
body = json.dumps(post_body)
resp, body = self.post("shares/%s/action" % share_id, body)
self.expected_success(202, resp.status)
return body
def shrink_share(self, share_id, new_size):
post_body = {
"os-shrink": {
"new_size": new_size,
}
}
body = json.dumps(post_body)
resp, body = self.post("shares/%s/action" % share_id, body)
self.expected_success(202, resp.status)
return body
def create_snapshot(self, share_id, name=None, description=None,
force=False):
if name is None:
name = data_utils.rand_name("tempest-created-share-snap")
if description is None:
description = data_utils.rand_name(
"tempest-created-share-snap-desc")
post_body = {
"snapshot": {
"name": name,
"force": force,
"description": description,
"share_id": share_id,
}
}
body = json.dumps(post_body)
resp, body = self.post("snapshots", body)
self.expected_success(202, resp.status)
return self._parse_resp(body)
def get_snapshot(self, snapshot_id):
resp, body = self.get("snapshots/%s" % snapshot_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def list_snapshots(self, detailed=False, params=None):
"""Get list of share snapshots w/o filters."""
uri = 'snapshots/detail' if detailed else 'snapshots'
uri += '?%s' % urllib.urlencode(params) if params else ''
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def list_snapshots_with_detail(self, params=None):
"""Get detailed list of share snapshots w/o filters."""
return self.list_snapshots(detailed=True, params=params)
def delete_snapshot(self, snap_id):
resp, body = self.delete("snapshots/%s" % snap_id)
self.expected_success(202, resp.status)
return body
def wait_for_share_status(self, share_id, status):
"""Waits for a share to reach a given status."""
body = self.get_share(share_id)
share_name = body['name']
share_status = body['status']
start = int(time.time())
while share_status != status:
time.sleep(self.build_interval)
body = self.get_share(share_id)
share_status = body['status']
if share_status == status:
return
elif 'error' in share_status.lower():
raise share_exceptions.\
ShareBuildErrorException(share_id=share_id)
if int(time.time()) - start >= self.build_timeout:
message = ('Share %s failed to reach %s status within '
'the required time (%s s).' %
(share_name, status, self.build_timeout))
raise exceptions.TimeoutException(message)
def wait_for_snapshot_status(self, snapshot_id, status):
"""Waits for a snapshot to reach a given status."""
body = self.get_snapshot(snapshot_id)
snapshot_name = body['name']
snapshot_status = body['status']
start = int(time.time())
while snapshot_status != status:
time.sleep(self.build_interval)
body = self.get_snapshot(snapshot_id)
snapshot_status = body['status']
if 'error' in snapshot_status:
raise exceptions.\
SnapshotBuildErrorException(snapshot_id=snapshot_id)
if int(time.time()) - start >= self.build_timeout:
message = ('Share Snapshot %s failed to reach %s status '
'within the required time (%s s).' %
(snapshot_name, status, self.build_timeout))
raise exceptions.TimeoutException(message)
def wait_for_access_rule_status(self, share_id, rule_id, status):
"""Waits for an access rule to reach a given status."""
rule_status = "new"
start = int(time.time())
while rule_status != status:
time.sleep(self.build_interval)
rules = self.list_access_rules(share_id)
for rule in rules:
if rule["id"] in rule_id:
rule_status = rule['state']
break
if 'error' in rule_status:
raise share_exceptions.\
AccessRuleBuildErrorException(rule_id=rule_id)
if int(time.time()) - start >= self.build_timeout:
message = ('Share Access Rule %s failed to reach %s status '
'within the required time (%s s).' %
(rule_id, status, self.build_timeout))
raise exceptions.TimeoutException(message)
def default_quotas(self, tenant_id):
resp, body = self.get("os-quota-sets/%s/defaults" % tenant_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def show_quotas(self, tenant_id, user_id=None):
uri = "os-quota-sets/%s" % tenant_id
if user_id is not None:
uri += "?user_id=%s" % user_id
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def reset_quotas(self, tenant_id, user_id=None):
uri = "os-quota-sets/%s" % tenant_id
if user_id is not None:
uri += "?user_id=%s" % user_id
resp, body = self.delete(uri)
self.expected_success(202, resp.status)
return body
def update_quotas(self, tenant_id, user_id=None, shares=None,
snapshots=None, gigabytes=None, snapshot_gigabytes=None,
share_networks=None, force=True):
uri = "os-quota-sets/%s" % tenant_id
if user_id is not None:
uri += "?user_id=%s" % user_id
put_body = {"tenant_id": tenant_id}
if force:
put_body["force"] = "true"
if shares is not None:
put_body["shares"] = shares
if snapshots is not None:
put_body["snapshots"] = snapshots
if gigabytes is not None:
put_body["gigabytes"] = gigabytes
if snapshot_gigabytes is not None:
put_body["snapshot_gigabytes"] = snapshot_gigabytes
if share_networks is not None:
put_body["share_networks"] = share_networks
put_body = json.dumps({"quota_set": put_body})
resp, body = self.put(uri, put_body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def get_limits(self):
resp, body = self.get("limits")
self.expected_success(200, resp.status)
return self._parse_resp(body)
def is_resource_deleted(self, *args, **kwargs):
"""Verifies whether provided resource deleted or not.
:param kwargs: dict with expected keys 'share_id', 'snapshot_id',
:param kwargs: 'sn_id', 'ss_id', 'vt_id' and 'server_id'
:raises share_exceptions.InvalidResource
"""
if "share_id" in kwargs:
return self._is_resource_deleted(
self.get_share, kwargs.get("share_id"))
elif "snapshot_id" in kwargs:
return self._is_resource_deleted(
self.get_snapshot, kwargs.get("snapshot_id"))
elif "sn_id" in kwargs:
return self._is_resource_deleted(
self.get_share_network, kwargs.get("sn_id"))
elif "ss_id" in kwargs:
return self._is_resource_deleted(
self.get_security_service, kwargs.get("ss_id"))
elif "vt_id" in kwargs:
return self._is_resource_deleted(
self.get_volume_type, kwargs.get("vt_id"))
elif "st_id" in kwargs:
return self._is_resource_deleted(
self.get_share_type, kwargs.get("st_id"))
elif "server_id" in kwargs:
return self._is_resource_deleted(
self.show_share_server, kwargs.get("server_id"))
else:
raise share_exceptions.InvalidResource(
message=six.text_type(kwargs))
def _is_resource_deleted(self, func, res_id):
try:
res = func(res_id)
except lib_exc.NotFound:
return True
if res.get('status') == 'error_deleting':
# Resource has "error_deleting" status and can not be deleted.
resource_type = func.__name__.split('_', 1)[-1]
raise share_exceptions.ResourceReleaseFailed(
res_type=resource_type, res_id=res_id)
return False
def wait_for_resource_deletion(self, *args, **kwargs):
"""Waits for a resource to be deleted."""
start_time = int(time.time())
while True:
if self.is_resource_deleted(*args, **kwargs):
return
if int(time.time()) - start_time >= self.build_timeout:
raise exceptions.TimeoutException
time.sleep(self.build_interval)
def list_extensions(self):
resp, extensions = self.get("extensions")
self.expected_success(200, resp.status)
return self._parse_resp(extensions)
def update_share(self, share_id, name=None, desc=None, is_public=None):
body = {"share": {}}
if name is not None:
body["share"].update({"display_name": name})
if desc is not None:
body["share"].update({"display_description": desc})
if is_public is not None:
body["share"].update({"is_public": is_public})
body = json.dumps(body)
resp, body = self.put("shares/%s" % share_id, body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def rename_snapshot(self, snapshot_id, name, desc=None):
body = {"snapshot": {"display_name": name}}
if desc is not None:
body["snapshot"].update({"display_description": desc})
body = json.dumps(body)
resp, body = self.put("snapshots/%s" % snapshot_id, body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def reset_state(self, s_id, status="error", s_type="shares"):
"""Resets the state of a share or a snapshot.
status: available, error, creating, deleting, error_deleting
s_type: shares, snapshots
"""
body = {"os-reset_status": {"status": status}}
body = json.dumps(body)
resp, body = self.post("%s/%s/action" % (s_type, s_id), body)
self.expected_success(202, resp.status)
return body
def force_delete(self, s_id, s_type="shares"):
"""Force delete share or snapshot.
s_type: shares, snapshots
"""
body = {"os-force_delete": None}
body = json.dumps(body)
resp, body = self.post("%s/%s/action" % (s_type, s_id), body)
self.expected_success(202, resp.status)
return body
###############
def list_services(self, params=None):
"""List services."""
uri = 'os-services'
if params:
uri += '?%s' % urllib.urlencode(params)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
###############
def _update_metadata(self, share_id, metadata=None, method="post"):
uri = "shares/%s/metadata" % share_id
if metadata is None:
metadata = {}
post_body = {"metadata": metadata}
body = json.dumps(post_body)
if method is "post":
resp, metadata = self.post(uri, body)
if method is "put":
resp, metadata = self.put(uri, body)
self.expected_success(200, resp.status)
return self._parse_resp(metadata)
def set_metadata(self, share_id, metadata=None):
return self._update_metadata(share_id, metadata)
def update_all_metadata(self, share_id, metadata=None):
return self._update_metadata(share_id, metadata, method="put")
def delete_metadata(self, share_id, key):
resp, body = self.delete("shares/%s/metadata/%s" % (share_id, key))
self.expected_success(200, resp.status)
return body
def get_metadata(self, share_id):
resp, body = self.get("shares/%s/metadata" % share_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
###############
def create_security_service(self, ss_type="ldap", **kwargs):
# ss_type: ldap, kerberos, active_directory
# kwargs: name, description, dns_ip, server, domain, user, password
post_body = {"type": ss_type}
post_body.update(kwargs)
body = json.dumps({"security_service": post_body})
resp, body = self.post("security-services", body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def update_security_service(self, ss_id, **kwargs):
# ss_id - id of security-service entity
# kwargs: dns_ip, server, domain, user, password, name, description
# for 'active' status can be changed
# only 'name' and 'description' fields
body = json.dumps({"security_service": kwargs})
resp, body = self.put("security-services/%s" % ss_id, body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def get_security_service(self, ss_id):
resp, body = self.get("security-services/%s" % ss_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def list_security_services(self, detailed=False, params=None):
uri = "security-services"
if detailed:
uri += '/detail'
if params:
uri += "?%s" % urllib.urlencode(params)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def delete_security_service(self, ss_id):
resp, body = self.delete("security-services/%s" % ss_id)
self.expected_success(202, resp.status)
return body
###############
def create_share_network(self, **kwargs):
# kwargs: name, description
# + for neutron: neutron_net_id, neutron_subnet_id
body = json.dumps({"share_network": kwargs})
resp, body = self.post("share-networks", body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def update_share_network(self, sn_id, **kwargs):
# kwargs: name, description
# + for neutron: neutron_net_id, neutron_subnet_id
body = json.dumps({"share_network": kwargs})
resp, body = self.put("share-networks/%s" % sn_id, body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def get_share_network(self, sn_id):
resp, body = self.get("share-networks/%s" % sn_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def list_share_networks(self):
resp, body = self.get("share-networks")
self.expected_success(200, resp.status)
return self._parse_resp(body)
def list_share_networks_with_detail(self, params=None):
"""List the details of all shares."""
uri = "share-networks/detail"
if params:
uri += "?%s" % urllib.urlencode(params)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def delete_share_network(self, sn_id):
resp, body = self.delete("share-networks/%s" % sn_id)
self.expected_success(202, resp.status)
return body
###############
def _map_security_service_and_share_network(self, sn_id, ss_id,
action="add"):
# sn_id: id of share_network_entity
# ss_id: id of security service entity
# action: add, remove
data = {
"%s_security_service" % action: {
"security_service_id": ss_id,
}
}
body = json.dumps(data)
resp, body = self.post("share-networks/%s/action" % sn_id, body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def add_sec_service_to_share_network(self, sn_id, ss_id):
body = self._map_security_service_and_share_network(sn_id, ss_id)
return body
def remove_sec_service_from_share_network(self, sn_id, ss_id):
body = self._map_security_service_and_share_network(
sn_id, ss_id, "remove")
return body
def list_sec_services_for_share_network(self, sn_id):
resp, body = self.get("security-services?share_network_id=%s" % sn_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
###############
def list_share_types(self, params=None):
uri = 'types'
if params is not None:
uri += '?%s' % urllib.urlencode(params)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def create_share_type(self, name, is_public=True, **kwargs):
post_body = {
'name': name,
'extra_specs': kwargs.get('extra_specs'),
'os-share-type-access:is_public': is_public,
}
post_body = json.dumps({'share_type': post_body})
resp, body = self.post('types', post_body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def delete_share_type(self, share_type_id):
resp, body = self.delete("types/%s" % share_type_id)
self.expected_success(202, resp.status)
return body
def get_share_type(self, share_type_id):
resp, body = self.get("types/%s" % share_type_id)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def add_access_to_share_type(self, share_type_id, project_id):
uri = 'types/%s/action' % share_type_id
post_body = {'project': project_id}
post_body = json.dumps({'addProjectAccess': post_body})
resp, body = self.post(uri, post_body)
self.expected_success(202, resp.status)
return body
def remove_access_from_share_type(self, share_type_id, project_id):
uri = 'types/%s/action' % share_type_id
post_body = {'project': project_id}
post_body = json.dumps({'removeProjectAccess': post_body})
resp, body = self.post(uri, post_body)
self.expected_success(202, resp.status)
return body
def list_access_to_share_type(self, share_type_id):
uri = 'types/%s/os-share-type-access' % share_type_id
resp, body = self.get(uri)
# [{"share_type_id": "%st_id%", "project_id": "%project_id%"}, ]
self.expected_success(200, resp.status)
return self._parse_resp(body)
###############
def create_share_type_extra_specs(self, share_type_id, extra_specs):
url = "types/%s/extra_specs" % share_type_id
post_body = json.dumps({'extra_specs': extra_specs})
resp, body = self.post(url, post_body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def get_share_type_extra_spec(self, share_type_id, extra_spec_name):
uri = "types/%s/extra_specs/%s" % (share_type_id, extra_spec_name)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def get_share_type_extra_specs(self, share_type_id, params=None):
uri = "types/%s/extra_specs" % share_type_id
if params is not None:
uri += '?%s' % urllib.urlencode(params)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def update_share_type_extra_spec(self, share_type_id, spec_name,
spec_value):
uri = "types/%s/extra_specs/%s" % (share_type_id, spec_name)
extra_spec = {spec_name: spec_value}
post_body = json.dumps(extra_spec)
resp, body = self.put(uri, post_body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def update_share_type_extra_specs(self, share_type_id, extra_specs):
uri = "types/%s/extra_specs" % share_type_id
extra_specs = {"extra_specs": extra_specs}
post_body = json.dumps(extra_specs)
resp, body = self.post(uri, post_body)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def delete_share_type_extra_spec(self, share_type_id, extra_spec_name):
uri = "types/%s/extra_specs/%s" % (share_type_id, extra_spec_name)
resp, body = self.delete(uri)
self.expected_success(202, resp.status)
return body
###############
def list_share_servers(self, search_opts=None):
"""Get list of share servers."""
uri = "share-servers"
if search_opts:
uri += "?%s" % urllib.urlencode(search_opts)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def delete_share_server(self, share_server_id):
"""Delete share server by its ID."""
uri = "share-servers/%s" % share_server_id
resp, body = self.delete(uri)
self.expected_success(202, resp.status)
return body
def show_share_server(self, share_server_id):
"""Get share server info."""
uri = "share-servers/%s" % share_server_id
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
def show_share_server_details(self, share_server_id):
"""Get share server details only."""
uri = "share-servers/%s/details" % share_server_id
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return self._parse_resp(body)
###############
def list_pools(self, detail=False, search_opts=None):
"""Get list of scheduler pools."""
uri = 'scheduler-stats/pools'
if detail:
uri += '/detail'
if search_opts:
uri += "?%s" % urllib.urlencode(search_opts)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return json.loads(body)
|
sajuptpm/manila
|
contrib/tempest/tempest/services/share/json/shares_client.py
|
Python
|
apache-2.0
| 28,667
|
from .attributes import Attribute, MapItem, KeyedListItem
from .attribute_collection import (AttributeCollection, MapAttributeCollection,
KeyedListAttributeCollection)
from .maps import Map, KeyedList
from .objects import Object
from .sequences import Sequence, IntList, FloatList, StrList
from .yamlizable import Dynamic, Yamlizable, Typed
from .yamlizing_error import YamlizingError
|
SimplyKnownAsG/yamlize
|
yamlize/__init__.py
|
Python
|
apache-2.0
| 421
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for MarkRecommendationSucceeded
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-recommender
# [START recommender_v1beta1_generated_Recommender_MarkRecommendationSucceeded_sync]
from google.cloud import recommender_v1beta1
def sample_mark_recommendation_succeeded():
# Create a client
client = recommender_v1beta1.RecommenderClient()
# Initialize request argument(s)
request = recommender_v1beta1.MarkRecommendationSucceededRequest(
name="name_value",
etag="etag_value",
)
# Make the request
response = client.mark_recommendation_succeeded(request=request)
# Handle the response
print(response)
# [END recommender_v1beta1_generated_Recommender_MarkRecommendationSucceeded_sync]
|
googleapis/python-recommender
|
samples/generated_samples/recommender_v1beta1_generated_recommender_mark_recommendation_succeeded_sync.py
|
Python
|
apache-2.0
| 1,596
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Quantum Subnets.
"""
import logging
from django.core.urlresolvers import reverse_lazy, reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from horizon import workflows
from openstack_dashboard import api
from .tabs import SubnetDetailTabs
from .workflows import CreateSubnet, UpdateSubnet
LOG = logging.getLogger(__name__)
class CreateView(workflows.WorkflowView):
workflow_class = CreateSubnet
template_name = 'admin/networks/subnets/create.html'
def get_object(self):
if not hasattr(self, "_object"):
try:
network_id = self.kwargs["network_id"]
self._object = api.quantum.network_get(self.request,
network_id)
self._object.set_id_as_name_if_empty()
except:
redirect = reverse('horizon:project:networks:index')
msg = _("Unable to retrieve network.")
exceptions.handle(self.request, msg, redirect=redirect)
return self._object
def get_initial(self):
network = self.get_object()
return {"network_id": self.kwargs['network_id'],
"network_name": network.name}
class UpdateView(workflows.WorkflowView):
workflow_class = UpdateSubnet
template_name = 'project/networks/subnets/update.html'
def _get_object(self, *args, **kwargs):
if not hasattr(self, "_object"):
subnet_id = self.kwargs['subnet_id']
try:
self._object = api.quantum.subnet_get(self.request, subnet_id)
except:
redirect = reverse("horizon:project:networks:index")
msg = _('Unable to retrieve subnet details')
exceptions.handle(self.request, msg, redirect=redirect)
return self._object
def get_initial(self):
initial = super(UpdateView, self).get_initial()
subnet = self._get_object()
initial['network_id'] = self.kwargs['network_id']
initial['subnet_id'] = subnet['id']
initial['subnet_name'] = subnet['name']
for key in ('cidr', 'ip_version', 'enable_dhcp'):
initial[key] = subnet[key]
initial['gateway_ip'] = subnet['gateway_ip'] or ''
initial['no_gateway'] = (subnet['gateway_ip'] is None)
initial['dns_nameservers'] = '\n'.join(subnet['dns_nameservers'])
pools = ['%s,%s' % (p['start'], p['end'])
for p in subnet['allocation_pools']]
initial['allocation_pools'] = '\n'.join(pools)
routes = ['%s,%s' % (r['destination'], r['nexthop'])
for r in subnet['host_routes']]
initial['host_routes'] = '\n'.join(routes)
return initial
class DetailView(tabs.TabView):
tab_group_class = SubnetDetailTabs
template_name = 'project/networks/subnets/detail.html'
|
MKTCloud/MKTCloud
|
openstack_dashboard/dashboards/admin/networks/subnets/views.py
|
Python
|
apache-2.0
| 3,612
|
# Copyright 2016 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from lcm.pub.database.models import FPInstModel
from lcm.pub.msapi import extsys
from lcm.pub.msapi import sdncdriver
logger = logging.getLogger(__name__)
class CreatePortChain(object):
def __init__(self, data):
self.fp_inst_id = data["fpinstid"]
self.ns_model_info = data["ns_model_data"]
self.sdnControllerId = ""
self.symmetric = ""
self.port_pair_groups_ids = []
self.flow_classifier_ids = []
def do_biz(self):
logger.info("CreatePortChain start:")
self.init_data()
self.create_sfc()
logger.info("CreatePortChain end:")
def init_data(self):
fp_inst_info = FPInstModel.objects.filter(fpinstid=self.fp_inst_id).get()
self.sdnControllerId = fp_inst_info.sdncontrollerid
self.symmetric = "true" if fp_inst_info.symmetric == 1 else "false"
flow_classfier_str = fp_inst_info.flowclassifiers
self.flow_classifier_ids = [flow_classfier_str]
portpairgroup_ids = []
for portpairgroup in json.loads(fp_inst_info.portpairgroups):
portpairgroup_ids.append(portpairgroup["groupid"])
self.port_pair_groups_ids = portpairgroup_ids
def create_sfc(self):
data = {
"sdnControllerId": self.sdnControllerId,
"url": extsys.get_sdn_controller_by_id(self.sdnControllerId)["url"],
"flowClassifiers": self.flow_classifier_ids,
"portPairGroups": self.port_pair_groups_ids,
"symmetric": self.symmetric
}
# url = "/openoapi/sdncdriver/v1.0/createchain"
# req_param = json.JSONEncoder.encoding(data)
# ret = req_by_msb(url, "POST", req_param)
# ret = req_by_msb("OPENAPI_CREATE_SERVICE_PORT_CHAIN",data)
# if ret[0] > 0:
# logger.error('Send SFC Create request to Driver failed.')
# sfc_inst_failed_handle( "Send SFC Create request to Driver failed.")
# raise NSLCMException('Send SFC Create request to Driver failed.')
# resp_body = json.loads(ret[1])
# sfc_id = resp_body["id"]
sfc_id = sdncdriver.create_port_chain(data)
FPInstModel.objects.filter(fpinstid=self.fp_inst_id).update(sfcid=sfc_id)
# def get_url_by_sdncontrollerid(self):
# try:
# logger.warn("query sdncontroller by id begins:")
#
# url = "/openoapi/extsys/v1/sdncontrollers/%s" % (self.sdnControllerId)
# ret = req_by_msb(url, "GET")
# if ret[0] > 0:
# logger.error('query sdncontroller failed.')
# raise VnfoException('query sdncontroller failed.')
# resp_body = json.JSONDecoder().decode(ret[1])
# logger.warn("query sdncontroller by id ends:")
# except:
# if ret[0] > 0:
# logger.error('Send Flow Classifier request to Driver failed.')
# self.sfc_inst_failed_handle(self.fp_inst_id, "Send Flow Classifier request to Driver failed.")
# raise VnfoException('Send Flow Classifier request to Driver failed.')
#
# return resp_body('url')
# def sfc_inst_failed_handle(fp_inst_id, error_msg):
# logger.error('create sfc failed, detail message: %s' % error_msg)
# FPInstModel.objects.filter(fpid=fp_inst_id).update(status="disabled").get()
|
open-o/nfvo
|
lcm/lcm/ns/sfcs/create_port_chain.py
|
Python
|
apache-2.0
| 4,061
|
#!/usr/bin/env python
import httplib2
import mock
from six.moves import urllib
import unittest
import ee
from ee import apitestcase
class DataTest(unittest.TestCase):
def testGetTaskList(self):
def Request(unused_self, url, method, body, headers):
_ = method, body, headers # Unused kwargs.
parse_result = urllib.parse.urlparse(url)
if parse_result.path != '/api/tasklist':
return httplib2.Response({'status': 404}), 'not found'
resp_body = '{}'
query_args = urllib.parse.parse_qs(parse_result.query)
if query_args == {'pagesize': ['500']}:
resp_body = ('{"data": {"tasks": [{"id": "1"}],'
' "next_page_token": "foo"}}')
elif query_args == {'pagesize': ['500'], 'pagetoken': ['foo']}:
resp_body = '{"data": {"tasks": [{"id": "2"}]}}'
response = httplib2.Response({
'status': 200,
'content-type': 'application/json',
})
return response, resp_body
with mock.patch('httplib2.Http.request', new=Request):
self.assertEqual([{'id': '1'}, {'id': '2'}], ee.data.getTaskList())
def testListOperations(self):
mock_http = mock.MagicMock(httplib2.Http)
# Return in three groups.
mock_http.request.side_effect = [
(httplib2.Response({
'status': 200
}), b'{"operations": [{"name": "name1"}], "nextPageToken": "t1"}'),
(httplib2.Response({
'status': 200
}), b'{"operations": [{"name": "name2"}], "nextPageToken": "t2"}'),
(httplib2.Response({
'status': 200
}), b'{"operations": [{"name": "name3"}]}'),
]
with apitestcase.UsingCloudApi(mock_http=mock_http):
self.assertEqual([{
'name': 'name1'
}, {
'name': 'name2'
}, {
'name': 'name3'
}], ee.data.listOperations())
def testListOperationsEmptyList(self):
# Empty lists don't appear at all in the result.
mock_http = mock.MagicMock(httplib2.Http)
mock_http.request.return_value = (httplib2.Response({'status': 200}), b'{}')
with apitestcase.UsingCloudApi(mock_http=mock_http):
self.assertEqual([], ee.data.listOperations())
@mock.patch('time.sleep')
def testSuccess(self, mock_sleep):
with DoStubHttp(200, 'application/json', '{"data": "bar"}'):
self.assertEqual('bar', ee.data.send_('/foo', {}))
self.assertEqual(False, mock_sleep.called)
@mock.patch('time.sleep')
def testRetry(self, mock_sleep):
with DoStubHttp(429, 'application/json', '{"data": "bar"}'):
with self.assertRaises(ee.ee_exception.EEException):
ee.data.send_('/foo', {})
self.assertEqual(5, mock_sleep.call_count)
def testNon200Success(self):
with DoStubHttp(202, 'application/json', '{"data": "bar"}'):
self.assertEqual('bar', ee.data.send_('/foo', {}))
def testJsonSyntaxError(self):
with DoStubHttp(200, 'application/json', '{"data"}'):
with self.assertRaises(ee.ee_exception.EEException) as cm:
ee.data.send_('/foo', {})
self.assertEqual('Invalid JSON: {"data"}', str(cm.exception))
def testJsonStructureError(self):
with DoStubHttp(200, 'application/json', '{}'):
with self.assertRaises(ee.ee_exception.EEException) as cm:
ee.data.send_('/foo', {})
self.assertEqual('Malformed response: {}', str(cm.exception))
def testUnexpectedStatus(self):
with DoStubHttp(418, 'text/html', '<html>'):
with self.assertRaises(ee.ee_exception.EEException) as cm:
ee.data.send_('/foo', {})
self.assertEqual('Server returned HTTP code: 418', str(cm.exception))
def testJson200Error(self):
with DoStubHttp(200, 'application/json',
'{"error": {"code": 500, "message": "bar"}}'):
with self.assertRaises(ee.ee_exception.EEException) as cm:
ee.data.send_('/foo', {})
self.assertEqual(u'bar', str(cm.exception))
def testJsonNon2xxError(self):
with DoStubHttp(400, 'application/json',
'{"error": {"code": 400, "message": "bar"}}'):
with self.assertRaises(ee.ee_exception.EEException) as cm:
ee.data.send_('/foo', {})
self.assertEqual(u'bar', str(cm.exception))
def testWrongContentType(self):
with DoStubHttp(200, 'text/html', '{"data": "bar"}'):
with self.assertRaises(ee.ee_exception.EEException) as cm:
ee.data.send_('/foo', {})
self.assertEqual(u'Response was unexpectedly not JSON, but text/html',
str(cm.exception))
def testNoContentType(self):
with DoStubHttp(200, None, '{"data": "bar"}'):
self.assertEqual('bar', ee.data.send_('/foo', {}))
def testContentTypeParameterAllowed(self):
with DoStubHttp(200, 'application/json; charset=utf-8', '{"data": ""}'):
self.assertEqual('', ee.data.send_('/foo', {}))
def testRawSuccess(self):
with DoStubHttp(200, 'image/png', 'FAKEDATA'):
self.assertEqual('FAKEDATA', ee.data.send_('/foo', {}, opt_raw=True))
def testRawError(self):
with DoStubHttp(400, 'application/json',
'{"error": {"code": 400, "message": "bar"}}'):
with self.assertRaises(ee.ee_exception.EEException) as cm:
ee.data.send_('/foo', {}, opt_raw=True)
self.assertEqual(u'Server returned HTTP code: 400', str(cm.exception))
def testRaw200Error(self):
"""Raw shouldn't be parsed, so the error-in-200 shouldn't be noticed.
(This is an edge case we do not expect to see.)
"""
with DoStubHttp(200, 'application/json',
'{"error": {"code": 400, "message": "bar"}}'):
self.assertEqual('{"error": {"code": 400, "message": "bar"}}',
ee.data.send_('/foo', {}, opt_raw=True))
def testNotProfiling(self):
# Test that we do not request profiling.
with DoProfileStubHttp(self, False):
ee.data.send_('/foo', {})
def testProfiling(self):
with DoProfileStubHttp(self, True):
seen = []
def ProfileHook(profile_id):
seen.append(profile_id)
with ee.data.profiling(ProfileHook):
ee.data.send_('/foo', {})
self.assertEqual(['someProfileId'], seen)
def testProfilingCleanup(self):
with DoProfileStubHttp(self, True):
try:
with ee.data.profiling(lambda _: None):
raise ExceptionForTest()
except ExceptionForTest:
pass
# Should not have profiling enabled after exiting the context by raising.
with DoProfileStubHttp(self, False):
ee.data.send_('/foo', {})
def testListAssets(self):
cloud_api_resource = mock.MagicMock()
with apitestcase.UsingCloudApi(cloud_api_resource=cloud_api_resource):
mock_result = {'assets': [{'path': 'id1', 'type': 'type1'}]}
cloud_api_resource.projects().assets().listAssets(
).execute.return_value = mock_result
cloud_api_resource.projects().assets().listAssets_next.return_value = None
actual_result = ee.data.listAssets({'p': 'q'})
cloud_api_resource.projects().assets().listAssets().\
execute.assert_called_once()
self.assertEqual(mock_result, actual_result)
def testListImages(self):
cloud_api_resource = mock.MagicMock()
with apitestcase.UsingCloudApi(cloud_api_resource=cloud_api_resource):
mock_result = {'images': [{'path': 'id1', 'type': 'type1'}]}
cloud_api_resource.projects().assets().listImages(
).execute.return_value = mock_result
cloud_api_resource.projects().assets().listImages_next.return_value = None
actual_result = ee.data.listImages({'p': 'q'})
cloud_api_resource.projects().assets().listImages(
).execute.assert_called_once()
self.assertEqual(mock_result, actual_result)
def testListBuckets(self):
cloud_api_resource = mock.MagicMock()
with apitestcase.UsingCloudApi(cloud_api_resource=cloud_api_resource):
mock_result = {'assets': [{'name': 'id1', 'type': 'FOLDER'}]}
cloud_api_resource.projects().listAssets(
).execute.return_value = mock_result
actual_result = ee.data.listBuckets()
cloud_api_resource.projects().listAssets(
).execute.assert_called_once()
self.assertEqual(mock_result, actual_result)
def testSimpleGetListViaCloudApi(self):
cloud_api_resource = mock.MagicMock()
with apitestcase.UsingCloudApi(cloud_api_resource=cloud_api_resource):
mock_result = {'assets': [{'name': 'id1', 'type': 'IMAGE_COLLECTION'}]}
cloud_api_resource.projects().assets().listAssets(
).execute.return_value = mock_result
actual_result = ee.data.getList({'id': 'glam', 'num': 3})
expected_params = {
'parent': 'projects/earthengine-public/assets/glam',
'pageSize': 3
}
expected_result = [{'id': 'id1', 'type': 'ImageCollection'}]
cloud_api_resource.projects().assets().listAssets.assert_called_with(
**expected_params)
self.assertEqual(expected_result, actual_result)
def testComplexGetListViaCloudApi(self):
cloud_api_resource = mock.MagicMock()
with apitestcase.UsingCloudApi(cloud_api_resource=cloud_api_resource):
mock_result = {
'images': [{
'name': 'id1',
'size_bytes': 1234
}]
}
cloud_api_resource.projects().assets().listImages(
).execute.return_value = mock_result
actual_result = ee.data.getList({
'id': 'glam',
'num': 3,
'starttime': 3612345
})
expected_params = {
'parent': 'projects/earthengine-public/assets/glam',
'pageSize': 3,
'startTime': '1970-01-01T01:00:12.345000Z',
'fields': 'images(name)'
}
expected_result = [{'id': 'id1', 'type': 'Image'}]
cloud_api_resource.projects().assets().listImages.assert_called_with(
**expected_params)
self.assertEqual(expected_result, actual_result)
def testCloudProfilingEnabled(self):
seen = []
def ProfileHook(profile_id):
seen.append(profile_id)
with ee.data.profiling(ProfileHook):
with apitestcase.UsingCloudApi(), DoCloudProfileStubHttp(self, True):
ee.data.listImages({'parent': 'projects/earthengine-public/assets/q'})
self.assertEqual(['someProfileId'], seen)
def testCloudProfilingDisabled(self):
with apitestcase.UsingCloudApi(), DoCloudProfileStubHttp(self, False):
ee.data.listImages({'parent': 'projects/earthengine-public/assets/q'})
def testCloudErrorTranslation(self):
mock_http = mock.MagicMock(httplib2.Http)
mock_http.request.return_value = (httplib2.Response({'status': 400}),
b'{"error": {"message": "errorly"} }')
with apitestcase.UsingCloudApi(mock_http=mock_http):
with self.assertRaisesRegexp(ee.ee_exception.EEException, '^errorly$'):
ee.data.listImages({'parent': 'projects/earthengine-public/assets/q'})
def DoStubHttp(status, mime, resp_body):
"""Context manager for temporarily overriding Http."""
def Request(unused_self, unused_url, method, body, headers):
_ = method, body, headers # Unused kwargs.
response = httplib2.Response({
'status': status,
'content-type': mime,
})
return response, resp_body
return mock.patch('httplib2.Http.request', new=Request)
def DoProfileStubHttp(test, expect_profiling):
def Request(unused_self, unused_url, method, body, headers):
_ = method, headers # Unused kwargs.
test.assertEqual(expect_profiling, 'profiling=1' in body, msg=body)
response_dict = {
'status': 200,
'content-type': 'application/json'
}
if expect_profiling:
response_dict[
ee.data._PROFILE_RESPONSE_HEADER_LOWERCASE] = 'someProfileId'
response = httplib2.Response(response_dict)
return response, '{"data": "dummy_data"}'
return mock.patch('httplib2.Http.request', new=Request)
def DoCloudProfileStubHttp(test, expect_profiling):
def Request(unused_self, unused_url, method, body, headers):
_ = method, body # Unused kwargs.
test.assertEqual(expect_profiling,
ee.data._PROFILE_REQUEST_HEADER in headers)
response_dict = {
'status': 200,
'content-type': 'application/json'
}
if expect_profiling:
response_dict[
ee.data._PROFILE_RESPONSE_HEADER_LOWERCASE] = 'someProfileId'
response = httplib2.Response(response_dict)
return response, '{"data": "dummy_data"}'
return mock.patch('httplib2.Http.request', new=Request)
class ExceptionForTest(Exception):
pass
if __name__ == '__main__':
unittest.main()
|
tylere/earthengine-api
|
python/ee/tests/data_test.py
|
Python
|
apache-2.0
| 12,603
|
# coding:utf-8
from tasks.workers import app
from page_get import user as user_get
from db.seed_ids import get_seed_ids, get_seed_by_id, insert_seeds, set_seed_other_crawled
@app.task(ignore_result=True)
def crawl_follower_fans(uid):
seed = get_seed_by_id(uid)
if seed.other_crawled == 0:
rs = user_get.get_fans_or_followers_ids(uid, 1)
rs.extend(user_get.get_fans_or_followers_ids(uid, 2))
datas = set(rs)
# 重复数据跳过插入
if datas:
insert_seeds(datas)
set_seed_other_crawled(uid)
@app.task(ignore_result=True)
def crawl_person_infos(uid):
"""
根据用户id来爬取用户相关资料和用户的关注数和粉丝数(由于微博服务端限制,默认爬取前五页,企业号的关注和粉丝也不能查看)
:param uid: 用户id
:return:
"""
if not uid:
return
# 由于与别的任务共享数据表,所以需要先判断数据库是否有该用户信息,再进行抓取
user = user_get.get_profile(uid)
# 不抓取企业号
if user.verify_type == 2:
set_seed_other_crawled(uid)
return
app.send_task('tasks.user.crawl_follower_fans', args=(uid,), queue='fans_followers',
routing_key='for_fans_followers')
@app.task(ignore_result=True)
def excute_user_task():
seeds = get_seed_ids()
if seeds:
for seed in seeds:
app.send_task('tasks.user.crawl_person_infos', args=(seed.uid,), queue='user_crawler',
routing_key='for_user_info')
|
Danceiny/HackGirlfriend
|
Spider/WeiboSpider/tasks/user.py
|
Python
|
apache-2.0
| 1,574
|
#!/usr/bin/env python
"""Classes for exporting network-related data."""
from typing import Iterator, List
from grr_response_core.lib.rdfvalues import client_network as rdf_client_network
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_proto import export_pb2
from grr_response_server.export_converters import base
class ExportedNetworkConnection(rdf_structs.RDFProtoStruct):
protobuf = export_pb2.ExportedNetworkConnection
rdf_deps = [
base.ExportedMetadata,
rdf_client_network.NetworkEndpoint,
]
class ExportedDNSClientConfiguration(rdf_structs.RDFProtoStruct):
protobuf = export_pb2.ExportedDNSClientConfiguration
rdf_deps = [
base.ExportedMetadata,
]
class ExportedNetworkInterface(rdf_structs.RDFProtoStruct):
protobuf = export_pb2.ExportedNetworkInterface
rdf_deps = [
base.ExportedMetadata,
]
class NetworkConnectionToExportedNetworkConnectionConverter(
base.ExportConverter):
"""Converts NetworkConnection to ExportedNetworkConnection."""
input_rdf_type = rdf_client_network.NetworkConnection
def Convert(
self, metadata: base.ExportedMetadata,
conn: rdf_client_network.NetworkConnection
) -> List[ExportedNetworkConnection]:
"""Converts a NetworkConnection into a ExportedNetworkConnection.
Args:
metadata: ExportedMetadata to be added to the ExportedNetworkConnection.
conn: NetworkConnection to be converted.
Returns:
A list with a single ExportedNetworkConnection containing the converted
NetworkConnection.
"""
result = ExportedNetworkConnection(
metadata=metadata,
family=conn.family,
type=conn.type,
local_address=conn.local_address,
remote_address=conn.remote_address,
state=conn.state,
pid=conn.pid,
ctime=conn.ctime)
return [result]
class InterfaceToExportedNetworkInterfaceConverter(base.ExportConverter):
"""Converts Interface to ExportedNetworkInterface."""
input_rdf_type = rdf_client_network.Interface
def Convert(
self, metadata: base.ExportedMetadata,
interface: rdf_client_network.Interface
) -> Iterator[ExportedNetworkInterface]:
"""Converts a Interface into ExportedNetworkInterfaces.
Args:
metadata: ExportedMetadata to be added to the ExportedNetworkInterface.
interface: (Network) Interface to be converted.
Yields:
An ExportedNetworkInterface containing the converted Interface.
"""
ip4_addresses = []
ip6_addresses = []
for addr in interface.addresses:
if addr.address_type == addr.Family.INET:
ip4_addresses.append(addr.human_readable_address)
elif addr.address_type == addr.Family.INET6:
ip6_addresses.append(addr.human_readable_address)
else:
raise ValueError("Invalid address type: %s" % addr.address_type)
result = ExportedNetworkInterface(
metadata=metadata,
ifname=interface.ifname,
ip4_addresses=" ".join(ip4_addresses),
ip6_addresses=" ".join(ip6_addresses))
if interface.mac_address:
result.mac_address = interface.mac_address.human_readable_address
yield result
class DNSClientConfigurationToExportedDNSClientConfiguration(
base.ExportConverter):
"""Converts DNSClientConfiguration to ExportedDNSClientConfiguration."""
input_rdf_type = rdf_client_network.DNSClientConfiguration
def Convert(
self, metadata: base.ExportedMetadata,
config: rdf_client_network.DNSClientConfiguration
) -> Iterator[ExportedDNSClientConfiguration]:
"""Converts a DNSClientConfiguration into a ExportedDNSClientConfiguration.
Args:
metadata: ExportedMetadata to be added to the
ExportedDNSClientConfiguration.
config: DNSClientConfiguration to be converted.
Yields:
An ExportedDNSClientConfiguration containing the DNSClientConfiguration.
"""
result = ExportedDNSClientConfiguration(
metadata=metadata,
dns_servers=" ".join(config.dns_server),
dns_suffixes=" ".join(config.dns_suffix))
yield result
|
google/grr
|
grr/server/grr_response_server/export_converters/network.py
|
Python
|
apache-2.0
| 4,124
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutronclient.v2_0 import client
from oslotest import base
from ceilometer import neutron_client
class TestNeutronClientLBaaSV2(base.BaseTestCase):
def setUp(self):
super(TestNeutronClientLBaaSV2, self).setUp()
self.nc = neutron_client.Client()
@staticmethod
def fake_list_lbaas_pools():
return {
'pools': [{
'lb_algorithm': 'ROUND_ROBIN',
'protocol': 'HTTP',
'description': 'simple pool',
'admin_state_up': True,
'tenant_id': '1a3e005cf9ce40308c900bcb08e5320c',
'healthmonitor_id': None,
'listeners': [{
'id': "35cb8516-1173-4035-8dae-0dae3453f37f"
}
],
'members': [{
'id': 'fcf23bde-8cf9-4616-883f-208cebcbf858'}
],
'id': '4c0a0a5f-cf8f-44b7-b912-957daa8ce5e5',
'name': 'pool1'
}]
}
@staticmethod
def fake_list_lbaas_members():
return {
'members': [{
'weight': 1,
'admin_state_up': True,
'subnet_id': '013d3059-87a4-45a5-91e9-d721068ae0b2',
'tenant_id': '1a3e005cf9ce40308c900bcb08e5320c',
'address': '10.0.0.8',
'protocol_port': 80,
'id': 'fcf23bde-8cf9-4616-883f-208cebcbf858'
}]
}
@staticmethod
def fake_list_lbaas_healthmonitors():
return {
'healthmonitors': [{
'admin_state_up': True,
'tenant_id': '6f3584d5754048a18e30685362b88411',
'delay': 1,
'expected_codes': '200,201,202',
'max_retries': 5,
'http_method': 'GET',
'timeout': 1,
'pools': [{
'id': '74aa2010-a59f-4d35-a436-60a6da882819'
}],
'url_path': '/index.html',
'type': 'HTTP',
'id': '0a9ac99d-0a09-4b18-8499-a0796850279a'
}]
}
@staticmethod
def fake_show_listener():
return {
'listener': {
'default_pool_id': None,
'protocol': 'HTTP',
'description': '',
'admin_state_up': True,
'loadbalancers': [{
'id': 'a9729389-6147-41a3-ab22-a24aed8692b2'
}],
'tenant_id': '3e4d8bec50a845fcb09e03a4375c691d',
'connection_limit': 100,
'protocol_port': 80,
'id': '35cb8516-1173-4035-8dae-0dae3453f37f',
'name': ''
}
}
@staticmethod
def fake_retrieve_loadbalancer_status():
return {
'statuses': {
'loadbalancer': {
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
'listeners': [{
'id': '35cb8516-1173-4035-8dae-0dae3453f37f',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
'pools': [{
'id': '4c0a0a5f-cf8f-44b7-b912-957daa8ce5e5',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
'members': [{
'id': 'fcf23bde-8cf9-4616-883f-208cebcbf858',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE'
}],
'healthmonitor': {
'id': '785131d2-8f7b-4fee-a7e7-3196e11b4518',
'provisioning_status': 'ACTIVE'
}
}]
}]
}
}
}
@staticmethod
def fake_retrieve_loadbalancer_status_complex():
return {
'statuses': {
'loadbalancer': {
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
'listeners': [{
'id': '35cb8516-1173-4035-8dae-0dae3453f37f',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
'pools': [{
'id': '4c0a0a5f-cf8f-44b7-b912-957daa8ce5e5',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
'members': [{
'id': 'fcf23bde-8cf9-4616-883f-208cebcbf858',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE'
},
{
'id': 'fcf23bde-8cf9-4616-883f-208cebcbf969',
'operating_status': 'OFFLINE',
'provisioning_status': 'ACTIVE'
}],
'healthmonitor': {
'id': '785131d2-8f7b-4fee-a7e7-3196e11b4518',
'provisioning_status': 'ACTIVE'
}
},
{
'id': '4c0a0a5f-cf8f-44b7-b912-957daa8ce6f6',
'operating_status': 'OFFLINE',
'provisioning_status': 'ACTIVE',
'members': [{
'id': 'fcf23bde-8cf9-4616-883f-208cebcbfa7a',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE'
}],
'healthmonitor': {
'id': '785131d2-8f7b-4fee-a7e7-3196e11b4629',
'provisioning_status': 'ACTIVE'
}
}]
},
{
'id': '35cb8516-1173-4035-8dae-0dae3453f48e',
'operating_status': 'OFFLINE',
'provisioning_status': 'ACTIVE',
'pools': [{
'id': '4c0a0a5f-cf8f-44b7-b912-957daa8ce7g7',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
'members': [{
'id': 'fcf23bde-8cf9-4616-883f-208cebcbfb8b',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE'
}],
'healthmonitor': {
'id': '785131d2-8f7b-4fee-a7e7-3196e11b473a',
'provisioning_status': 'ACTIVE'
}
}]
}]
}
}
}
@staticmethod
def fake_list_lbaas_listeners():
return {
'listeners': [{
'default_pool_id': None,
'protocol': 'HTTP',
'description': '',
'admin_state_up': True,
'loadbalancers': [{
'id': 'a9729389-6147-41a3-ab22-a24aed8692b2'
}],
'tenant_id': '3e4d8bec50a845fcb09e03a4375c691d',
'connection_limit': 100,
'protocol_port': 80,
'id': '35cb8516-1173-4035-8dae-0dae3453f37f',
'name': 'listener_one'
}]}
@mock.patch.object(client.Client,
'list_lbaas_pools')
@mock.patch.object(client.Client,
'show_listener')
@mock.patch.object(neutron_client.Client,
'_retrieve_loadbalancer_status_tree')
def test_list_pools_v2(self, mock_status, mock_show, mock_list):
mock_status.return_value = self.fake_retrieve_loadbalancer_status()
mock_show.return_value = self.fake_show_listener()
mock_list.return_value = self.fake_list_lbaas_pools()
pools = self.nc.list_pools_v2()
self.assertEqual(1, len(pools))
for pool in pools:
self.assertEqual('ONLINE', pool['status'])
self.assertEqual('ROUND_ROBIN', pool['lb_method'])
@mock.patch.object(client.Client,
'list_lbaas_pools')
@mock.patch.object(client.Client,
'list_lbaas_members')
@mock.patch.object(client.Client,
'show_listener')
@mock.patch.object(neutron_client.Client,
'_retrieve_loadbalancer_status_tree')
def test_list_members_v2(self, mock_status, mock_show, mock_list_members,
mock_list_pools):
mock_status.return_value = self.fake_retrieve_loadbalancer_status()
mock_show.return_value = self.fake_show_listener()
mock_list_pools.return_value = self.fake_list_lbaas_pools()
mock_list_members.return_value = self.fake_list_lbaas_members()
members = self.nc.list_members_v2()
self.assertEqual(1, len(members))
for member in members:
self.assertEqual('ONLINE', member['status'])
self.assertEqual('4c0a0a5f-cf8f-44b7-b912-957daa8ce5e5',
member['pool_id'])
@mock.patch.object(client.Client,
'list_lbaas_healthmonitors')
def test_list_health_monitors_v2(self, mock_list_healthmonitors):
mock_list_healthmonitors.return_value = (
self.fake_list_lbaas_healthmonitors())
healthmonitors = self.nc.list_health_monitors_v2()
self.assertEqual(1, len(healthmonitors))
for healthmonitor in healthmonitors:
self.assertEqual(5, healthmonitor['max_retries'])
@mock.patch.object(neutron_client.Client,
'_retrieve_loadbalancer_status_tree')
def test_get_member_status(self, mock_status):
mock_status.return_value = (
self.fake_retrieve_loadbalancer_status_complex())
loadbalancer_id = '5b1b1b6e-cf8f-44b7-b912-957daa8ce5e5'
listener_id = '35cb8516-1173-4035-8dae-0dae3453f37f'
pool_id = '4c0a0a5f-cf8f-44b7-b912-957daa8ce5e5'
parent_id = [listener_id, pool_id]
result_status = self.nc._get_member_status(loadbalancer_id,
parent_id)
expected_keys = ['fcf23bde-8cf9-4616-883f-208cebcbf858',
'fcf23bde-8cf9-4616-883f-208cebcbf969']
excepted_status = {
'fcf23bde-8cf9-4616-883f-208cebcbf858': 'ONLINE',
'fcf23bde-8cf9-4616-883f-208cebcbf969': 'OFFLINE'}
for key in result_status.keys():
self.assertIn(key, expected_keys)
self.assertEqual(excepted_status[key], result_status[key])
@mock.patch.object(neutron_client.Client,
'_retrieve_loadbalancer_status_tree')
def test_get_pool_status(self, mock_status):
mock_status.return_value = (
self.fake_retrieve_loadbalancer_status_complex())
loadbalancer_id = '5b1b1b6e-cf8f-44b7-b912-957daa8ce5e5'
parent_id = '35cb8516-1173-4035-8dae-0dae3453f37f'
result_status = self.nc._get_pool_status(loadbalancer_id,
parent_id)
expected_keys = ['4c0a0a5f-cf8f-44b7-b912-957daa8ce5e5',
'4c0a0a5f-cf8f-44b7-b912-957daa8ce6f6']
excepted_status = {
'4c0a0a5f-cf8f-44b7-b912-957daa8ce5e5': 'ONLINE',
'4c0a0a5f-cf8f-44b7-b912-957daa8ce6f6': 'OFFLINE'}
for key in result_status.keys():
self.assertIn(key, expected_keys)
self.assertEqual(excepted_status[key], result_status[key])
@mock.patch.object(neutron_client.Client,
'_retrieve_loadbalancer_status_tree')
def test_get_listener_status(self, mock_status):
mock_status.return_value = (
self.fake_retrieve_loadbalancer_status_complex())
loadbalancer_id = '5b1b1b6e-cf8f-44b7-b912-957daa8ce5e5'
result_status = self.nc._get_listener_status(loadbalancer_id)
expected_keys = ['35cb8516-1173-4035-8dae-0dae3453f37f',
'35cb8516-1173-4035-8dae-0dae3453f48e']
excepted_status = {
'35cb8516-1173-4035-8dae-0dae3453f37f': 'ONLINE',
'35cb8516-1173-4035-8dae-0dae3453f48e': 'OFFLINE'}
for key in result_status.keys():
self.assertIn(key, expected_keys)
self.assertEqual(excepted_status[key], result_status[key])
@mock.patch.object(client.Client,
'list_listeners')
@mock.patch.object(neutron_client.Client,
'_retrieve_loadbalancer_status_tree')
def test_list_listener(self, mock_status, mock_list_listeners):
mock_list_listeners.return_value = (
self.fake_list_lbaas_listeners())
mock_status.return_value = (
self.fake_retrieve_loadbalancer_status())
listeners = self.nc.list_listener()
expected_key = '35cb8516-1173-4035-8dae-0dae3453f37f'
expected_status = 'ONLINE'
self.assertEqual(1, len(listeners))
self.assertEqual(expected_key, listeners[0]['id'])
self.assertEqual(expected_status, listeners[0]['operating_status'])
|
idegtiarov/ceilometer
|
ceilometer/tests/unit/test_neutronclient_lbaas_v2.py
|
Python
|
apache-2.0
| 14,539
|
import importlib
import pytest
from unittest import mock
from google.cloud import aiplatform
from google.cloud.aiplatform import base
from google.cloud.aiplatform import datasets
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import schema
from google.cloud.aiplatform import training_jobs
from google.cloud.aiplatform_v1.services.model_service import (
client as model_service_client,
)
from google.cloud.aiplatform_v1.services.pipeline_service import (
client as pipeline_service_client,
)
from google.cloud.aiplatform_v1.types import (
dataset as gca_dataset,
encryption_spec as gca_encryption_spec,
model as gca_model,
pipeline_state as gca_pipeline_state,
training_pipeline as gca_training_pipeline,
)
from google.protobuf import json_format
from google.protobuf import struct_pb2
_TEST_BUCKET_NAME = "test-bucket"
_TEST_GCS_PATH_WITHOUT_BUCKET = "path/to/folder"
_TEST_GCS_PATH = f"{_TEST_BUCKET_NAME}/{_TEST_GCS_PATH_WITHOUT_BUCKET}"
_TEST_GCS_PATH_WITH_TRAILING_SLASH = f"{_TEST_GCS_PATH}/"
_TEST_PROJECT = "test-project"
_TEST_DATASET_DISPLAY_NAME = "test-dataset-display-name"
_TEST_DATASET_NAME = "test-dataset-name"
_TEST_DISPLAY_NAME = "test-display-name"
_TEST_METADATA_SCHEMA_URI_TABULAR = schema.dataset.metadata.tabular
_TEST_METADATA_SCHEMA_URI_NONTABULAR = schema.dataset.metadata.image
_TEST_TRAINING_COLUMN_NAMES = [
"sepal_width",
"sepal_length",
"petal_length",
"petal_width",
"target",
]
_TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE = [
"apple",
"banana",
"coconut",
"target",
]
_TEST_TRAINING_COLUMN_TRANSFORMATIONS = [
{"auto": {"column_name": "sepal_width"}},
{"auto": {"column_name": "sepal_length"}},
{"auto": {"column_name": "petal_length"}},
{"auto": {"column_name": "petal_width"}},
]
_TEST_TRAINING_COLUMN_SPECS = {
"apple": "auto",
"banana": "auto",
"coconut": "auto",
}
_TEST_TRAINING_COLUMN_TRANSFORMATIONS_ALTERNATIVE = [
{"auto": {"column_name": "apple"}},
{"auto": {"column_name": "banana"}},
{"auto": {"column_name": "coconut"}},
]
_TEST_TRAINING_COLUMN_TRANSFORMATIONS_ALTERNATIVE_NOT_AUTO = [
{"numeric": {"column_name": "apple"}},
{"categorical": {"column_name": "banana"}},
{"text": {"column_name": "coconut"}},
]
_TEST_TRAINING_TARGET_COLUMN = "target"
_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS = 1000
_TEST_TRAINING_WEIGHT_COLUMN = "weight"
_TEST_TRAINING_DISABLE_EARLY_STOPPING = True
_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME = "minimize-log-loss"
_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE = "classification"
_TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS = True
_TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS_BIGQUERY_DESTINATION_URI = (
"bq://path.to.table"
)
_TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS_OVERRIDE_DESTINATION = False
_TEST_ADDITIONAL_EXPERIMENTS = ["exp1", "exp2"]
_TEST_TRAINING_TASK_INPUTS_DICT = {
# required inputs
"targetColumn": _TEST_TRAINING_TARGET_COLUMN,
"transformations": _TEST_TRAINING_COLUMN_TRANSFORMATIONS,
"trainBudgetMilliNodeHours": _TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
# optional inputs
"weightColumnName": _TEST_TRAINING_WEIGHT_COLUMN,
"disableEarlyStopping": _TEST_TRAINING_DISABLE_EARLY_STOPPING,
"predictionType": _TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
"optimizationObjective": _TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
"optimizationObjectiveRecallValue": None,
"optimizationObjectivePrecisionValue": None,
}
_TEST_TRAINING_TASK_INPUTS = json_format.ParseDict(
_TEST_TRAINING_TASK_INPUTS_DICT, struct_pb2.Value(),
)
_TEST_TRAINING_TASK_INPUTS_WITH_ADDITIONAL_EXPERIMENTS = json_format.ParseDict(
{
**_TEST_TRAINING_TASK_INPUTS_DICT,
"additionalExperiments": _TEST_ADDITIONAL_EXPERIMENTS,
},
struct_pb2.Value(),
)
_TEST_TRAINING_TASK_INPUTS_ALTERNATIVE = json_format.ParseDict(
{
**_TEST_TRAINING_TASK_INPUTS_DICT,
"transformations": _TEST_TRAINING_COLUMN_TRANSFORMATIONS_ALTERNATIVE,
},
struct_pb2.Value(),
)
_TEST_TRAINING_TASK_INPUTS_ALTERNATIVE_NOT_AUTO = json_format.ParseDict(
{
**_TEST_TRAINING_TASK_INPUTS_DICT,
"transformations": _TEST_TRAINING_COLUMN_TRANSFORMATIONS_ALTERNATIVE_NOT_AUTO,
},
struct_pb2.Value(),
)
_TEST_TRAINING_TASK_INPUTS_WITH_EXPORT_EVAL_DATA_ITEMS = json_format.ParseDict(
{
**_TEST_TRAINING_TASK_INPUTS_DICT,
"exportEvaluatedDataItemsConfig": {
"destinationBigqueryUri": _TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS_BIGQUERY_DESTINATION_URI,
"overrideExistingTable": _TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS_OVERRIDE_DESTINATION,
},
},
struct_pb2.Value(),
)
_TEST_DATASET_NAME = "test-dataset-name"
_TEST_MODEL_DISPLAY_NAME = "model-display-name"
_TEST_LABELS = {"key": "value"}
_TEST_MODEL_LABELS = {"model_key": "model_value"}
_TEST_FRACTION_SPLIT_TRAINING = 0.6
_TEST_FRACTION_SPLIT_VALIDATION = 0.2
_TEST_FRACTION_SPLIT_TEST = 0.2
_TEST_SPLIT_PREDEFINED_COLUMN_NAME = "split"
_TEST_SPLIT_TIMESTAMP_COLUMN_NAME = "timestamp"
_TEST_OUTPUT_PYTHON_PACKAGE_PATH = "gs://test/ouput/python/trainer.tar.gz"
_TEST_MODEL_NAME = "projects/my-project/locations/us-central1/models/12345"
_TEST_PIPELINE_RESOURCE_NAME = (
"projects/my-project/locations/us-central1/trainingPipelines/12345"
)
# CMEK encryption
_TEST_DEFAULT_ENCRYPTION_KEY_NAME = "key_default"
_TEST_DEFAULT_ENCRYPTION_SPEC = gca_encryption_spec.EncryptionSpec(
kms_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME
)
_TEST_PIPELINE_ENCRYPTION_KEY_NAME = "key_pipeline"
_TEST_PIPELINE_ENCRYPTION_SPEC = gca_encryption_spec.EncryptionSpec(
kms_key_name=_TEST_PIPELINE_ENCRYPTION_KEY_NAME
)
_TEST_MODEL_ENCRYPTION_KEY_NAME = "key_model"
_TEST_MODEL_ENCRYPTION_SPEC = gca_encryption_spec.EncryptionSpec(
kms_key_name=_TEST_MODEL_ENCRYPTION_KEY_NAME
)
@pytest.fixture
def mock_pipeline_service_create():
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "create_training_pipeline"
) as mock_create_training_pipeline:
mock_create_training_pipeline.return_value = gca_training_pipeline.TrainingPipeline(
name=_TEST_PIPELINE_RESOURCE_NAME,
state=gca_pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED,
model_to_upload=gca_model.Model(name=_TEST_MODEL_NAME),
)
yield mock_create_training_pipeline
@pytest.fixture
def mock_pipeline_service_create_fail():
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "create_training_pipeline"
) as mock_create_training_pipeline:
mock_create_training_pipeline.side_effect = RuntimeError("Mock fail")
yield mock_create_training_pipeline
@pytest.fixture
def mock_pipeline_service_get():
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "get_training_pipeline"
) as mock_get_training_pipeline:
mock_get_training_pipeline.return_value = gca_training_pipeline.TrainingPipeline(
name=_TEST_PIPELINE_RESOURCE_NAME,
state=gca_pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED,
model_to_upload=gca_model.Model(name=_TEST_MODEL_NAME),
)
yield mock_get_training_pipeline
@pytest.fixture
def mock_pipeline_service_create_and_get_with_fail():
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "create_training_pipeline"
) as mock_create_training_pipeline:
mock_create_training_pipeline.return_value = gca_training_pipeline.TrainingPipeline(
name=_TEST_PIPELINE_RESOURCE_NAME,
state=gca_pipeline_state.PipelineState.PIPELINE_STATE_RUNNING,
)
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "get_training_pipeline"
) as mock_get_training_pipeline:
mock_get_training_pipeline.return_value = gca_training_pipeline.TrainingPipeline(
name=_TEST_PIPELINE_RESOURCE_NAME,
state=gca_pipeline_state.PipelineState.PIPELINE_STATE_FAILED,
)
yield mock_create_training_pipeline, mock_get_training_pipeline
@pytest.fixture
def mock_model_service_get():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as mock_get_model:
mock_get_model.return_value = gca_model.Model()
yield mock_get_model
@pytest.fixture
def mock_dataset_tabular():
ds = mock.MagicMock(datasets.TabularDataset)
ds.name = _TEST_DATASET_NAME
ds._latest_future = None
ds._exception = None
ds._gca_resource = gca_dataset.Dataset(
display_name=_TEST_DATASET_DISPLAY_NAME,
metadata_schema_uri=_TEST_METADATA_SCHEMA_URI_TABULAR,
labels={},
name=_TEST_DATASET_NAME,
metadata={},
)
ds.column_names = _TEST_TRAINING_COLUMN_NAMES
yield ds
@pytest.fixture
def mock_dataset_tabular_alternative():
ds = mock.MagicMock(datasets.TabularDataset)
ds.name = _TEST_DATASET_NAME
ds._latest_future = None
ds._exception = None
ds._gca_resource = gca_dataset.Dataset(
display_name=_TEST_DATASET_DISPLAY_NAME,
metadata_schema_uri=_TEST_METADATA_SCHEMA_URI_TABULAR,
labels={},
name=_TEST_DATASET_NAME,
metadata={},
)
ds.column_names = _TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE
yield ds
@pytest.fixture
def mock_dataset_nontabular():
ds = mock.MagicMock(datasets.ImageDataset)
ds.name = _TEST_DATASET_NAME
ds._latest_future = None
ds._exception = None
ds._gca_resource = gca_dataset.Dataset(
display_name=_TEST_DATASET_DISPLAY_NAME,
metadata_schema_uri=_TEST_METADATA_SCHEMA_URI_NONTABULAR,
labels={},
name=_TEST_DATASET_NAME,
metadata={},
)
return ds
class TestAutoMLTabularTrainingJob:
def setup_method(self):
importlib.reload(initializer)
importlib.reload(aiplatform)
def teardown_method(self):
initializer.global_pool.shutdown(wait=True)
@pytest.mark.parametrize("sync", [True, False])
def test_run_call_pipeline_service_create(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
aiplatform.init(
project=_TEST_PROJECT,
staging_bucket=_TEST_BUCKET_NAME,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
labels=_TEST_LABELS,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
model_labels=_TEST_MODEL_LABELS,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
additional_experiments=_TEST_ADDITIONAL_EXPERIMENTS,
sync=sync,
)
job.wait_for_resource_creation()
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
if not sync:
model_from_job.wait()
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
labels=_TEST_MODEL_LABELS,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
labels=_TEST_LABELS,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS_WITH_ADDITIONAL_EXPERIMENTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
assert job._gca_resource is mock_pipeline_service_get.return_value
mock_model_service_get.assert_called_once_with(
name=_TEST_MODEL_NAME, retry=base._DEFAULT_RETRY
)
assert model_from_job._gca_resource is mock_model_service_get.return_value
assert job.get_model()._gca_resource is mock_model_service_get.return_value
assert not job.has_failed
assert job.state == gca_pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED
@pytest.mark.parametrize("sync", [True, False])
def test_run_call_pipeline_service_create_with_export_eval_data_items(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
aiplatform.init(
project=_TEST_PROJECT,
staging_bucket=_TEST_BUCKET_NAME,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
export_evaluated_data_items=_TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS,
export_evaluated_data_items_bigquery_destination_uri=_TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS_BIGQUERY_DESTINATION_URI,
export_evaluated_data_items_override_destination=_TEST_TRAINING_EXPORT_EVALUATED_DATA_ITEMS_OVERRIDE_DESTINATION,
sync=sync,
)
job.wait_for_resource_creation()
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
if not sync:
model_from_job.wait()
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS_WITH_EXPORT_EVAL_DATA_ITEMS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
assert job._gca_resource is mock_pipeline_service_get.return_value
mock_model_service_get.assert_called_once_with(
name=_TEST_MODEL_NAME, retry=base._DEFAULT_RETRY
)
assert model_from_job._gca_resource is mock_model_service_get.return_value
assert job.get_model()._gca_resource is mock_model_service_get.return_value
assert not job.has_failed
assert job.state == gca_pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED
@pytest.mark.usefixtures("mock_pipeline_service_get")
@pytest.mark.parametrize("sync", [True, False])
def test_run_call_pipeline_if_no_model_display_name_nor_model_labels(
self,
mock_pipeline_service_create,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
labels=_TEST_LABELS,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
training_encryption_spec_key_name=_TEST_PIPELINE_ENCRYPTION_KEY_NAME,
model_encryption_spec_key_name=_TEST_MODEL_ENCRYPTION_KEY_NAME,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
)
job.wait_for_resource_creation()
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
if not sync:
model_from_job.wait()
# Test that if defaults to the job display name
true_managed_model = gca_model.Model(
display_name=_TEST_DISPLAY_NAME,
labels=_TEST_LABELS,
encryption_spec=_TEST_MODEL_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
labels=_TEST_LABELS,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_PIPELINE_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.parametrize("sync", [True, False])
# This test checks that default transformations are used if no columns transformations are provided
def test_run_call_pipeline_service_create_if_no_column_transformations(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
aiplatform.init(
project=_TEST_PROJECT,
staging_bucket=_TEST_BUCKET_NAME,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_transformations=None,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
job.wait_for_resource_creation()
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
if not sync:
model_from_job.wait()
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.parametrize("sync", [True, False])
# This test checks that default transformations are used if no columns transformations are provided
def test_run_call_pipeline_service_create_if_set_additional_experiments(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
aiplatform.init(
project=_TEST_PROJECT,
staging_bucket=_TEST_BUCKET_NAME,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_transformations=None,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
job._add_additional_experiments(_TEST_ADDITIONAL_EXPERIMENTS)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
job.wait_for_resource_creation()
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
if not sync:
model_from_job.wait()
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS_WITH_ADDITIONAL_EXPERIMENTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.parametrize("sync", [True, False])
def test_run_call_pipeline_service_create_with_column_specs(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular_alternative,
mock_model_service_get,
sync,
):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
column_specs = training_jobs.AutoMLTabularTrainingJob.get_auto_column_specs(
dataset=mock_dataset_tabular_alternative,
target_column=_TEST_TRAINING_TARGET_COLUMN,
)
assert column_specs == _TEST_TRAINING_COLUMN_SPECS
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_specs=column_specs,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular_alternative,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
if not sync:
model_from_job.wait()
true_managed_model = gca_model.Model(display_name=_TEST_MODEL_DISPLAY_NAME)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular_alternative.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS_ALTERNATIVE,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.parametrize("sync", [True, False])
def test_call_pipeline_service_create_with_column_specs_and_transformations_raises(
self, mock_dataset_tabular_alternative, sync,
):
aiplatform.init()
column_specs = training_jobs.AutoMLTabularTrainingJob.get_auto_column_specs(
dataset=mock_dataset_tabular_alternative,
target_column=_TEST_TRAINING_TARGET_COLUMN,
)
assert column_specs == _TEST_TRAINING_COLUMN_SPECS
with pytest.raises(ValueError):
training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
column_specs=column_specs,
)
@pytest.mark.parametrize("sync", [True, False])
def test_get_column_specs_no_target_raises(
self, mock_dataset_tabular_alternative, sync,
):
aiplatform.init()
with pytest.raises(TypeError):
training_jobs.AutoMLTabularTrainingJob.get_auto_column_specs(
dataset=mock_dataset_tabular_alternative
)
@pytest.mark.parametrize("sync", [True, False])
def test_run_call_pipeline_service_create_with_column_specs_not_auto(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular_alternative,
mock_model_service_get,
sync,
):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
column_specs = training_jobs.AutoMLTabularTrainingJob.get_auto_column_specs(
dataset=mock_dataset_tabular_alternative,
target_column=_TEST_TRAINING_TARGET_COLUMN,
)
column_specs[
_TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[0]
] = training_jobs.AutoMLTabularTrainingJob.column_data_types.NUMERIC
column_specs[
_TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[1]
] = training_jobs.AutoMLTabularTrainingJob.column_data_types.CATEGORICAL
column_specs[
_TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[2]
] = training_jobs.AutoMLTabularTrainingJob.column_data_types.TEXT
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
column_specs=column_specs,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular_alternative,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
budget_milli_node_hours=_TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
if not sync:
model_from_job.wait()
true_managed_model = gca_model.Model(display_name=_TEST_MODEL_DISPLAY_NAME)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular_alternative.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS_ALTERNATIVE_NOT_AUTO,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.usefixtures(
"mock_pipeline_service_create",
"mock_pipeline_service_get",
"mock_model_service_get",
)
@pytest.mark.parametrize("sync", [True, False])
# Also acts as a custom column_transformations test as it should not error during first call
def test_run_called_twice_raises(self, mock_dataset_tabular, sync):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
sync=sync,
)
job.wait_for_resource_creation()
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
with pytest.raises(RuntimeError):
job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
sync=sync,
)
@pytest.mark.parametrize("sync", [True, False])
def test_run_raises_if_pipeline_fails(
self, mock_pipeline_service_create_and_get_with_fail, mock_dataset_tabular, sync
):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
with pytest.raises(RuntimeError):
job.run(
model_display_name=_TEST_MODEL_DISPLAY_NAME,
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
sync=sync,
)
if not sync:
job.wait()
with pytest.raises(RuntimeError):
job.get_model()
def test_wait_for_resource_creation_does_not_fail_if_creation_does_not_fail(
self, mock_pipeline_service_create_and_get_with_fail, mock_dataset_tabular
):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
job.run(
model_display_name=_TEST_MODEL_DISPLAY_NAME,
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
sync=False,
)
job.wait_for_resource_creation()
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
with pytest.raises(RuntimeError):
job.wait()
with pytest.raises(RuntimeError):
job.get_model()
@pytest.mark.usefixtures("mock_pipeline_service_create_fail")
@pytest.mark.parametrize("sync", [True, False])
def test_create_fails(self, mock_dataset_tabular, sync):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
if sync:
with pytest.raises(RuntimeError) as e:
job.run(
model_display_name=_TEST_MODEL_DISPLAY_NAME,
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
sync=sync,
)
assert e.match("Mock fail")
with pytest.raises(RuntimeError) as e:
job.wait_for_resource_creation()
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource is not scheduled to be created."
)
with pytest.raises(RuntimeError) as e:
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created."
)
job.wait()
with pytest.raises(RuntimeError) as e:
job.get_model()
e.match(
regexp="TrainingPipeline has not been launched. You must run this TrainingPipeline using TrainingPipeline.run."
)
else:
job.run(
model_display_name=_TEST_MODEL_DISPLAY_NAME,
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
sync=sync,
)
with pytest.raises(RuntimeError) as e:
job.wait_for_resource_creation()
assert e.match(regexp=r"Mock fail")
with pytest.raises(RuntimeError) as e:
assert job.resource_name == _TEST_PIPELINE_RESOURCE_NAME
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created. Resource failed with: Mock fail"
)
with pytest.raises(RuntimeError):
job.wait()
with pytest.raises(RuntimeError):
job.get_model()
def test_raises_before_run_is_called(self, mock_pipeline_service_create):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
with pytest.raises(RuntimeError):
job.get_model()
with pytest.raises(RuntimeError):
job.has_failed
with pytest.raises(RuntimeError):
job.state
with pytest.raises(RuntimeError) as e:
job.wait_for_resource_creation()
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource is not scheduled to be created."
)
def test_properties_throw_if_not_available(self):
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
)
with pytest.raises(RuntimeError) as e:
job.name
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created"
)
with pytest.raises(RuntimeError) as e:
job.resource_name
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created"
)
with pytest.raises(RuntimeError) as e:
job.display_name
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created"
)
with pytest.raises(RuntimeError) as e:
job.create_time
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created"
)
with pytest.raises(RuntimeError) as e:
job.encryption_spec
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created"
)
with pytest.raises(RuntimeError) as e:
job.labels
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created"
)
with pytest.raises(RuntimeError) as e:
job.gca_resource
assert e.match(
regexp=r"AutoMLTabularTrainingJob resource has not been created"
)
@pytest.mark.parametrize("sync", [True, False])
def test_splits_fraction(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
"""
Initiate aiplatform with encryption key name.
Create and run an AutoML Video Classification training job, verify calls and return value
"""
aiplatform.init(
project=_TEST_PROJECT,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
training_fraction_split=_TEST_FRACTION_SPLIT_TRAINING,
validation_fraction_split=_TEST_FRACTION_SPLIT_VALIDATION,
test_fraction_split=_TEST_FRACTION_SPLIT_TEST,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
if not sync:
model_from_job.wait()
true_fraction_split = gca_training_pipeline.FractionSplit(
training_fraction=_TEST_FRACTION_SPLIT_TRAINING,
validation_fraction=_TEST_FRACTION_SPLIT_VALIDATION,
test_fraction=_TEST_FRACTION_SPLIT_TEST,
)
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
fraction_split=true_fraction_split, dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.parametrize("sync", [True, False])
def test_splits_timestamp(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
"""
Initiate aiplatform with encryption key name.
Create and run an AutoML Video Classification training job, verify calls and return value
"""
aiplatform.init(
project=_TEST_PROJECT,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
training_fraction_split=_TEST_FRACTION_SPLIT_TRAINING,
validation_fraction_split=_TEST_FRACTION_SPLIT_VALIDATION,
test_fraction_split=_TEST_FRACTION_SPLIT_TEST,
timestamp_split_column_name=_TEST_SPLIT_TIMESTAMP_COLUMN_NAME,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
if not sync:
model_from_job.wait()
true_split = gca_training_pipeline.TimestampSplit(
training_fraction=_TEST_FRACTION_SPLIT_TRAINING,
validation_fraction=_TEST_FRACTION_SPLIT_VALIDATION,
test_fraction=_TEST_FRACTION_SPLIT_TEST,
key=_TEST_SPLIT_TIMESTAMP_COLUMN_NAME,
)
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
timestamp_split=true_split, dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.parametrize("sync", [True, False])
def test_splits_predefined(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
"""
Initiate aiplatform with encryption key name.
Create and run an AutoML Video Classification training job, verify calls and return value
"""
aiplatform.init(
project=_TEST_PROJECT,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
predefined_split_column_name=_TEST_SPLIT_PREDEFINED_COLUMN_NAME,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
if not sync:
model_from_job.wait()
true_split = gca_training_pipeline.PredefinedSplit(
key=_TEST_SPLIT_PREDEFINED_COLUMN_NAME
)
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
predefined_split=true_split, dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
@pytest.mark.parametrize("sync", [True, False])
def test_splits_default(
self,
mock_pipeline_service_create,
mock_pipeline_service_get,
mock_dataset_tabular,
mock_model_service_get,
sync,
):
"""
Initiate aiplatform with encryption key name.
Create and run an AutoML Video Classification training job, verify calls and return value
"""
aiplatform.init(
project=_TEST_PROJECT,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS,
optimization_objective_recall_value=None,
optimization_objective_precision_value=None,
)
model_from_job = job.run(
dataset=mock_dataset_tabular,
target_column=_TEST_TRAINING_TARGET_COLUMN,
weight_column=_TEST_TRAINING_WEIGHT_COLUMN,
model_display_name=_TEST_MODEL_DISPLAY_NAME,
disable_early_stopping=_TEST_TRAINING_DISABLE_EARLY_STOPPING,
sync=sync,
)
if not sync:
model_from_job.wait()
true_managed_model = gca_model.Model(
display_name=_TEST_MODEL_DISPLAY_NAME,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
true_input_data_config = gca_training_pipeline.InputDataConfig(
dataset_id=mock_dataset_tabular.name,
)
true_training_pipeline = gca_training_pipeline.TrainingPipeline(
display_name=_TEST_DISPLAY_NAME,
training_task_definition=schema.training_job.definition.automl_tabular,
training_task_inputs=_TEST_TRAINING_TASK_INPUTS,
model_to_upload=true_managed_model,
input_data_config=true_input_data_config,
encryption_spec=_TEST_DEFAULT_ENCRYPTION_SPEC,
)
mock_pipeline_service_create.assert_called_once_with(
parent=initializer.global_config.common_location_path(),
training_pipeline=true_training_pipeline,
)
|
googleapis/python-aiplatform
|
tests/unit/aiplatform/test_automl_tabular_training_jobs.py
|
Python
|
apache-2.0
| 50,875
|
#
# Created as part of the StratusLab project (http://stratuslab.eu),
# co-funded by the European Commission under the Grant Agreement
# INFSO-RI-261552."
#
# Copyright (c) 2011, SixSq Sarl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
class FileAppender(object):
def __init__(self, filename):
self.filename = filename
self.lines = []
self.newLines = []
self.foundExit = False
def insertAtTheEnd(self, newLine):
''' Append at the end of the file (e.g. rc.local) the newLine
at the end of the file, unless the file ends with 'exit',
in which case it inserts just before.'''
self._backupIfNecessary()
self._reverseLines()
newLine = newLine + '\n'
self.foundExit = False
for line in self.lines:
if line.strip() == '':
self._appendNewLine(line)
continue
if self._containsExit(line):
self._insertLines(line, newLine)
continue
if self.foundExit:
self._appendNewLine(line)
continue
self._insertLines(newLine, line)
self.newLines.reverse()
self._writeAndClose()
def _backupIfNecessary(self):
originalFilename = self.filename + '.orig'
if not os.path.exists(originalFilename):
shutil.copyfile(self.filename, originalFilename)
def _reverseLines(self):
file = open(self.filename)
self.lines = file.readlines()
self.lines.reverse()
def _appendNewLine(self, line):
self.newLines.append(line)
def _containsExit(self, line):
return line.strip().startswith('exit')
def _insertLines(self, first, second):
self.foundExit = True
self._appendNewLine(first)
self._appendNewLine(second)
def _writeAndClose(self):
newfile = open(self.filename, 'w')
newfile.writelines(self.newLines)
newfile.close()
os.chmod(self.filename, 0755)
|
StratusLab/client
|
api/code/src/main/python/stratuslab/FileAppender.py
|
Python
|
apache-2.0
| 2,638
|
# Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import string
import os
import logging
import re
import itertools
from collections import defaultdict
from collections import OrderedDict
# bsd licensed - pip install jinja2
from jinja2 import Environment, FileSystemLoader
# fsutils, , misc filesystem utils, internal
import fsutils
# validate, , validate various things, internal
import validate
Template_Dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'templates')
logger = logging.getLogger('cmakegen')
Ignore_Subdirs = set(('build','yotta_modules', 'yotta_targets', 'CMake'))
jinja_environment = Environment(loader=FileSystemLoader(Template_Dir), trim_blocks=True, lstrip_blocks=True)
def replaceBackslashes(s):
return s.replace('\\', '/')
def sanitizePreprocessorSymbol(sym):
return re.sub('[^a-zA-Z0-9]', '_', str(sym)).upper()
def sanitizeSymbol(sym):
return re.sub('[^a-zA-Z0-9]', '_', str(sym))
jinja_environment.filters['replaceBackslashes'] = replaceBackslashes
jinja_environment.filters['sanitizePreprocessorSymbol'] = sanitizePreprocessorSymbol
jinja_environment.globals['list'] = list
class SourceFile(object):
def __init__(self, fullpath, relpath, lang):
super(SourceFile, self).__init__()
self.fullpath = fullpath
self.relpath = relpath
self.lang = lang
def __repr__(self):
return self.fullpath
class CMakeGen(object):
def __init__(self, directory, target):
super(CMakeGen, self).__init__()
self.buildroot = directory
logger.info("generate for target: %s" % target)
self.target = target
self.config_include_file = None
self.build_info_include_file = None
self.build_uuid = None
def _writeFile(self, path, contents):
dirname = os.path.dirname(path)
fsutils.mkDirP(dirname)
self.writeIfDifferent(path, contents)
def generateRecursive(self, component, all_components, builddir=None, modbuilddir=None, processed_components=None, application=None):
''' generate top-level CMakeLists for this component and its
dependencies: the CMakeLists are all generated in self.buildroot,
which MUST be out-of-source
!!! NOTE: experimenting with a slightly different way of doing
things here, this function is a generator that yields any errors
produced, so the correct use is:
for error in gen.generateRecursive(...):
print(error)
'''
if builddir is None:
builddir = self.buildroot
if modbuilddir is None:
modbuilddir = os.path.join(builddir, 'ym')
if processed_components is None:
processed_components = dict()
if not self.target:
yield 'Target "%s" is not a valid build target' % self.target
toplevel = not len(processed_components)
logger.debug('generate build files: %s (target=%s)' % (component, self.target))
# because of the way c-family language includes work we need to put the
# public header directories of all components that this component
# depends on (directly OR indirectly) into the search path, which means
# we need to first enumerate all the direct and indirect dependencies
recursive_deps = component.getDependenciesRecursive(
available_components = all_components,
target = self.target,
available_only = True,
test = True
)
dependencies = component.getDependencies(
all_components,
target = self.target,
available_only = True,
test = True
)
for name, dep in dependencies.items():
# if dep is a test dependency, then it might not be required (if
# we're not building tests). We don't actually know at this point
if not dep:
if dep.isTestDependency():
logger.debug('Test dependency "%s" of "%s" is not installed.' % (name, component))
else:
yield 'Required dependency "%s" of "%s" is not installed.' % (name, component)
# ensure this component is assumed to have been installed before we
# check for its dependencies, in case it has a circular dependency on
# itself
processed_components[component.getName()] = component
new_dependencies = OrderedDict([(name,c) for name,c in dependencies.items() if c and not name in processed_components])
self.generate(builddir, modbuilddir, component, new_dependencies, dependencies, recursive_deps, application, toplevel)
logger.debug('recursive deps of %s:' % component)
for d in recursive_deps.values():
logger.debug(' %s' % d)
processed_components.update(new_dependencies)
for name, c in new_dependencies.items():
for error in self.generateRecursive(
c, all_components, os.path.join(modbuilddir, name), modbuilddir, processed_components, application=application
):
yield error
def checkStandardSourceDir(self, dirname, component):
err = validate.sourceDirValidationError(dirname, component.getName())
if err:
logger.warn(err)
def _listSubDirectories(self, component):
''' return: {
manual: [list of subdirectories with manual CMakeLists],
auto: [list of pairs: (subdirectories name to autogenerate, a list of source files in that dir)],
bin: {dictionary of subdirectory name to binary name},
test: [list of directories that build tests]
resource: [list of directories that contain resources]
}
'''
manual_subdirs = []
auto_subdirs = []
bin_subdirs = {os.path.normpath(x) : y for x,y in component.getBinaries().items()};
test_subdirs = []
resource_subdirs = []
for f in sorted(os.listdir(component.path)):
if f in Ignore_Subdirs or f.startswith('.') or f.startswith('_'):
continue
if os.path.isfile(os.path.join(component.path, f, 'CMakeLists.txt')):
self.checkStandardSourceDir(f, component)
# if the subdirectory has a CMakeLists.txt in it, then use that
manual_subdirs.append(f)
# tests only supported in the `test` directory for now
if f in ('test',):
test_subdirs.append(f)
elif f in ('source', 'test') or os.path.normpath(f) in bin_subdirs:
# otherwise, if the directory has source files, generate a
# CMakeLists in the corresponding temporary directory, and add
# that.
# For now we only do this for the source and test directories -
# in theory we could do others
sources = self.containsSourceFiles(os.path.join(component.path, f), component)
if sources:
auto_subdirs.append((f, sources))
# tests only supported in the `test` directory for now
if f in ('test',):
test_subdirs.append(f)
elif f in ('resource'):
resource_subdirs.append(os.path.join(component.path, f))
elif f.lower() in ('source', 'src', 'test', 'resource'):
self.checkStandardSourceDir(f, component)
return {
"manual": manual_subdirs,
"auto": auto_subdirs,
"bin": bin_subdirs,
"test": test_subdirs,
"resource": resource_subdirs
}
def _definitionsForConfig(self, config, key_path=None):
if key_path is None:
key_path = list()
key_prefix = '_'.join([sanitizePreprocessorSymbol(x) for x in key_path])
r = []
if len(key_prefix):
r.append((key_prefix,None))
for (k, v) in config.items():
if isinstance(v, dict):
r += self._definitionsForConfig(v, key_path + [k])
else:
# Don't validate the value here (we wouldn't know where an
# invalid value came from, so the error message would be
# unhelpful) - the target schema should validate values, or if
# that isn't possible then the target should check when loading
if isinstance(v, bool):
# convert bool to 1/0, since we can't know the availability
# of a C bool type
v = 1 if v else 0
r.append(('%s_%s' % (key_prefix, sanitizePreprocessorSymbol(k)), v))
return r
def getConfigData(self, all_dependencies, component, builddir, build_info_header_path):
''' returns (path_to_config_header, cmake_set_definitions) '''
add_defs_header = ''
set_definitions = ''
# !!! backwards-compatible "TARGET_LIKE" definitions for the top-level
# of the config. NB: THESE WILL GO AWAY
definitions = []
definitions.append(('TARGET', sanitizePreprocessorSymbol(self.target.getName())))
definitions.append(('TARGET_LIKE_%s' % sanitizePreprocessorSymbol(self.target.getName()),None))
# make the path to the build-info header available both to CMake and
# in the preprocessor:
full_build_info_header_path = replaceBackslashes(os.path.abspath(build_info_header_path))
logger.debug('build info header include path: "%s"', full_build_info_header_path)
definitions.append(('YOTTA_BUILD_INFO_HEADER', '"'+full_build_info_header_path+'"'))
for target in self.target.getSimilarTo_Deprecated():
if '*' not in target:
definitions.append(('TARGET_LIKE_%s' % sanitizePreprocessorSymbol(target),None))
logger.debug('target configuration data: %s', self.target.getMergedConfig())
definitions += self._definitionsForConfig(self.target.getMergedConfig(), ['YOTTA', 'CFG'])
add_defs_header += '// yotta config data (including backwards-compatible definitions)\n'
for k, v in definitions:
if v is not None:
add_defs_header += '#define %s %s\n' % (k, v)
set_definitions += 'set(%s %s)\n' % (k, v)
else:
add_defs_header += '#define %s\n' % k
set_definitions += 'set(%s TRUE)\n' % k
add_defs_header += '\n// version definitions\n'
for dep in list(all_dependencies.values()) + [component]:
add_defs_header += "#define YOTTA_%s_VERSION_STRING \"%s\"\n" % (sanitizePreprocessorSymbol(dep.getName()), str(dep.getVersion()))
add_defs_header += "#define YOTTA_%s_VERSION_MAJOR %d\n" % (sanitizePreprocessorSymbol(dep.getName()), dep.getVersion().major())
add_defs_header += "#define YOTTA_%s_VERSION_MINOR %d\n" % (sanitizePreprocessorSymbol(dep.getName()), dep.getVersion().minor())
add_defs_header += "#define YOTTA_%s_VERSION_PATCH %d\n" % (sanitizePreprocessorSymbol(dep.getName()), dep.getVersion().patch())
# use -include <definitions header> instead of lots of separate
# defines... this is compiler specific, but currently testing it
# out for gcc-compatible compilers only:
config_include_file = os.path.join(builddir, 'yotta_config.h')
self._writeFile(
config_include_file,
'#ifndef __YOTTA_CONFIG_H__\n'+
'#define __YOTTA_CONFIG_H__\n'+
add_defs_header+
'#endif // ndef __YOTTA_CONFIG_H__\n'
)
return (config_include_file, set_definitions)
def getBuildInfo(self, sourcedir, builddir):
''' Write the build info header file, and return (path_to_written_header, set_cmake_definitions) '''
cmake_defs = ''
preproc_defs = '// yotta build info, #include YOTTA_BUILD_INFO_HEADER to access\n'
# standard library modules
import datetime
# vcs, , represent version controlled directories, internal
import vcs
now = datetime.datetime.utcnow()
vcs = vcs.getVCS(sourcedir)
if self.build_uuid is None:
import uuid
self.build_uuid = uuid.uuid4()
definitions = [
('YOTTA_BUILD_YEAR', now.year, 'UTC year'),
('YOTTA_BUILD_MONTH', now.month, 'UTC month 1-12'),
('YOTTA_BUILD_DAY', now.day, 'UTC day 1-31'),
('YOTTA_BUILD_HOUR', now.hour, 'UTC hour 0-24'),
('YOTTA_BUILD_MINUTE', now.minute, 'UTC minute 0-59'),
('YOTTA_BUILD_SECOND', now.second, 'UTC second 0-61'),
('YOTTA_BUILD_UUID', self.build_uuid, 'unique random UUID for each build'),
]
if vcs is not None:
definitions += [
('YOTTA_BUILD_VCS_ID', vcs.getCommitId(), 'git or mercurial hash')
('YOTTA_BUILD_VCS_CLEAN', vcs.getCommitId(), 'evaluates true if the version control system was clean, otherwise false')
]
for d in definitions:
preproc_defs += '#define %s %s // %s\n' % d
cmake_defs += 'set(%s "%s") # %s\n' % d
buildinfo_include_file = os.path.join(builddir, 'yotta_build_info.h')
self._writeFile(
buildinfo_include_file,
'#ifndef __YOTTA_BUILD_INFO_H__\n'+
'#define __YOTTA_BUILD_INFO_H__\n'+
preproc_defs+
'#endif // ndef __YOTTA_BUILD_INFO_H__\n'
)
return (buildinfo_include_file, cmake_defs)
def generate(
self, builddir, modbuilddir, component, active_dependencies, immediate_dependencies, all_dependencies, application, toplevel
):
''' active_dependencies is the dictionary of components that need to be
built for this component, but will not already have been built for
another component.
'''
set_definitions = ''
if self.build_info_include_file is None:
assert(toplevel)
self.build_info_include_file, build_info_definitions = self.getBuildInfo(component.path, builddir)
set_definitions += build_info_definitions
if self.config_include_file is None:
self.config_include_file, config_definitions = self.getConfigData(all_dependencies, component, builddir, self.build_info_include_file)
set_definitions += config_definitions
include_root_dirs = ''
if application is not None and component is not application:
include_root_dirs += 'include_directories("%s")\n' % replaceBackslashes(application.path)
include_sys_dirs = ''
include_other_dirs = ''
for name, c in itertools.chain(((component.getName(), component),), all_dependencies.items()):
if c is not component and c.isTestDependency():
continue
include_root_dirs += 'include_directories("%s")\n' % replaceBackslashes(c.path)
dep_sys_include_dirs = c.getExtraSysIncludes()
for d in dep_sys_include_dirs:
include_sys_dirs += 'include_directories(SYSTEM "%s")\n' % replaceBackslashes(os.path.join(c.path, d))
dep_extra_include_dirs = c.getExtraIncludes()
for d in dep_extra_include_dirs:
include_other_dirs += 'include_directories("%s")\n' % replaceBackslashes(os.path.join(c.path, d))
add_depend_subdirs = ''
for name, c in active_dependencies.items():
depend_subdir = replaceBackslashes(os.path.join(modbuilddir, name))
add_depend_subdirs += 'add_subdirectory("%s" "%s")\n' % (
depend_subdir, depend_subdir
)
delegate_to_existing = None
delegate_build_dir = None
if os.path.isfile(os.path.join(component.path, 'CMakeLists.txt')):
delegate_to_existing = component.path
add_own_subdirs = []
logger.debug("delegate to build dir: %s", builddir)
delegate_build_dir = os.path.join(builddir, 'existing')
else:
subdirs = self._listSubDirectories(component)
manual_subdirs = subdirs['manual']
autogen_subdirs = subdirs['auto']
binary_subdirs = subdirs['bin']
test_subdirs = subdirs['test']
resource_subdirs = subdirs['resource']
add_own_subdirs = []
for f in manual_subdirs:
if os.path.isfile(os.path.join(component.path, f, 'CMakeLists.txt')):
add_own_subdirs.append(
(os.path.join(component.path, f), os.path.join(builddir, f))
)
# names of all directories at this level with stuff in: used to figure
# out what to link automatically
all_subdirs = manual_subdirs + [x[0] for x in autogen_subdirs]
for f, source_files in autogen_subdirs:
if f in binary_subdirs:
exe_name = binary_subdirs[f]
else:
exe_name = None
if f in test_subdirs:
# if this module is a test dependency, then don't recurse
# to building its own tests.
if component.isTestDependency():
continue
self.generateTestDirList(
builddir, f, source_files, component, immediate_dependencies, toplevel=toplevel
)
else:
self.generateSubDirList(
builddir, f, source_files, component, all_subdirs,
immediate_dependencies, exe_name, resource_subdirs
)
add_own_subdirs.append(
(os.path.join(builddir, f), os.path.join(builddir, f))
)
# from now on, completely forget that this component had any tests
# if it is itself a test dependency:
if component.isTestDependency():
test_subdirs = []
# if we're not building anything other than tests, then we need to
# generate a dummy library so that this component can still be linked
# against
if len(add_own_subdirs) <= len(test_subdirs):
add_own_subdirs.append(self.createDummyLib(
component, builddir, [x[0] for x in immediate_dependencies.items() if not x[1].isTestDependency()]
))
# generate the top-level toolchain file:
template = jinja_environment.get_template('toolchain.cmake')
file_contents = template.render({
# toolchain files are provided in hierarchy
# order, but the template needs them in reverse
# order (base-first):
"toolchain_files": reversed(self.target.getToolchainFiles())
})
toolchain_file_path = os.path.join(builddir, 'toolchain.cmake')
self._writeFile(toolchain_file_path, file_contents)
# generate the top-level CMakeLists.txt
template = jinja_environment.get_template('base_CMakeLists.txt')
file_contents = template.render({
"toplevel": toplevel,
"target_name": self.target.getName(),
"set_definitions": set_definitions,
"toolchain_file": toolchain_file_path,
"component": component,
"include_root_dirs": include_root_dirs,
"include_sys_dirs": include_sys_dirs,
"include_other_dirs": include_other_dirs,
"add_depend_subdirs": add_depend_subdirs,
"add_own_subdirs": add_own_subdirs,
"config_include_file": self.config_include_file,
"delegate_to": delegate_to_existing,
"delegate_build_dir": delegate_build_dir,
"active_dependencies": active_dependencies
})
self._writeFile(os.path.join(builddir, 'CMakeLists.txt'), file_contents)
def createDummyLib(self, component, builddir, link_dependencies):
safe_name = sanitizeSymbol(component.getName())
dummy_dirname = 'yotta_dummy_lib_%s' % safe_name
dummy_cfile_name = 'dummy.c'
logger.debug("create dummy lib: %s, %s, %s" % (safe_name, dummy_dirname, dummy_cfile_name))
dummy_template = jinja_environment.get_template('dummy_CMakeLists.txt')
dummy_cmakelists = dummy_template.render({
"cfile_name": dummy_cfile_name,
"libname": component.getName(),
"link_dependencies": link_dependencies
})
self._writeFile(os.path.join(builddir, dummy_dirname, "CMakeLists.txt"), dummy_cmakelists)
dummy_cfile = "void __yotta_dummy_lib_symbol_%s(){}\n" % safe_name
self._writeFile(os.path.join(builddir, dummy_dirname, dummy_cfile_name), dummy_cfile)
return (os.path.join(builddir, dummy_dirname), os.path.join(builddir, dummy_dirname))
def writeIfDifferent(self, fname, contents):
try:
with open(fname, "r+") as f:
current_contents = f.read()
if current_contents != contents:
f.seek(0)
f.write(contents)
f.truncate()
except IOError:
with open(fname, "w") as f:
f.write(contents)
def generateTestDirList(self, builddir, dirname, source_files, component, immediate_dependencies, toplevel=False):
logger.debug('generate CMakeLists.txt for directory: %s' % os.path.join(component.path, dirname))
link_dependencies = [x for x in immediate_dependencies]
fname = os.path.join(builddir, dirname, 'CMakeLists.txt')
# group the list of source files by subdirectory: generate one test for
# each subdirectory, and one test for each file at the top level
subdirs = defaultdict(list)
toplevel_srcs = []
for f in source_files:
if f.lang in ('c', 'cpp', 'objc', 's'):
subrelpath = os.path.relpath(f.relpath, dirname)
subdir = fsutils.fullySplitPath(subrelpath)[0]
if subdir and subdir != subrelpath:
subdirs[subdir].append(f)
else:
toplevel_srcs.append(f)
tests = []
for f in toplevel_srcs:
object_name = '%s-test-%s' % (
component.getName(), os.path.basename(os.path.splitext(str(f))[0]).lower()
)
tests.append([[str(f)], object_name, [f.lang]])
for subdirname, sources in sorted(subdirs.items(), key=lambda x: x[0]):
object_name = '%s-test-%s' % (
component.getName(), fsutils.fullySplitPath(subdirname)[0].lower()
)
tests.append([[str(f) for f in sources], object_name, [f.lang for f in sources]])
# link tests against the main executable
link_dependencies.append(component.getName())
# Find cmake files
cmake_files = []
for root, dires, files in os.walk(os.path.join(component.path, dirname)):
for f in files:
name, ext = os.path.splitext(f)
if ext.lower() == '.cmake':
cmake_files.append(os.path.join(root, f))
test_template = jinja_environment.get_template('test_CMakeLists.txt')
file_contents = test_template.render({
'source_directory':os.path.join(component.path, dirname),
'tests':tests,
'link_dependencies':link_dependencies,
'cmake_files': cmake_files,
'exclude_from_all': (not toplevel),
'test_dependencies': [x[1] for x in immediate_dependencies.items() if x[1].isTestDependency()]
})
self._writeFile(fname, file_contents)
def generateSubDirList(self, builddir, dirname, source_files, component, all_subdirs, immediate_dependencies, executable_name, resource_subdirs):
logger.debug('generate CMakeLists.txt for directory: %s' % os.path.join(component.path, dirname))
link_dependencies = [x[0] for x in immediate_dependencies.items() if not x[1].isTestDependency()]
fname = os.path.join(builddir, dirname, 'CMakeLists.txt')
if dirname == 'source' or executable_name:
if executable_name:
object_name = executable_name
executable = True
else:
object_name = component.getName()
executable = False
# if we're building the main library, or an executable for this
# component, then we should link against all the other directories
# containing cmakelists:
link_dependencies += [x for x in all_subdirs if x not in ('source', 'test', dirname)]
# Find resource files
resource_files = []
for f in resource_subdirs:
for root, dires, files in os.walk(f):
if root.endswith(".xcassets") or root.endswith(".bundle"):
resource_files.append(root)
del dires[:]
else:
for f in files:
resource_files.append(os.path.join(root, f))
# Find cmake files
cmake_files = []
for root, dires, files in os.walk(os.path.join(component.path, dirname)):
for f in files:
name, ext = os.path.splitext(f)
if ext.lower() == '.cmake':
cmake_files.append(os.path.join(root, f))
subdir_template = jinja_environment.get_template('subdir_CMakeLists.txt')
file_contents = subdir_template.render({
'source_directory': os.path.join(component.path, dirname),
"config_include_file": self.config_include_file,
'executable': executable,
'file_names': [str(f) for f in source_files],
'object_name': object_name,
'link_dependencies': link_dependencies,
'languages': set(f.lang for f in source_files),
'source_files': set((f.fullpath, f.lang) for f in source_files),
'resource_files': resource_files,
'cmake_files': cmake_files
})
else:
raise Exception('auto CMakeLists for non-source/test directories is not supported')
self._writeFile(fname, file_contents)
def containsSourceFiles(self, directory, component):
c_exts = set(('.c',))
cpp_exts = set(('.cpp','.cc','.cxx'))
asm_exts = set(('.s',))
objc_exts = set(('.m', '.mm'))
header_exts = set(('.h',))
sources = []
for root, dires, files in os.walk(directory):
for f in sorted(files):
name, ext = os.path.splitext(f)
ext = ext.lower()
fullpath = os.path.join(root, f)
relpath = os.path.relpath(fullpath, component.path)
if component.ignores(relpath):
continue
if ext in c_exts:
sources.append(SourceFile(fullpath, relpath, 'c'))
elif ext in cpp_exts:
sources.append(SourceFile(fullpath, relpath, 'cpp'))
elif ext in asm_exts:
sources.append(SourceFile(fullpath, relpath, 's'))
elif ext in objc_exts:
sources.append(SourceFile(fullpath, relpath, 'objc'))
elif ext in header_exts:
sources.append(SourceFile(fullpath, relpath, 'header'))
return sources
|
stevenewey/yotta
|
yotta/lib/cmakegen.py
|
Python
|
apache-2.0
| 28,539
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Indicators")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import *
from QuantConnect.Indicators import *
from QuantConnect.Data.Market import *
from QuantConnect.Data.Consolidators import *
from datetime import timedelta
class DataConsolidationAlgorithm(QCAlgorithm):
'''Example algorithm giving an introduction into using IDataConsolidators.
This is an advanced QC concept and requires a certain level of comfort using C# and its event system.
What is an IDataConsolidator?
IDataConsolidator is a plugin point that can be used to transform your data more easily.
In this example we show one of the simplest consolidators, the TradeBarConsolidator.
This type is capable of taking a timespan to indicate how long each bar should be, or an
integer to indicate how many bars should be aggregated into one.
When a new 'consolidated' piece of data is produced by the IDataConsolidator,
an event is fired with the argument of the new data.
If you are unfamiliar with C# events, or events in general, you may find this useful. This is
Microsoft's overview of events in C#
http://msdn.microsoft.com/en-us/library/aa645739%28v=vs.71%29.aspx'''
def Initialize(self):
'''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
self.SetStartDate(DateTime(2013, 10, 07, 9, 30, 0)) #Set Start Date
self.SetEndDate(self.StartDate + timedelta(1)) #Set End Date
# Find more symbols here: http://quantconnect.com/data
self.AddEquity("SPY")
# define our 30 minute trade bar consolidator. we can
# access the 30 minute bar from the DataConsolidated events
thirtyMinuteConsolidator = TradeBarConsolidator(timedelta(minutes=30))
# attach our event handler. the event handler is a function that will
# be called each time we produce a new consolidated piece of data.
thirtyMinuteConsolidator.DataConsolidated += self.ThirtyMinuteBarHandler
# this call adds our 30 minute consolidator to
# the manager to receive updates from the engine
self.SubscriptionManager.AddConsolidator("SPY", thirtyMinuteConsolidator)
# here we'll define a slightly more complex consolidator. what we're trying to produce is
# a 3 day bar. Now we could just use a single TradeBarConsolidator like above and pass in
# TimeSpan.FromDays(3), but in reality that's not what we want. For time spans of longer than
# a day we'll get incorrect results around weekends and such. What we really want are tradeable
# days. So we'll create a daily consolidator, and then wrap it with a 3 count consolidator.
# first define a one day trade bar -- this produces a consolidated piece of data after a day has passed
oneDayConsolidator = TradeBarConsolidator(timedelta(1))
# next define our 3 count trade bar -- this produces a consolidated piece of data after it sees 3 pieces of data
threeCountConsolidator = TradeBarConsolidator(3)
# here we combine them to make a new, 3 day trade bar. The SequentialConsolidator allows composition of
# consolidators. It takes the consolidated output of one consolidator (in this case, the oneDayConsolidator)
# and pipes it through to the threeCountConsolidator. His output will be a 3 day bar.
three_oneDayBar = SequentialConsolidator(oneDayConsolidator, threeCountConsolidator)
# attach our handler
three_oneDayBar.DataConsolidated += self.ThreeDayBarConsolidatedHandler
# this call adds our 3 day to the manager to receive updates from the engine
self.SubscriptionManager.AddConsolidator("SPY", three_oneDayBar)
self.__last = None
def OnData(self, data):
'''We need to declare this method'''
pass
def OnEndOfDay(self):
# close up shop each day and reset our 'last' value so we start tomorrow fresh
self.Liquidate("SPY")
self.__last = None
def ThirtyMinuteBarHandler(self, sender, bar):
'''This is our event handler for our 30 minute trade bar defined above in Initialize(). So each time the
consolidator produces a new 30 minute bar, this function will be called automatically. The 'sender' parameter
will be the instance of the IDataConsolidator that invoked the event, but you'll almost never need that!'''
if self.__last is not None and bar.Close > self.__last.Close:
self.Log("{0} >> SPY >> LONG >> 100 >> {1}".format(bar.Time, self.Portfolio["SPY"].Quantity))
self.Order("SPY", 100)
elif self.__last is not None and bar.Close < self.__last.Close:
self.Log("{0} >> SPY >> SHORT >> 100 >> {1}".format(bar.Time, self.Portfolio["SPY"].Quantity))
self.Order("SPY", -100)
self.__last = bar
def ThreeDayBarConsolidatedHandler(self, sender, bar):
''' This is our event handler for our 3 day trade bar defined above in Initialize(). So each time the
consolidator produces a new 3 day bar, this function will be called automatically. The 'sender' parameter
will be the instance of the IDataConsolidator that invoked the event, but you'll almost never need that!'''
self.Log("{0} >> Plotting!".format(bar.Time))
self.Plot(bar.Symbol, "3HourBar", bar.Close)
|
Mendelone/forex_trading
|
Algorithm.Python/DataConsolidationAlgorithm.py
|
Python
|
apache-2.0
| 6,399
|
from studio.util import logs
from typing import Dict
from studio.storage.http_storage_handler import HTTPStorageHandler
from studio.storage.local_storage_handler import LocalStorageHandler
from studio.storage.storage_setup import get_storage_verbose_level
from studio.storage.storage_handler import StorageHandler
from studio.storage.storage_type import StorageType
from studio.storage.s3_storage_handler import S3StorageHandler
_storage_factory = None
class StorageHandlerFactory:
def __init__(self):
self.logger = logs.get_logger(self.__class__.__name__)
self.logger.setLevel(get_storage_verbose_level())
self.handlers_cache = dict()
self.cleanup_at_exit: bool = True
@classmethod
def get_factory(cls):
global _storage_factory
if _storage_factory is None:
_storage_factory = StorageHandlerFactory()
return _storage_factory
def set_cleanup_at_exit(self, value: bool):
self.cleanup_at_exit = value
def cleanup(self):
if not self.cleanup_at_exit:
return
for _, handler in self.handlers_cache.items():
if handler is not None:
handler.cleanup()
def get_handler(self, handler_type: StorageType,
config: Dict) -> StorageHandler:
if handler_type == StorageType.storageS3:
handler_id: str = S3StorageHandler.get_id(config)
handler = self.handlers_cache.get(handler_id, None)
if handler is None:
handler = S3StorageHandler(config)
self.handlers_cache[handler_id] = handler
return handler
if handler_type == StorageType.storageHTTP:
handler_id: str = HTTPStorageHandler.get_id(config)
handler = self.handlers_cache.get(handler_id, None)
if handler is None:
handler = HTTPStorageHandler(
config.get('endpoint', None),
config.get('credentials', None))
self.handlers_cache[handler_id] = handler
return handler
if handler_type == StorageType.storageLocal:
handler_id: str = LocalStorageHandler.get_id(config)
handler = self.handlers_cache.get(handler_id, None)
if handler is None:
handler = LocalStorageHandler(config)
self.handlers_cache[handler_id] = handler
return handler
self.logger("FAILED to get storage handler: unsupported type %s",
repr(handler_type))
return None
|
studioml/studio
|
studio/storage/storage_handler_factory.py
|
Python
|
apache-2.0
| 2,584
|
import sys
from solution import Solution
# from classes import ?
class TestSuite:
def run(self):
self.test000()
self.test001()
self.test002()
# self.test003()
# self.test004()
def test000(self):
print "test 000"
s = '()[]{}'
r = Solution().isValid(s)
print " input:\t", s
print " expect:\t", True
print " output:\t", r
print
def test001(self):
print "test 001"
s = '(]'
r = Solution().isValid(s)
print " input:\t", s
print " expect:\t", False
print " output:\t", r
print
def test002(self):
print "test 002"
s = '([)]'
r = Solution().isValid(s)
print " input:\t", s
print " expect:\t", False
print " output:\t", r
print
def main(argv):
TestSuite().run()
if __name__ == '__main__':
main(sys.argv)
|
54lihaoxin/leetcode_python
|
src/ValidParentheses/test_suite.py
|
Python
|
apache-2.0
| 1,102
|
#!/usr/bin/env python3
import urllib.request, urllib.parse
from xml.dom import minidom
import json
import datetime
import codecs
#import argparse
import logging, os, sys, tempfile
import cgi
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
form = cgi.FieldStorage()
CODE = form.getvalue("code")
# Units should be 'c' or 'f'
UNITS='c'
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = urllib.parse.quote("select * from weather.forecast where u='" + UNITS + "' and woeid=" + CODE)
yql_url = baseurl + "q=" + yql_query +"&format=json"
logging.debug(yql_url)
weather = urllib.request.urlopen(yql_url)
data = json.loads(codecs.decode(weather.read(), "utf-8"))
logging.debug(data)
forecast = data['query']['results']['channel']['item']['forecast']
logging.debug(forecast)
# Open SVG to process
output = open("icons/template.svg", "r", encoding='utf-8').read()
logging.debug("Forecast:")
logging.debug(forecast)
days = { "Mon": "Monday", "Tue": "Tuesday", "Wed": "Wednesday", "Thu": "Thursday", "Fri": "Friday", "Sat": "Saturday", "Sun": "Sunday" }
output = output.replace('LOCATION', data['query']['results']['channel']['location']['city'])
for i in range(len(forecast)):
day = forecast[i]
day["day"] = days[day["day"]]
logging.debug("Day:")
logging.debug(day)
image_url = 'icons/' + day['code'] + '.svg'
logging.debug("Using icon %s", image_url)
icon = ""
# Read icon (Just the path line)
with codecs.open(image_url ,'r', encoding='utf-8') as f:
for line in f:
if "xml version" in line or "DOCTYPE" in line:
pass
else:
icon = icon + line
day['icon'] = icon
f.close()
for k, v in day.items():
output = output.replace('DAY_%d_%s' % (i, k), v)
# Write output
svg = tempfile.NamedTemporaryFile()
png = tempfile.NamedTemporaryFile()
out = tempfile.NamedTemporaryFile('rb')
svg.write(bytes(output, 'UTF-8'))
# Convert svg to png
os.system("rsvg-convert --background-color=white -o %s %s" % (png.name, svg.name))
# Optimize the image for kindle eink
os.system("pngcrush -s -c 0 %s %s" % (png.name, out.name))
print("Content-Type: image/png\n")
sys.stdout.flush()
sys.stdout.buffer.write(out.read())
|
mpredfearn/kindle-forecast
|
server/cgi-bin/weather.py
|
Python
|
apache-2.0
| 2,183
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
"""Add KL, beta-BBB, just on encoder_w and include a version of vanilla NP.
Based on code by Mingzhang Yin (https://github.com/google-research/google-research/tree/master/meta_learning_without_memorization).
"""
from __future__ import print_function
import functools
import os
import pickle
import time
from absl import app
from absl import flags
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.compat.v1.keras.layers import MaxPooling2D
import tensorflow_probability as tfp
from tensorflow_probability.python.layers import util as tfp_layers_util
# tf.compat.v1.enable_v2_tensorshape()
FLAGS = flags.FLAGS
## Dataset/method options
flags.DEFINE_string('logdir', '/tmp/data',
'directory for summaries and checkpoints.')
flags.DEFINE_string('data_dir', None,
'Directory of data files.')
get_data_dir = lambda: FLAGS.data_dir
flags.DEFINE_list('data', ['train_data_ins.pkl', 'val_data_ins.pkl'],
'data name')
flags.DEFINE_integer('update_batch_size', 15, 'number of context/target')
flags.DEFINE_integer('meta_batch_size', 10, 'number of tasks')
flags.DEFINE_integer('dim_im', 128, 'image size')
flags.DEFINE_integer('dim_y', 1, 'dimension of y')
## Training options
flags.DEFINE_list('n_hidden_units_g', [100, 100],
'number of tasks sampled per meta-update')
flags.DEFINE_list('n_hidden_units_r', [100, 100],
'number of inner gradient updates during test.')
flags.DEFINE_integer('dim_z', 200, 'dimension of z')
flags.DEFINE_integer('dim_r', 200, 'dimension of r for aggregating')
flags.DEFINE_float('update_lr', 5e-4, 'lr')
flags.DEFINE_integer('num_updates', 100000, 'num_updates')
flags.DEFINE_integer('trial', 1, 'trial number')
flags.DEFINE_integer(
'num_classes', 1,
'number of classes used in classification (e.g. 5-way classification).')
flags.DEFINE_bool('deterministic', True, 'deterministic encoder')
flags.DEFINE_float('beta', 0.001, 'beta for IB')
flags.DEFINE_float('var', -3.0, 'var initial')
flags.DEFINE_integer('dim_w', 20, 'dimension of w')
flags.DEFINE_float('facto', 1.0, 'zero out z to memorize or not')
flags.DEFINE_float('noise_scale', 0, 'add noise to labels')
def get_batch(x, y, is_training):
"""Get data batch."""
xs, ys, xq, yq = [], [], [], []
for _ in range(FLAGS.meta_batch_size):
# sample WAY classes
classes = np.random.choice(
range(np.shape(x)[0]), size=FLAGS.num_classes, replace=False)
support_set = []
query_set = []
support_sety = []
query_sety = []
for k in list(classes):
# sample SHOT and QUERY instances
idx = np.random.choice(
range(np.shape(x)[1]),
size=FLAGS.update_batch_size + FLAGS.update_batch_size,
replace=False)
x_k = x[k][idx]
y_k = y[k][idx].copy()
if FLAGS.noise_scale and is_training:
# Add single scalar to all elements of the inner batch corresponding
# to task idx
scale = FLAGS.noise_scale
low, high = -scale, scale
y_k = (y_k + np.random.uniform(low, high)) % 1.0
support_set.append(x_k[:FLAGS.update_batch_size])
query_set.append(x_k[FLAGS.update_batch_size:])
support_sety.append(y_k[:FLAGS.update_batch_size])
query_sety.append(y_k[FLAGS.update_batch_size:])
xs_k = np.concatenate(support_set, 0)
xq_k = np.concatenate(query_set, 0)
ys_k = np.concatenate(support_sety, 0)
yq_k = np.concatenate(query_sety, 0)
xs.append(xs_k)
xq.append(xq_k)
ys.append(ys_k)
yq.append(yq_k)
xs, ys = np.stack(xs, 0), np.stack(ys, 0)
xq, yq = np.stack(xq, 0), np.stack(yq, 0)
xs = np.reshape(
xs,
[FLAGS.meta_batch_size, FLAGS.update_batch_size * FLAGS.num_classes, -1])
xq = np.reshape(
xq,
[FLAGS.meta_batch_size, FLAGS.update_batch_size * FLAGS.num_classes, -1])
xs = xs.astype(np.float32) / 255.0
xq = xq.astype(np.float32) / 255.0
ys = ys.astype(np.float32) * 10.0
yq = yq.astype(np.float32) * 10.0
return xs, ys, xq, yq
def gen(x, y, is_training):
while True:
yield get_batch(np.array(x), np.array(y), is_training)
def sampling(output):
mu, logstd = tf.split(output, num_or_size_splits=2, axis=-1)
sigma = tf.nn.softplus(logstd)
ws = mu + tf.random_normal(tf.shape(mu)) * sigma
return ws, mu, sigma
def mse(pred, label):
pred = tf.reshape(pred, [-1])
label = tf.reshape(label, [-1])
return tf.reduce_mean(tf.square(pred - label))
def encoder_r(xys):
"""Define encoder."""
with tf.variable_scope('encoder_r', reuse=tf.AUTO_REUSE):
hidden_layer = xys
# First layers are relu
for i, n_hidden_units in enumerate(FLAGS.n_hidden_units_r):
hidden_layer = tf.layers.dense(
hidden_layer,
n_hidden_units,
activation=tf.nn.relu,
name='encoder_r_{}'.format(i),
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
# Last layer is simple linear
i = len(FLAGS.n_hidden_units_r)
r = tf.layers.dense(
hidden_layer,
FLAGS.dim_r,
name='encoder_r_{}'.format(i),
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
return r
def encoder_w(xs, encoder_w0):
"""xs is [n_task, n_im, dim_x]; return [n_task, n_im, dim_w]."""
n_task = tf.shape(xs)[0]
n_im = tf.shape(xs)[1]
xs = tf.reshape(xs, [-1, 128, 128, 1])
ws = encoder_w0(xs)
ws = tf.reshape(ws, [n_task, n_im, FLAGS.dim_w])
return ws
def xy_to_z(xs, ys, encoder_w0):
r"""ws = T0(xs), rs = T1(ws, ys), r = mean(rs), z \sim N(mu(r), sigma(r))."""
with tf.variable_scope(''):
ws = encoder_w(xs, encoder_w0) # (n_task * n_im_per_task) * dim_w
transformed_ys = tf.layers.dense(
ys,
FLAGS.dim_w // 4,
name='lift_y',
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
wys = tf.concat([ws, transformed_ys],
axis=-1) # n_task * n_im_per_task * (dim_w+dim_transy)
rs = encoder_r(wys) # n_task * n_im_per_task * dim_r
r = tf.reduce_mean(rs, axis=1, keepdims=True) # n_task * 1 * dim_r
if FLAGS.deterministic:
z_sample = tf.layers.dense(
r,
FLAGS.dim_z,
name='r2z',
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
else:
z = tf.layers.dense(
r,
FLAGS.dim_z + FLAGS.dim_z,
name='r2z',
reuse=tf.AUTO_REUSE,
kernel_initializer='normal')
z_sample, _, _ = sampling(z)
return tf.tile(z_sample, [1, FLAGS.update_batch_size, 1]) # tile n_targets
def construct_model(input_tensors, encoder_w0, decoder0, prefix=None):
"""Construct model."""
facto = tf.placeholder_with_default(1.0, ())
context_xs = input_tensors['inputa']
context_ys = input_tensors['labela']
target_xs = input_tensors['inputb']
target_ys = input_tensors['labelb']
# sample ws ~ w|(x_all,a), rs = T(ws, ys), r = mean(rs), z = T(r)
# x_all = tf.concat([context_xs, target_xs], axis=1) #n_task * 20 * (128*128)
# y_all = tf.concat([context_ys, target_ys], axis=1)
x_all = context_xs
y_all = context_ys
# n_task * [n_im] * d_z
if 'train' in prefix:
z_samples = xy_to_z(x_all, y_all, encoder_w0) * facto
else:
z_samples = xy_to_z(context_xs, context_ys, encoder_w0) * facto
target_ws = encoder_w(target_xs, encoder_w0)
input_zxs = tf.concat([z_samples, target_ws], axis=-1)
# sample y_hat ~ y|(w,z)
with tf.variable_scope('decoder'):
target_yhat_mu = decoder0(input_zxs) # n_task * n_im * dim_y
# when var of p(y | x,z) is fixed, neg-loglik <=> MSE
mse_loss = mse(target_yhat_mu, target_ys)
tf.summary.scalar(prefix + 'mse', mse_loss)
optimizer1 = tf.train.AdamOptimizer(FLAGS.update_lr)
optimizer2 = tf.train.AdamOptimizer(FLAGS.update_lr)
if 'train' in prefix:
THETA = ( # pylint: disable=invalid-name
tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='decoder') +
tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='encoder_w'))
all_var = tf.trainable_variables()
PHI = [v for v in all_var if v not in THETA] # pylint: disable=invalid-name
kl_loss = sum(encoder_w0.losses) # +sum(decoder0.losses)
scale_v = [v for v in encoder_w0.trainable_variables if 'scale' in v.name]
scale_norm = [tf.reduce_mean(v) for v in scale_v]
scale_norm = tf.reduce_mean(scale_norm)
loss = mse_loss + FLAGS.beta * kl_loss
gvs_theta = optimizer1.compute_gradients(loss, THETA)
train_theta_op = optimizer1.apply_gradients(gvs_theta)
gvs_phi = optimizer2.compute_gradients(loss, PHI)
train_phi_op = optimizer2.apply_gradients(gvs_phi)
with tf.control_dependencies([train_theta_op, train_phi_op]):
train_op = tf.no_op()
tf.summary.scalar(prefix + 'full_loss', loss)
tf.summary.scalar(prefix + 'regularizer', FLAGS.beta * kl_loss)
tf.summary.scalar(prefix + 'untransformed_scale', scale_norm)
return mse_loss, train_op, facto
else:
return mse_loss
def main(_):
kernel_posterior_fn = tfp_layers_util.default_mean_field_normal_fn(
untransformed_scale_initializer=tf.initializers.random_normal(
mean=FLAGS.var, stddev=0.1))
encoder_w0 = tf.keras.Sequential([
tfp.layers.Convolution2DReparameterization(
filters=32,
kernel_size=3,
strides=(2, 2),
activation='relu',
padding='SAME',
kernel_posterior_fn=kernel_posterior_fn),
tfp.layers.Convolution2DReparameterization(
filters=48,
kernel_size=3,
strides=(2, 2),
activation='relu',
padding='SAME',
kernel_posterior_fn=kernel_posterior_fn),
MaxPooling2D(pool_size=(2, 2)),
tfp.layers.Convolution2DReparameterization(
filters=64,
kernel_size=3,
strides=(2, 2),
activation='relu',
padding='SAME',
kernel_posterior_fn=kernel_posterior_fn),
tf.keras.layers.Flatten(),
tfp.layers.DenseReparameterization(
FLAGS.dim_w, kernel_posterior_fn=kernel_posterior_fn),
])
decoder0 = tf.keras.Sequential([
tf.keras.layers.Dense(100, activation=tf.nn.relu),
tf.keras.layers.Dense(100, activation=tf.nn.relu),
tf.keras.layers.Dense(FLAGS.dim_y),
])
dim_output = FLAGS.dim_y
dim_input = FLAGS.dim_im * FLAGS.dim_im * 1
model_name = 'np_bbb'
if FLAGS.noise_scale:
model_name = 'np_bbb_noise_scale' + str(FLAGS.noise_scale)
exp_name = '%s.beta-%g.update_lr-%g.trial-%d' % (model_name, FLAGS.beta,
FLAGS.update_lr, FLAGS.trial)
checkpoint_dir = os.path.join(FLAGS.logdir, exp_name)
if os.path.exists(checkpoint_dir):
print('checkpoint dir already exists. exiting')
return
x_train, y_train = pickle.load(
tf.io.gfile.GFile(os.path.join(get_data_dir(), FLAGS.data[0]), 'rb'))
x_val, y_val = pickle.load(
tf.io.gfile.GFile(os.path.join(get_data_dir(), FLAGS.data[1]), 'rb'))
x_train, y_train = np.array(x_train), np.array(y_train)
y_train = y_train[:, :, -1, None]
x_val, y_val = np.array(x_val), np.array(y_val)
y_val = y_val[:, :, -1, None]
ds_train = tf.data.Dataset.from_generator(
functools.partial(gen, x_train, y_train, True),
(tf.float32, tf.float32, tf.float32, tf.float32),
(tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output])))
ds_val = tf.data.Dataset.from_generator(
functools.partial(gen, x_val, y_val, False),
(tf.float32, tf.float32, tf.float32, tf.float32),
(tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_input]),
tf.TensorShape(
[None, FLAGS.update_batch_size * FLAGS.num_classes, dim_output])))
inputa, labela, inputb, labelb = ds_train.make_one_shot_iterator().get_next()
input_tensors = {'inputa': inputa,\
'inputb': inputb,\
'labela': labela, 'labelb': labelb}
inputa_val, labela_val, inputb_val, labelb_val = ds_val.make_one_shot_iterator(
).get_next()
metaval_input_tensors = {'inputa': inputa_val,\
'inputb': inputb_val,\
'labela': labela_val, 'labelb': labelb_val}
loss, train_op, facto = construct_model(
input_tensors, encoder_w0, decoder0, prefix='metatrain_')
loss_val = construct_model(
metaval_input_tensors, encoder_w0, decoder0, prefix='metaval_')
###########
summ_op = tf.summary.merge_all()
sess = tf.InteractiveSession()
summary_writer = tf.summary.FileWriter(checkpoint_dir, sess.graph)
tf.global_variables_initializer().run()
PRINT_INTERVAL = 50 # pylint: disable=invalid-name
SUMMARY_INTERVAL = 5 # pylint: disable=invalid-name
prelosses, prelosses_val = [], []
old_time = time.time()
for itr in range(FLAGS.num_updates):
feed_dict = {facto: FLAGS.facto}
if itr % SUMMARY_INTERVAL == 0:
summary, cost, cost_val = sess.run([summ_op, loss, loss_val], feed_dict)
summary_writer.add_summary(summary, itr)
prelosses.append(cost) # 0 step loss on training set
prelosses_val.append(cost_val) # 0 step loss on meta_val training set
sess.run(train_op, feed_dict)
if (itr != 0) and itr % PRINT_INTERVAL == 0:
print('Iteration ' + str(itr) + ': ' + str(np.mean(prelosses)), 'time =',
time.time() - old_time)
prelosses = []
old_time = time.time()
print('Validation results: ' + str(np.mean(prelosses_val)))
prelosses_val = []
# Evaluate final
test_val = []
for _ in range(600):
cost_val =sess.run(loss_val, feed_dict)
test_val.append(cost_val)
with open(os.path.join(checkpoint_dir, 'results.p'), 'wb') as f:
pickle.dump(test_val, f)
if __name__ == '__main__':
app.run(main)
|
google-research/google-research
|
meta_augmentation/pose_regression/np_bbb.py
|
Python
|
apache-2.0
| 14,955
|
# -*- coding: utf-8 -*-
# Copyright 2010-2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
import testtools
from oscdebug.tests import fakes
_TRUE_VALUES = ('true', '1', 'yes')
class TestCase(testtools.TestCase):
"""Test case base class for all unit tests."""
def setUp(self):
"""Run before each test method to initialize test environment."""
super(TestCase, self).setUp()
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
try:
test_timeout = int(test_timeout)
except ValueError:
# If timeout value is invalid do not set a timeout.
test_timeout = 0
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
self.useFixture(fixtures.NestedTempfile())
self.useFixture(fixtures.TempHomeDir())
if os.environ.get('OS_STDOUT_CAPTURE') in _TRUE_VALUES:
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if os.environ.get('OS_STDERR_CAPTURE') in _TRUE_VALUES:
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.log_fixture = self.useFixture(fixtures.FakeLogger())
class TestCommand(TestCase):
"""Test cliff command classes"""
def setUp(self):
super(TestCommand, self).setUp()
# Build up a fake app
self.app = fakes.FakeApp()
self.app.client_manager = fakes.FakeClientManager()
def check_parser(self, cmd, args, verify_args):
cmd_parser = cmd.get_parser('check_parser')
parsed_args = cmd_parser.parse_args(args)
for av in verify_args:
attr, value = av
if attr:
self.assertIn(attr, parsed_args)
self.assertEqual(getattr(parsed_args, attr), value)
return parsed_args
|
dtroyer/osc-debug
|
oscdebug/tests/base.py
|
Python
|
apache-2.0
| 2,618
|
# Copyright 2011 Andrew Bogott for the Wikimedia Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_serialization import jsonutils
import six
import webob
from jacket.api.compute.openstack.compute import flavor_access as flavor_access_v21
from jacket.api.compute.openstack.compute import flavor_manage as flavormanage_v21
from jacket.api.compute.openstack.compute.legacy_v2.contrib import flavor_access \
as flavor_access_v2
from jacket.api.compute.openstack.compute.legacy_v2.contrib import flavormanage \
as flavormanage_v2
from jacket.compute.cloud import flavors
from jacket.compute import exception
from jacket.compute import test
from jacket.tests.compute.unit.api.openstack import fakes
def fake_db_flavor(**updates):
db_flavor = {
'root_gb': 1,
'ephemeral_gb': 1,
'name': u'frob',
'deleted': False,
'created_at': datetime.datetime(2012, 1, 19, 18, 49, 30, 877329),
'updated_at': None,
'memory_mb': 256,
'vcpus': 1,
'flavorid': 1,
'swap': 0,
'rxtx_factor': 1.0,
'extra_specs': {},
'deleted_at': None,
'vcpu_weight': None,
'id': 7,
'is_public': True,
'disabled': False,
}
if updates:
db_flavor.update(updates)
return db_flavor
def fake_get_flavor_by_flavor_id(flavorid, ctxt=None, read_deleted='yes'):
if flavorid == 'failtest':
raise exception.FlavorNotFound(flavor_id=flavorid)
elif not str(flavorid) == '1234':
raise Exception("This test expects flavorid 1234, not %s" % flavorid)
if read_deleted != 'no':
raise test.TestingException("Should not be reading deleted")
return fake_db_flavor(flavorid=flavorid)
def fake_destroy(flavorname):
pass
def fake_create(context, kwargs, projects=None):
newflavor = fake_db_flavor()
flavorid = kwargs.get('flavorid')
if flavorid is None:
flavorid = 1234
newflavor['flavorid'] = flavorid
newflavor["name"] = kwargs.get('name')
newflavor["memory_mb"] = int(kwargs.get('memory_mb'))
newflavor["vcpus"] = int(kwargs.get('vcpus'))
newflavor["root_gb"] = int(kwargs.get('root_gb'))
newflavor["ephemeral_gb"] = int(kwargs.get('ephemeral_gb'))
newflavor["swap"] = kwargs.get('swap')
newflavor["rxtx_factor"] = float(kwargs.get('rxtx_factor'))
newflavor["is_public"] = bool(kwargs.get('is_public'))
newflavor["disabled"] = bool(kwargs.get('disabled'))
return newflavor
class FlavorManageTestV21(test.NoDBTestCase):
controller = flavormanage_v21.FlavorManageController()
validation_error = exception.ValidationError
base_url = '/v2/fake/flavors'
def setUp(self):
super(FlavorManageTestV21, self).setUp()
self.stubs.Set(flavors,
"get_flavor_by_flavor_id",
fake_get_flavor_by_flavor_id)
self.stubs.Set(flavors, "destroy", fake_destroy)
self.stub_out("compute.db.flavor_create", fake_create)
self.request_body = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": six.text_type('1234'),
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
self.expected_flavor = self.request_body
def _get_http_request(self, url=''):
return fakes.HTTPRequest.blank(url)
@property
def app(self):
return fakes.wsgi_app_v21(init_only=('os-flavor-manage',
'os-flavor-rxtx',
'os-flavor-access', 'flavors',
'os-flavor-extra-data'))
def test_delete(self):
res = self.controller._delete(self._get_http_request(), 1234)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.controller,
flavormanage_v21.FlavorManageController):
status_int = self.controller._delete.wsgi_code
else:
status_int = res.status_int
self.assertEqual(202, status_int)
# subsequent delete should fail
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._delete, self._get_http_request(),
"failtest")
def _test_create_missing_parameter(self, parameter):
body = {
"flavor": {
"name": "azAZ09. -_",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": six.text_type('1234'),
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
del body['flavor'][parameter]
self.assertRaises(self.validation_error, self.controller._create,
self._get_http_request(), body=body)
def test_create_missing_name(self):
self._test_create_missing_parameter('name')
def test_create_missing_ram(self):
self._test_create_missing_parameter('ram')
def test_create_missing_vcpus(self):
self._test_create_missing_parameter('vcpus')
def test_create_missing_disk(self):
self._test_create_missing_parameter('disk')
def _create_flavor_success_case(self, body, req=None):
req = req if req else self._get_http_request(url=self.base_url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
res = req.get_response(self.app)
self.assertEqual(200, res.status_code)
return jsonutils.loads(res.body)
def test_create(self):
body = self._create_flavor_success_case(self.request_body)
for key in self.expected_flavor["flavor"]:
self.assertEqual(body["flavor"][key],
self.expected_flavor["flavor"][key])
def test_create_public_default(self):
del self.request_body['flavor']['os-flavor-access:is_public']
body = self._create_flavor_success_case(self.request_body)
for key in self.expected_flavor["flavor"]:
self.assertEqual(body["flavor"][key],
self.expected_flavor["flavor"][key])
def test_create_without_flavorid(self):
del self.request_body['flavor']['id']
body = self._create_flavor_success_case(self.request_body)
for key in self.expected_flavor["flavor"]:
self.assertEqual(body["flavor"][key],
self.expected_flavor["flavor"][key])
def _create_flavor_bad_request_case(self, body):
self.stubs.UnsetAll()
self.assertRaises(self.validation_error, self.controller._create,
self._get_http_request(), body=body)
def test_create_invalid_name(self):
self.request_body['flavor']['name'] = 'bad !@#!$%\x00 name'
self._create_flavor_bad_request_case(self.request_body)
def test_create_flavor_name_is_whitespace(self):
self.request_body['flavor']['name'] = ' '
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_name_too_long(self):
self.request_body['flavor']['name'] = 'a' * 256
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_name_leading_trailing_spaces(self):
self.request_body['flavor']['name'] = ' test '
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_name_leading_trailing_spaces_compat_mode(self):
req = self._get_http_request(url=self.base_url)
req.set_legacy_v2()
self.request_body['flavor']['name'] = ' test '
body = self._create_flavor_success_case(self.request_body, req)
self.assertEqual('test', body['flavor']['name'])
def test_create_without_flavorname(self):
del self.request_body['flavor']['name']
self._create_flavor_bad_request_case(self.request_body)
def test_create_empty_body(self):
body = {
"flavor": {}
}
self._create_flavor_bad_request_case(body)
def test_create_no_body(self):
body = {}
self._create_flavor_bad_request_case(body)
def test_create_invalid_format_body(self):
body = {
"flavor": []
}
self._create_flavor_bad_request_case(body)
def test_create_invalid_flavorid(self):
self.request_body['flavor']['id'] = "!@#!$#!$^#&^$&"
self._create_flavor_bad_request_case(self.request_body)
def test_create_check_flavor_id_length(self):
MAX_LENGTH = 255
self.request_body['flavor']['id'] = "a" * (MAX_LENGTH + 1)
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_leading_trailing_whitespaces_in_flavor_id(self):
self.request_body['flavor']['id'] = " bad_id "
self._create_flavor_bad_request_case(self.request_body)
def test_create_without_ram(self):
del self.request_body['flavor']['ram']
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_0_ram(self):
self.request_body['flavor']['ram'] = 0
self._create_flavor_bad_request_case(self.request_body)
def test_create_without_vcpus(self):
del self.request_body['flavor']['vcpus']
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_0_vcpus(self):
self.request_body['flavor']['vcpus'] = 0
self._create_flavor_bad_request_case(self.request_body)
def test_create_without_disk(self):
del self.request_body['flavor']['disk']
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_minus_disk(self):
self.request_body['flavor']['disk'] = -1
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_minus_ephemeral(self):
self.request_body['flavor']['OS-FLV-EXT-DATA:ephemeral'] = -1
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_minus_swap(self):
self.request_body['flavor']['swap'] = -1
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_minus_rxtx_factor(self):
self.request_body['flavor']['rxtx_factor'] = -1
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_non_boolean_is_public(self):
self.request_body['flavor']['os-flavor-access:is_public'] = 123
self._create_flavor_bad_request_case(self.request_body)
def test_flavor_exists_exception_returns_409(self):
expected = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": 1235,
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
def fake_create(name, memory_mb, vcpus, root_gb, ephemeral_gb,
flavorid, swap, rxtx_factor, is_public):
raise exception.FlavorExists(name=name)
self.stubs.Set(flavors, "create", fake_create)
self.assertRaises(webob.exc.HTTPConflict, self.controller._create,
self._get_http_request(), body=expected)
@mock.patch('compute.compute.flavors.create',
side_effect=exception.FlavorCreateFailed)
def test_flavor_create_db_failed(self, mock_create):
request_dict = {
"flavor": {
"name": "test",
'id': "12345",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
ex = self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller._create,
self._get_http_request(), body=request_dict)
self.assertIn('Unable to create flavor', ex.explanation)
def test_invalid_memory_mb(self):
"""Check negative and decimal number can't be accepted."""
self.stubs.UnsetAll()
self.assertRaises(exception.InvalidInput, flavors.create, "abc",
-512, 2, 1, 1, 1234, 512, 1, True)
self.assertRaises(exception.InvalidInput, flavors.create, "abcd",
512.2, 2, 1, 1, 1234, 512, 1, True)
self.assertRaises(exception.InvalidInput, flavors.create, "abcde",
None, 2, 1, 1, 1234, 512, 1, True)
self.assertRaises(exception.InvalidInput, flavors.create, "abcdef",
512, 2, None, 1, 1234, 512, 1, True)
self.assertRaises(exception.InvalidInput, flavors.create, "abcdef",
"test_memory_mb", 2, None, 1, 1234, 512, 1, True)
class PrivateFlavorManageTestV21(test.TestCase):
controller = flavormanage_v21.FlavorManageController()
base_url = '/v2/fake/flavors'
def setUp(self):
super(PrivateFlavorManageTestV21, self).setUp()
self.flavor_access_controller = (flavor_access_v21.
FlavorAccessController())
self.expected = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"swap": 512,
"rxtx_factor": 1
}
}
@property
def app(self):
return fakes.wsgi_app_v21(init_only=('os-flavor-manage',
'os-flavor-access',
'os-flavor-rxtx', 'flavors',
'os-flavor-extra-data'),
fake_auth_context=self._get_http_request().
environ['compute.context'])
def _get_http_request(self, url=''):
return fakes.HTTPRequest.blank(url)
def _get_response(self):
req = self._get_http_request(self.base_url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(self.expected)
res = req.get_response(self.app)
return jsonutils.loads(res.body)
def test_create_private_flavor_should_not_grant_flavor_access(self):
self.expected["flavor"]["os-flavor-access:is_public"] = False
body = self._get_response()
for key in self.expected["flavor"]:
self.assertEqual(body["flavor"][key], self.expected["flavor"][key])
# Because for normal user can't access the non-public flavor without
# access. So it need admin context at here.
flavor_access_body = self.flavor_access_controller.index(
fakes.HTTPRequest.blank('', use_admin_context=True),
body["flavor"]["id"])
expected_flavor_access_body = {
"tenant_id": 'fake',
"flavor_id": "%s" % body["flavor"]["id"]
}
self.assertNotIn(expected_flavor_access_body,
flavor_access_body["flavor_access"])
def test_create_public_flavor_should_not_create_flavor_access(self):
self.expected["flavor"]["os-flavor-access:is_public"] = True
body = self._get_response()
for key in self.expected["flavor"]:
self.assertEqual(body["flavor"][key], self.expected["flavor"][key])
class FlavorManageTestV2(FlavorManageTestV21):
controller = flavormanage_v2.FlavorManageController()
validation_error = webob.exc.HTTPBadRequest
def setUp(self):
super(FlavorManageTestV2, self).setUp()
self.flags(
osapi_compute_extension=[
'compute.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Flavormanage', 'Flavorextradata',
'Flavor_access', 'Flavor_rxtx', 'Flavor_swap'])
@property
def app(self):
return fakes.wsgi_app(init_only=('flavors',),
fake_auth_context=self._get_http_request().
environ['compute.context'])
def _get_http_request(self, url=''):
return fakes.HTTPRequest.blank(url, use_admin_context=False)
def test_create_with_name_leading_trailing_spaces(self):
req = self._get_http_request(url=self.base_url)
self.request_body['flavor']['name'] = ' test '
body = self._create_flavor_success_case(self.request_body, req)
self.assertEqual('test', body['flavor']['name'])
def test_create_with_name_leading_trailing_spaces_compat_mode(self):
pass
class PrivateFlavorManageTestV2(PrivateFlavorManageTestV21):
controller = flavormanage_v2.FlavorManageController()
def setUp(self):
super(PrivateFlavorManageTestV2, self).setUp()
self.flags(
osapi_compute_extension=[
'compute.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Flavormanage', 'Flavorextradata',
'Flavor_access', 'Flavor_rxtx', 'Flavor_swap'])
self.flavor_access_controller = (flavor_access_v2.
FlavorAccessController())
@property
def app(self):
return fakes.wsgi_app(init_only=('flavors',),
fake_auth_context=self._get_http_request().
environ['compute.context'])
def _get_http_request(self, url=''):
return fakes.HTTPRequest.blank(url, use_admin_context=False)
class FlavorManagerPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(FlavorManagerPolicyEnforcementV21, self).setUp()
self.controller = flavormanage_v21.FlavorManageController()
def test_create_policy_failed(self):
rule_name = "os_compute_api:os-flavor-manage"
self.policy.set_rules({rule_name: "project:non_fake"})
req = fakes.HTTPRequest.blank('')
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller._create, req,
body={"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"swap": 512,
"rxtx_factor": 1,
}})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_delete_policy_failed(self):
rule_name = "os_compute_api:os-flavor-manage"
self.policy.set_rules({rule_name: "project:non_fake"})
req = fakes.HTTPRequest.blank('')
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller._delete, req,
fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
|
HybridF5/jacket
|
jacket/tests/compute/unit/api/openstack/compute/test_flavor_manage.py
|
Python
|
apache-2.0
| 20,113
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetNamespace
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-service-directory
# [START servicedirectory_v1_generated_RegistrationService_GetNamespace_async]
from google.cloud import servicedirectory_v1
async def sample_get_namespace():
# Create a client
client = servicedirectory_v1.RegistrationServiceAsyncClient()
# Initialize request argument(s)
request = servicedirectory_v1.GetNamespaceRequest(
name="name_value",
)
# Make the request
response = await client.get_namespace(request=request)
# Handle the response
print(response)
# [END servicedirectory_v1_generated_RegistrationService_GetNamespace_async]
|
googleapis/python-service-directory
|
samples/generated_samples/servicedirectory_v1_generated_registration_service_get_namespace_async.py
|
Python
|
apache-2.0
| 1,526
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.openstack.common import log as logging
import tempest.test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class BaseDatabaseTest(tempest.test.BaseTestCase):
"""Base test case class for all Database API tests."""
_interface = 'json'
force_tenant_isolation = False
@classmethod
def setUpClass(cls):
super(BaseDatabaseTest, cls).setUpClass()
if not CONF.service_available.trove:
skip_msg = ("%s skipped as trove is not available" % cls.__name__)
raise cls.skipException(skip_msg)
cls.catalog_type = CONF.database.catalog_type
cls.db_flavor_ref = CONF.database.db_flavor_ref
cls.db_current_version = CONF.database.db_current_version
os = cls.get_client_manager()
cls.os = os
cls.database_flavors_client = cls.os.database_flavors_client
cls.os_flavors_client = cls.os.flavors_client
cls.database_versions_client = cls.os.database_versions_client
|
Mirantis/tempest
|
tempest/api/database/base.py
|
Python
|
apache-2.0
| 1,654
|
from io import StringIO
from base import BaseService
class DNSError(Exception):
pass
class DNS(BaseService):
PACKAGE = 'dhcp'
HOSTS = '/tmp/hosts/jumpscale'
ADD_OP = '+'
REM_OP = '-'
REMALL_OP = '--'
def __init__(self, wrt):
super(DNS, self).__init__(wrt)
self._transactions = list()
@property
def domain(self):
"""
Get DNS domain
"""
dnsmasq = self.package.find('dnsmasq')
if not dnsmasq:
return ''
section = dnsmasq[0]
return section['domain']
@domain.setter
def domain(self, value):
"""
Set DNS domain
"""
dnsmasq = self.package.find('dnsmasq')
if not dnsmasq:
section = self._wrt.add('dnsmasq')
else:
section = dnsmasq[0]
section['domain'] = value
section['local'] = '/%s/' % value
@property
def records(self):
"""
Return all custom DNS A records
"""
con = self._wrt.connection
with settings(shell=self._wrt.WRT_SHELL, abort_exception=DNSError):
if not con.file_exists(DNS.HOSTS):
return {}
hosts = {}
# we can't use file_read on open-wrt because it doesn't have
# openssl by default. We use cat instead
hostsstr = con.run('cat %s' % DNS.HOSTS)
for line in hostsstr.splitlines():
line = line.strip()
if line == '' or line.startswith('#'):
continue
ip, name = line.split(' ', 2)
hosts.setdefault(name, list())
hosts[name].append(ip)
return hosts
def _runTransactions(self):
# write hosts
records = self.records
while self._transactions:
trans = self._transactions.pop(0)
op, name, ip = trans
if op == DNS.ADD_OP:
records.setdefault(name, list())
records[name].append(ip)
elif op == DNS.REM_OP:
if name not in records:
continue
if ip is None:
del records[name]
elif ip in records[name]:
records[name].remove(ip)
elif op == DNS.REMALL_OP:
records = {}
return records
def commit(self):
"""
Apply any pending changes and restart DNS
"""
# write main dns uci
self._wrt.commit(self.package)
records = self._runTransactions()
command = StringIO()
command.write('cat > {file} <<HOSTS\n'.format(file=DNS.HOSTS))
for host, ips in records.items():
for ip in ips:
command.write('%s %s\n' % (ip, host))
command.write('\nHOSTS\n')
con = self._wrt.connection
with settings(shell=self._wrt.WRT_SHELL, abort_exception=DNSError):
# write hosts file
con.run(command.getvalue())
# restart dnsmasq
con.run('/etc/init.d/dnsmasq restart')
def addARecord(self, name, ip):
"""
Add A record to DNS
:name: Host name
:ip: Host IP
"""
self._transactions.append((DNS.ADD_OP, name, ip))
def removeARecord(self, name, ip=None):
"""
Remove A record from DNS
:name: Host name
:ip: Host IP, if None, remove all A records for the named host
"""
self._transactions.append((DNS.REM_OP, name, ip))
def erase(self):
"""
Remove all hosts (A records)
"""
self._transactions.append((DNS.REMALL_OP, None, None))
|
Jumpscale/jumpscale_core8
|
lib/JumpScale/tools/openwrt/dns.py
|
Python
|
apache-2.0
| 3,726
|
# -*- coding: utf-8 -*-
from selenium.webdriver.firefox.webdriver import WebDriver
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from fixture.session import SessionHelper
from fixture.group import GroupHelper
from fixture.contact import ContactHelper
class Application:
def __init__(self):
profile = FirefoxProfile()
profile.set_preference("browser.startup.homepage_override.mstone", "ignore")
self.wd = WebDriver(firefox_profile= profile)
#self.wd = WebDriver()
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(self)
def is_valid(self):
try:
self.wd.current_url
return True
except:
return False
def open_home_page(self):
wd = self.wd
if not wd.current_url.endswith("/addressbook/"):
wd.get("http://localhost/addressbook/")
def destroy(self):
self.wd.quit()
|
tzvezda/python_training
|
fixture/application.py
|
Python
|
apache-2.0
| 999
|
# Gambit scripts
#
# Copyright (C) USC Information Sciences Institute
# Author: Nibir Bora <nbora@usc.edu>
# URL: <http://cbg.isi.edu/>
# For license information, see LICENSE
import os
import sys
import csv
import math
import numpy
import anyjson
import psycopg2
import matplotlib
import matplotlib.pyplot as plt
from pylab import *
from PIL import Image
from pprint import pprint
from datetime import datetime
from scipy.stats import ks_2samp
from scipy.optimize import leastsq
from matplotlib.ticker import FuncFormatter
from matplotlib.backends.backend_pdf import PdfPages
import settings as my
sys.path.insert(0, os.path.abspath('..'))
#
# DISPLACEMENT STATISTICS
#
def out_user_disp_plot():
'''Plot charts for daily max displacement'''
with open('data/' + my.DATA_FOLDER + 'displacement/' + 'user_disp.csv', 'rb') as fp2:
csv_reader = csv.reader(fp2, delimiter=',')
x = [int(row[1]) for row in csv_reader]
print '%s daily max displacements read.' % (len(x))
_plot_hist(x, 99, (100,10000), '100m-10km')
a, i = _plot_hist(x, 149, (100,15000), '100m-15km')
print a, ',', i
_plot_hist(x, 199, (100,20000), '100m-20km')
_plot_hist(x, 249, (100,25000), '100m-25km')
def out_user_disp_plot_weekday():
'''Plot charts for daily max displacement (Weekday/Weekend)'''
x_wd = []
x_we = []
with open('data/' + my.DATA_FOLDER + 'displacement/' + 'user_disp.csv', 'rb') as fp2:
csv_reader = csv.reader(fp2, delimiter=',')
for row in csv_reader:
dt = datetime.strptime(row[2], '%Y-%m-%d').date()
if dt.weekday() in [5, 6]:
x_we.append(int(row[1]))
else:
x_wd.append(int(row[1]))
print 'Weekday: %s, Weekend: %s daily max displacements read.' % (len(x_wd), len(x_we))
_plot_hist(x_wd, 99, (100,10000), '100m-10km (weekday)')
_plot_hist(x_we, 99, (100,10000), '100m-10km (weekend)')
a, i = _plot_hist(x_wd, 149, (100,15000), '100m-15km (weekday)')
print a, ',', i
a, i = _plot_hist(x_we, 149, (100,15000), '100m-15km (weekend)')
print a, ',', i
_plot_hist(x_wd, 199, (100,20000), '100m-20km (weekday)')
_plot_hist(x_we, 199, (100,20000), '100m-20km (weekend)')
_plot_hist(x_wd, 249, (100,25000), '100m-25km (weekday)')
_plot_hist(x_we, 249, (100,25000), '100m-25km (weekend)')
def _plot_hist(x, nbins, range, file_name):
title = my.DATA_FOLDER[:-1].upper() + ' ' + file_name
width = (range[1]-range[0])/nbins
nbins_prev, nbins_after = math.ceil((range[0]-min(x))/width), math.ceil((max(x)-range[1])/width)
h, b = numpy.histogram(x, nbins_prev+nbins+nbins_after)
b = b[nbins_prev:-nbins_after]
s = float(sum(h))
h_prev = [round(val/s, 4) for val in h[:nbins_prev]]
h_after = [round(val/s, 4) for val in h[-nbins_after:]]
h = [round(val/s, 4) for val in h[nbins_prev:-nbins_after]]
n_all = len(x)
h_100_2k = round(len([val for val in x if val > 100 and val < 2000]) / float(len(x)), 4)
#print len(x)
#print h_100_2k
#print len(h_prev), sum(h_prev)
#print len(h), sum(h)
#print len(h_after), sum(h_after)
#print len(h_prev)+len(h)+len(h_after)
#print sum(h_prev)*len(x) + sum(h)*len(x) + sum(h_after)*len(x)
fig = plt.figure(figsize=(8,4))
ax = fig.add_subplot(111)
if my.PLOT_LABEL_ABS:
plt.subplots_adjust(left=0.1, right=0.96, top=0.92, bottom=0.08)
else:
plt.subplots_adjust(left=0.075, right=0.96, top=0.92, bottom=0.08)
#ax.set_autoscaley_on(False)
#ax.set_ylim([0,0.1])
ax.set_xlim(0, range[1])
ax.bar(b[:-1], h, width=width, \
color='#377EB8', alpha=0.8, edgecolor='#377EB8')
if my.PLOT_LABEL_ABS:
formatter = FuncFormatter(lambda v, pos: str(int(v*n_all)) + '\n(' + str(round(v*100, 2)) + '%)')
else:
formatter = FuncFormatter(lambda v, pos: str(round(v*100, 2))+'%')
plt.gca().yaxis.set_major_formatter(formatter)
formatter = FuncFormatter(lambda v, pos: '' if v/1000 == 0 else str(int(v/1000))+'km')
plt.gca().xaxis.set_major_formatter(formatter)
if my.PLOT_LABEL_ABS:
matplotlib.rc('ytick', labelsize=10)
# Power law fit
x = numpy.array(b[:-1])
y = numpy.array([val if val != 0.0 else 0.0001 for val in h])
logx = log10(x)
logy = log10(y)
fitfunc = lambda p, x: p[0] + p[1] * x
errfunc = lambda p, x, y: (y - fitfunc(p, x))
pinit = [1.0, -1.0]
out = leastsq(errfunc, pinit, args=(logx, logy), full_output=1)
pfinal = out[0]
covar = out[1]
index = pfinal[1]
amp = 10.0**pfinal[0]
powerlaw = lambda x, amp, index: amp * (x**index)
ax.plot(x, powerlaw(x, amp, index), \
label = 'fit: y = %s x ^%s' % (round(amp,3), round(index,3)), color='#E41A1C')
info = '<{0}m = {1}%\n >{2}km = {3}%\n 100m-2km = {4}%'.format(range[0], sum(h_prev)*100, \
range[1]/1000, sum(h_after)*100, h_100_2k*100) + \
'\nfit: y = {0} x^{1}'.format(round(amp,3), round(index,3))
#KS Statistics
y_ = powerlaw(x, amp, index)
d, p = ks_2samp(y, y_)
#print 'D, p: ', d, p
info += '\nKS statistic: {0}\np: {1}'.format(round(d,5), round(p,5))
ax.text(0.95, 0.95, info, horizontalalignment='right', verticalalignment='top', transform = ax.transAxes, fontsize=10)
ax.set_title(title, fontsize=11)
if not os.path.exists('data/' + my.DATA_FOLDER + 'displacement/' + 'disp_stat/'):
os.makedirs('data/' + my.DATA_FOLDER + 'displacement/' + 'disp_stat/')
plt.savefig('data/' + my.DATA_FOLDER + 'displacement/' + 'disp_stat/' + file_name + '.pdf')
print 'Stored chart: %s' % file_name
return amp, index
|
nbir/gambit-scripts
|
scripts/LAnhoodAnalysis/src/disp_stat.py
|
Python
|
apache-2.0
| 5,328
|
#!/usr/bin/env python
"""Contains the Data Model for a cool Resource.
"""
__author__ = "Sanjay Joshi"
__copyright__ = "IBM Copyright 2017"
__credits__ = ["Sanjay Joshi"]
__license__ = "Apache 2.0"
__version__ = "1.0"
__maintainer__ = "Sanjay Joshi"
__email__ = "joshisa@us.ibm.com"
__status__ = "Prototype"
schema = {
'url': 'corpora/ada_diabetes/statistics',
'schema': {
'cloudhost': {
'type': 'string',
'default': 'Powered by IBM Bluemix and Python Eve'
},
'corpus': {
'type': 'string',
'default': '######'
},
'host': {
'type': 'string',
'default': '##-##-##'
},
'stats': {
'type': 'dict',
'schema': {}
}
},
'allow_unknown': True
}
|
joshisa/mistub
|
mistub/models/corporaadastatistics.py
|
Python
|
apache-2.0
| 812
|
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmarking support for Apache Spark services.
In order to benchmark Apache Spark services such as Google Cloud
Platform's Dataproc or Amazon's EMR, we create a BaseSparkService
class. Classes to wrap each provider's Apache Spark Service are
in the provider directory as a subclass of BaseSparkService.
Also in this module is a PkbSparkService, which builds a Spark
cluster by creating VMs and installing the necessary software.
For more on Apache Spark: http://spark.apache.org/
"""
import abc
import copy
import datetime
import posixpath
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import hadoop
flags.DEFINE_string('spark_static_cluster_id', None,
'If set, the name of the Spark cluster, assumed to be '
'ready.')
flags.DEFINE_enum('spark_service_log_level', 'INFO', ['DEBUG', 'INFO', 'FATAL'],
'Supported log levels when submitting jobs to spark service'
' clusters.')
# Cloud to use for pkb-created Spark service.
PKB_MANAGED = 'pkb_managed'
PROVIDER_MANAGED = 'managed'
SUCCESS = 'success'
RUNTIME = 'running_time'
WAITING = 'pending_time'
SPARK_JOB_TYPE = 'spark'
PYSPARK_JOB_TYPE = 'pyspark'
HADOOP_JOB_TYPE = 'hadoop'
SPARK_VM_GROUPS = ('master_group', 'worker_group')
# This is used for error messages.
FLAGS = flags.FLAGS
def GetSparkServiceClass(cloud, service_type):
"""Get the Spark class corresponding to 'cloud'."""
if service_type == PKB_MANAGED:
return PkbSparkService
return resource.GetResourceClass(BaseSparkService, CLOUD=cloud)
class BaseSparkService(resource.BaseResource):
"""Object representing a Spark Service."""
RESOURCE_TYPE = 'BaseSparkService'
# Set by derived classes
CLOUD = None
SERVICE_NAME = None
SPARK_SAMPLE_LOCATION = ('file:///usr/lib/spark/examples/jars/'
'spark-examples.jar')
HADOOP_SAMPLE_LOCATION = ('file:///usr/lib/hadoop-mapreduce/'
'hadoop-mapreduce-examples.jar')
def __init__(self, spark_service_spec):
"""Initialize the Apache Spark Service object.
Args:
spark_service_spec: spec of the spark service.
"""
is_user_managed = spark_service_spec.static_cluster_id is not None
super(BaseSparkService, self).__init__(user_managed=is_user_managed)
self.spec = spark_service_spec
# If only the worker group is specified, assume the master group is
# configured the same way.
if spark_service_spec.master_group is None:
self.spec.master_group = copy.copy(self.spec.worker_group)
self.spec.master_group.vm_count = 1
self.cluster_id = spark_service_spec.static_cluster_id
assert (spark_service_spec.master_group.vm_spec.zone ==
spark_service_spec.worker_group.vm_spec.zone)
self.zone = spark_service_spec.master_group.vm_spec.zone
@abc.abstractmethod
def SubmitJob(self, job_jar, class_name,
job_script=None,
job_poll_interval=None,
job_stdout_file=None, job_arguments=None,
job_type=SPARK_JOB_TYPE):
"""Submit a job to the spark service.
Submits a job and waits for it to complete.
Args:
job_jar: Jar file to execute.
class_name: Name of the main class.
job_script: PySpark script to run. job_jar and class_name must be None.
job_poll_interval: integer saying how often to poll for job
completion. Not used by providers for which submit job is a
synchronous operation.
job_stdout_file: String giving the location of the file in
which to put the standard out of the job.
job_arguments: Arguments to pass to class_name. These are
not the arguments passed to the wrapper that submits the
job.
job_type: The type of the job.
Returns:
dictionary, where success is true if the job succeeded,
false otherwise. The dictionary may also contain an entry for
running_time and pending_time if the platform reports those
metrics.
"""
pass
@abc.abstractmethod
def ExecuteOnMaster(self, script_path, script_args):
"""Execute a script on the master node.
Args:
script_path: local path of the script to execute.
script_args: arguments to pass to the script.
"""
pass
@abc.abstractmethod
def CopyFromMaster(self, remote_path, local_path):
"""Copy a file from the master node.
Args:
remote_path: path of the file to copy.
local_path: destination to copy to.
"""
pass
def GetMetadata(self):
"""Return a dictionary of the metadata for this cluster."""
basic_data = {
'spark_service': self.SERVICE_NAME,
'spark_svc_cloud': self.CLOUD,
'spark_cluster_id': self.cluster_id,
'spark_cluster_zone': getattr(self, 'zone', None) or 'unknown'
}
# TODO grab this information for user_managed clusters.
if not self.user_managed:
basic_data.update({'num_workers': str(self.spec.worker_group.vm_count),
'worker_machine_type':
str(self.spec.worker_group.vm_spec.machine_type)})
return basic_data
@classmethod
def GetExampleJar(cls, job_type):
if job_type == SPARK_JOB_TYPE:
return cls.SPARK_SAMPLE_LOCATION
elif job_type == HADOOP_JOB_TYPE:
return cls.HADOOP_SAMPLE_LOCATION
else:
raise NotImplementedError()
class PkbSparkService(BaseSparkService):
"""A Spark service created from vms.
This class will create a Spark service by creating VMs and installing
the necessary software. (Similar to how the hbase benchmark currently
runs. It should work across all or almost all providers.
"""
CLOUD = PKB_MANAGED
SERVICE_NAME = 'pkb-managed'
def __init__(self, spark_service_spec):
super(PkbSparkService, self).__init__(spark_service_spec)
assert self.cluster_id is None
self.vms = {}
# set by _Create
self.leader = None
def _Create(self):
"""Create an Apache Spark cluster."""
# need to fix this to install spark
def InstallHadoop(vm):
vm.Install('hadoop')
if 'worker_group' not in self.vms:
raise errors.Resource.CreationError(
'PkbSparkService requires worker_group VMs.')
vm_util.RunThreaded(InstallHadoop,
self.vms['worker_group'] + self.vms['master_group'])
self.leader = self.vms['master_group'][0]
hadoop.ConfigureAndStart(self.leader,
self.vms['worker_group'])
def _Delete(self):
pass
def SubmitJob(self, jar_file, class_name, job_poll_interval=None,
job_stdout_file=None, job_arguments=None,
job_type=SPARK_JOB_TYPE):
"""Submit the jar file."""
if job_type == SPARK_JOB_TYPE:
raise NotImplementedError()
cmd_list = [posixpath.join(hadoop.HADOOP_BIN, 'hadoop'),
'jar', jar_file]
if class_name:
cmd_list.append(class_name)
if job_arguments:
cmd_list += job_arguments
cmd_string = ' '.join(cmd_list)
start_time = datetime.datetime.now()
stdout, _ = self.leader.RemoteCommand(cmd_string)
end_time = datetime.datetime.now()
if job_stdout_file:
with open(job_stdout_file, 'w') as f:
f.write(stdout)
return {SUCCESS: True,
RUNTIME: (end_time - start_time).total_seconds()}
@classmethod
def GetExampleJar(cls, job_type):
if job_type == HADOOP_JOB_TYPE:
return posixpath.join(
hadoop.HADOOP_DIR, 'share', 'hadoop', 'mapreduce',
'hadoop-mapreduce-examples-{0}.jar'.format(FLAGS.hadoop_version))
else:
raise NotImplementedError()
def ExecuteOnMaster(self, script_path, script_args):
pass
def CopyFromMaster(self, remote_path, local_path):
pass
|
GoogleCloudPlatform/PerfKitBenchmarker
|
perfkitbenchmarker/spark_service.py
|
Python
|
apache-2.0
| 8,537
|
master = "192.168.222.3"
#本番時 : 172.16.2.126 とか
|
SP2LC/procon25-main
|
A-star/config.py
|
Python
|
apache-2.0
| 58
|
import tweepy
import json
import couchdb
import jsonpickle
import sys,os
import time
from textblob import TextBlob
import requests
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
import matplotlib.path as mplPath
import numpy as np
import socket
maxTweets = 10000000 # Some arbitrary large number
tweetsPerQry = 100 # this is the max the API permits
tweetCount = 0
#create socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', 9999))
print "Connect to socket"
print s.recv(1024)
oauth_keys = [["RjG2NW31P2LBR14iBcl2NTtHQ", "j7Z81CvuoxlmnirBaXYvv0ncugs2qqkBhosxomwOEYESJlokO8", "856109659895218178-JwIbrtpRreIXULwSDsgS6Y9vqUjUZSz", "6gc4AOswm7ZqAebJ4VE8KItobluFcM7KZrjNPVltuTt6j"],
["0OfIH1dCTHAuf5B3K8gy9Ni4X", "9BfIHjGoKLQMISkSFdc7L014RYjvQv74JowHeBbiLQ0yhksYVk", "856109659895218178-2ahh1Em96SjiGVKps00GZHy32US0ZVF", "srM8nm9z0y6sYHeeV2fv6Dxom2V60GfrqkmNHOCnn36LV"],
["p6atboVqe2eN5VrXHGeh0YOuF", "UmpTijm7MpXEs9AXTDRznWhPfrhkbtkIDfQiBwnJN9dlBwBJNz", "839300214410207232-p5KRSFi3c7WLlbma9sP3cRpfX1nAoQn", "JZdKxShEDrTfNj7JMMWIR31exHNSoqzLmNzLzscZj03JF"],
["hVC7zPDwRBni2lIfZDQiKdVTb", "fvWPF5tEkMEyZp9e0yQOIi9a3kzl3Bjfa1b1SfYFxjomgxHNQr", "856002238250012672-WOGROUddQLJI92zQYl07xK9zktEHN6a", "ll9lhJwZDuKIuEsviiKdeXPlpuZbyXTAN1ss6QZxiasXl"],
["GuqJrIAlenTWGJoQDXuLI78G6", "X8QGSq5cogeOBMAjrlZOO1Kh1puoIhKXuHhz80PVcq6BxOIkgb", "856109659895218178-1ON36X1qYHe7FgWY9AJOCMQ6yXV8KTq", "DUjbcjp2t5SHTXVnHQj0KLwN9WEYacD1sZCYqHLz8gNDC"]]
auths = []
for consumer_key, consumer_secret, access_key, access_secret in oauth_keys:
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
auths.append(auth)
#Pass our consumer key and consumer secret to Tweepy's user authentication handler
#Creating a twitter API wrapper using tweepy
# If results from a specific ID onwards are reqd, set since_id to that ID.
# else default to no lower limit, go as far back as API allows
sinceId = None
# If results only below a specific ID are, set max_id to that ID.
# else default to no upper limit, start from the most recent tweet matching the search query.
max_id = -1L
switch = 0
while tweetCount < maxTweets:
api = tweepy.API(auths[switch], #monitor_rate_limit=True,
#retry_count=10000, retry_delay=5,
# retry_errors=set([401, 404, 500, 503]),
#wait_on_rate_limit=True,
wait_on_rate_limit_notify=True)
#Error handling
if (not api):
print ("Problem connecting to API")
try:
if max_id <= 0:
if (not sinceId):
new_tweets = api.search(q="place:01e8a1a140ccdc5c", count=tweetsPerQry)
else:
new_tweets = api.search(q="place:01e8a1a140ccdc5c", count=tweetsPerQry,
since_id=sinceId)
else:
if (not sinceId):
new_tweets = api.search(q="place:01e8a1a140ccdc5c", count=tweetsPerQry,
max_id=str(max_id - 1))
else:
new_tweets = api.search(q="place:01e8a1a140ccdc5c", count=tweetsPerQry,
max_id=str(max_id - 1),
since_id=sinceId)
if not new_tweets:
print "No more tweets found"
time.sleep(240)
for tweet in new_tweets:
#Load tweets and send to analysis server
data = json.loads(jsonpickle.encode(tweet._json))
_id = data['id']
text = str(data['text'].encode('ascii','ignore'))
lang = str(data['lang'].encode('ascii'))
created_at = str(data['created_at'].encode('ascii'))
coordinates = "null"
if data['coordinates'] != None:
coordinates = data['coordinates']['coordinates']
else:
print "No coordinate"
place = str(data['place']['full_name'].encode('ascii'))
is_finance = 'false'
send_data = {'id':_id, 'text':text, 'lang':lang, 'created_at':created_at, 'coordinates':coordinates, 'place':place, 'is_finance':is_finance}
send_date_string = json.dumps(send_data)
print "send data"
s.send(send_date_string)
print " Send data success"
tweetCount += len(new_tweets)
try:
max_id = new_tweets[-1].id
except :
continue
print("Downloaded {0} tweets in Adelaide".format(tweetCount))
except tweepy.TweepError as e:
print "switching keys...ade"
switch += 1
if switch > 4:
print "Limit reached"
switch = 0
time.sleep(180)
continue
except StopIteration:
break
|
COMP90024CloudComputing/Submit_Cloud_Computing
|
search/rest_ade.py
|
Python
|
apache-2.0
| 5,015
|
"""
models.py
App Engine datastore models
"""
from google.appengine.ext import ndb
class Problem(ndb.Model):
problemId = ndb.StringProperty(required=True)
problemInd = ndb.StringProperty(required=True)
contestId = ndb.IntegerProperty(required=True)
peopleSolved = ndb.IntegerProperty(required=True)
tags = ndb.StringProperty(repeated=True)
difficulty = ndb.FloatProperty(default=10.0)
|
expl0si0nn/cf-tracker
|
src/cftracker/models.py
|
Python
|
apache-2.0
| 411
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 PolyBeacon, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'python-ari'
copyright = u'2013, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
kickstandproject/python-ari
|
doc/source/conf.py
|
Python
|
apache-2.0
| 2,478
|
# Copyright 2012 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Interface for shares extention."""
import collections
import os
import re
import urllib
from manilaclient import base
from manilaclient import exceptions
from manilaclient import utils
class Share(base.Resource):
"""A share is an extra block level storage to the OpenStack instances."""
def __repr__(self):
return "<Share: %s>" % self.id
def delete(self):
"""Delete this share."""
self.manager.delete(self)
def allow(self, access_type, access):
"""Allow access to a share."""
self._validate_access(access_type, access)
return self.manager.allow(self, access_type, access)
def deny(self, id):
"""Deny access from IP to a share."""
return self.manager.deny(self, id)
def access_list(self):
"""Deny access from IP to a share."""
return self.manager.access_list(self)
def _validate_access(self, access_type, access):
if access_type == 'ip':
self._validate_ip_range(access)
elif access_type == 'passwd':
self._validate_username(access)
else:
raise exceptions.CommandError(
'Only ip and passwd type are supported')
@staticmethod
def _validate_username(access):
valid_useraname_re = '\w{4,32}'
username = access
if not re.match(valid_useraname_re, username):
exc_str = _('Invalid user name. Must be alphanum 4-32 chars long')
raise exceptions.CommandError(exc_str)
@staticmethod
def _validate_ip_range(ip_range):
ip_range = ip_range.split('/')
exc_str = ('Supported ip format examples:\n'
'\t10.0.0.2, 10.0.0.*, 10.0.0.0/24')
if len(ip_range) > 2:
raise exceptions.CommandError(exc_str)
allow_asterisk = (len(ip_range) == 1)
ip_range = ip_range[0].split('.')
if len(ip_range) != 4:
raise exceptions.CommandError(exc_str)
for item in ip_range:
try:
if 0 <= int(item) <= 255:
continue
raise ValueError()
except ValueError:
if not (allow_asterisk and item == '*'):
raise exceptions.CommandError(exc_str)
class ShareManager(base.ManagerWithFind):
"""Manage :class:`Share` resources."""
resource_class = Share
def create(self, share_proto, size, snapshot_id=None, name=None,
description=None):
"""Create NAS.
:param size: Size of NAS in GB
:param snapshot_id: ID of the snapshot
:param name: Name of the NAS
:param description: Short description of a share
:param share_proto: Type of NAS (NFS or CIFS)
:rtype: :class:`Share`
"""
body = {'share': {'size': size,
'snapshot_id': snapshot_id,
'name': name,
'description': description,
'share_proto': share_proto}}
return self._create('/shares', body, 'share')
def get(self, share_id):
"""Get a share.
:param share_id: The ID of the share to delete.
:rtype: :class:`Share`
"""
return self._get("/shares/%s" % share_id, "share")
def list(self, detailed=True, search_opts=None):
"""Get a list of all shares.
:rtype: list of :class:`Share`
"""
if search_opts:
query_string = urllib.urlencode([(key, value)
for (key, value)
in search_opts.items()
if value])
if query_string:
query_string = "?%s" % (query_string,)
else:
query_string = ''
if detailed:
path = "/shares/detail%s" % (query_string,)
else:
path = "/shares%s" % (query_string,)
return self._list(path, 'shares')
def delete(self, share):
"""Delete a share.
:param share: The :class:`Share` to delete.
"""
self._delete("/shares/%s" % base.getid(share))
def allow(self, share, access_type, access):
"""Allow access from IP to a shares.
:param share: The :class:`Share` to delete.
:param access_type: string that represents access type ('ip','domain')
:param access: string that represents access ('127.0.0.1')
"""
access = self._action('os-allow_access', share,
{'access_type': access_type,
'access_to': access})[1]["access"]
return access
def deny(self, share, id):
"""Deny access from IP to a shares.
:param share: The :class:`Share` to delete.
:param ip: string that represents ip address
"""
return self._action('os-deny_access', share, {'access_id': id})
def access_list(self, share):
"""Get access list to the share."""
access_list = self._action("os-access_list", share)[1]["access_list"]
if access_list:
t = collections.namedtuple('Access', access_list[0].keys())
return [t(*value.values()) for value in access_list]
else:
return []
def _action(self, action, share, info=None, **kwargs):
"""Perform a share 'action'."""
body = {action: info}
self.run_hooks('modify_body_for_action', body, **kwargs)
url = '/shares/%s/action' % base.getid(share)
return self.api.client.post(url, body=body)
|
bswartz/python-manilaclient
|
manilaclient/v1/shares.py
|
Python
|
apache-2.0
| 6,247
|
#from selenium.webdriver.firefox.webdriver import WebDriver
from selenium import webdriver
from fixture.session import SessionHelper
from fixture.group import GroupHelper
from fixture.contact import ContactHelper
class Application:
def __init__(self, browser, base_url):
# self.wd = WebDriver()
if browser == "firefox":
self.wd = webdriver.Firefox()
elif browser == "crome":
self.wd = webdriver.Chrome()
elif browser == "ie":
self.wd = webdriver.Ie()
else:
raise ValueError("Unrecognized browser %s" % browser)
#self.wd.implicitly_wait(5)
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(self)
self.base_url = base_url
# self.username = username
# self.password = password
def is_valid(self):
try:
self.wd.current_url
return True
except:
return False
def open_home_page(self):
wd = self.wd
wd.get(self. base_url)
def destroy(self):
self.wd.quit()
|
IrishaS/python_training
|
fixture/application.py
|
Python
|
apache-2.0
| 1,136
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import sys
print('Exiting with status 1')
sys.exit(1)
|
tejal29/pants
|
tests/python/pants_test/python/die.py
|
Python
|
apache-2.0
| 356
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import json
import os
import shutil
from collections import defaultdict
from pants.base.address import SyntheticAddress
from pants.base.address_lookup_error import AddressLookupError
from pants.base.exceptions import TaskError
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open
from pants.util.memo import memoized_property
from pants.contrib.go.subsystems.fetchers import Fetchers
from pants.contrib.go.targets.go_remote_library import GoRemoteLibrary
from pants.contrib.go.tasks.go_task import GoTask
class GoFetch(GoTask):
"""Fetches third-party Go libraries."""
@classmethod
def global_subsystems(cls):
return super(GoFetch, cls).global_subsystems() + (Fetchers,)
@classmethod
def product_types(cls):
return ['go_remote_lib_src']
@property
def cache_target_dirs(self):
# TODO(John Sirois): See TODO in _transitive_download_remote_libs, re-consider how artifact
# caching works for fetches.
return True
def execute(self):
self.context.products.safe_create_data('go_remote_lib_src', lambda: defaultdict(str))
go_remote_libs = self.context.targets(self.is_remote_lib)
if not go_remote_libs:
return
undeclared_deps = self._transitive_download_remote_libs(set(go_remote_libs))
if undeclared_deps:
self._log_undeclared_deps(undeclared_deps)
raise TaskError('Failed to resolve transitive Go remote dependencies.')
def _log_undeclared_deps(self, undeclared_deps):
for dependee, deps in undeclared_deps.items():
self.context.log.error('{address} has remote dependencies which require local declaration:'
.format(address=dependee.address.reference()))
for dep_import_path, address in deps:
self.context.log.error('\t--> {import_path} (expected go_remote_library declaration '
'at {address})'.format(import_path=dep_import_path,
address=address.reference()))
def _get_fetcher(self, import_path):
return Fetchers.global_instance().get_fetcher(import_path)
def _transitive_download_remote_libs(self, go_remote_libs, all_known_addresses=None):
"""Recursively attempt to resolve / download all remote transitive deps of go_remote_libs.
Returns a dict<GoRemoteLibrary, set<tuple<str, Address>>>, which maps a go remote library to a
set of unresolved remote dependencies, each dependency expressed as a tuple containing the
the import path of the dependency and the expected target address. If all transitive
dependencies were successfully resolved, returns an empty dict.
Downloads as many invalidated transitive dependencies as possible, and returns as many
undeclared dependencies as possible. However, because the dependencies of a remote library
can only be determined _after_ it has been downloaded, a transitive dependency of an undeclared
remote library will never be detected.
Because go_remote_libraries do not declare dependencies (rather, they are inferred), injects
all successfully resolved transitive dependencies into the build graph.
"""
if not go_remote_libs:
return {}
all_known_addresses = all_known_addresses or set()
all_known_addresses.update(lib.address for lib in go_remote_libs)
resolved_remote_libs = set()
undeclared_deps = defaultdict(set)
go_remote_lib_src = self.context.products.get_data('go_remote_lib_src')
with self.invalidated(go_remote_libs) as invalidation_check:
for vt in invalidation_check.all_vts:
go_remote_lib = vt.target
gopath = vt.results_dir
fetcher = self._get_fetcher(go_remote_lib.import_path)
if not vt.valid:
root = fetcher.root(go_remote_lib.import_path)
fetch_dir = os.path.join(self.workdir, 'fetches')
root_dir = os.path.join(fetch_dir, root)
# Only fetch each remote root once.
if not os.path.exists(root_dir):
with temporary_dir() as tmp_fetch_root:
fetcher.fetch(go_remote_lib.import_path, dest=tmp_fetch_root, rev=go_remote_lib.rev)
safe_mkdir(root_dir)
for path in os.listdir(tmp_fetch_root):
shutil.move(os.path.join(tmp_fetch_root, path), os.path.join(root_dir, path))
# TODO(John Sirois): Circle back and get get rid of this symlink tree.
# GoWorkspaceTask will further symlink a single package from the tree below into a
# target's workspace when it could just be linking from the fetch_dir. The only thing
# standing in the way is a determination of what we want to artifact cache. If we don't
# want to cache fetched zips, linking straight from the fetch_dir works simply. Otherwise
# thought needs to be applied to using the artifact cache directly or synthesizing a
# canonical owner target for the fetched files that 'child' targets (subpackages) can
# depend on and share the fetch from.
dest_dir = os.path.join(gopath, 'src', root)
# We may have been `invalidate`d and not `clean-all`ed so we need a new empty symlink
# chroot to avoid collision; thus `clean=True`.
safe_mkdir(dest_dir, clean=True)
for path in os.listdir(root_dir):
os.symlink(os.path.join(root_dir, path), os.path.join(dest_dir, path))
# Map the fetched remote sources.
pkg = go_remote_lib.import_path
go_remote_lib_src[go_remote_lib] = os.path.join(gopath, 'src', pkg)
for remote_import_path in self._get_remote_import_paths(pkg, gopath=gopath):
fetcher = self._get_fetcher(remote_import_path)
remote_root = fetcher.root(remote_import_path)
spec_path = os.path.join(go_remote_lib.target_base, remote_root)
package_path = GoRemoteLibrary.remote_package_path(remote_root, remote_import_path)
target_name = package_path or os.path.basename(remote_root)
address = SyntheticAddress(spec_path, target_name)
if address not in all_known_addresses:
try:
# If we've already resolved a package from this remote root, its ok to define an
# implicit synthetic remote target for all other packages in the same remote root.
implicit_ok = any(spec_path == a.spec_path for a in all_known_addresses)
remote_lib = self._resolve(go_remote_lib, address, package_path, implicit_ok)
resolved_remote_libs.add(remote_lib)
all_known_addresses.add(address)
except self.UndeclaredRemoteLibError as e:
undeclared_deps[go_remote_lib].add((remote_import_path, e.address))
self.context.build_graph.inject_dependency(go_remote_lib.address, address)
# Recurse after the invalidated block, so the libraries we downloaded are now "valid"
# and thus we don't try to download a library twice.
trans_undeclared_deps = self._transitive_download_remote_libs(resolved_remote_libs,
all_known_addresses)
undeclared_deps.update(trans_undeclared_deps)
return undeclared_deps
class UndeclaredRemoteLibError(Exception):
def __init__(self, address):
self.address = address
def _resolve(self, dependent_remote_lib, address, pkg, implict_ok):
"""Resolves the GoRemoteLibrary at `address` defining the given `pkg`.
If `implicit_ok` is True, then a GoRemoteLibrary to own `pkg` is always synthesized; otherwise
the address must already exist in the build graph (a BUILD file must exist on disk that owns
the given `pkg` and declares a `rev` for it).
:param dependent_remote_lib: The remote library that depends on the remote `pkg`.
:type: :class:`pants.contrib.go.targets.go_remote_library.GoRemoteLibrary`
:param address: The address of the remote library that should own `pkg`.
:type: :class:`pants.base.Address`
:param string pkg: The remote package path whose owning target needs to be resolved.
:param bool implict_ok: `False` if the given `address` must be defined in a BUILD file on disk;
otherwise a remote library to own `pkg` will always be created and
returned.
:returns: The resulting resolved remote library after injecting it in the build graph.
:rtype: :class:`pants.contrib.go.targets.go_remote_library.GoRemoteLibrary`
:raises: :class:`GoFetch.UndeclaredRemoteLibError`: If no BUILD file exists for the remote root
`pkg` lives in.
"""
if implict_ok:
self.context.add_new_target(address=address,
target_base=dependent_remote_lib.target_base,
target_type=GoRemoteLibrary,
pkg=pkg)
else:
try:
self.context.build_graph.inject_address_closure(address)
except AddressLookupError:
raise self.UndeclaredRemoteLibError(address)
return self.context.build_graph.get_target(address)
@memoized_property
def go_stdlib(self):
out = self.go_dist.create_go_cmd('list', args=['std']).check_output()
return frozenset(out.strip().split())
def _get_remote_import_paths(self, pkg, gopath=None):
"""Returns the remote import paths declared by the given Go `pkg`."""
out = self.go_dist.create_go_cmd('list', args=['-json', pkg], gopath=gopath).check_output()
try:
data = json.loads(out)
imports = data.get('Imports', [])
imports.extend(data.get('TestImports', []))
return [imp for imp in imports if imp not in self.go_stdlib]
except ValueError as e:
save_file = os.path.join(gopath, '.errors', pkg, 'list.json')
with safe_open(save_file, 'w') as fp:
fp.write(out)
self.context.log.error('Problem determining imports for {}, saved json response to {}'
.format(pkg, save_file))
raise TaskError(e)
|
areitz/pants
|
contrib/go/src/python/pants/contrib/go/tasks/go_fetch.py
|
Python
|
apache-2.0
| 10,376
|
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.battle.Movie
from panda3d.core import Point3, Vec3
import copy
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import *
from direct.showbase import DirectObject
import random
from BattleBase import *
import BattleExperience
import BattleParticles
import MovieDrop
import MovieFire
import MovieHeal
import MovieLure
import MovieNPCSOS
import MoviePetSOS
import MovieSOS
import MovieSound
import MovieSquirt
import MovieSuitAttacks
import MovieThrow
import MovieToonVictory
import MovieTrap
import MovieUtil
import PlayByPlayText
import RewardPanel
from SuitBattleGlobals import *
from toontown.distributed import DelayDelete
from toontown.toon import NPCToons
from toontown.toon import Toon
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownGlobals
from toontown.toonbase.ToontownBattleGlobals import *
from toontown.toontowngui import TTDialog
from toontown.suit import SuitDNA
from otp.nametag.NametagConstants import *
from otp.nametag.NametagGroup import *
camPos = Point3(14, 0, 10)
camHpr = Vec3(89, -30, 0)
randomBattleTimestamp = config.GetBool('random-battle-timestamp', 0)
class Movie(DirectObject.DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory('Movie')
def __init__(self, battle):
self.battle = battle
self.track = None
self.rewardPanel = None
self.rewardCallback = None
self.playByPlayText = PlayByPlayText.PlayByPlayText()
self.playByPlayText.hide()
self.renderProps = []
self.hasBeenReset = 0
self.reset()
self.rewardHasBeenReset = 0
self.tutRewardDialog = None
self.tutSequence = None
self.resetReward()
return
def cleanup(self):
self.reset()
self.resetReward()
self.battle = None
if self.playByPlayText != None:
self.playByPlayText.cleanup()
self.playByPlayText = None
if self.rewardPanel != None:
self.rewardPanel.cleanup()
self.cleanupTutRewardDialog()
self.rewardPanel = None
self.rewardCallback = None
return
def needRestoreColor(self):
self.restoreColor = 1
def clearRestoreColor(self):
self.restoreColor = 0
def needRestoreHips(self):
self.restoreHips = 1
def clearRestoreHips(self):
self.restoreHips = 0
def needRestoreHeadScale(self):
self.restoreHeadScale = 1
def clearRestoreHeadScale(self):
self.restoreHeadScale = 0
def needRestoreToonScale(self):
self.restoreToonScale = 1
def clearRestoreToonScale(self):
self.restoreToonScale = 0
def needRestoreParticleEffect(self, effect):
self.specialParticleEffects.append(effect)
def clearRestoreParticleEffect(self, effect):
if effect in self.specialParticleEffects:
self.specialParticleEffects.remove(effect)
def needRestoreRenderProp(self, prop):
self.renderProps.append(prop)
def clearRenderProp(self, prop):
if prop in self.renderProps:
self.renderProps.remove(prop)
def restore(self):
return
for toon in self.battle.activeToons:
toon.loop('neutral')
origPos, origHpr = self.battle.getActorPosHpr(toon)
toon.setPosHpr(self.battle, origPos, origHpr)
hands = [toon.getLeftHand(), toon.getRightHand()]
for hand in hands:
props = hand.getChildren()
for prop in props:
if prop.getName() != 'book':
MovieUtil.removeProp(prop)
if self.restoreColor == 1:
headParts = toon.getHeadParts()
torsoParts = toon.getTorsoParts()
legsParts = toon.getLegsParts()
partsList = [headParts, torsoParts, legsParts]
for parts in partsList:
for partNum in xrange(0, parts.getNumPaths()):
nextPart = parts.getPath(partNum)
nextPart.clearColorScale()
nextPart.clearTransparency()
if self.restoreHips == 1:
parts = toon.getHipsParts()
for partNum in xrange(0, parts.getNumPaths()):
nextPart = parts.getPath(partNum)
props = nextPart.getChildren()
for prop in props:
if prop.getName() == 'redtape-tube.egg':
MovieUtil.removeProp(prop)
if self.restoreHeadScale == 1:
headScale = ToontownGlobals.toonHeadScales[toon.style.getAnimal()]
toon.getPart('head').setScale(headScale)
if self.restoreToonScale == 1:
toon.setScale(1)
headParts = toon.getHeadParts()
for partNum in xrange(0, headParts.getNumPaths()):
part = headParts.getPath(partNum)
part.setHpr(0, 0, 0)
part.setPos(0, 0, 0)
arms = toon.findAllMatches('**/arms')
sleeves = toon.findAllMatches('**/sleeves')
hands = toon.findAllMatches('**/hands')
for partNum in xrange(0, arms.getNumPaths()):
armPart = arms.getPath(partNum)
sleevePart = sleeves.getPath(partNum)
handsPart = hands.getPath(partNum)
armPart.setHpr(0, 0, 0)
sleevePart.setHpr(0, 0, 0)
handsPart.setHpr(0, 0, 0)
for suit in self.battle.activeSuits:
if suit._Actor__animControlDict != None:
suit.loop('neutral')
suit.battleTrapIsFresh = 0
origPos, origHpr = self.battle.getActorPosHpr(suit)
suit.setPosHpr(self.battle, origPos, origHpr)
hands = [suit.getRightHand(), suit.getLeftHand()]
for hand in hands:
props = hand.getChildren()
for prop in props:
MovieUtil.removeProp(prop)
for effect in self.specialParticleEffects:
if effect != None:
effect.cleanup()
self.specialParticleEffects = []
for prop in self.renderProps:
MovieUtil.removeProp(prop)
self.renderProps = []
return
def _deleteTrack(self):
if self.track:
DelayDelete.cleanupDelayDeletes(self.track)
self.track = None
return
def reset(self, finish = 0):
if self.hasBeenReset == 1:
return
self.hasBeenReset = 1
self.stop()
self._deleteTrack()
if finish == 1:
self.restore()
self.toonAttackDicts = []
self.suitAttackDicts = []
self.specialSuitAttackDicts = []
self.restoreColor = 0
self.restoreHips = 0
self.restoreHeadScale = 0
self.restoreToonScale = 0
self.specialParticleEffects = []
for prop in self.renderProps:
MovieUtil.removeProp(prop)
self.renderProps = []
def resetReward(self, finish = 0):
if self.rewardHasBeenReset == 1:
return
else:
self.rewardHasBeenReset = 1
self.stop()
self._deleteTrack()
if finish == 1:
self.restore()
self.toonRewardDicts = []
if self.rewardPanel != None:
self.rewardPanel.destroy()
self.rewardPanel = None
return
def play(self, ts, callback):
self.hasBeenReset = 0
ptrack = Sequence()
camtrack = Sequence()
if random.random() > 0.5:
MovieUtil.shotDirection = 'left'
else:
MovieUtil.shotDirection = 'right'
for s in self.battle.activeSuits:
s.battleTrapIsFresh = 0
spattacks, spcam = self.__doSpecialSuitAttacks()
if spattacks:
ptrack.append(spattacks)
camtrack.append(spcam)
sattacks, scam = self.__doSuitAttacks(True)
if sattacks:
ptrack.append(sattacks)
camtrack.append(scam)
tattacks, tcam = self.__doToonAttacks()
if tattacks:
ptrack.append(tattacks)
camtrack.append(tcam)
sattacks, scam = self.__doSuitAttacks(False)
if sattacks:
ptrack.append(sattacks)
camtrack.append(scam)
ptrack.append(Func(callback))
self._deleteTrack()
self.track = Sequence(ptrack, name='movie-track-%d' % self.battle.doId)
if self.battle.localToonPendingOrActive():
self.track = Parallel(self.track, camtrack, name='movie-track-with-cam-%d' % self.battle.doId)
if randomBattleTimestamp == 1:
randNum = random.randint(0, 99)
dur = self.track.getDuration()
ts = float(randNum) / 100.0 * dur
self.track.delayDeletes = []
for suit in self.battle.suits:
self.track.delayDeletes.append(DelayDelete.DelayDelete(suit, 'Movie.play'))
for toon in self.battle.toons:
self.track.delayDeletes.append(DelayDelete.DelayDelete(toon, 'Movie.play'))
self.track.start(ts)
return None
def finish(self):
self.track.finish()
def playReward(self, ts, name, callback, noSkip = False):
self.rewardHasBeenReset = 0
ptrack = Sequence()
camtrack = Sequence()
self.rewardPanel = RewardPanel.RewardPanel(name)
self.rewardPanel.hide()
victory, camVictory, skipper = MovieToonVictory.doToonVictory(self.battle.localToonActive(), self.battle.activeToons, self.toonRewardIds, self.toonRewardDicts, self.deathList, self.rewardPanel, 1, self.uberList, self.helpfulToons, noSkip=noSkip)
if victory:
skipper.setIvals((ptrack, camtrack), ptrack.getDuration())
ptrack.append(victory)
camtrack.append(camVictory)
ptrack.append(Func(callback))
self._deleteTrack()
self.track = Sequence(ptrack, name='movie-reward-track-%d' % self.battle.doId)
if self.battle.localToonActive():
self.track = Parallel(self.track, camtrack, name='movie-reward-track-with-cam-%d' % self.battle.doId)
self.track.delayDeletes = []
for t in self.battle.activeToons:
self.track.delayDeletes.append(DelayDelete.DelayDelete(t, 'Movie.playReward'))
skipper.setIvals((self.track,), 0.0)
skipper.setBattle(self.battle)
self.track.start(ts)
def cleanupTutRewardDialog(self):
if self.tutRewardDialog:
self.tutRewardDialog.cleanup()
self.tutRewardDialog = None
if self.tutSequence:
self.tutSequence.pause()
self.tutSequence = None
return
def playTutorialReward(self, ts, name, callback):
self.rewardHasBeenReset = 0
self.rewardPanel = RewardPanel.RewardPanel(name)
self.rewardCallback = callback
camera.setPosHpr(0, 8, base.localAvatar.getHeight() * 0.66, 179, 15, 0)
self.rewardPanel.initGagFrame(base.localAvatar, base.localAvatar.experience.experience, [0] * len(SuitDNA.suitDepts), noSkip=True)
earnedExp = self.toonRewardDicts[0]['earnedExp']
if all((not xp for xp in earnedExp)):
self.playTutorialNoGagReward()
else:
self.playTutorialGagReward(None, earnedExp)
return
def playTutorialGagReward(self, dialog, earnedXp, tutTrack = 0):
self.cleanupTutRewardDialog()
if tutTrack >= len(earnedXp):
self.rewardCallback()
return
else:
xp = earnedXp[tutTrack]
if xp:
self.tutRewardDialog = TTDialog.TTDialog(text=TTLocalizer.MovieTutorialMessage % (xp, TTLocalizer.BattleGlobalTracks[tutTrack].capitalize()), command=self.playTutorialGagReward, extraArgs=[earnedXp, tutTrack + 1], style=TTDialog.Acknowledge, fadeScreen=None, pos=(0.65, 0, 0.5), scale=0.8)
self.tutSequence = Sequence()
self.tutSequence += self.rewardPanel.getTrackIntervalList(base.localAvatar, tutTrack, base.localAvatar.experience.getExp(tutTrack), xp, 0)
self.tutSequence.start()
else:
self.playTutorialGagReward(None, earnedXp, tutTrack + 1)
return
def playTutorialNoGagReward(self):
self.tutRewardDialog = TTDialog.TTDialog(text=TTLocalizer.MovieTutorialMessage2, command=self.__callbackAndCleanupTut, style=TTDialog.Acknowledge, fadeScreen=None, pos=(0.65, 0, 0.5), scale=0.8)
return
def __callbackAndCleanupTut(self, dialog = None):
self.cleanupTutRewardDialog()
self.rewardCallback()
def stop(self):
if self.track:
self.track.finish()
self._deleteTrack()
if hasattr(self, 'track1'):
self.track1.finish()
self.track1 = None
if hasattr(self, 'track2'):
self.track2.finish()
self.track2 = None
if hasattr(self, 'track3'):
self.track3.finish()
self.track3 = None
if self.rewardPanel:
self.rewardPanel.hide()
if self.playByPlayText:
self.playByPlayText.hide()
return
def __doToonAttacks(self):
if config.GetBool('want-toon-attack-anims', 1):
track = Sequence(name='toon-attacks')
camTrack = Sequence(name='toon-attacks-cam')
ival, camIval = MovieFire.doFires(self.__findToonAttack(FIRE))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieSOS.doSOSs(self.__findToonAttack(SOS))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieNPCSOS.doNPCSOSs(self.__findToonAttack(NPCSOS))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MoviePetSOS.doPetSOSs(self.__findToonAttack(PETSOS))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieHeal.doHeals(self.__findToonAttack(HEAL), self.battle.getInteractivePropTrackBonus() == HEAL)
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieTrap.doTraps(self.__findToonAttack(TRAP))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieLure.doLures(self.__findToonAttack(LURE))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieSound.doSounds(self.__findToonAttack(SOUND))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieThrow.doThrows(self.__findToonAttack(THROW))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieSquirt.doSquirts(self.__findToonAttack(SQUIRT))
if ival:
track.append(ival)
camTrack.append(camIval)
ival, camIval = MovieDrop.doDrops(self.__findToonAttack(DROP))
if ival:
track.append(ival)
camTrack.append(camIval)
if len(track) == 0:
return (None, None)
else:
return (track, camTrack)
else:
return (None, None)
return None
def genRewardDicts(self, battleExperience):
experiences, self.deathList, self.uberList, self.helpfulToons = battleExperience
self.toonRewardDicts = BattleExperience.genRewardDicts(experiences)
self.toonRewardIds = [ experience[0] for experience in experiences ]
def genAttackDicts(self, activeToons, activeSuits, toonAttacks, suitAttacks, specialSuitAttacks):
if self.track and self.track.isPlaying():
self.notify.warning('genAttackDicts() - track is playing!')
self.__genToonAttackDicts(activeToons, activeSuits, toonAttacks)
self.__genSuitAttackDicts(activeToons, activeSuits, suitAttacks)
self.__genSpecialSuitAttackDicts(activeSuits, specialSuitAttacks)
def __genToonAttackDicts(self, toons, suits, toonAttacks):
for ta in toonAttacks:
targetGone = 0
track = ta[TOON_TRACK_COL]
if track != NO_ATTACK:
adict = {}
toonIndex = ta[TOON_ID_COL]
toonId = toons[toonIndex]
toon = self.battle.findToon(toonId)
if toon == None:
continue
level = ta[TOON_LVL_COL]
adict['toon'] = toon
adict['track'] = track
adict['level'] = level
hps = ta[TOON_HP_COL]
kbbonuses = ta[TOON_KBBONUS_COL]
if track == NPCSOS:
adict['npcId'] = ta[TOON_TGT_COL]
toonId = ta[TOON_TGT_COL]
track, npc_level, npc_hp = NPCToons.getNPCTrackLevelHp(adict['npcId'])
if track == None:
track = NPCSOS
adict['track'] = track
adict['level'] = npc_level
elif track == PETSOS:
petId = ta[TOON_TGT_COL]
adict['toonId'] = toonId
adict['petId'] = petId
if track == SOS:
targetId = ta[TOON_TGT_COL]
if targetId == base.localAvatar.doId:
target = base.localAvatar
adict['targetType'] = 'callee'
elif toon == base.localAvatar:
target = base.cr.identifyAvatar(targetId)
adict['targetType'] = 'caller'
else:
target = None
adict['targetType'] = 'observer'
adict['target'] = target
elif track in (NPCSOS,
PETSOS,
NPC_COGS_MISS,
NPC_TOONS_HIT,
NPC_COGS_POWER_DOWN,
NPC_TOONS_POWER_UP,
NPC_RESTOCK_GAGS):
adict['special'] = 1
toonHandles = []
for t in toons:
if t != -1:
target = self.battle.findToon(t)
if target == None:
continue
if (track == NPC_TOONS_HIT or track == NPC_TOONS_POWER_UP) and t == toonId:
continue
toonHandles.append(target)
adict['toons'] = toonHandles
suitHandles = []
for s in suits:
if s != -1:
target = self.battle.findSuit(s)
if target == None:
continue
suitHandles.append(target)
adict['suits'] = suitHandles
if track == PETSOS:
del adict['special']
targets = []
for t in toons:
if t != -1:
target = self.battle.findToon(t)
if target == None:
continue
tdict = {}
tdict['toon'] = target
tdict['hp'] = hps[toons.index(t)]
self.notify.debug('PETSOS: toon: %d healed for hp: %d' % (target.doId, hps[toons.index(t)]))
targets.append(tdict)
if len(targets) > 0:
adict['target'] = targets
elif track == HEAL:
if levelAffectsGroup(HEAL, level):
targets = []
for t in toons:
if t != toonId and t != -1:
target = self.battle.findToon(t)
if target == None:
continue
tdict = {}
tdict['toon'] = target
tdict['hp'] = hps[toons.index(t)]
self.notify.debug('HEAL: toon: %d healed for hp: %d' % (target.doId, hps[toons.index(t)]))
targets.append(tdict)
if len(targets) > 0:
adict['target'] = targets
else:
targetGone = 1
else:
targetIndex = ta[TOON_TGT_COL]
if targetIndex < 0:
targetGone = 1
else:
targetId = toons[targetIndex]
target = self.battle.findToon(targetId)
if target != None:
tdict = {}
tdict['toon'] = target
tdict['hp'] = hps[targetIndex]
adict['target'] = tdict
else:
targetGone = 1
elif attackAffectsGroup(track, level, ta[TOON_TRACK_COL]):
targets = []
for s in suits:
if s != -1:
target = self.battle.findSuit(s)
if ta[TOON_TRACK_COL] == NPCSOS:
if track == LURE and self.battle.isSuitLured(target):
continue
elif track == TRAP and (self.battle.isSuitLured(target) or target.battleTrap != NO_TRAP):
continue
targetIndex = suits.index(s)
sdict = {}
sdict['suit'] = target
sdict['hp'] = hps[targetIndex]
if ta[TOON_TRACK_COL] == NPCSOS and track == DROP and hps[targetIndex] == 0:
continue
sdict['kbbonus'] = kbbonuses[targetIndex]
sdict['died'] = ta[SUIT_DIED_COL] & 1 << targetIndex
sdict['revived'] = ta[SUIT_REVIVE_COL] & 1 << targetIndex
if sdict['died'] != 0:
pass
sdict['leftSuits'] = []
sdict['rightSuits'] = []
targets.append(sdict)
adict['target'] = targets
else:
targetIndex = ta[TOON_TGT_COL]
if targetIndex < 0:
targetGone = 1
else:
targetId = suits[targetIndex]
target = self.battle.findSuit(targetId)
sdict = {}
sdict['suit'] = target
if target not in self.battle.activeSuits:
targetGone = 1
suitIndex = 0
else:
suitIndex = self.battle.activeSuits.index(target)
leftSuits = []
for si in xrange(0, suitIndex):
asuit = self.battle.activeSuits[si]
if not self.battle.isSuitLured(asuit):
leftSuits.append(asuit)
lenSuits = len(self.battle.activeSuits)
rightSuits = []
if lenSuits > suitIndex + 1:
for si in xrange(suitIndex + 1, lenSuits):
asuit = self.battle.activeSuits[si]
if not self.battle.isSuitLured(asuit):
rightSuits.append(asuit)
sdict['leftSuits'] = leftSuits
sdict['rightSuits'] = rightSuits
sdict['hp'] = hps[targetIndex]
sdict['kbbonus'] = kbbonuses[targetIndex]
sdict['died'] = ta[SUIT_DIED_COL] & 1 << targetIndex
sdict['revived'] = ta[SUIT_REVIVE_COL] & 1 << targetIndex
if sdict['revived'] != 0:
pass
if sdict['died'] != 0:
pass
if track == DROP or track == TRAP:
adict['target'] = [sdict]
else:
adict['target'] = sdict
adict['hpbonus'] = ta[TOON_HPBONUS_COL]
adict['sidestep'] = ta[TOON_ACCBONUS_COL]
if 'npcId' in adict:
adict['sidestep'] = 0
adict['battle'] = self.battle
adict['playByPlayText'] = self.playByPlayText
if targetGone == 0:
self.toonAttackDicts.append(adict)
else:
self.notify.warning('genToonAttackDicts() - target gone!')
def compFunc(a, b):
alevel = a['level']
blevel = b['level']
if alevel > blevel:
return 1
if alevel < blevel:
return -1
return 0
self.toonAttackDicts.sort(compFunc)
return
def __findToonAttack(self, track):
tp = []
for ta in self.toonAttackDicts:
if ta['track'] == track or track == NPCSOS and 'special' in ta:
tp.append(ta)
if track == TRAP:
sortedTraps = []
for attack in tp:
if 'npcId' not in attack:
sortedTraps.append(attack)
for attack in tp:
if 'npcId' in attack:
sortedTraps.append(attack)
tp = sortedTraps
return tp
def __genSpecialSuitAttackDicts(self, suits, suitAttacks):
for sa in suitAttacks:
attack = sa[SUIT_ATK_COL]
if attack == NO_ATTACK:
continue
suitIndex = sa[SUIT_ID_COL]
suitId = suits[suitIndex]
suit = self.battle.findSuit(suitId)
if not suit:
continue
atkDict = {}
atkDict['id'] = attack
atkDict['name'] = SuitAttacks.keys()[attack]
atkDict['suit'] = suit
atkDict['battle'] = self.battle
atkDict['playByPlayText'] = self.playByPlayText
atkDict['taunt'] = sa[SUIT_SPECIAL_TAUNT_COL]
self.specialSuitAttackDicts.append(atkDict)
def __genSuitAttackDicts(self, toons, suits, suitAttacks):
for sa in suitAttacks:
targetGone = 0
attack = sa[SUIT_ATK_COL]
if attack != NO_ATTACK:
suitIndex = sa[SUIT_ID_COL]
suitId = suits[suitIndex]
suit = self.battle.findSuit(suitId)
if suit == None:
self.notify.warning('suit: %d not in battle!' % suitId)
return
adict = getSuitAttack(suit.getStyleName(), suit.getLevel(), attack)
adict['suit'] = suit
adict['battle'] = self.battle
adict['playByPlayText'] = self.playByPlayText
adict['taunt'] = sa[SUIT_TAUNT_COL]
adict['beforeToons'] = sa[SUIT_BEFORE_TOONS_COL]
hps = sa[SUIT_HP_COL]
if adict['group'] == ATK_TGT_GROUP:
targets = []
for t in toons:
if t != -1:
target = self.battle.findToon(t)
if target == None:
continue
targetIndex = toons.index(t)
tdict = {}
tdict['toon'] = target
tdict['hp'] = hps[targetIndex]
self.notify.debug('DAMAGE: toon: %d hit for hp: %d' % (target.doId, hps[targetIndex]))
toonDied = sa[TOON_DIED_COL] & 1 << targetIndex
tdict['died'] = toonDied
targets.append(tdict)
if len(targets) > 0:
adict['target'] = targets
else:
targetGone = 1
elif adict['group'] == ATK_TGT_SINGLE:
targetIndex = sa[SUIT_TGT_COL]
targetId = toons[targetIndex]
target = self.battle.findToon(targetId)
if target == None:
targetGone = 1
break
tdict = {}
tdict['toon'] = target
tdict['hp'] = hps[targetIndex]
self.notify.debug('DAMAGE: toon: %d hit for hp: %d' % (target.doId, hps[targetIndex]))
toonDied = sa[TOON_DIED_COL] & 1 << targetIndex
tdict['died'] = toonDied
toonIndex = self.battle.activeToons.index(target)
rightToons = []
for ti in xrange(0, toonIndex):
rightToons.append(self.battle.activeToons[ti])
lenToons = len(self.battle.activeToons)
leftToons = []
if lenToons > toonIndex + 1:
for ti in xrange(toonIndex + 1, lenToons):
leftToons.append(self.battle.activeToons[ti])
tdict['leftToons'] = leftToons
tdict['rightToons'] = rightToons
adict['target'] = tdict
else:
self.notify.warning('got suit attack not group or single!')
if targetGone == 0:
self.suitAttackDicts.append(adict)
else:
self.notify.warning('genSuitAttackDicts() - target gone!')
return
def __doSpecialSuitAttacks(self):
if not config.GetBool('want-suit-anims', 1):
return (None, None)
track = Sequence(name='special-suit-attacks')
camTrack = Sequence(name='special-suit-attacks-cam')
for attack in self.specialSuitAttackDicts:
ival, camIval = MovieSuitAttacks.doSuitAttack(attack)
if ival:
taunt = getAttackTaunt(attack['name'], attack['taunt'])
suit = attack['suit']
ival = Sequence(Func(suit.setChatAbsolute, taunt, CFSpeech), ival, Func(suit.clearChat))
track.append(ival)
camTrack.append(camIval)
if len(track) == 0:
return (None, None)
else:
return (track, camTrack)
def __doSuitAttacks(self, beforeToons = False):
if config.GetBool('want-suit-anims', 1):
track = Sequence(name='suit-attacks')
camTrack = Sequence(name='suit-attacks-cam')
isLocalToonSad = False
for a in self.suitAttackDicts:
if a['beforeToons'] != beforeToons:
continue
ival, camIval = MovieSuitAttacks.doSuitAttack(a)
if ival:
track.append(ival)
camTrack.append(camIval)
targetField = a.get('target')
if targetField is None:
continue
if a['group'] == ATK_TGT_GROUP:
for target in targetField:
if target['died'] and target['toon'].doId == base.localAvatar.doId:
isLocalToonSad = True
elif a['group'] == ATK_TGT_SINGLE:
if targetField['died'] and targetField['toon'].doId == base.localAvatar.doId:
isLocalToonSad = True
if isLocalToonSad:
break
if len(track) == 0:
return (None, None)
return (track, camTrack)
else:
return (None, None)
return
|
DedMemez/ODS-August-2017
|
battle/Movie.py
|
Python
|
apache-2.0
| 33,920
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Neural GPU."""
import math
import os
import random
import sys
import threading
import time
import numpy as np
import tensorflow as tf
import program_utils
import data_utils as data
import neural_gpu as ngpu
import wmt_utils as wmt
tf.app.flags.DEFINE_float("lr", 0.1, "Learning rate.")
tf.app.flags.DEFINE_float("init_weight", 0.8, "Initial weights deviation.")
tf.app.flags.DEFINE_float("max_grad_norm", 4.0, "Clip gradients to this norm.")
tf.app.flags.DEFINE_float("cutoff", 1.2, "Cutoff at the gates.")
tf.app.flags.DEFINE_float("curriculum_ppx", 9.9, "Move curriculum if ppl < X.")
tf.app.flags.DEFINE_float("curriculum_seq", 0.3, "Move curriculum if seq < X.")
tf.app.flags.DEFINE_float("dropout", 0.1, "Dropout that much.")
tf.app.flags.DEFINE_float("grad_noise_scale", 0.0, "Gradient noise scale.")
tf.app.flags.DEFINE_float("max_sampling_rate", 0.1, "Maximal sampling rate.")
tf.app.flags.DEFINE_float("length_norm", 0.0, "Length normalization.")
tf.app.flags.DEFINE_float("train_beam_freq", 0.0, "Beam-based training.")
tf.app.flags.DEFINE_float("train_beam_anneal", 20000, "How many steps anneal.")
tf.app.flags.DEFINE_integer("eval_beam_steps", 4, "How many beam steps eval.")
tf.app.flags.DEFINE_integer("batch_size", 32, "Batch size.")
tf.app.flags.DEFINE_integer("steps_per_checkpoint", 100, "Steps per epoch.")
tf.app.flags.DEFINE_integer("nmaps", 64, "Number of floats in each cell.")
tf.app.flags.DEFINE_integer("vec_size", 64, "Size of word vectors.")
tf.app.flags.DEFINE_integer("train_data_size", 1000, "Training examples/len.")
tf.app.flags.DEFINE_integer("max_length", 40, "Maximum length.")
tf.app.flags.DEFINE_integer("random_seed", 125459, "Random seed.")
tf.app.flags.DEFINE_integer("nconvs", 2, "How many convolutions / 1 step.")
tf.app.flags.DEFINE_integer("kw", 3, "Kernel width.")
tf.app.flags.DEFINE_integer("kh", 3, "Kernel height.")
tf.app.flags.DEFINE_integer("height", 4, "Height.")
tf.app.flags.DEFINE_integer("mem_size", -1, "Memory size (sqrt)")
tf.app.flags.DEFINE_integer("soft_mem_size", 1024, "Softmax memory this size.")
tf.app.flags.DEFINE_integer("num_gpus", 1, "Number of GPUs to use.")
tf.app.flags.DEFINE_integer("num_replicas", 1, "Number of replicas in use.")
tf.app.flags.DEFINE_integer("beam_size", 1, "Beam size during decoding. "
"If 0, no decoder, the non-extended Neural GPU.")
tf.app.flags.DEFINE_integer("max_target_vocab", 0,
"Maximal size of target vocabulary.")
tf.app.flags.DEFINE_integer("decode_offset", 0, "Offset for decoding.")
tf.app.flags.DEFINE_integer("task", -1, "Task id when running on borg.")
tf.app.flags.DEFINE_integer("nprint", 0, "How many test examples to print out.")
tf.app.flags.DEFINE_integer("eval_bin_print", 3, "How many bins step in eval.")
tf.app.flags.DEFINE_integer("mode", 0, "Mode: 0-train other-decode.")
tf.app.flags.DEFINE_bool("atrous", False, "Whether to use atrous convs.")
tf.app.flags.DEFINE_bool("layer_norm", False, "Do layer normalization.")
tf.app.flags.DEFINE_bool("quantize", False, "Whether to quantize variables.")
tf.app.flags.DEFINE_bool("do_train", True, "If false, only update memory.")
tf.app.flags.DEFINE_bool("rnn_baseline", False, "If true build an RNN instead.")
tf.app.flags.DEFINE_bool("simple_tokenizer", False,
"If true, tokenize on spaces only, digits are 0.")
tf.app.flags.DEFINE_bool("normalize_digits", True,
"Whether to normalize digits with simple tokenizer.")
tf.app.flags.DEFINE_integer("vocab_size", 16, "Joint vocabulary size.")
tf.app.flags.DEFINE_string("data_dir", "/tmp", "Data directory")
tf.app.flags.DEFINE_string("train_dir", "/tmp/", "Directory to store models.")
tf.app.flags.DEFINE_string("test_file_prefix", "", "Files to test (.en,.fr).")
tf.app.flags.DEFINE_integer("max_train_data_size", 0,
"Limit on the size of training data (0: no limit).")
tf.app.flags.DEFINE_string("word_vector_file_en", "",
"Optional file with word vectors to start training.")
tf.app.flags.DEFINE_string("word_vector_file_fr", "",
"Optional file with word vectors to start training.")
tf.app.flags.DEFINE_string("problem", "wmt", "What problem are we solving?.")
tf.app.flags.DEFINE_integer("ps_tasks", 0, "Number of ps tasks used.")
tf.app.flags.DEFINE_string("master", "", "Name of the TensorFlow master.")
FLAGS = tf.app.flags.FLAGS
EXTRA_EVAL = 10
EVAL_LEN_INCR = 8
MAXLEN_F = 2.0
def zero_split(tok_list, append=None):
"""Split tok_list (list of ints) on 0s, append int to all parts if given."""
res, cur, l = [], [], 0
for tok in tok_list:
if tok == 0:
if append is not None:
cur.append(append)
res.append(cur)
l = max(l, len(cur))
cur = []
else:
cur.append(tok)
if append is not None:
cur.append(append)
res.append(cur)
l = max(l, len(cur))
return res, l
def read_data(source_path, target_path, buckets, max_size=None, print_out=True):
"""Read data from source and target files and put into buckets.
Args:
source_path: path to the files with token-ids for the source language.
target_path: path to the file with token-ids for the target language;
it must be aligned with the source file: n-th line contains the desired
output for n-th line from the source_path.
buckets: the buckets to use.
max_size: maximum number of lines to read, all other will be ignored;
if 0 or None, data files will be read completely (no limit).
If set to 1, no data will be returned (empty lists of the right form).
print_out: whether to print out status or not.
Returns:
data_set: a list of length len(_buckets); data_set[n] contains a list of
(source, target) pairs read from the provided data files that fit
into the n-th bucket, i.e., such that len(source) < _buckets[n][0] and
len(target) < _buckets[n][1]; source and target are lists of token-ids.
"""
data_set = [[] for _ in buckets]
counter = 0
if max_size != 1:
with tf.gfile.GFile(source_path, mode="r") as source_file:
with tf.gfile.GFile(target_path, mode="r") as target_file:
source, target = source_file.readline(), target_file.readline()
while source and target and (not max_size or counter < max_size):
counter += 1
if counter % 100000 == 0 and print_out:
print " reading data line %d" % counter
sys.stdout.flush()
source_ids = [int(x) for x in source.split()]
target_ids = [int(x) for x in target.split()]
source_ids, source_len = zero_split(source_ids)
target_ids, target_len = zero_split(target_ids, append=wmt.EOS_ID)
for bucket_id, size in enumerate(buckets):
if source_len <= size and target_len <= size:
data_set[bucket_id].append([source_ids, target_ids])
break
source, target = source_file.readline(), target_file.readline()
return data_set
global_train_set = {"wmt": []}
train_buckets_scale = {"wmt": []}
def calculate_buckets_scale(data_set, buckets, problem):
"""Calculate buckets scales for the given data set."""
train_bucket_sizes = [len(data_set[b]) for b in xrange(len(buckets))]
train_total_size = max(1, float(sum(train_bucket_sizes)))
# A bucket scale is a list of increasing numbers from 0 to 1 that we'll use
# to select a bucket. Length of [scale[i], scale[i+1]] is proportional to
# the size if i-th training bucket, as used later.
if problem not in train_buckets_scale:
train_buckets_scale[problem] = []
train_buckets_scale[problem].append(
[sum(train_bucket_sizes[:i + 1]) / train_total_size
for i in xrange(len(train_bucket_sizes))])
return train_total_size
def read_data_into_global(source_path, target_path, buckets,
max_size=None, print_out=True):
"""Read data into the global variables (can be in a separate thread)."""
# pylint: disable=global-variable-not-assigned
global global_train_set, train_buckets_scale
# pylint: enable=global-variable-not-assigned
data_set = read_data(source_path, target_path, buckets, max_size, print_out)
global_train_set["wmt"].append(data_set)
train_total_size = calculate_buckets_scale(data_set, buckets, "wmt")
if print_out:
print " Finished global data reading (%d)." % train_total_size
def initialize(sess=None):
"""Initialize data and model."""
global MAXLEN_F
# Create training directory if it does not exist.
if not tf.gfile.IsDirectory(FLAGS.train_dir):
data.print_out("Creating training directory %s." % FLAGS.train_dir)
tf.gfile.MkDir(FLAGS.train_dir)
decode_suffix = "beam%dln%d" % (FLAGS.beam_size,
int(100 * FLAGS.length_norm))
if FLAGS.mode == 0:
decode_suffix = ""
if FLAGS.task >= 0:
data.log_filename = os.path.join(FLAGS.train_dir,
"log%d%s" % (FLAGS.task, decode_suffix))
else:
data.log_filename = os.path.join(FLAGS.train_dir, "neural_gpu/log")
# Set random seed.
if FLAGS.random_seed > 0:
seed = FLAGS.random_seed + max(0, FLAGS.task)
tf.set_random_seed(seed)
random.seed(seed)
np.random.seed(seed)
# Check data sizes.
assert data.bins
max_length = min(FLAGS.max_length, data.bins[-1])
while len(data.bins) > 1 and data.bins[-2] >= max_length + EXTRA_EVAL:
data.bins = data.bins[:-1]
if sess is None and FLAGS.task == 0 and FLAGS.num_replicas > 1:
if max_length > 60:
max_length = max_length * 1 / 2 # Save memory on chief.
min_length = min(14, max_length - 3) if FLAGS.problem == "wmt" else 3
for p in FLAGS.problem.split("-"):
if p in ["progeval", "progsynth"]:
min_length = max(26, min_length)
assert max_length + 1 > min_length
while len(data.bins) > 1 and data.bins[-2] >= max_length + EXTRA_EVAL:
data.bins = data.bins[:-1]
# Create checkpoint directory if it does not exist.
if FLAGS.mode == 0 or FLAGS.task < 0:
checkpoint_dir = os.path.join(FLAGS.train_dir, "neural_gpu%s"
% ("" if FLAGS.task < 0 else str(FLAGS.task)))
else:
checkpoint_dir = FLAGS.train_dir
if not tf.gfile.IsDirectory(checkpoint_dir):
data.print_out("Creating checkpoint directory %s." % checkpoint_dir)
tf.gfile.MkDir(checkpoint_dir)
# Prepare data.
if FLAGS.problem == "wmt":
# Prepare WMT data.
data.print_out("Preparing WMT data in %s" % FLAGS.data_dir)
if FLAGS.simple_tokenizer:
MAXLEN_F = 3.5
(en_train, fr_train, en_dev, fr_dev,
en_path, fr_path) = wmt.prepare_wmt_data(
FLAGS.data_dir, FLAGS.vocab_size,
tokenizer=wmt.space_tokenizer,
normalize_digits=FLAGS.normalize_digits)
else:
(en_train, fr_train, en_dev, fr_dev,
en_path, fr_path) = wmt.prepare_wmt_data(
FLAGS.data_dir, FLAGS.vocab_size)
# Read data into buckets and compute their sizes.
fr_vocab, rev_fr_vocab = wmt.initialize_vocabulary(fr_path)
data.vocab = fr_vocab
data.rev_vocab = rev_fr_vocab
data.print_out("Reading development and training data (limit: %d)."
% FLAGS.max_train_data_size)
dev_set = {}
dev_set["wmt"] = read_data(en_dev, fr_dev, data.bins)
def data_read(size, print_out):
read_data_into_global(en_train, fr_train, data.bins, size, print_out)
data_read(50000, False)
read_thread_small = threading.Thread(
name="reading-data-small", target=lambda: data_read(900000, False))
read_thread_small.start()
read_thread_full = threading.Thread(
name="reading-data-full",
target=lambda: data_read(FLAGS.max_train_data_size, True))
read_thread_full.start()
data.print_out("Data reading set up.")
else:
# Prepare algorithmic data.
en_path, fr_path = None, None
tasks = FLAGS.problem.split("-")
data_size = FLAGS.train_data_size
for t in tasks:
data.print_out("Generating data for %s." % t)
if t in ["progeval", "progsynth"]:
data.init_data(t, data.bins[-1], 20 * data_size, FLAGS.vocab_size)
if len(program_utils.prog_vocab) > FLAGS.vocab_size - 2:
raise ValueError("Increase vocab_size to %d for prog-tasks."
% (len(program_utils.prog_vocab) + 2))
data.rev_vocab = program_utils.prog_vocab
data.vocab = program_utils.prog_rev_vocab
else:
for l in xrange(max_length + EXTRA_EVAL - 1):
data.init_data(t, l, data_size, FLAGS.vocab_size)
data.init_data(t, data.bins[-2], data_size, FLAGS.vocab_size)
data.init_data(t, data.bins[-1], data_size, FLAGS.vocab_size)
if t not in global_train_set:
global_train_set[t] = []
global_train_set[t].append(data.train_set[t])
calculate_buckets_scale(data.train_set[t], data.bins, t)
dev_set = data.test_set
# Grid-search parameters.
lr = FLAGS.lr
init_weight = FLAGS.init_weight
max_grad_norm = FLAGS.max_grad_norm
if sess is not None and FLAGS.task > -1:
def job_id_factor(step):
"""If jobid / step mod 3 is 0, 1, 2: say 0, 1, -1."""
return ((((FLAGS.task / step) % 3) + 1) % 3) - 1
lr *= math.pow(2, job_id_factor(1))
init_weight *= math.pow(1.5, job_id_factor(3))
max_grad_norm *= math.pow(2, job_id_factor(9))
# Print out parameters.
curriculum = FLAGS.curriculum_seq
msg1 = ("layers %d kw %d h %d kh %d batch %d noise %.2f"
% (FLAGS.nconvs, FLAGS.kw, FLAGS.height, FLAGS.kh,
FLAGS.batch_size, FLAGS.grad_noise_scale))
msg2 = ("cut %.2f lr %.3f iw %.2f cr %.2f nm %d d%.4f gn %.2f %s"
% (FLAGS.cutoff, lr, init_weight, curriculum, FLAGS.nmaps,
FLAGS.dropout, max_grad_norm, msg1))
data.print_out(msg2)
# Create model and initialize it.
tf.get_variable_scope().set_initializer(
tf.orthogonal_initializer(gain=1.8 * init_weight))
max_sampling_rate = FLAGS.max_sampling_rate if FLAGS.mode == 0 else 0.0
o = FLAGS.vocab_size if FLAGS.max_target_vocab < 1 else FLAGS.max_target_vocab
ngpu.CHOOSE_K = FLAGS.soft_mem_size
do_beam_model = FLAGS.train_beam_freq > 0.0001 and FLAGS.beam_size > 1
beam_size = FLAGS.beam_size if FLAGS.mode > 0 and not do_beam_model else 1
beam_size = min(beam_size, FLAGS.beam_size)
beam_model = None
def make_ngpu(cur_beam_size, back):
return ngpu.NeuralGPU(
FLAGS.nmaps, FLAGS.vec_size, FLAGS.vocab_size, o,
FLAGS.dropout, max_grad_norm, FLAGS.cutoff, FLAGS.nconvs,
FLAGS.kw, FLAGS.kh, FLAGS.height, FLAGS.mem_size,
lr / math.sqrt(FLAGS.num_replicas), min_length + 3, FLAGS.num_gpus,
FLAGS.num_replicas, FLAGS.grad_noise_scale, max_sampling_rate,
atrous=FLAGS.atrous, do_rnn=FLAGS.rnn_baseline,
do_layer_norm=FLAGS.layer_norm, beam_size=cur_beam_size, backward=back)
if sess is None:
with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)):
model = make_ngpu(beam_size, True)
if do_beam_model:
tf.get_variable_scope().reuse_variables()
beam_model = make_ngpu(FLAGS.beam_size, False)
else:
model = make_ngpu(beam_size, True)
if do_beam_model:
tf.get_variable_scope().reuse_variables()
beam_model = make_ngpu(FLAGS.beam_size, False)
sv = None
if sess is None:
# The supervisor configuration has a few overriden options.
sv = tf.train.Supervisor(logdir=checkpoint_dir,
is_chief=(FLAGS.task < 1),
saver=model.saver,
summary_op=None,
save_summaries_secs=60,
save_model_secs=15 * 60,
global_step=model.global_step)
config = tf.ConfigProto(allow_soft_placement=True)
sess = sv.PrepareSession(FLAGS.master, config=config)
data.print_out("Created model. Checkpoint dir %s" % checkpoint_dir)
# Load model from parameters if a checkpoint exists.
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and tf.gfile.Exists(ckpt.model_checkpoint_path + ".index"):
data.print_out("Reading model parameters from %s"
% ckpt.model_checkpoint_path)
model.saver.restore(sess, ckpt.model_checkpoint_path)
elif sv is None:
sess.run(tf.initialize_all_variables())
data.print_out("Initialized variables (no supervisor mode).")
elif FLAGS.task < 1 and FLAGS.mem_size > 0:
# sess.run(model.mem_norm_op)
data.print_out("Created new model and normalized mem (on chief).")
# Return the model and needed variables.
return (model, beam_model, min_length, max_length, checkpoint_dir,
(global_train_set, dev_set, en_path, fr_path), sv, sess)
def m_step(model, beam_model, sess, batch_size, inp, target, bucket, nsteps, p):
"""Evaluation multi-step for program synthesis."""
state, scores, hist = None, [[-11.0 for _ in xrange(batch_size)]], []
for _ in xrange(nsteps):
# Get the best beam (no training, just forward model).
new_target, new_first, new_inp, new_scores = get_best_beam(
beam_model, sess, inp, target,
batch_size, FLAGS.beam_size, bucket, hist, p, test_mode=True)
hist.append(new_first)
_, _, _, state = model.step(sess, inp, new_target, False, state=state)
inp = new_inp
scores.append([max(scores[-1][i], new_scores[i])
for i in xrange(batch_size)])
# The final step with the true target.
loss, res, _, _ = model.step(sess, inp, target, False, state=state)
return loss, res, new_target, scores[1:]
def single_test(bin_id, model, sess, nprint, batch_size, dev, p, print_out=True,
offset=None, beam_model=None):
"""Test model on test data of length l using the given session."""
if not dev[p][bin_id]:
data.print_out(" bin %d (%d)\t%s\tppl NA errors NA seq-errors NA"
% (bin_id, data.bins[bin_id], p))
return 1.0, 1.0, 0.0
inpt, target = data.get_batch(
bin_id, batch_size, dev[p], FLAGS.height, offset)
if FLAGS.beam_size > 1 and beam_model:
loss, res, new_tgt, scores = m_step(
model, beam_model, sess, batch_size, inpt, target, bin_id,
FLAGS.eval_beam_steps, p)
score_avgs = [sum(s) / float(len(s)) for s in scores]
score_maxs = [max(s) for s in scores]
score_str = ["(%.2f, %.2f)" % (score_avgs[i], score_maxs[i])
for i in xrange(FLAGS.eval_beam_steps)]
data.print_out(" == scores (avg, max): %s" % "; ".join(score_str))
errors, total, seq_err = data.accuracy(inpt, res, target, batch_size,
nprint, new_tgt, scores[-1])
else:
loss, res, _, _ = model.step(sess, inpt, target, False)
errors, total, seq_err = data.accuracy(inpt, res, target, batch_size,
nprint)
seq_err = float(seq_err) / batch_size
if total > 0:
errors = float(errors) / total
if print_out:
data.print_out(" bin %d (%d)\t%s\tppl %.2f errors %.2f seq-errors %.2f"
% (bin_id, data.bins[bin_id], p, data.safe_exp(loss),
100 * errors, 100 * seq_err))
return (errors, seq_err, loss)
def assign_vectors(word_vector_file, embedding_key, vocab_path, sess):
"""Assign the embedding_key variable from the given word vectors file."""
# For words in the word vector file, set their embedding at start.
if not tf.gfile.Exists(word_vector_file):
data.print_out("Word vector file does not exist: %s" % word_vector_file)
sys.exit(1)
vocab, _ = wmt.initialize_vocabulary(vocab_path)
vectors_variable = [v for v in tf.trainable_variables()
if embedding_key == v.name]
if len(vectors_variable) != 1:
data.print_out("Word vector variable not found or too many.")
sys.exit(1)
vectors_variable = vectors_variable[0]
vectors = vectors_variable.eval()
data.print_out("Pre-setting word vectors from %s" % word_vector_file)
with tf.gfile.GFile(word_vector_file, mode="r") as f:
# Lines have format: dog 0.045123 -0.61323 0.413667 ...
for line in f:
line_parts = line.split()
# The first part is the word.
word = line_parts[0]
if word in vocab:
# Remaining parts are components of the vector.
word_vector = np.array(map(float, line_parts[1:]))
if len(word_vector) != FLAGS.vec_size:
data.print_out("Warn: Word '%s', Expecting vector size %d, "
"found %d" % (word, FLAGS.vec_size,
len(word_vector)))
else:
vectors[vocab[word]] = word_vector
# Assign the modified vectors to the vectors_variable in the graph.
sess.run([vectors_variable.initializer],
{vectors_variable.initializer.inputs[1]: vectors})
def print_vectors(embedding_key, vocab_path, word_vector_file):
"""Print vectors from the given variable."""
_, rev_vocab = wmt.initialize_vocabulary(vocab_path)
vectors_variable = [v for v in tf.trainable_variables()
if embedding_key == v.name]
if len(vectors_variable) != 1:
data.print_out("Word vector variable not found or too many.")
sys.exit(1)
vectors_variable = vectors_variable[0]
vectors = vectors_variable.eval()
l, s = vectors.shape[0], vectors.shape[1]
data.print_out("Printing %d word vectors from %s to %s."
% (l, embedding_key, word_vector_file))
with tf.gfile.GFile(word_vector_file, mode="w") as f:
# Lines have format: dog 0.045123 -0.61323 0.413667 ...
for i in xrange(l):
f.write(rev_vocab[i])
for j in xrange(s):
f.write(" %.8f" % vectors[i][j])
f.write("\n")
def get_bucket_id(train_buckets_scale_c, max_cur_length, data_set):
"""Get a random bucket id."""
# Choose a bucket according to data distribution. Pick a random number
# in [0, 1] and use the corresponding interval in train_buckets_scale.
random_number_01 = np.random.random_sample()
bucket_id = min([i for i in xrange(len(train_buckets_scale_c))
if train_buckets_scale_c[i] > random_number_01])
while bucket_id > 0 and not data_set[bucket_id]:
bucket_id -= 1
for _ in xrange(10 if np.random.random_sample() < 0.9 else 1):
if data.bins[bucket_id] > max_cur_length:
random_number_01 = min(random_number_01, np.random.random_sample())
bucket_id = min([i for i in xrange(len(train_buckets_scale_c))
if train_buckets_scale_c[i] > random_number_01])
while bucket_id > 0 and not data_set[bucket_id]:
bucket_id -= 1
return bucket_id
def score_beams(beams, target, inp, history, p,
print_out=False, test_mode=False):
"""Score beams."""
if p == "progsynth":
return score_beams_prog(beams, target, inp, history, print_out, test_mode)
elif test_mode:
return beams[0], 10.0 if str(beams[0][:len(target)]) == str(target) else 0.0
else:
history_s = [str(h) for h in history]
best, best_score, tgt, eos_id = None, -1000.0, target, None
if p == "wmt":
eos_id = wmt.EOS_ID
if eos_id and eos_id in target:
tgt = target[:target.index(eos_id)]
for beam in beams:
if eos_id and eos_id in beam:
beam = beam[:beam.index(eos_id)]
l = min(len(tgt), len(beam))
score = len([i for i in xrange(l) if tgt[i] == beam[i]]) / float(len(tgt))
hist_score = 20.0 if str([b for b in beam if b > 0]) in history_s else 0.0
if score < 1.0:
score -= hist_score
if score > best_score:
best = beam
best_score = score
return best, best_score
def score_beams_prog(beams, target, inp, history, print_out=False,
test_mode=False):
"""Score beams for program synthesis."""
tgt_prog = linearize(target, program_utils.prog_vocab, True, 1)
hist_progs = [linearize(h, program_utils.prog_vocab, True, 1)
for h in history]
tgt_set = set(target)
if print_out:
print "target: ", tgt_prog
inps, tgt_outs = [], []
for i in xrange(3):
ilist = [inp[i + 1, l] for l in xrange(inp.shape[1])]
clist = [program_utils.prog_vocab[x] for x in ilist if x > 0]
olist = clist[clist.index("]") + 1:] # outputs
clist = clist[1:clist.index("]")] # inputs
inps.append([int(x) for x in clist])
if olist[0] == "[": # olist may be [int] or just int
tgt_outs.append(str([int(x) for x in olist[1:-1]]))
else:
if len(olist) == 1:
tgt_outs.append(olist[0])
else:
print [program_utils.prog_vocab[x] for x in ilist if x > 0]
print olist
print tgt_prog
print program_utils.evaluate(tgt_prog, {"a": inps[-1]})
print "AAAAA"
tgt_outs.append(olist[0])
if not test_mode:
for _ in xrange(7):
ilen = np.random.randint(len(target) - 3) + 1
inps.append([random.choice(range(-15, 15)) for _ in range(ilen)])
tgt_outs.extend([program_utils.evaluate(tgt_prog, {"a": inp})
for inp in inps[3:]])
best, best_prog, best_score = None, "", -1000.0
for beam in beams:
b_prog = linearize(beam, program_utils.prog_vocab, True, 1)
b_set = set(beam)
jsim = len(tgt_set & b_set) / float(len(tgt_set | b_set))
b_outs = [program_utils.evaluate(b_prog, {"a": inp}) for inp in inps]
errs = len([x for x in b_outs if x == "ERROR"])
imatches = len([i for i in xrange(3) if b_outs[i] == tgt_outs[i]])
perfect = 10.0 if imatches == 3 else 0.0
hist_score = 20.0 if b_prog in hist_progs else 0.0
if test_mode:
score = perfect - errs
else:
matches = len([i for i in xrange(10) if b_outs[i] == tgt_outs[i]])
score = perfect + matches + jsim - errs
if score < 10.0:
score -= hist_score
# print b_prog
# print "jsim: ", jsim, " errs: ", errs, " mtchs: ", matches, " s: ", score
if score > best_score:
best = beam
best_prog = b_prog
best_score = score
if print_out:
print "best score: ", best_score, " best prog: ", best_prog
return best, best_score
def get_best_beam(beam_model, sess, inp, target, batch_size, beam_size,
bucket, history, p, test_mode=False):
"""Run beam_model, score beams, and return the best as target and in input."""
_, output_logits, _, _ = beam_model.step(
sess, inp, target, None, beam_size=FLAGS.beam_size)
new_targets, new_firsts, scores, new_inp = [], [], [], np.copy(inp)
for b in xrange(batch_size):
outputs = []
history_b = [[h[b, 0, l] for l in xrange(data.bins[bucket])]
for h in history]
for beam_idx in xrange(beam_size):
outputs.append([int(o[beam_idx * batch_size + b])
for o in output_logits])
target_t = [target[b, 0, l] for l in xrange(data.bins[bucket])]
best, best_score = score_beams(
outputs, [t for t in target_t if t > 0], inp[b, :, :],
[[t for t in h if t > 0] for h in history_b], p, test_mode=test_mode)
scores.append(best_score)
if 1 in best: # Only until _EOS.
best = best[:best.index(1) + 1]
best += [0 for _ in xrange(len(target_t) - len(best))]
new_targets.append([best])
first, _ = score_beams(
outputs, [t for t in target_t if t > 0], inp[b, :, :],
[[t for t in h if t > 0] for h in history_b], p, test_mode=True)
if 1 in first: # Only until _EOS.
first = first[:first.index(1) + 1]
first += [0 for _ in xrange(len(target_t) - len(first))]
new_inp[b, 0, :] = np.array(first, dtype=np.int32)
new_firsts.append([first])
# Change target if we found a great answer.
new_target = np.array(new_targets, dtype=np.int32)
for b in xrange(batch_size):
if scores[b] >= 10.0:
target[b, 0, :] = new_target[b, 0, :]
new_first = np.array(new_firsts, dtype=np.int32)
return new_target, new_first, new_inp, scores
def train():
"""Train the model."""
batch_size = FLAGS.batch_size * FLAGS.num_gpus
(model, beam_model, min_length, max_length, checkpoint_dir,
(train_set, dev_set, en_vocab_path, fr_vocab_path), sv, sess) = initialize()
with sess.as_default():
quant_op = model.quantize_op
max_cur_length = min(min_length + 3, max_length)
prev_acc_perp = [1000000 for _ in xrange(5)]
prev_seq_err = 1.0
is_chief = FLAGS.task < 1
do_report = False
# Main traning loop.
while not sv.ShouldStop():
global_step, max_cur_length, learning_rate = sess.run(
[model.global_step, model.cur_length, model.lr])
acc_loss, acc_l1, acc_total, acc_errors, acc_seq_err = 0.0, 0.0, 0, 0, 0
acc_grad_norm, step_count, step_c1, step_time = 0.0, 0, 0, 0.0
# For words in the word vector file, set their embedding at start.
bound1 = FLAGS.steps_per_checkpoint - 1
if FLAGS.word_vector_file_en and global_step < bound1 and is_chief:
assign_vectors(FLAGS.word_vector_file_en, "embedding:0",
en_vocab_path, sess)
if FLAGS.max_target_vocab < 1:
assign_vectors(FLAGS.word_vector_file_en, "target_embedding:0",
en_vocab_path, sess)
if FLAGS.word_vector_file_fr and global_step < bound1 and is_chief:
assign_vectors(FLAGS.word_vector_file_fr, "embedding:0",
fr_vocab_path, sess)
if FLAGS.max_target_vocab < 1:
assign_vectors(FLAGS.word_vector_file_fr, "target_embedding:0",
fr_vocab_path, sess)
for _ in xrange(FLAGS.steps_per_checkpoint):
step_count += 1
step_c1 += 1
global_step = int(model.global_step.eval())
train_beam_anneal = global_step / float(FLAGS.train_beam_anneal)
train_beam_freq = FLAGS.train_beam_freq * min(1.0, train_beam_anneal)
p = random.choice(FLAGS.problem.split("-"))
train_set = global_train_set[p][-1]
bucket_id = get_bucket_id(train_buckets_scale[p][-1], max_cur_length,
train_set)
# Prefer longer stuff 60% of time if not wmt.
if np.random.randint(100) < 60 and FLAGS.problem != "wmt":
bucket1 = get_bucket_id(train_buckets_scale[p][-1], max_cur_length,
train_set)
bucket_id = max(bucket1, bucket_id)
# Run a step and time it.
start_time = time.time()
inp, target = data.get_batch(bucket_id, batch_size, train_set,
FLAGS.height)
noise_param = math.sqrt(math.pow(global_step + 1, -0.55) *
prev_seq_err) * FLAGS.grad_noise_scale
# In multi-step mode, we use best from beam for middle steps.
state, new_target, scores, history = None, None, None, []
while (FLAGS.beam_size > 1 and
train_beam_freq > np.random.random_sample()):
# Get the best beam (no training, just forward model).
new_target, new_first, new_inp, scores = get_best_beam(
beam_model, sess, inp, target,
batch_size, FLAGS.beam_size, bucket_id, history, p)
history.append(new_first)
# Training step with the previous input and the best beam as target.
_, _, _, state = model.step(sess, inp, new_target, FLAGS.do_train,
noise_param, update_mem=True, state=state)
# Change input to the new one for the next step.
inp = new_inp
# If all results are great, stop (todo: not to wait for all?).
if FLAGS.nprint > 1:
print scores
if sum(scores) / float(len(scores)) >= 10.0:
break
# The final step with the true target.
loss, res, gnorm, _ = model.step(
sess, inp, target, FLAGS.do_train, noise_param,
update_mem=True, state=state)
step_time += time.time() - start_time
acc_grad_norm += 0.0 if gnorm is None else float(gnorm)
# Accumulate statistics.
acc_loss += loss
acc_l1 += loss
errors, total, seq_err = data.accuracy(
inp, res, target, batch_size, 0, new_target, scores)
if FLAGS.nprint > 1:
print "seq_err: ", seq_err
acc_total += total
acc_errors += errors
acc_seq_err += seq_err
# Report summary every 10 steps.
if step_count + 3 > FLAGS.steps_per_checkpoint:
do_report = True # Don't polute plot too early.
if is_chief and step_count % 10 == 1 and do_report:
cur_loss = acc_l1 / float(step_c1)
acc_l1, step_c1 = 0.0, 0
cur_perp = data.safe_exp(cur_loss)
summary = tf.Summary()
summary.value.extend(
[tf.Summary.Value(tag="log_perplexity", simple_value=cur_loss),
tf.Summary.Value(tag="perplexity", simple_value=cur_perp)])
sv.SummaryComputed(sess, summary, global_step)
# Normalize and print out accumulated statistics.
acc_loss /= step_count
step_time /= FLAGS.steps_per_checkpoint
acc_seq_err = float(acc_seq_err) / (step_count * batch_size)
prev_seq_err = max(0.0, acc_seq_err - 0.02) # No noise at error < 2%.
acc_errors = float(acc_errors) / acc_total if acc_total > 0 else 1.0
t_size = float(sum([len(x) for x in train_set])) / float(1000000)
msg = ("step %d step-time %.2f train-size %.3f lr %.6f grad-norm %.4f"
% (global_step + 1, step_time, t_size, learning_rate,
acc_grad_norm / FLAGS.steps_per_checkpoint))
data.print_out("%s len %d ppl %.6f errors %.2f sequence-errors %.2f" %
(msg, max_cur_length, data.safe_exp(acc_loss),
100*acc_errors, 100*acc_seq_err))
# If errors are below the curriculum threshold, move curriculum forward.
is_good = FLAGS.curriculum_ppx > data.safe_exp(acc_loss)
is_good = is_good and FLAGS.curriculum_seq > acc_seq_err
if is_good and is_chief:
if FLAGS.quantize:
# Quantize weights.
data.print_out(" Quantizing parameters.")
sess.run([quant_op])
# Increase current length (until the next with training data).
sess.run(model.cur_length_incr_op)
# Forget last perplexities if we're not yet at the end.
if max_cur_length < max_length:
prev_acc_perp.append(1000000)
# Lower learning rate if we're worse than the last 5 checkpoints.
acc_perp = data.safe_exp(acc_loss)
if acc_perp > max(prev_acc_perp[-5:]) and is_chief:
sess.run(model.lr_decay_op)
prev_acc_perp.append(acc_perp)
# Save checkpoint.
if is_chief:
checkpoint_path = os.path.join(checkpoint_dir, "neural_gpu.ckpt")
model.saver.save(sess, checkpoint_path,
global_step=model.global_step)
# Run evaluation.
bin_bound = 4
for p in FLAGS.problem.split("-"):
total_loss, total_err, tl_counter = 0.0, 0.0, 0
for bin_id in xrange(len(data.bins)):
if bin_id < bin_bound or bin_id % FLAGS.eval_bin_print == 1:
err, _, loss = single_test(bin_id, model, sess, FLAGS.nprint,
batch_size * 4, dev_set, p,
beam_model=beam_model)
if loss > 0.0:
total_loss += loss
total_err += err
tl_counter += 1
test_loss = total_loss / max(1, tl_counter)
test_err = total_err / max(1, tl_counter)
test_perp = data.safe_exp(test_loss)
summary = tf.Summary()
summary.value.extend(
[tf.Summary.Value(tag="test/%s/loss" % p, simple_value=test_loss),
tf.Summary.Value(tag="test/%s/error" % p, simple_value=test_err),
tf.Summary.Value(tag="test/%s/perplexity" % p,
simple_value=test_perp)])
sv.SummaryComputed(sess, summary, global_step)
def linearize(output, rev_fr_vocab, simple_tokenizer=None, eos_id=wmt.EOS_ID):
# If there is an EOS symbol in outputs, cut them at that point (WMT).
if eos_id in output:
output = output[:output.index(eos_id)]
# Print out French sentence corresponding to outputs.
if simple_tokenizer or FLAGS.simple_tokenizer:
vlen = len(rev_fr_vocab)
def vget(o):
if o < vlen:
return rev_fr_vocab[o]
return "UNK"
return " ".join([vget(o) for o in output])
else:
return wmt.basic_detokenizer([rev_fr_vocab[o] for o in output])
def evaluate():
"""Evaluate an existing model."""
batch_size = FLAGS.batch_size * FLAGS.num_gpus
with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
(model, beam_model, _, _, _,
(_, dev_set, en_vocab_path, fr_vocab_path), _, sess) = initialize(sess)
for p in FLAGS.problem.split("-"):
for bin_id in xrange(len(data.bins)):
if (FLAGS.task >= 0 and bin_id > 4) or (FLAGS.nprint == 0 and
bin_id > 8 and p == "wmt"):
break
single_test(bin_id, model, sess, FLAGS.nprint, batch_size, dev_set, p,
beam_model=beam_model)
path = FLAGS.test_file_prefix
xid = "" if FLAGS.task < 0 else ("%.4d" % (FLAGS.task+FLAGS.decode_offset))
en_path, fr_path = path + ".en" + xid, path + ".fr" + xid
# Evaluate the test file if they exist.
if path and tf.gfile.Exists(en_path) and tf.gfile.Exists(fr_path):
data.print_out("Translating test set %s" % en_path)
# Read lines.
en_lines, fr_lines = [], []
with tf.gfile.GFile(en_path, mode="r") as f:
for line in f:
en_lines.append(line.strip())
with tf.gfile.GFile(fr_path, mode="r") as f:
for line in f:
fr_lines.append(line.strip())
# Tokenize and convert to ids.
en_vocab, _ = wmt.initialize_vocabulary(en_vocab_path)
_, rev_fr_vocab = wmt.initialize_vocabulary(fr_vocab_path)
if FLAGS.simple_tokenizer:
en_ids = [wmt.sentence_to_token_ids(
l, en_vocab, tokenizer=wmt.space_tokenizer,
normalize_digits=FLAGS.normalize_digits)
for l in en_lines]
else:
en_ids = [wmt.sentence_to_token_ids(l, en_vocab) for l in en_lines]
# Translate.
results = []
for idx, token_ids in enumerate(en_ids):
if idx % 5 == 0:
data.print_out("Translating example %d of %d." % (idx, len(en_ids)))
# Which bucket does it belong to?
buckets = [b for b in xrange(len(data.bins))
if data.bins[b] >= len(token_ids)]
if buckets:
result, result_cost = [], 100000000.0
for bucket_id in buckets:
if data.bins[bucket_id] > MAXLEN_F * len(token_ids) + EVAL_LEN_INCR:
break
# Get a 1-element batch to feed the sentence to the model.
used_batch_size = 1 # batch_size
inp, target = data.get_batch(
bucket_id, used_batch_size, None, FLAGS.height,
preset=([token_ids], [[]]))
loss, output_logits, _, _ = model.step(
sess, inp, target, None, beam_size=FLAGS.beam_size)
outputs = [int(o[0]) for o in output_logits]
loss = loss[0] - (data.bins[bucket_id] * FLAGS.length_norm)
if FLAGS.simple_tokenizer:
cur_out = outputs
if wmt.EOS_ID in cur_out:
cur_out = cur_out[:cur_out.index(wmt.EOS_ID)]
res_tags = [rev_fr_vocab[o] for o in cur_out]
bad_words, bad_brack = wmt.parse_constraints(token_ids, res_tags)
loss += 1000.0 * bad_words + 100.0 * bad_brack
# print (bucket_id, loss)
if loss < result_cost:
result = outputs
result_cost = loss
final = linearize(result, rev_fr_vocab)
results.append("%s\t%s\n" % (final, fr_lines[idx]))
# print result_cost
sys.stderr.write(results[-1])
sys.stderr.flush()
else:
sys.stderr.write("TOOO_LONG\t%s\n" % fr_lines[idx])
sys.stderr.flush()
if xid:
decode_suffix = "beam%dln%dn" % (FLAGS.beam_size,
int(100 * FLAGS.length_norm))
with tf.gfile.GFile(path + ".res" + decode_suffix + xid, mode="w") as f:
for line in results:
f.write(line)
def mul(l):
res = 1.0
for s in l:
res *= s
return res
def interactive():
"""Interactively probe an existing model."""
with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
# Initialize model.
(model, _, _, _, _, (_, _, en_path, fr_path), _, _) = initialize(sess)
# Load vocabularies.
en_vocab, rev_en_vocab = wmt.initialize_vocabulary(en_path)
_, rev_fr_vocab = wmt.initialize_vocabulary(fr_path)
# Print out vectors and variables.
if FLAGS.nprint > 0 and FLAGS.word_vector_file_en:
print_vectors("embedding:0", en_path, FLAGS.word_vector_file_en)
if FLAGS.nprint > 0 and FLAGS.word_vector_file_fr:
print_vectors("target_embedding:0", fr_path, FLAGS.word_vector_file_fr)
total = 0
for v in tf.trainable_variables():
shape = v.get_shape().as_list()
total += mul(shape)
print (v.name, shape, mul(shape))
print total
# Start interactive loop.
sys.stdout.write("Input to Neural GPU Translation Model.\n")
sys.stdout.write("> ")
sys.stdout.flush()
inpt = sys.stdin.readline(), ""
while inpt:
cures = []
# Get token-ids for the input sentence.
if FLAGS.simple_tokenizer:
token_ids = wmt.sentence_to_token_ids(
inpt, en_vocab, tokenizer=wmt.space_tokenizer,
normalize_digits=FLAGS.normalize_digits)
else:
token_ids = wmt.sentence_to_token_ids(inpt, en_vocab)
print [rev_en_vocab[t] for t in token_ids]
# Which bucket does it belong to?
buckets = [b for b in xrange(len(data.bins))
if data.bins[b] >= max(len(token_ids), len(cures))]
if cures:
buckets = [buckets[0]]
if buckets:
result, result_cost = [], 10000000.0
for bucket_id in buckets:
if data.bins[bucket_id] > MAXLEN_F * len(token_ids) + EVAL_LEN_INCR:
break
glen = 1
for gen_idx in xrange(glen):
# Get a 1-element batch to feed the sentence to the model.
inp, target = data.get_batch(
bucket_id, 1, None, FLAGS.height, preset=([token_ids], [cures]))
loss, output_logits, _, _ = model.step(
sess, inp, target, None, beam_size=FLAGS.beam_size,
update_mem=False)
# If it is a greedy decoder, outputs are argmaxes of output_logits.
if FLAGS.beam_size > 1:
outputs = [int(o) for o in output_logits]
else:
loss = loss[0] - (data.bins[bucket_id] * FLAGS.length_norm)
outputs = [int(np.argmax(logit, axis=1))
for logit in output_logits]
print [rev_fr_vocab[t] for t in outputs]
print loss, data.bins[bucket_id]
print linearize(outputs, rev_fr_vocab)
cures.append(outputs[gen_idx])
print cures
print linearize(cures, rev_fr_vocab)
if FLAGS.simple_tokenizer:
cur_out = outputs
if wmt.EOS_ID in cur_out:
cur_out = cur_out[:cur_out.index(wmt.EOS_ID)]
res_tags = [rev_fr_vocab[o] for o in cur_out]
bad_words, bad_brack = wmt.parse_constraints(token_ids, res_tags)
loss += 1000.0 * bad_words + 100.0 * bad_brack
if loss < result_cost:
result = outputs
result_cost = loss
print ("FINAL", result_cost)
print [rev_fr_vocab[t] for t in result]
print linearize(result, rev_fr_vocab)
else:
print "TOOO_LONG"
sys.stdout.write("> ")
sys.stdout.flush()
inpt = sys.stdin.readline(), ""
def main(_):
if FLAGS.mode == 0:
train()
elif FLAGS.mode == 1:
evaluate()
else:
interactive()
if __name__ == "__main__":
tf.app.run()
|
deepakgupta1313/models
|
neural_gpu/neural_gpu_trainer.py
|
Python
|
apache-2.0
| 45,093
|
import os
import socket
import botocore.session
import pytest
import mock
from botocore.stub import Stubber
from botocore.vendored.requests import ConnectionError as \
RequestsConnectionError
from pytest import fixture
from chalice.app import Chalice
from chalice.awsclient import LambdaClientError, AWSClientError
from chalice.awsclient import DeploymentPackageTooLargeError
from chalice.awsclient import LambdaErrorContext
from chalice.config import Config
from chalice.policy import AppPolicyGenerator
from chalice.deploy.deployer import ChaliceDeploymentError
from chalice.utils import UI
import unittest
from attr import attrs, attrib
from chalice.awsclient import TypedAWSClient
from chalice.utils import OSUtils, serialize_to_json
from chalice.deploy import models
from chalice.deploy import packager
from chalice.deploy.deployer import create_default_deployer, \
create_deletion_deployer, Deployer, BaseDeployStep, \
InjectDefaults, DeploymentPackager, SwaggerBuilder, \
PolicyGenerator, BuildStage, ResultsRecorder, DeploymentReporter
from chalice.deploy.appgraph import ApplicationGraphBuilder, \
DependencyBuilder
from chalice.deploy.executor import Executor
from chalice.deploy.swagger import SwaggerGenerator, TemplatedSwaggerGenerator
from chalice.deploy.planner import PlanStage
from chalice.deploy.planner import StringFormat
from chalice.deploy.sweeper import ResourceSweeper
from chalice.deploy.models import APICall
from chalice.constants import VPC_ATTACH_POLICY
from chalice.constants import SQS_EVENT_SOURCE_POLICY
from chalice.constants import POST_TO_WEBSOCKET_CONNECTION_POLICY
from chalice.deploy.deployer import LambdaEventSourcePolicyInjector
from chalice.deploy.deployer import WebsocketPolicyInjector
_SESSION = None
class InMemoryOSUtils(object):
def __init__(self, filemap=None):
if filemap is None:
filemap = {}
self.filemap = filemap
def file_exists(self, filename):
return filename in self.filemap
def get_file_contents(self, filename, binary=True):
return self.filemap[filename]
def set_file_contents(self, filename, contents, binary=True):
self.filemap[filename] = contents
@fixture
def in_memory_osutils():
return InMemoryOSUtils()
def stubbed_client(service_name):
global _SESSION
if _SESSION is None:
_SESSION = botocore.session.get_session()
client = _SESSION.create_client(service_name,
region_name='us-west-2')
stubber = Stubber(client)
return client, stubber
@fixture
def config_obj(sample_app):
config = Config.create(
chalice_app=sample_app,
stage='dev',
api_gateway_stage='api',
)
return config
@fixture
def ui():
return mock.Mock(spec=UI)
class TestChaliceDeploymentError(object):
def test_general_exception(self):
general_exception = Exception('My Exception')
deploy_error = ChaliceDeploymentError(general_exception)
deploy_error_msg = str(deploy_error)
assert (
'ERROR - While deploying your chalice application'
in deploy_error_msg
)
assert 'My Exception' in deploy_error_msg
def test_lambda_client_error(self):
lambda_error = LambdaClientError(
Exception('My Exception'),
context=LambdaErrorContext(
function_name='foo',
client_method_name='create_function',
deployment_size=1024 ** 2
)
)
deploy_error = ChaliceDeploymentError(lambda_error)
deploy_error_msg = str(deploy_error)
assert (
'ERROR - While sending your chalice handler code to '
'Lambda to create function \n"foo"' in deploy_error_msg
)
assert 'My Exception' in deploy_error_msg
def test_lambda_client_error_wording_for_update(self):
lambda_error = LambdaClientError(
Exception('My Exception'),
context=LambdaErrorContext(
function_name='foo',
client_method_name='update_function_code',
deployment_size=1024 ** 2
)
)
deploy_error = ChaliceDeploymentError(lambda_error)
deploy_error_msg = str(deploy_error)
assert (
'sending your chalice handler code to '
'Lambda to update function' in deploy_error_msg
)
def test_gives_where_and_suggestion_for_too_large_deployment_error(self):
too_large_error = DeploymentPackageTooLargeError(
Exception('Too large of deployment pacakge'),
context=LambdaErrorContext(
function_name='foo',
client_method_name='create_function',
deployment_size=1024 ** 2,
)
)
deploy_error = ChaliceDeploymentError(too_large_error)
deploy_error_msg = str(deploy_error)
assert (
'ERROR - While sending your chalice handler code to '
'Lambda to create function \n"foo"' in deploy_error_msg
)
assert 'Too large of deployment pacakge' in deploy_error_msg
assert (
'To avoid this error, decrease the size of your chalice '
'application ' in deploy_error_msg
)
def test_include_size_context_for_too_large_deployment_error(self):
too_large_error = DeploymentPackageTooLargeError(
Exception('Too large of deployment pacakge'),
context=LambdaErrorContext(
function_name='foo',
client_method_name='create_function',
deployment_size=58 * (1024 ** 2),
)
)
deploy_error = ChaliceDeploymentError(
too_large_error)
deploy_error_msg = str(deploy_error)
print(repr(deploy_error_msg))
assert 'deployment package is 58.0 MB' in deploy_error_msg
assert '50.0 MB or less' in deploy_error_msg
assert 'To avoid this error' in deploy_error_msg
def test_error_msg_for_general_connection(self):
lambda_error = DeploymentPackageTooLargeError(
RequestsConnectionError(
Exception(
'Connection aborted.',
socket.error('Some vague reason')
)
),
context=LambdaErrorContext(
function_name='foo',
client_method_name='create_function',
deployment_size=1024 ** 2
)
)
deploy_error = ChaliceDeploymentError(lambda_error)
deploy_error_msg = str(deploy_error)
assert 'Connection aborted.' in deploy_error_msg
assert 'Some vague reason' not in deploy_error_msg
def test_simplifies_error_msg_for_broken_pipe(self):
lambda_error = DeploymentPackageTooLargeError(
RequestsConnectionError(
Exception(
'Connection aborted.',
socket.error(32, 'Broken pipe')
)
),
context=LambdaErrorContext(
function_name='foo',
client_method_name='create_function',
deployment_size=1024 ** 2
)
)
deploy_error = ChaliceDeploymentError(lambda_error)
deploy_error_msg = str(deploy_error)
assert (
'Connection aborted. Lambda closed the connection' in
deploy_error_msg
)
def test_simplifies_error_msg_for_timeout(self):
lambda_error = DeploymentPackageTooLargeError(
RequestsConnectionError(
Exception(
'Connection aborted.',
socket.timeout('The write operation timed out')
)
),
context=LambdaErrorContext(
function_name='foo',
client_method_name='create_function',
deployment_size=1024 ** 2
)
)
deploy_error = ChaliceDeploymentError(lambda_error)
deploy_error_msg = str(deploy_error)
assert (
'Connection aborted. Timed out sending your app to Lambda.' in
deploy_error_msg
)
@attrs
class FooResource(models.Model):
name = attrib()
leaf = attrib()
def dependencies(self):
if not isinstance(self.leaf, list):
return [self.leaf]
return self.leaf
@attrs
class LeafResource(models.Model):
name = attrib()
@fixture
def mock_client():
return mock.Mock(spec=TypedAWSClient)
@fixture
def mock_osutils():
return mock.Mock(spec=OSUtils)
def create_function_resource(name):
return models.LambdaFunction(
resource_name=name,
function_name='appname-dev-%s' % name,
environment_variables={},
runtime='python2.7',
handler='app.app',
tags={},
timeout=60,
memory_size=128,
deployment_package=models.DeploymentPackage(filename='foo'),
role=models.PreCreatedIAMRole(role_arn='role:arn'),
security_group_ids=[],
subnet_ids=[],
layers=[]
)
class TestDependencyBuilder(object):
def test_can_build_resource_with_single_dep(self):
role = models.PreCreatedIAMRole(role_arn='foo')
app = models.Application(stage='dev', resources=[role])
dep_builder = DependencyBuilder()
deps = dep_builder.build_dependencies(app)
assert deps == [role]
def test_can_build_resource_with_dag_deps(self):
shared_leaf = LeafResource(name='leaf-resource')
first_parent = FooResource(name='first', leaf=shared_leaf)
second_parent = FooResource(name='second', leaf=shared_leaf)
app = models.Application(
stage='dev', resources=[first_parent, second_parent])
dep_builder = DependencyBuilder()
deps = dep_builder.build_dependencies(app)
assert deps == [shared_leaf, first_parent, second_parent]
def test_is_first_element_in_list(self):
shared_leaf = LeafResource(name='leaf-resource')
first_parent = FooResource(name='first', leaf=shared_leaf)
app = models.Application(
stage='dev', resources=[first_parent, shared_leaf],
)
dep_builder = DependencyBuilder()
deps = dep_builder.build_dependencies(app)
assert deps == [shared_leaf, first_parent]
def test_can_compares_with_identity_not_equality(self):
first_leaf = LeafResource(name='same-name')
second_leaf = LeafResource(name='same-name')
first_parent = FooResource(name='first', leaf=first_leaf)
second_parent = FooResource(name='second', leaf=second_leaf)
app = models.Application(
stage='dev', resources=[first_parent, second_parent])
dep_builder = DependencyBuilder()
deps = dep_builder.build_dependencies(app)
assert deps == [first_leaf, first_parent, second_leaf, second_parent]
def test_no_duplicate_depedencies(self):
leaf = LeafResource(name='leaf')
second_parent = FooResource(name='second', leaf=leaf)
first_parent = FooResource(name='first', leaf=[leaf, second_parent])
app = models.Application(
stage='dev', resources=[first_parent])
dep_builder = DependencyBuilder()
deps = dep_builder.build_dependencies(app)
assert deps == [leaf, second_parent, first_parent]
class RoleTestCase(object):
def __init__(self, given, roles, app_name='appname'):
self.given = given
self.roles = roles
self.app_name = app_name
def build(self):
app = Chalice(self.app_name)
for name in self.given:
def foo(event, context):
return {}
foo.__name__ = name
app.lambda_function(name)(foo)
user_provided_params = {
'chalice_app': app,
'app_name': self.app_name,
'project_dir': '.',
}
lambda_functions = {}
for key, value in self.given.items():
lambda_functions[key] = value
config_from_disk = {
'stages': {
'dev': {
'lambda_functions': lambda_functions,
}
}
}
config = Config(chalice_stage='dev',
user_provided_params=user_provided_params,
config_from_disk=config_from_disk)
return app, config
def assert_required_roles_created(self, application):
resources = application.resources
assert len(resources) == len(self.given)
functions_by_name = {f.function_name: f for f in resources}
# Roles that have the same name/arn should be the same
# object. If we encounter a role that's already in
# roles_by_identifier, we'll verify that it's the exact same object.
roles_by_identifier = {}
for function_name, expected in self.roles.items():
full_name = 'appname-dev-%s' % function_name
assert full_name in functions_by_name
actual_role = functions_by_name[full_name].role
expectations = self.roles[function_name]
if not expectations.get('managed_role', True):
actual_role_arn = actual_role.role_arn
assert isinstance(actual_role, models.PreCreatedIAMRole)
assert expectations['iam_role_arn'] == actual_role_arn
if actual_role_arn in roles_by_identifier:
assert roles_by_identifier[actual_role_arn] is actual_role
roles_by_identifier[actual_role_arn] = actual_role
continue
actual_name = actual_role.role_name
assert expectations['name'] == actual_name
if actual_name in roles_by_identifier:
assert roles_by_identifier[actual_name] is actual_role
roles_by_identifier[actual_name] = actual_role
is_autogenerated = expectations.get('autogenerated', False)
policy_file = expectations.get('policy_file')
if is_autogenerated:
assert isinstance(actual_role, models.ManagedIAMRole)
assert isinstance(actual_role.policy, models.AutoGenIAMPolicy)
if policy_file is not None and not is_autogenerated:
assert isinstance(actual_role, models.ManagedIAMRole)
assert isinstance(actual_role.policy,
models.FileBasedIAMPolicy)
assert actual_role.policy.filename == os.path.join(
'.', '.chalice', expectations['policy_file'])
# How to read these tests:
# 'given' is a mapping of lambda function name to config values.
# 'roles' is a mapping of lambda function to expected attributes
# of the role associated with the given function.
# The first test case is explained in more detail as an example.
ROLE_TEST_CASES = [
# Default case, we use the shared 'appname-dev' role.
RoleTestCase(
# Given we have a lambda function in our app.py named 'a',
# and we have our config file state that the 'a' function
# should have an autogen'd policy,
given={'a': {'autogen_policy': True}},
# then we expect the IAM role associated with the lambda
# function 'a' should be named 'appname-dev', and it should
# be an autogenerated role/policy.
roles={'a': {'name': 'appname-dev', 'autogenerated': True}}),
# If you specify an explicit policy, we generate a function
# specific role.
RoleTestCase(
given={'a': {'autogen_policy': False,
'iam_policy_file': 'mypolicy.json'}},
roles={'a': {'name': 'appname-dev-a',
'autogenerated': False,
'policy_file': 'mypolicy.json'}}),
# Multiple lambda functions that use autogen policies share
# the same 'appname-dev' role.
RoleTestCase(
given={'a': {'autogen_policy': True},
'b': {'autogen_policy': True}},
roles={'a': {'name': 'appname-dev'},
'b': {'name': 'appname-dev'}}),
# Multiple lambda functions with separate policies result
# in separate roles.
RoleTestCase(
given={'a': {'autogen_policy': False,
'iam_policy_file': 'a.json'},
'b': {'autogen_policy': False,
'iam_policy_file': 'b.json'}},
roles={'a': {'name': 'appname-dev-a',
'autogenerated': False,
'policy_file': 'a.json'},
'b': {'name': 'appname-dev-b',
'autogenerated': False,
'policy_file': 'b.json'}}),
# You can mix autogen and explicit policy files. Autogen will
# always use the '{app}-{stage}' role.
RoleTestCase(
given={'a': {'autogen_policy': True},
'b': {'autogen_policy': False,
'iam_policy_file': 'b.json'}},
roles={'a': {'name': 'appname-dev',
'autogenerated': True},
'b': {'name': 'appname-dev-b',
'autogenerated': False,
'policy_file': 'b.json'}}),
# Default location if no policy file is given is
# policy-dev.json
RoleTestCase(
given={'a': {'autogen_policy': False}},
roles={'a': {'name': 'appname-dev-a',
'autogenerated': False,
'policy_file': 'policy-dev.json'}}),
# As soon as autogen_policy is false, we will *always*
# create a function specific role.
RoleTestCase(
given={'a': {'autogen_policy': False},
'b': {'autogen_policy': True}},
roles={'a': {'name': 'appname-dev-a',
'autogenerated': False,
'policy_file': 'policy-dev.json'},
'b': {'name': 'appname-dev'}}),
RoleTestCase(
given={'a': {'manage_iam_role': False, 'iam_role_arn': 'role:arn'}},
# 'managed_role' will verify the associated role is a
# models.PreCreatedIAMRoleType with the provided iam_role_arn.
roles={'a': {'managed_role': False, 'iam_role_arn': 'role:arn'}}),
# Verify that we can use the same non-managed role for multiple
# lambda functions.
RoleTestCase(
given={'a': {'manage_iam_role': False, 'iam_role_arn': 'role:arn'},
'b': {'manage_iam_role': False, 'iam_role_arn': 'role:arn'}},
roles={'a': {'managed_role': False, 'iam_role_arn': 'role:arn'},
'b': {'managed_role': False, 'iam_role_arn': 'role:arn'}}),
RoleTestCase(
given={'a': {'manage_iam_role': False, 'iam_role_arn': 'role:arn'},
'b': {'autogen_policy': True}},
roles={'a': {'managed_role': False, 'iam_role_arn': 'role:arn'},
'b': {'name': 'appname-dev', 'autogenerated': True}}),
# Functions that mix all four options:
RoleTestCase(
# 2 functions with autogen'd policies.
given={
'a': {'autogen_policy': True},
'b': {'autogen_policy': True},
# 2 functions with various iam role arns.
'c': {'manage_iam_role': False, 'iam_role_arn': 'role:arn'},
'd': {'manage_iam_role': False, 'iam_role_arn': 'role:arn2'},
# A function with a default filename for a policy.
'e': {'autogen_policy': False},
# Even though this uses the same policy as 'e', we will
# still create a new role. This could be optimized in the
# future.
'f': {'autogen_policy': False},
# And finally 2 functions that have their own policy files.
'g': {'autogen_policy': False, 'iam_policy_file': 'g.json'},
'h': {'autogen_policy': False, 'iam_policy_file': 'h.json'}
},
roles={
'a': {'name': 'appname-dev', 'autogenerated': True},
'b': {'name': 'appname-dev', 'autogenerated': True},
'c': {'managed_role': False, 'iam_role_arn': 'role:arn'},
'd': {'managed_role': False, 'iam_role_arn': 'role:arn2'},
'e': {'name': 'appname-dev-e',
'autogenerated': False,
'policy_file': 'policy-dev.json'},
'f': {'name': 'appname-dev-f',
'autogenerated': False,
'policy_file': 'policy-dev.json'},
'g': {'name': 'appname-dev-g',
'autogenerated': False,
'policy_file': 'g.json'},
'h': {'name': 'appname-dev-h',
'autogenerated': False,
'policy_file': 'h.json'},
}),
]
@pytest.mark.parametrize('case', ROLE_TEST_CASES)
def test_role_creation(case):
_, config = case.build()
builder = ApplicationGraphBuilder()
application = builder.build(config, stage_name='dev')
case.assert_required_roles_created(application)
class TestDefaultsInjector(object):
def test_inject_when_values_are_none(self):
injector = InjectDefaults(
lambda_timeout=100,
lambda_memory_size=512,
)
function = models.LambdaFunction(
# The timeout/memory_size are set to
# None, so the injector should fill them
# in the with the default values above.
timeout=None,
memory_size=None,
resource_name='foo',
function_name='app-dev-foo',
environment_variables={},
runtime='python2.7',
handler='app.app',
tags={},
deployment_package=None,
role=None,
security_group_ids=[],
subnet_ids=[],
layers=[],
reserved_concurrency=None,
)
config = Config.create()
injector.handle(config, function)
assert function.timeout == 100
assert function.memory_size == 512
def test_no_injection_when_values_are_set(self):
injector = InjectDefaults(
lambda_timeout=100,
lambda_memory_size=512,
)
function = models.LambdaFunction(
# The timeout/memory_size are set to
# None, so the injector should fill them
# in the with the default values above.
timeout=1,
memory_size=1,
resource_name='foo',
function_name='app-stage-foo',
environment_variables={},
runtime='python2.7',
handler='app.app',
tags={},
deployment_package=None,
role=None,
security_group_ids=[],
subnet_ids=[],
layers=[],
reserved_concurrency=None,
)
config = Config.create()
injector.handle(config, function)
assert function.timeout == 1
assert function.memory_size == 1
class TestPolicyGeneratorStage(object):
def setup_method(self):
self.osutils = mock.Mock(spec=OSUtils)
def create_policy_generator(self, generator=None):
if generator is None:
generator = mock.Mock(spec=AppPolicyGenerator)
p = PolicyGenerator(generator, self.osutils)
return p
def test_invokes_policy_generator(self):
generator = mock.Mock(spec=AppPolicyGenerator)
generator.generate_policy.return_value = {'policy': 'doc'}
policy = models.AutoGenIAMPolicy(models.Placeholder.BUILD_STAGE)
config = Config.create()
p = self.create_policy_generator(generator)
p.handle(config, policy)
assert policy.document == {'policy': 'doc'}
def test_no_policy_generated_if_exists(self):
generator = mock.Mock(spec=AppPolicyGenerator)
generator.generate_policy.return_value = {'policy': 'new'}
policy = models.AutoGenIAMPolicy(document={'policy': 'original'})
config = Config.create()
p = self.create_policy_generator(generator)
p.handle(config, policy)
assert policy.document == {'policy': 'original'}
assert not generator.generate_policy.called
def test_policy_loaded_from_file_if_needed(self):
p = self.create_policy_generator()
policy = models.FileBasedIAMPolicy(
filename='foo.json', document=models.Placeholder.BUILD_STAGE)
self.osutils.get_file_contents.return_value = '{"iam": "policy"}'
p.handle(Config.create(), policy)
assert policy.document == {'iam': 'policy'}
self.osutils.get_file_contents.assert_called_with('foo.json')
def test_error_raised_if_file_policy_not_exists(self):
p = self.create_policy_generator()
policy = models.FileBasedIAMPolicy(
filename='foo.json', document=models.Placeholder.BUILD_STAGE)
self.osutils.get_file_contents.side_effect = IOError()
with pytest.raises(RuntimeError):
p.handle(Config.create(), policy)
def test_vpc_policy_inject_if_needed(self):
generator = mock.Mock(spec=AppPolicyGenerator)
generator.generate_policy.return_value = {'Statement': []}
policy = models.AutoGenIAMPolicy(
document=models.Placeholder.BUILD_STAGE,
traits=set([models.RoleTraits.VPC_NEEDED]),
)
config = Config.create()
p = self.create_policy_generator(generator)
p.handle(config, policy)
assert policy.document['Statement'][0] == VPC_ATTACH_POLICY
class TestSwaggerBuilder(object):
def test_can_generate_swagger_builder(self):
generator = mock.Mock(spec=SwaggerGenerator)
generator.generate_swagger.return_value = {'swagger': '2.0'}
rest_api = models.RestAPI(
resource_name='foo',
swagger_doc=models.Placeholder.BUILD_STAGE,
minimum_compression='',
endpoint_type='EDGE',
api_gateway_stage='api',
lambda_function=None,
)
app = Chalice(app_name='foo')
config = Config.create(chalice_app=app)
p = SwaggerBuilder(generator)
p.handle(config, rest_api)
assert rest_api.swagger_doc == {'swagger': '2.0'}
generator.generate_swagger.assert_called_with(app, rest_api)
class TestDeploymentPackager(object):
def test_can_generate_package(self):
generator = mock.Mock(spec=packager.LambdaDeploymentPackager)
generator.create_deployment_package.return_value = 'package.zip'
package = models.DeploymentPackage(models.Placeholder.BUILD_STAGE)
config = Config.create()
p = DeploymentPackager(generator)
p.handle(config, package)
assert package.filename == 'package.zip'
def test_package_not_generated_if_filename_populated(self):
generator = mock.Mock(spec=packager.LambdaDeploymentPackager)
generator.create_deployment_package.return_value = 'NEWPACKAGE.zip'
package = models.DeploymentPackage(filename='original-name.zip')
config = Config.create()
p = DeploymentPackager(generator)
p.handle(config, package)
assert package.filename == 'original-name.zip'
assert not generator.create_deployment_package.called
def test_build_stage():
first = mock.Mock(spec=BaseDeployStep)
second = mock.Mock(spec=BaseDeployStep)
build = BuildStage([first, second])
foo_resource = mock.sentinel.foo_resource
bar_resource = mock.sentinel.bar_resource
config = Config.create()
build.execute(config, [foo_resource, bar_resource])
assert first.handle.call_args_list == [
mock.call(config, foo_resource),
mock.call(config, bar_resource),
]
assert second.handle.call_args_list == [
mock.call(config, foo_resource),
mock.call(config, bar_resource),
]
class TestDeployer(unittest.TestCase):
def setUp(self):
self.resource_builder = mock.Mock(spec=ApplicationGraphBuilder)
self.deps_builder = mock.Mock(spec=DependencyBuilder)
self.build_stage = mock.Mock(spec=BuildStage)
self.plan_stage = mock.Mock(spec=PlanStage)
self.sweeper = mock.Mock(spec=ResourceSweeper)
self.executor = mock.Mock(spec=Executor)
self.recorder = mock.Mock(spec=ResultsRecorder)
self.chalice_app = Chalice(app_name='foo')
def create_deployer(self):
return Deployer(
self.resource_builder,
self.deps_builder,
self.build_stage,
self.plan_stage,
self.sweeper,
self.executor,
self.recorder,
)
def test_deploy_delegates_properly(self):
app = mock.Mock(spec=models.Application)
resources = [mock.Mock(spec=models.Model)]
api_calls = [mock.Mock(spec=APICall)]
self.resource_builder.build.return_value = app
self.deps_builder.build_dependencies.return_value = resources
self.plan_stage.execute.return_value = api_calls
self.executor.resource_values = {'foo': {'name': 'bar'}}
deployer = self.create_deployer()
config = Config.create(project_dir='.', chalice_app=self.chalice_app)
result = deployer.deploy(config, 'dev')
self.resource_builder.build.assert_called_with(config, 'dev')
self.deps_builder.build_dependencies.assert_called_with(app)
self.build_stage.execute.assert_called_with(config, resources)
self.plan_stage.execute.assert_called_with(resources)
self.sweeper.execute.assert_called_with(api_calls, config)
self.executor.execute.assert_called_with(api_calls)
expected_result = {
'resources': {'foo': {'name': 'bar'}},
'schema_version': '2.0',
'backend': 'api',
}
self.recorder.record_results.assert_called_with(
expected_result, 'dev', '.')
assert result == expected_result
def test_deploy_errors_raises_chalice_error(self):
self.resource_builder.build.side_effect = AWSClientError()
deployer = self.create_deployer()
config = Config.create(project_dir='.', chalice_app=self.chalice_app)
with pytest.raises(ChaliceDeploymentError):
deployer.deploy(config, 'dev')
def test_validation_errors_raise_failure(self):
@self.chalice_app.route('')
def bad_route_empty_string():
return {}
deployer = self.create_deployer()
config = Config.create(project_dir='.', chalice_app=self.chalice_app)
with pytest.raises(ChaliceDeploymentError):
deployer.deploy(config, 'dev')
def test_can_create_default_deployer():
session = botocore.session.get_session()
deployer = create_default_deployer(session, Config.create(
project_dir='.',
chalice_stage='dev',
), UI())
assert isinstance(deployer, Deployer)
def test_can_create_deletion_deployer():
session = botocore.session.get_session()
deployer = create_deletion_deployer(TypedAWSClient(session), UI())
assert isinstance(deployer, Deployer)
def test_templated_swagger_generator(sample_app):
doc = TemplatedSwaggerGenerator().generate_swagger(sample_app)
uri = doc['paths']['/']['get']['x-amazon-apigateway-integration']['uri']
assert isinstance(uri, StringFormat)
assert uri.template == (
'arn:aws:apigateway:{region_name}:lambda:path'
'/2015-03-31/functions/{api_handler_lambda_arn}/invocations'
)
assert uri.variables == ['region_name', 'api_handler_lambda_arn']
def test_templated_swagger_with_auth_uri(sample_app_with_auth):
doc = TemplatedSwaggerGenerator().generate_swagger(sample_app_with_auth)
uri = doc['securityDefinitions']['myauth'][
'x-amazon-apigateway-authorizer']['authorizerUri']
assert isinstance(uri, StringFormat)
assert uri.template == (
'arn:aws:apigateway:{region_name}:lambda:path'
'/2015-03-31/functions/{myauth_lambda_arn}/invocations'
)
assert uri.variables == ['region_name', 'myauth_lambda_arn']
class TestRecordResults(object):
def setup_method(self):
self.osutils = mock.Mock(spec=OSUtils)
self.recorder = ResultsRecorder(self.osutils)
self.deployed_values = {
'stages': {
'dev': {'resources': []},
},
'schema_version': '2.0',
}
self.osutils.joinpath = os.path.join
self.deployed_dir = os.path.join('.', '.chalice', 'deployed')
def test_can_record_results_initial_deploy(self):
expected_filename = os.path.join(self.deployed_dir, 'dev.json')
self.osutils.file_exists.return_value = False
self.osutils.directory_exists.return_value = False
self.recorder.record_results(
self.deployed_values, 'dev', '.',
)
expected_contents = serialize_to_json(self.deployed_values)
# Verify we created the deployed dir on an initial deploy.
self.osutils.makedirs.assert_called_with(self.deployed_dir)
self.osutils.set_file_contents.assert_called_with(
filename=expected_filename,
contents=expected_contents,
binary=False
)
class TestDeploymentReporter(object):
def setup_method(self):
self.ui = mock.Mock(spec=UI)
self.reporter = DeploymentReporter(ui=self.ui)
def test_can_generate_report(self):
deployed_values = {
"resources": [
{"role_name": "james2-dev",
"role_arn": "my-role-arn",
"name": "default-role",
"resource_type": "iam_role"},
{"lambda_arn": "lambda-arn-foo",
"name": "foo",
"resource_type": "lambda_function"},
{"lambda_arn": "lambda-arn-dev",
"name": "api_handler",
"resource_type": "lambda_function"},
{"name": "rest_api",
"rest_api_id": "rest_api_id",
"rest_api_url": "https://host/api",
"resource_type": "rest_api"},
{"name": "websocket_api",
"websocket_api_id": "websocket_api_id",
"websocket_api_url": "wss://host/api",
"resource_type": "websocket_api"},
],
}
report = self.reporter.generate_report(deployed_values)
assert report == (
"Resources deployed:\n"
" - Lambda ARN: lambda-arn-foo\n"
" - Lambda ARN: lambda-arn-dev\n"
" - Rest API URL: https://host/api\n"
" - Websocket API URL: wss://host/api\n"
)
def test_can_display_report(self):
deployed_values = {
'resources': []
}
self.reporter.display_report(deployed_values)
self.ui.write.assert_called_with('Resources deployed:\n')
class TestLambdaEventSourcePolicyInjector(object):
def create_model_from_app(self, app, config):
builder = ApplicationGraphBuilder()
application = builder.build(config, stage_name='dev')
return application.resources[0]
def test_can_inject_policy(self, sample_sqs_event_app):
config = Config.create(chalice_app=sample_sqs_event_app,
autogen_policy=True,
project_dir='.')
event_source = self.create_model_from_app(sample_sqs_event_app, config)
role = event_source.lambda_function.role
role.policy.document = {'Statement': []}
injector = LambdaEventSourcePolicyInjector()
injector.handle(config, event_source)
assert role.policy.document == {
'Statement': [SQS_EVENT_SOURCE_POLICY.copy()],
}
def test_no_inject_if_not_autogen_policy(self, sample_sqs_event_app):
config = Config.create(chalice_app=sample_sqs_event_app,
autogen_policy=False,
project_dir='.')
event_source = self.create_model_from_app(sample_sqs_event_app, config)
role = event_source.lambda_function.role
role.policy.document = {'Statement': []}
injector = LambdaEventSourcePolicyInjector()
injector.handle(config, event_source)
assert role.policy.document == {'Statement': []}
def test_no_inject_is_already_injected(self, sample_sqs_event_app):
@sample_sqs_event_app.on_sqs_message(queue='second-queue')
def second_handler(event):
pass
config = Config.create(chalice_app=sample_sqs_event_app,
autogen_policy=True,
project_dir='.')
builder = ApplicationGraphBuilder()
application = builder.build(config, stage_name='dev')
event_sources = application.resources
role = event_sources[0].lambda_function.role
role.policy.document = {'Statement': []}
injector = LambdaEventSourcePolicyInjector()
injector.handle(config, event_sources[0])
injector.handle(config, event_sources[1])
# Even though we have two queue handlers, we only need to
# inject the policy once.
assert role.policy.document == {
'Statement': [SQS_EVENT_SOURCE_POLICY.copy()],
}
class TestWebsocketPolicyInjector(object):
def create_model_from_app(self, app, config):
builder = ApplicationGraphBuilder()
application = builder.build(config, stage_name='dev')
return application.resources[0]
def test_can_inject_policy(self, sample_websocket_app):
config = Config.create(chalice_app=sample_websocket_app,
autogen_policy=True,
project_dir='.')
event_source = self.create_model_from_app(
sample_websocket_app, config)
role = event_source.connect_function.role
role.policy.document = {'Statement': []}
injector = WebsocketPolicyInjector()
injector.handle(config, event_source)
assert role.policy.document == {
'Statement': [POST_TO_WEBSOCKET_CONNECTION_POLICY.copy()],
}
def test_no_inject_if_not_autogen_policy(self, sample_websocket_app):
config = Config.create(chalice_app=sample_websocket_app,
autogen_policy=False,
project_dir='.')
event_source = self.create_model_from_app(sample_websocket_app, config)
role = event_source.connect_function.role
role.policy.document = {'Statement': []}
injector = LambdaEventSourcePolicyInjector()
injector.handle(config, event_source)
assert role.policy.document == {'Statement': []}
|
awslabs/chalice
|
tests/unit/deploy/test_deployer.py
|
Python
|
apache-2.0
| 38,828
|
import logging
class NLog(object):
"""
Main Log Handling Class
"""
def __init__(self):
self.log = logging
|
NeptuneFramework/neptune
|
neptune/logger.py
|
Python
|
apache-2.0
| 132
|
# Copyright 2019 The TensorNetwork Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper methods for `path_contractors`."""
# pylint: disable=line-too-long
from tensornetwork.network_operations import get_all_edges, get_subgraph_dangling
from tensornetwork.network_components import AbstractNode, Edge
from typing import (Any, Callable, Dict, List, Set, Tuple, Iterable, Text)
# `opt_einsum` algorithm method typing
Algorithm = Callable[[List[Set[Edge]], Set[Edge], Dict[Edge, Any]],
List[Tuple[int, int]]]
def multi_remove(elems: List[Any], indices: List[int]) -> List[Any]:
"""Remove multiple indicies in a list at once."""
return [i for j, i in enumerate(elems) if j not in indices]
def get_path(
nodes: Iterable[AbstractNode],
algorithm: Algorithm) -> Tuple[List[Tuple[int, int]], List[AbstractNode]]:
"""Calculates the contraction paths using `opt_einsum` methods.
Args:
nodes: An iterable of nodes.
algorithm: `opt_einsum` method to use for calculating the contraction path.
Returns:
The optimal contraction path as returned by `opt_einsum`.
"""
nodes = list(nodes)
input_sets = [set(node.edges) for node in nodes]
output_set = get_subgraph_dangling(nodes)
size_dict = {edge: edge.dimension for edge in get_all_edges(nodes)}
return algorithm(input_sets, output_set, size_dict), nodes
|
google/TensorNetwork
|
tensornetwork/contractors/opt_einsum_paths/utils.py
|
Python
|
apache-2.0
| 1,871
|
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2020 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
from __future__ import absolute_import, unicode_literals
import multiprocessing
import os
import android.adb.commands
from swift_build_support.swift_build_support import targets
from swift_build_support.swift_build_support.targets import \
StdlibDeploymentTarget
from . import argparse
from . import defaults
__all__ = [
'create_argument_parser',
]
class _ApplyDefaultsArgumentParser(argparse.ArgumentParser):
"""Wrapper class around the default ArgumentParser that allows for
post-processing the parsed argument namespace to apply default argument
transformations.
"""
def __init__(self, apply_defaults=None, *args, **kwargs):
self._apply_defaults = apply_defaults
super(_ApplyDefaultsArgumentParser, self).__init__(*args, **kwargs)
def parse_known_args(self, args=None, namespace=None):
args, argv = super(_ApplyDefaultsArgumentParser, self)\
.parse_known_args(args, namespace)
self._apply_defaults(args)
return args, argv
def _apply_default_arguments(args):
"""Preprocess argument namespace to apply default behaviors.
"""
# Build cmark if any cmark-related options were specified.
if (args.cmark_build_variant is not None):
args.build_cmark = True
# Build LLDB if any LLDB-related options were specified.
if args.lldb_build_variant is not None or \
args.lldb_assertions is not None or \
args.lldb_build_with_xcode is not None:
args.build_lldb = True
# Set the default build variant.
if args.build_variant is None:
args.build_variant = 'Debug'
if args.llvm_build_variant is None:
args.llvm_build_variant = args.build_variant
if args.swift_build_variant is None:
args.swift_build_variant = args.build_variant
if args.swift_stdlib_build_variant is None:
args.swift_stdlib_build_variant = args.build_variant
if args.cmark_build_variant is None:
args.cmark_build_variant = args.swift_build_variant
if args.lldb_build_variant is None:
args.lldb_build_variant = args.build_variant
if args.lldb_build_with_xcode is None:
args.lldb_build_with_xcode = '0'
if args.foundation_build_variant is None:
args.foundation_build_variant = args.build_variant
if args.libdispatch_build_variant is None:
args.libdispatch_build_variant = args.build_variant
if args.libicu_build_variant is None:
args.libicu_build_variant = args.build_variant
# Assertions are enabled by default.
if args.assertions is None:
args.assertions = True
# Propagate the default assertions setting.
if args.cmark_assertions is None:
args.cmark_assertions = args.assertions
if args.llvm_assertions is None:
args.llvm_assertions = args.assertions
if args.swift_assertions is None:
args.swift_assertions = args.assertions
if args.swift_stdlib_assertions is None:
args.swift_stdlib_assertions = args.assertions
if args.llbuild_assertions is None:
args.llbuild_assertions = args.assertions
if args.lldb_assertions is None:
args.lldb_assertions = args.assertions
# Set the default CMake generator.
if args.cmake_generator is None:
args.cmake_generator = 'Ninja'
# --ios-all etc are not supported by open-source Swift.
if args.ios_all:
raise ValueError('error: --ios-all is unavailable in open-source '
'Swift.\nUse --ios to skip iOS device tests.')
if args.tvos_all:
raise ValueError('error: --tvos-all is unavailable in open-source '
'Swift.\nUse --tvos to skip tvOS device tests.')
if args.watchos_all:
raise ValueError('error: --watchos-all is unavailable in open-source '
'Swift.\nUse --watchos to skip watchOS device tests.')
# --skip-{ios,tvos,watchos} or --skip-build-{ios,tvos,watchos} are
# merely shorthands for --skip-build-{**os}-{device,simulator}
if not args.ios or not args.build_ios:
args.build_ios_device = False
args.build_ios_simulator = False
if not args.tvos or not args.build_tvos:
args.build_tvos_device = False
args.build_tvos_simulator = False
if not args.watchos or not args.build_watchos:
args.build_watchos_device = False
args.build_watchos_simulator = False
if not args.android or not args.build_android:
args.build_android = False
# --test-paths implies --test and/or --validation-test
# depending on what directories/files have been specified.
if args.test_paths:
for path in args.test_paths:
if path.startswith('test'):
args.test = True
elif path.startswith('validation-test'):
args.test = True
args.validation_test = True
# --validation-test implies --test.
if args.validation_test:
args.test = True
# --test-optimized implies --test.
if args.test_optimized:
args.test = True
# --test-optimize-size implies --test.
if args.test_optimize_for_size:
args.test = True
# --test-optimize-none-with-implicit-dynamic implies --test.
if args.test_optimize_none_with_implicit_dynamic:
args.test = True
# If none of tests specified skip swift stdlib test on all platforms
if not args.test and not args.validation_test and not args.long_test:
args.test_linux = False
args.test_freebsd = False
args.test_cygwin = False
args.test_osx = False
args.test_ios = False
args.test_tvos = False
args.test_watchos = False
args.test_android = False
args.test_swiftpm = False
args.test_swift_driver = False
args.test_swiftsyntax = False
args.test_indexstoredb = False
args.test_sourcekitlsp = False
args.test_skstresstester = False
args.test_swiftformat = False
args.test_swiftevolve = False
args.test_toolchainbenchmarks = False
# --skip-test-ios is merely a shorthand for host and simulator tests.
if not args.test_ios:
args.test_ios_host = False
args.test_ios_simulator = False
# --skip-test-tvos is merely a shorthand for host and simulator tests.
if not args.test_tvos:
args.test_tvos_host = False
args.test_tvos_simulator = False
# --skip-test-watchos is merely a shorthand for host and simulator
# --tests.
if not args.test_watchos:
args.test_watchos_host = False
args.test_watchos_simulator = False
# --skip-build-{ios,tvos,watchos}-{device,simulator} implies
# --skip-test-{ios,tvos,watchos}-{host,simulator}
if not args.build_ios_device:
args.test_ios_host = False
if not args.build_ios_simulator:
args.test_ios_simulator = False
if not args.build_tvos_device:
args.test_tvos_host = False
if not args.build_tvos_simulator:
args.test_tvos_simulator = False
if not args.build_watchos_device:
args.test_watchos_host = False
if not args.build_watchos_simulator:
args.test_watchos_simulator = False
if not args.build_android:
# If building natively on an Android host, allow running the test suite
# without the NDK config.
if not StdlibDeploymentTarget.Android.contains(StdlibDeploymentTarget
.host_target().name):
args.test_android = False
args.test_android_host = False
if not args.test_android:
args.test_android_host = False
if not args.host_test:
args.test_ios_host = False
args.test_tvos_host = False
args.test_watchos_host = False
args.test_android_host = False
def create_argument_parser():
"""Return a configured argument parser."""
# NOTE: USAGE, DESCRIPTION and EPILOG are defined at the bottom of the file
parser = _ApplyDefaultsArgumentParser(
apply_defaults=_apply_default_arguments,
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=USAGE,
description=DESCRIPTION,
epilog=EPILOG)
builder = parser.to_builder()
# Prepare DSL functions
option = builder.add_option
set_defaults = builder.set_defaults
in_group = builder.in_group
mutually_exclusive_group = builder.mutually_exclusive_group
# Prepare DSL actions
append = builder.actions.append
store = builder.actions.store
store_true = builder.actions.store_true
store_false = builder.actions.store_false
store_int = builder.actions.store_int
store_path = builder.actions.store_path
toggle_true = builder.actions.toggle_true
toggle_false = builder.actions.toggle_false
unsupported = builder.actions.unsupported
# -------------------------------------------------------------------------
# Top-level options
option(['-n', '--dry-run'], store_true,
help='print the commands that would be executed, but do not '
'execute them')
option('--dump-config', toggle_true,
help='instead of building, write JSON to stdout containing '
'various values used to build in this configuration')
option('--legacy-impl', store_true('legacy_impl'),
help='use legacy implementation')
option('--build-runtime-with-host-compiler', toggle_true,
help='Use the host compiler, not the self-built one to compile the '
'Swift runtime')
option(['-i', '--ios'], store_true,
help='also build for iOS, but disallow tests that require an iOS '
'device')
option(['-I', '--ios-all'], store_true('ios_all'),
help='also build for iOS, and allow all iOS tests')
option(['--skip-local-build'], toggle_true('skip_local_build'),
help='set to skip building for the local platform')
option('--skip-ios', store_false('ios'),
help='set to skip everything iOS-related')
option('--tvos', toggle_true,
help='also build for tvOS, but disallow tests that require a tvos '
'device')
option('--tvos-all', toggle_true('tvos_all'),
help='also build for tvOS, and allow all tvOS tests')
option('--skip-tvos', store_false('tvos'),
help='set to skip everything tvOS-related')
option('--watchos', toggle_true,
help='also build for watchOS, but disallow tests that require an '
'watchOS device')
option('--watchos-all', toggle_true('watchos_all'),
help='also build for Apple watchOS, and allow all Apple watchOS '
'tests')
option('--skip-watchos', store_false('watchos'),
help='set to skip everything watchOS-related')
option('--maccatalyst', toggle_true,
help='Enable building Swift with macCatalyst support')
option('--maccatalyst-ios-tests', toggle_true,
help='When building for macCatalyst run tests with iOS-like '
'target triple')
option('--android', toggle_true,
help='also build for Android')
option('--swift-analyze-code-coverage', store,
choices=['false', 'not-merged', 'merged'],
# so CMake can see the inert mode as a false value
default=defaults.SWIFT_ANALYZE_CODE_COVERAGE,
help='enable code coverage analysis in Swift (false, not-merged, '
'merged).')
option('--build-subdir', store,
metavar='PATH',
help='name of the directory under $SWIFT_BUILD_ROOT where the '
'build products will be placed')
option('--install-prefix', store_path,
default=targets.install_prefix(),
help='The installation prefix. This is where built Swift products '
'(like bin, lib, and include) will be installed.')
option('--install-symroot', store_path,
help='the path to install debug symbols into')
option('--install-destdir', store_path,
help='the path to use as the filesystem root for the installation')
option('--install-all', toggle_true,
help='Assume all built products should be installed')
option(['-j', '--jobs'], store_int('build_jobs'),
default=multiprocessing.cpu_count(),
help='the number of parallel build jobs to use')
option('--darwin-xcrun-toolchain', store,
help='the name of the toolchain to use on Darwin')
option('--cmake', store_path(executable=True),
help='the path to a CMake executable that will be used to build '
'Swift')
option('--show-sdks', toggle_true,
help='print installed Xcode and SDK versions')
option('--extra-swift-args', append,
help='Pass through extra flags to swift in the form of a CMake '
'list "module_regexp;flag". Can be called multiple times to '
'add multiple such module_regexp flag pairs. All semicolons '
'in flags must be escaped with a "\\"')
option('--host-cc', store_path(executable=True),
help='the absolute path to CC, the "clang" compiler for the host '
'platform. Default is auto detected.')
option('--host-cxx', store_path(executable=True),
help='the absolute path to CXX, the "clang++" compiler for the '
'host platform. Default is auto detected.')
option('--cmake-c-launcher', store_path(executable=True),
default=os.environ.get('C_COMPILER_LAUNCHER', None),
help='the absolute path to set CMAKE_C_COMPILER_LAUNCHER')
option('--cmake-cxx-launcher', store_path(executable=True),
default=os.environ.get('CXX_COMPILER_LAUNCHER', None),
help='the absolute path to set CMAKE_CXX_COMPILER_LAUNCHER')
option('--host-lipo', store_path(executable=True),
help='the absolute path to lipo. Default is auto detected.')
option('--host-libtool', store_path(executable=True),
help='the absolute path to libtool. Default is auto detected.')
option('--distcc', toggle_true,
default=os.environ.get('USE_DISTCC') == '1',
help='use distcc in pump mode')
option('--sccache', toggle_true,
default=os.environ.get('SWIFT_USE_SCCACHE') == '1',
help='use sccache')
option('--enable-asan', toggle_true,
help='enable Address Sanitizer')
option('--enable-ubsan', toggle_true,
help='enable Undefined Behavior Sanitizer')
option('--enable-tsan', toggle_true,
help='enable Thread Sanitizer for swift tools')
option('--enable-tsan-runtime', toggle_true,
help='enable Thread Sanitizer on the swift runtime')
option('--enable-lsan', toggle_true,
help='enable Leak Sanitizer for swift tools')
option('--enable-sanitize-coverage', toggle_true,
help='enable sanitizer coverage for swift tools. Necessary for '
'fuzzing swiftc')
option('--compiler-vendor', store,
choices=['none', 'apple'],
default=defaults.COMPILER_VENDOR,
help='Compiler vendor name')
option('--clang-compiler-version', store,
type=argparse.ClangVersionType(),
metavar='MAJOR.MINOR.PATCH',
help='string that indicates a compiler version for Clang')
option('--clang-user-visible-version', store,
type=argparse.ClangVersionType(),
default=defaults.CLANG_USER_VISIBLE_VERSION,
metavar='MAJOR.MINOR.PATCH',
help='User-visible version of the embedded Clang and LLVM '
'compilers')
option('--swift-compiler-version', store,
type=argparse.SwiftVersionType(),
metavar='MAJOR.MINOR',
help='string that indicates a compiler version for Swift')
option('--swift-user-visible-version', store,
type=argparse.SwiftVersionType(),
default=defaults.SWIFT_USER_VISIBLE_VERSION,
metavar='MAJOR.MINOR',
help='User-visible version of the embedded Swift compiler')
option('--darwin-deployment-version-osx', store,
default=defaults.DARWIN_DEPLOYMENT_VERSION_OSX,
metavar='MAJOR.MINOR',
help='minimum deployment target version for OS X')
option('--darwin-deployment-version-ios', store,
default=defaults.DARWIN_DEPLOYMENT_VERSION_IOS,
metavar='MAJOR.MINOR',
help='minimum deployment target version for iOS')
option('--darwin-deployment-version-tvos', store,
default=defaults.DARWIN_DEPLOYMENT_VERSION_TVOS,
metavar='MAJOR.MINOR',
help='minimum deployment target version for tvOS')
option('--darwin-deployment-version-watchos', store,
default=defaults.DARWIN_DEPLOYMENT_VERSION_WATCHOS,
metavar='MAJOR.MINOR',
help='minimum deployment target version for watchOS')
option('--extra-cmake-options', append,
type=argparse.ShellSplitType(),
help='Pass through extra options to CMake in the form of comma '
'separated options "-DCMAKE_VAR1=YES,-DCMAKE_VAR2=/tmp". Can '
'be called multiple times to add multiple such options.')
option('--build-args', store,
type=argparse.ShellSplitType(),
default=[],
help='arguments to the build tool. This would be prepended to the '
'default argument that is "-j8" when CMake generator is '
'"Ninja".')
option('--verbose-build', toggle_true,
help='print the commands executed during the build')
option('--lto', store('lto_type'),
choices=['thin', 'full'],
const='full',
default=None,
metavar='LTO_TYPE',
help='use lto optimization on llvm/swift tools. This does not '
'imply using lto on the swift standard library or runtime. '
'Options: thin, full. If no optional arg is provided, full is '
'chosen by default')
option('--clang-profile-instr-use', store_path,
help='profile file to use for clang PGO')
option('--llvm-max-parallel-lto-link-jobs', store_int,
default=defaults.LLVM_MAX_PARALLEL_LTO_LINK_JOBS,
metavar='COUNT',
help='the maximum number of parallel link jobs to use when '
'compiling llvm')
option('--swift-tools-max-parallel-lto-link-jobs', store_int,
default=defaults.SWIFT_MAX_PARALLEL_LTO_LINK_JOBS,
metavar='COUNT',
help='the maximum number of parallel link jobs to use when '
'compiling swift tools.')
option('--disable-guaranteed-normal-arguments', store_true,
help='Disable guaranteed normal arguments')
option('--enable-stdlibcore-exclusivity-checking', store_true,
help='Enable exclusivity checking in stdlibCore')
option('--force-optimized-typechecker', store_true,
help='Force the type checker to be built with '
'optimization')
option('--lit-args', store,
default='-sv',
metavar='LITARGS',
help='lit args to use when testing')
option('--coverage-db', store_path,
help='coverage database to use when prioritizing testing')
option('--llvm-install-components', store,
default=defaults.llvm_install_components(),
help='A semi-colon split list of llvm components to install')
# -------------------------------------------------------------------------
in_group('Host and cross-compilation targets')
option('--host-target', store,
default=StdlibDeploymentTarget.host_target().name,
help='The host target. LLVM, Clang, and Swift will be built for '
'this target. The built LLVM and Clang will be used to '
'compile Swift for the cross-compilation targets.')
option('--cross-compile-hosts', append,
type=argparse.ShellSplitType(),
default=[],
help='A space separated list of targets to cross-compile host '
'Swift tools for. Can be used multiple times.')
option('--stdlib-deployment-targets', store,
type=argparse.ShellSplitType(),
default=None,
help='The targets to compile or cross-compile the Swift standard '
'library for. %(default)s by default.'
' Comma separated list: {}'.format(
' '.join(StdlibDeploymentTarget.get_target_names())))
option('--build-stdlib-deployment-targets', store,
type=argparse.ShellSplitType(),
default=['all'],
help='A space-separated list that filters which of the configured '
'targets to build the Swift standard library for, or "all".')
option('--swift-darwin-supported-archs', store,
metavar='ARCHS',
help='Semicolon-separated list of architectures to configure on '
'Darwin platforms. If left empty all default architectures '
'are configured.')
option('--swift-darwin-module-archs', store,
metavar='ARCHS',
help='Semicolon-separated list of architectures to configure Swift '
'module-only targets on Darwin platforms. These targets are '
'in addition to the full library targets.')
# -------------------------------------------------------------------------
in_group('Options to select projects')
option('--infer', toggle_true('infer_dependencies'),
help='Infer any downstream dependencies from enabled projects')
option(['-l', '--lldb'], toggle_true('build_lldb'),
help='build LLDB')
option(['-b', '--llbuild'], toggle_true('build_llbuild'),
help='build llbuild')
option(['--libcxx'], toggle_true('build_libcxx'),
help='build libcxx')
option(['-p', '--swiftpm'], toggle_true('build_swiftpm'),
help='build swiftpm')
option(['--install-swiftpm'], toggle_true('install_swiftpm'),
help='install swiftpm')
option(['--swiftsyntax'], toggle_true('build_swiftsyntax'),
help='build swiftSyntax')
option(['--skstresstester'], toggle_true('build_skstresstester'),
help='build the SourceKit stress tester')
option(['--swiftformat'], toggle_true('build_swiftformat'),
help='build swift-format')
option(['--swiftevolve'], toggle_true('build_swiftevolve'),
help='build the swift-evolve tool')
option(['--swift-driver'], toggle_true('build_swift_driver'),
help='build swift-driver')
option(['--indexstore-db'], toggle_true('build_indexstoredb'),
help='build IndexStoreDB')
option('--test-indexstore-db-sanitize-all',
toggle_true('test_indexstoredb_sanitize_all'),
help='run indexstore-db tests under all sanitizers')
option(['--sourcekit-lsp'], toggle_true('build_sourcekitlsp'),
help='build SourceKitLSP')
option('--test-sourcekit-lsp-sanitize-all',
toggle_true('test_sourcekitlsp_sanitize_all'),
help='run sourcekit-lsp tests under all sanitizers')
option('--install-swiftsyntax', toggle_true('install_swiftsyntax'),
help='install SwiftSyntax')
option('--swiftsyntax-verify-generated-files',
toggle_true('swiftsyntax_verify_generated_files'),
help='set to verify that the generated files in the source tree '
'match the ones that would be generated from current main')
option(['--install-sourcekit-lsp'], toggle_true('install_sourcekitlsp'),
help='install SourceKitLSP')
option(['--install-skstresstester'], toggle_true('install_skstresstester'),
help='install the SourceKit stress tester')
option(['--install-swift-driver'], toggle_true('install_swift_driver'),
help='install new Swift driver')
option(['--install-swiftevolve'], toggle_true('install_swiftevolve'),
help='install SwiftEvolve')
option(['--toolchain-benchmarks'],
toggle_true('build_toolchainbenchmarks'),
help='build Swift Benchmarks using swiftpm against the just built '
'toolchain')
option(['--swift-inspect'],
toggle_true('build_swift_inspect'),
help='build SwiftInspect using swiftpm against the just built '
'toolchain')
option('--xctest', toggle_true('build_xctest'),
help='build xctest')
option('--foundation', toggle_true('build_foundation'),
help='build foundation')
option('--libdispatch', toggle_true('build_libdispatch'),
help='build libdispatch')
option('--libicu', toggle_true('build_libicu'),
help='build libicu')
option('--playgroundsupport', toggle_true('build_playgroundsupport'),
help='build PlaygroundSupport')
option('--install-playgroundsupport',
toggle_true('install_playgroundsupport'),
help='install playground support')
option('--build-ninja', toggle_true,
help='build the Ninja tool')
option(['--build-libparser-only'], toggle_true('build_libparser_only'),
help='build only libParser for SwiftSyntax')
option('--skip-build-clang-tools-extra',
toggle_false('build_clang_tools_extra'),
default=True,
help='skip building clang-tools-extra as part of llvm')
# -------------------------------------------------------------------------
in_group('Extra actions to perform before or in addition to building')
option(['-c', '--clean'], store_true,
help='do a clean build')
option('--export-compile-commands', toggle_true,
help='generate compilation databases in addition to building')
option('--symbols-package', store_path,
help='if provided, an archive of the symbols directory will be '
'generated at this path')
# -------------------------------------------------------------------------
in_group('Build variant')
with mutually_exclusive_group():
set_defaults(build_variant='Debug')
option(['-d', '--debug'], store('build_variant'),
const='Debug',
help='build the Debug variant of everything (LLVM, Clang, '
'Swift host tools, target Swift standard libraries, LLDB) '
'(default is %(default)s)')
option(['-r', '--release-debuginfo'], store('build_variant'),
const='RelWithDebInfo',
help='build the RelWithDebInfo variant of everything (default '
'is %(default)s)')
option(['-R', '--release'], store('build_variant'),
const='Release',
help='build the Release variant of everything (default is '
'%(default)s)')
option(['--min-size-release'], store('build_variant'),
const='MinSizeRel',
help='build the MinSizeRel variant of everything (default is '
'%(default)s)')
# -------------------------------------------------------------------------
in_group('Override build variant for a specific project')
option('--debug-llvm', store('llvm_build_variant'),
const='Debug',
help='build the Debug variant of LLVM')
option('--debug-swift', store('swift_build_variant'),
const='Debug',
help='build the Debug variant of Swift host tools')
option('--debug-swift-stdlib', store('swift_stdlib_build_variant'),
const='Debug',
help='build the Debug variant of the Swift standard library and '
' SDK overlay')
option('--debug-lldb', store('lldb_build_variant'),
const='Debug',
help='build the Debug variant of LLDB')
option('--lldb-build-with-xcode', store('lldb_build_with_xcode'),
const='1',
help='build LLDB using xcodebuild, if possible')
option('--lldb-build-with-cmake', store('lldb_build_with_xcode'),
const='0',
help='build LLDB using CMake')
option('--debug-cmark', store('cmark_build_variant'),
const='Debug',
help='build the Debug variant of CommonMark')
option('--debug-foundation', store('foundation_build_variant'),
const='Debug',
help='build the Debug variant of Foundation')
option('--debug-libdispatch', store('libdispatch_build_variant'),
const='Debug',
help='build the Debug variant of libdispatch')
option('--debug-libicu', store('libicu_build_variant'),
const='Debug',
help='build the Debug variant of libicu')
# -------------------------------------------------------------------------
# Assertions group
with mutually_exclusive_group():
set_defaults(assertions=True)
# TODO: Convert to store_true
option(['-a', '--assertions'], store,
const=True,
help='enable assertions in all projects')
# TODO: Convert to store_false
option(['-A', '--no-assertions'], store('assertions'),
const=False,
help='disable assertions in all projects')
# -------------------------------------------------------------------------
in_group('Control assertions in a specific project')
option('--cmark-assertions', store,
const=True,
help='enable assertions in CommonMark')
option('--llvm-assertions', store,
const=True,
help='enable assertions in LLVM')
option('--no-llvm-assertions', store('llvm_assertions'),
const=False,
help='disable assertions in LLVM')
option('--swift-assertions', store,
const=True,
help='enable assertions in Swift')
option('--no-swift-assertions', store('swift_assertions'),
const=False,
help='disable assertions in Swift')
option('--swift-stdlib-assertions', store,
const=True,
help='enable assertions in the Swift standard library')
option('--no-swift-stdlib-assertions', store('swift_stdlib_assertions'),
const=False,
help='disable assertions in the Swift standard library')
option('--lldb-assertions', store,
const=True,
help='enable assertions in LLDB')
option('--no-lldb-assertions', store('lldb_assertions'),
const=False,
help='disable assertions in LLDB')
option('--llbuild-assertions', store,
const=True,
help='enable assertions in llbuild')
option('--no-llbuild-assertions', store('llbuild_assertions'),
const=False,
help='disable assertions in llbuild')
# -------------------------------------------------------------------------
in_group('Select the CMake generator')
set_defaults(cmake_generator=defaults.CMAKE_GENERATOR)
option(['-e', '--eclipse'], store('cmake_generator'),
const='Eclipse CDT4 - Ninja',
help="use CMake's Eclipse generator (%(default)s by default)")
option(['-m', '--make'], store('cmake_generator'),
const='Unix Makefiles',
help="use CMake's Makefile generator (%(default)s by default)")
option(['-x', '--xcode'], store('cmake_generator'),
const='Xcode',
help="use CMake's Xcode generator (%(default)s by default)")
# -------------------------------------------------------------------------
in_group('Run tests')
# NOTE: We can't merge -t and --test, because nargs='?' makes
# `-ti` to be treated as `-t=i`.
# FIXME: Convert to store_true action
option('-t', store('test', const=True),
help='test Swift after building')
option('--test', toggle_true,
help='test Swift after building')
option('-T', store('validation_test', const=True),
help='run the validation test suite (implies --test)')
option('--validation-test', toggle_true,
help='run the validation test suite (implies --test)')
# FIXME: Convert to store_true action
option('-o', store('test_optimized', const=True),
help='run the test suite in optimized mode too (implies --test)')
option('--test-optimized', toggle_true,
help='run the test suite in optimized mode too (implies --test)')
# FIXME: Convert to store_true action
option('-s', store('test_optimize_for_size', const=True),
help='run the test suite in optimize for size mode too '
'(implies --test)')
option('--test-optimize-for-size', toggle_true,
help='run the test suite in optimize for size mode too '
'(implies --test)')
# FIXME: Convert to store_true action
option('-y', store('test_optimize_none_with_implicit_dynamic', const=True),
help='run the test suite in optimize none with implicit dynamic'
' mode too (implies --test)')
option('--test-optimize-none-with-implicit-dynamic', toggle_true,
help='run the test suite in optimize none with implicit dynamic'
'mode too (implies --test)')
option('--long-test', toggle_true,
help='run the long test suite')
option('--stress-test', toggle_true,
help='run the stress test suite')
option('--host-test', toggle_true,
help='run executable tests on host devices (such as iOS or tvOS)')
option('--only-executable-test', toggle_true,
help='Only run executable tests. Does nothing if host-test is not '
'allowed')
option('--only-non-executable-test', toggle_true,
help='Only run non-executable tests.')
option('--test-paths', append,
type=argparse.ShellSplitType(),
help='run tests located in specific directories and/or files '
'(implies --test and/or --validation-test)')
option(['-B', '--benchmark'], store_true,
help='run the Swift Benchmark Suite after building')
option('--benchmark-num-o-iterations', store_int,
default=3,
help='if the Swift Benchmark Suite is run after building, run N '
'iterations with -O')
option('--benchmark-num-onone-iterations', store_int,
default=3,
help='if the Swift Benchmark Suite is run after building, run N '
'iterations with -Onone')
# We want to run the TSan (compiler-rt) libdispatch tests on Linux, where
# libdispatch is just another library and not available by default. To do
# so we build Clang/LLVM/libdispatch and use it to compile/run the TSan
# libdispatch tests.
option('--tsan-libdispatch-test', toggle_true,
help='Builds a new toolchain including the libdispatch C library. '
'Then re-builds the TSan runtime (compiler-rt) using this '
'freshly-built Clang and runs the TSan libdispatch tests.')
option('--skip-test-osx', toggle_false('test_osx'),
help='skip testing Swift stdlibs for Mac OS X')
option('--skip-test-linux', toggle_false('test_linux'),
help='skip testing Swift stdlibs for Linux')
option('--skip-test-freebsd', toggle_false('test_freebsd'),
help='skip testing Swift stdlibs for FreeBSD')
option('--skip-test-cygwin', toggle_false('test_cygwin'),
help='skip testing Swift stdlibs for Cygwin')
# -------------------------------------------------------------------------
in_group('Run build')
option('--build-swift-dynamic-stdlib', toggle_true,
default=True,
help='build dynamic variants of the Swift standard library')
option('--build-swift-static-stdlib', toggle_true,
help='build static variants of the Swift standard library')
option('--build-swift-dynamic-sdk-overlay', toggle_true,
default=True,
help='build dynamic variants of the Swift SDK overlay')
option('--build-swift-static-sdk-overlay', toggle_true,
help='build static variants of the Swift SDK overlay')
option('--build-swift-stdlib-unittest-extra', toggle_true,
help='Build optional StdlibUnittest components')
option(['-S', '--skip-build'], store_true,
help='generate build directory only without building')
option('--skip-build-linux', toggle_false('build_linux'),
help='skip building Swift stdlibs for Linux')
option('--skip-build-freebsd', toggle_false('build_freebsd'),
help='skip building Swift stdlibs for FreeBSD')
option('--skip-build-cygwin', toggle_false('build_cygwin'),
help='skip building Swift stdlibs for Cygwin')
option('--skip-build-osx', toggle_false('build_osx'),
help='skip building Swift stdlibs for MacOSX')
option('--skip-build-ios', toggle_false('build_ios'),
help='skip building Swift stdlibs for iOS')
option('--skip-build-ios-device', toggle_false('build_ios_device'),
help='skip building Swift stdlibs for iOS devices '
'(i.e. build simulators only)')
option('--skip-build-ios-simulator', toggle_false('build_ios_simulator'),
help='skip building Swift stdlibs for iOS simulator '
'(i.e. build devices only)')
option('--skip-build-tvos', toggle_false('build_tvos'),
help='skip building Swift stdlibs for tvOS')
option('--skip-build-tvos-device', toggle_false('build_tvos_device'),
help='skip building Swift stdlibs for tvOS devices '
'(i.e. build simulators only)')
option('--skip-build-tvos-simulator', toggle_false('build_tvos_simulator'),
help='skip building Swift stdlibs for tvOS simulator '
'(i.e. build devices only)')
option('--skip-build-watchos', toggle_false('build_watchos'),
help='skip building Swift stdlibs for watchOS')
option('--skip-build-watchos-device', toggle_false('build_watchos_device'),
help='skip building Swift stdlibs for watchOS devices '
'(i.e. build simulators only)')
option('--skip-build-watchos-simulator',
toggle_false('build_watchos_simulator'),
help='skip building Swift stdlibs for watchOS simulator '
'(i.e. build devices only)')
option('--skip-build-android', toggle_false('build_android'),
help='skip building Swift stdlibs for Android')
option('--skip-build-benchmarks', toggle_false('build_benchmarks'),
help='skip building Swift Benchmark Suite')
option('--build-external-benchmarks', toggle_true,
help='skip building Swift Benchmark Suite')
# -------------------------------------------------------------------------
in_group('Skip testing specified targets')
option('--skip-test-ios',
toggle_false('test_ios'),
help='skip testing all iOS targets. Equivalent to specifying both '
'--skip-test-ios-simulator and --skip-test-ios-host')
option('--skip-test-ios-simulator',
toggle_false('test_ios_simulator'),
help='skip testing iOS simulator targets')
option('--skip-test-ios-32bit-simulator',
toggle_false('test_ios_32bit_simulator'),
help='skip testing iOS 32 bit simulator targets')
option('--skip-test-ios-host',
toggle_false('test_ios_host'),
help='skip testing iOS device targets on the host machine (the '
'phone itself)')
option('--skip-test-tvos',
toggle_false('test_tvos'),
help='skip testing all tvOS targets. Equivalent to specifying both '
'--skip-test-tvos-simulator and --skip-test-tvos-host')
option('--skip-test-tvos-simulator',
toggle_false('test_tvos_simulator'),
help='skip testing tvOS simulator targets')
option('--skip-test-tvos-host',
toggle_false('test_tvos_host'),
help='skip testing tvOS device targets on the host machine (the '
'TV itself)')
option('--skip-test-watchos',
toggle_false('test_watchos'),
help='skip testing all tvOS targets. Equivalent to specifying both '
'--skip-test-watchos-simulator and --skip-test-watchos-host')
option('--skip-test-watchos-simulator',
toggle_false('test_watchos_simulator'),
help='skip testing watchOS simulator targets')
option('--skip-test-watchos-host',
toggle_false('test_watchos_host'),
help='skip testing watchOS device targets on the host machine (the '
'watch itself)')
option('--skip-test-android',
toggle_false('test_android'),
help='skip testing all Android targets.')
option('--skip-test-android-host',
toggle_false('test_android_host'),
help='skip testing Android device targets on the host machine (the '
'phone itself)')
option('--skip-clean-swiftpm', toggle_false('clean_swiftpm'),
help='skip cleaning up swiftpm')
option('--skip-clean-swift-driver', toggle_false('clean_swift_driver'),
help='skip cleaning up Swift driver')
option('--skip-test-swiftpm', toggle_false('test_swiftpm'),
help='skip testing swiftpm')
option('--skip-test-swift-driver', toggle_false('test_swift_driver'),
help='skip testing Swift driver')
option('--skip-test-swiftsyntax', toggle_false('test_swiftsyntax'),
help='skip testing SwiftSyntax')
option('--skip-test-indexstore-db', toggle_false('test_indexstoredb'),
help='skip testing indexstore-db')
option('--skip-test-sourcekit-lsp', toggle_false('test_sourcekitlsp'),
help='skip testing sourcekit-lsp')
option('--skip-test-playgroundsupport',
toggle_false('test_playgroundsupport'),
help='skip testing PlaygroundSupport')
option('--skip-test-skstresstester', toggle_false('test_skstresstester'),
help='skip testing the SourceKit Stress tester')
option('--skip-test-swiftformat', toggle_false('test_swiftformat'),
help='skip testing swift-format')
option('--skip-test-swiftevolve', toggle_false('test_swiftevolve'),
help='skip testing SwiftEvolve')
option('--skip-test-toolchain-benchmarks',
toggle_false('test_toolchainbenchmarks'),
help='skip testing toolchain benchmarks')
option('--skip-test-swift-inspect',
toggle_false('test_swift_inspect'),
help='skip testing swift_inspect')
# -------------------------------------------------------------------------
in_group('Build settings specific for LLVM')
option('--llvm-targets-to-build', store,
default='X86;ARM;AArch64;PowerPC;SystemZ;Mips',
help='LLVM target generators to build')
# -------------------------------------------------------------------------
in_group('Build settings for Android')
option('--android-ndk', store_path,
help='An absolute path to the NDK that will be used as a libc '
'implementation for Android builds')
option('--android-api-level', store,
default='21',
help='The Android API level to target when building for Android. '
'Currently only 21 or above is supported')
option('--android-ndk-gcc-version', store,
choices=['4.8', '4.9'],
default='4.9',
help='The GCC version to use when building for Android. Currently '
'only 4.9 is supported. %(default)s is also the default '
'value. This option may be used when experimenting with '
'versions of the Android NDK not officially supported by '
'Swift')
option('--android-icu-uc', store_path,
help='Path to libicuuc.so')
option('--android-icu-uc-include', store_path,
help='Path to a directory containing headers for libicuuc')
option('--android-icu-i18n', store_path,
help='Path to libicui18n.so')
option('--android-icu-i18n-include', store_path,
help='Path to a directory containing headers libicui18n')
option('--android-icu-data', store_path,
help='Path to libicudata.so')
option('--android-deploy-device-path', store_path,
default=android.adb.commands.DEVICE_TEMP_DIR,
help='Path on an Android device to which built Swift stdlib '
'products will be deployed. If running host tests, specify '
'the "{}" directory.'.format(
android.adb.commands.DEVICE_TEMP_DIR))
option('--android-arch', store,
choices=['armv7', 'aarch64'],
default='armv7',
help='The Android target architecture when building for Android. '
'Currently only armv7 and aarch64 are supported. '
'%(default)s is the default.')
# -------------------------------------------------------------------------
in_group('Experimental language features')
option('--enable-experimental-differentiable-programming', toggle_true,
default=True,
help='Enable experimental Swift differentiable programming language'
' features.')
option('--enable-experimental-concurrency', toggle_true,
default=True,
help='Enable experimental Swift concurrency model.')
# -------------------------------------------------------------------------
in_group('Unsupported options')
option('--build-jobs', unsupported)
option('--common-cmake-options', unsupported)
option('--only-execute', unsupported)
option('--skip-test-optimize-for-size', unsupported)
option('--skip-test-optimize-none-with-implicit-dynamic', unsupported)
option('--skip-test-optimized', unsupported)
# -------------------------------------------------------------------------
in_group('Build-script-impl arguments (for disambiguation)')
# We need to represent these options so that we can skip installing them if
# the user is running in install-all mode.
option('--skip-build-cmark', toggle_false('build_cmark'),
help='skip building cmark')
option('--skip-build-llvm', toggle_false('build_llvm'),
help='skip building llvm')
option('--skip-build-swift', toggle_false('build_swift'),
help='skip building swift')
# We need to list --skip-test-swift explicitly because otherwise argparse
# will auto-expand arguments like --skip-test-swift to the only known
# argument --skip-test-swiftevolve.
# These arguments are forwarded to impl_args in migration.py
option('--install-swift', toggle_true('impl_install_swift'))
option('--skip-test-swift', toggle_true('impl_skip_test_swift'))
# -------------------------------------------------------------------------
return builder.build()
# ----------------------------------------------------------------------------
USAGE = """
%(prog)s [-h | --help] [OPTION ...]
%(prog)s --preset=NAME [SUBSTITUTION ...]
"""
DESCRIPTION = """
Use this tool to build, test, and prepare binary distribution archives of Swift
and related tools.
Builds Swift (and, optionally, LLDB), incrementally, optionally
testing it thereafter. Different build configurations are maintained in
parallel.
"""
EPILOG = """
Using option presets:
--preset-file=PATH load presets from the specified file
--preset=NAME use the specified option preset
The preset mode is mutually exclusive with other options. It is not
possible to add ad-hoc customizations to a preset. This is a deliberate
design decision. (Rationale: a preset is a certain important set of
options that we want to keep in a centralized location. If you need to
customize it, you should create another preset in a centralized location,
rather than scattering the knowledge about the build across the system.)
Presets support substitutions for controlled customizations. Substitutions
are defined in the preset file. Values for substitutions are supplied
using the name=value syntax on the command line.
Any arguments not listed are forwarded directly to Swift's
'build-script-impl'. See that script's help for details. The listed
build-script-impl arguments are only for disambiguation in the argument parser.
Environment variables
---------------------
This script respects a few environment variables, should you
choose to set them:
SWIFT_SOURCE_ROOT: a directory containing the source for LLVM, Clang, Swift.
If this script is located in a Swift
source directory, the location of SWIFT_SOURCE_ROOT will be
inferred if the variable is not set.
'build-script' expects the sources to be laid out in the following way:
$SWIFT_SOURCE_ROOT/llvm-project
/swift
/llbuild (optional)
/swiftpm (optional, requires llbuild)
/swift-syntax (optional, requires swiftpm)
/swift-stress-tester (optional,
requires swift-syntax)
/swift-corelibs-xctest (optional)
/swift-corelibs-foundation (optional)
/swift-corelibs-libdispatch (optional)
/icu (optional)
SWIFT_BUILD_ROOT: a directory in which to create out-of-tree builds.
Defaults to "$SWIFT_SOURCE_ROOT/build/".
Preparing to run this script
----------------------------
See README.md for instructions on cloning Swift subprojects.
If you intend to use the -l, -L, --lldb, or --debug-lldb options.
That's it; you're ready to go!
Examples
--------
Given the above layout of sources, the simplest invocation of 'build-script' is
just:
[~/src/s]$ ./swift/utils/build-script
This builds LLVM, Clang, Swift and Swift standard library in debug mode.
All builds are incremental. To incrementally build changed files, repeat the
same 'build-script' command.
Typical uses of 'build-script'
------------------------------
To build everything with optimization without debug information:
[~/src/s]$ ./swift/utils/build-script -R
To run tests, add '-t':
[~/src/s]$ ./swift/utils/build-script -R -t
To run normal tests and validation tests, add '-T':
[~/src/s]$ ./swift/utils/build-script -R -T
To build LLVM+Clang with optimization without debug information, and a
debuggable Swift compiler:
[~/src/s]$ ./swift/utils/build-script -R --debug-swift
To build a debuggable Swift standard library:
[~/src/s]$ ./swift/utils/build-script -R --debug-swift-stdlib
iOS build targets are always configured and present, but are not built by
default. To build the standard library for OS X, iOS simulator and iOS device:
[~/src/s]$ ./swift/utils/build-script -R -i
To run OS X and iOS tests that don't require a device:
[~/src/s]$ ./swift/utils/build-script -R -i -t
To use 'make' instead of 'ninja', use '-m':
[~/src/s]$ ./swift/utils/build-script -m -R
To create Xcode projects that can build Swift, use '-x':
[~/src/s]$ ./swift/utils/build-script -x -R
Preset mode in build-script
---------------------------
All buildbots and automated environments use 'build-script' in *preset mode*.
In preset mode, the command line only specifies the preset name and allows
limited customization (extra output paths). The actual options come from
the selected preset in 'utils/build-presets.ini'. For example, to build like
the incremental buildbot, run:
[~/src/s]$ ./swift/utils/build-script --preset=buildbot_incremental
To build with AddressSanitizer:
[~/src/s]$ ./swift/utils/build-script --preset=asan
To build a root for Xcode XYZ, '/tmp/xcode-xyz-root.tar.gz':
[~/src/s]$ ./swift/utils/build-script --preset=buildbot_BNI_internal_XYZ \\
install_destdir="/tmp/install"
install_symroot="/tmp/symroot"
installable_package="/tmp/xcode-xyz-root.tar.gz"
If you have your own favorite set of options, you can create your own, local,
preset. For example, let's create a preset called 'ds' (which stands for
Debug Swift):
$ cat > ~/.swift-build-presets
[preset: ds]
release
debug-swift
debug-swift-stdlib
test
build-subdir=ds
To use it, specify the '--preset=' argument:
[~/src/s]$ ./swift/utils/build-script --preset=ds
./swift/utils/build-script: using preset 'ds', which expands to
./swift/utils/build-script --release --debug-swift --debug-swift-stdlib \
--test
--build-subdir=ds --
...
Existing presets can be found in `utils/build-presets.ini`
Philosophy
----------
While you can invoke CMake directly to build Swift, this tool will save you
time by taking away the mechanical parts of the process, providing you controls
for the important options.
For all automated build environments, this tool is regarded as *the* *only* way
to build Swift. This is not a technical limitation of the Swift build system.
It is a policy decision aimed at making the builds uniform across all
environments and easily reproducible by engineers who are not familiar with the
details of the setups of other systems or automated environments.
"""
|
atrick/swift
|
utils/build_swift/build_swift/driver_arguments.py
|
Python
|
apache-2.0
| 54,095
|
# coding=utf-8
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Train a population of CNN using threads."""
from concurrent import futures
import os
from absl import app
from absl import flags
from flax.metrics import tensorboard
import haiku as hk
import jax
from learned_optimization import filesystem
from learned_optimization.population import population as population_mod
from learned_optimization.population.examples.simple_cnn import common
from learned_optimization.population.mutators import winner_take_all_genetic
import numpy as onp
import optax
flags.DEFINE_string("train_log_dir", None, "Path to save data to.")
FLAGS = flags.FLAGS
def train_one(worker_id):
"""Train a single worker of the population."""
train_log_dir = os.path.join(FLAGS.train_log_dir, str(worker_id))
filesystem.make_dirs(train_log_dir)
summary_writer = tensorboard.SummaryWriter(train_log_dir)
tr_iterator, te_iterator = common.get_data_iterators()
population = population_mod.get_courier_client("population")
meta_params = None
gen_id = None
step = 0
state_path = None
key = jax.random.PRNGKey(0)
net = hk.transform(common.hk_forward_fn)
opt = optax.adam(1e-3)
model_state = None
for _ in range(10000):
batch = next(tr_iterator)
new_data = population.maybe_get_worker_data(worker_id, gen_id, step,
state_path, meta_params)
if new_data:
state_path = new_data.params
meta_params = new_data.meta_params
gen_id = new_data.generation_id
step = new_data.step
if state_path is None:
params = net.init(key, next(tr_iterator))
opt_state = opt.init(params)
model_state = (params, opt_state)
else:
params, opt_state = common.load_state(state_path, model_state)
if step % 10 == 0:
print(f"{worker_id}]] Using meta params: {meta_params}")
ls = []
for _ in range(5):
batch = next(te_iterator)
key, key1 = jax.random.split(key)
l = common.loss(params, key1, batch)
ls.append(l)
mean_l = onp.mean(ls)
# save to disk
model_state = (params, opt_state)
state_path = os.path.join(train_log_dir, f"{step}__{gen_id}.model")
common.save_state(state_path, model_state)
population.set_eval(worker_id, gen_id, step, state_path, mean_l)
print(f"{worker_id} ]] step={step}, loss={l} path={state_path}")
summary_writer.scalar("loss", l, step=step)
summary_writer.scalar(
"learning_rate", meta_params["learning_rate"], step=step)
summary_writer.scalar(
"log_learning_rate", onp.log(meta_params["learning_rate"]), step=step)
summary_writer.flush()
params, opt_state, l = common.update(params, key, opt_state, batch,
meta_params)
step += 1
def main(_):
def mutate_fn(meta_params):
offset = onp.random.normal() * 0.5
loglr = onp.log(meta_params["learning_rate"])
return {"learning_rate": onp.exp(loglr + offset)}
num_workers = 5
mutator = winner_take_all_genetic.WinnerTakeAllGenetic(mutate_fn, 300)
initial_population = [{"learning_rate": 1e-3} for _ in range(num_workers)]
initial_population = [mutate_fn(m) for m in initial_population]
population = population_mod.PopulationController(initial_population, mutator)
server = population_mod.start_courier_server("population", population) # pylint: disable=unused-variable
with futures.ThreadPoolExecutor(num_workers) as executor:
futures_list = []
for i in range(num_workers):
futures_list.append(executor.submit(train_one, worker_id=i))
for f in futures.as_completed(futures_list):
print("Done", i)
f.result()
if __name__ == "__main__":
flags.mark_flag_as_required("train_log_dir")
app.run(main)
|
google/learned_optimization
|
learned_optimization/population/examples/simple_cnn/train_threads.py
|
Python
|
apache-2.0
| 4,362
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import datetime
import errno
import functools
import hashlib
import inspect
import os
import pyclbr
import random
import re
import shlex
import shutil
import socket
import struct
import sys
import tempfile
import time
import uuid
import weakref
from xml.sax import saxutils
from eventlet import event
from eventlet.green import subprocess
from eventlet import greenthread
from eventlet import semaphore
import netaddr
from nova.common import deprecated
from nova import exception
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import excutils
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
FLAGS.register_opt(
cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'))
def vpn_ping(address, port, timeout=0.05, session_id=None):
"""Sends a vpn negotiation packet and returns the server session.
Returns False on a failure. Basic packet structure is below.
Client packet (14 bytes)::
0 1 8 9 13
+-+--------+-----+
|x| cli_id |?????|
+-+--------+-----+
x = packet identifier 0x38
cli_id = 64 bit identifier
? = unknown, probably flags/padding
Server packet (26 bytes)::
0 1 8 9 13 14 21 2225
+-+--------+-----+--------+----+
|x| srv_id |?????| cli_id |????|
+-+--------+-----+--------+----+
x = packet identifier 0x40
cli_id = 64 bit identifier
? = unknown, probably flags/padding
bit 9 was 1 and the rest were 0 in testing
"""
if session_id is None:
session_id = random.randint(0, 0xffffffffffffffff)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = struct.pack('!BQxxxxx', 0x38, session_id)
sock.sendto(data, (address, port))
sock.settimeout(timeout)
try:
received = sock.recv(2048)
except socket.timeout:
return False
finally:
sock.close()
fmt = '!BQxxxxxQxxxx'
if len(received) != struct.calcsize(fmt):
print struct.calcsize(fmt)
return False
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
if identifier == 0x40 and client_sess == session_id:
return server_sess
def execute(*cmd, **kwargs):
"""Helper method to execute command with optional retry.
If you add a run_as_root=True command, don't forget to add the
corresponding filter to etc/nova/rootwrap.d !
:param cmd: Passed to subprocess.Popen.
:param process_input: Send to opened process.
:param check_exit_code: Single bool, int, or list of allowed exit
codes. Defaults to [0]. Raise
exception.ProcessExecutionError unless
program exits with one of these code.
:param delay_on_retry: True | False. Defaults to True. If set to
True, wait a short amount of time
before retrying.
:param attempts: How many times to retry cmd.
:param run_as_root: True | False. Defaults to False. If set to True,
the command is prefixed by the command specified
in the root_helper FLAG.
:raises exception.NovaException: on receiving unknown arguments
:raises exception.ProcessExecutionError:
:returns: a tuple, (stdout, stderr) from the spawned process, or None if
the command fails.
"""
process_input = kwargs.pop('process_input', None)
check_exit_code = kwargs.pop('check_exit_code', [0])
ignore_exit_code = False
if isinstance(check_exit_code, bool):
ignore_exit_code = not check_exit_code
check_exit_code = [0]
elif isinstance(check_exit_code, int):
check_exit_code = [check_exit_code]
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
run_as_root = kwargs.pop('run_as_root', False)
shell = kwargs.pop('shell', False)
if len(kwargs):
raise exception.NovaException(_('Got unknown keyword args '
'to utils.execute: %r') % kwargs)
if run_as_root:
if FLAGS.rootwrap_config is None or FLAGS.root_helper != 'sudo':
deprecated.warn(_('The root_helper option (which lets you specify '
'a root wrapper different from nova-rootwrap, '
'and defaults to using sudo) is now deprecated. '
'You should use the rootwrap_config option '
'instead.'))
if (FLAGS.rootwrap_config is not None):
cmd = ['sudo', 'nova-rootwrap', FLAGS.rootwrap_config] + list(cmd)
else:
cmd = shlex.split(FLAGS.root_helper) + list(cmd)
cmd = map(str, cmd)
while attempts > 0:
attempts -= 1
try:
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
_PIPE = subprocess.PIPE # pylint: disable=E1101
obj = subprocess.Popen(cmd,
stdin=_PIPE,
stdout=_PIPE,
stderr=_PIPE,
close_fds=True,
shell=shell)
result = None
if process_input is not None:
result = obj.communicate(process_input)
else:
result = obj.communicate()
obj.stdin.close() # pylint: disable=E1101
_returncode = obj.returncode # pylint: disable=E1101
LOG.debug(_('Result was %s') % _returncode)
if not ignore_exit_code and _returncode not in check_exit_code:
(stdout, stderr) = result
raise exception.ProcessExecutionError(
exit_code=_returncode,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return result
except exception.ProcessExecutionError:
if not attempts:
raise
else:
LOG.debug(_('%r failed. Retrying.'), cmd)
if delay_on_retry:
greenthread.sleep(random.randint(20, 200) / 100.0)
finally:
# NOTE(termie): this appears to be necessary to let the subprocess
# call clean something up in between calls, without
# it two execute calls in a row hangs the second one
greenthread.sleep(0)
def trycmd(*args, **kwargs):
"""
A wrapper around execute() to more easily handle warnings and errors.
Returns an (out, err) tuple of strings containing the output of
the command's stdout and stderr. If 'err' is not empty then the
command can be considered to have failed.
:discard_warnings True | False. Defaults to False. If set to True,
then for succeeding commands, stderr is cleared
"""
discard_warnings = kwargs.pop('discard_warnings', False)
try:
out, err = execute(*args, **kwargs)
failed = False
except exception.ProcessExecutionError, exn:
out, err = '', str(exn)
failed = True
if not failed and discard_warnings and err:
# Handle commands that output to stderr but otherwise succeed
err = ''
return out, err
def ssh_execute(ssh, cmd, process_input=None,
addl_env=None, check_exit_code=True):
LOG.debug(_('Running cmd (SSH): %s'), ' '.join(cmd))
if addl_env:
raise exception.NovaException(_('Environment not supported over SSH'))
if process_input:
# This is (probably) fixable if we need it...
msg = _('process_input not supported over SSH')
raise exception.NovaException(msg)
stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd)
channel = stdout_stream.channel
#stdin.write('process_input would go here')
#stdin.flush()
# NOTE(justinsb): This seems suspicious...
# ...other SSH clients have buffering issues with this approach
stdout = stdout_stream.read()
stderr = stderr_stream.read()
stdin_stream.close()
exit_status = channel.recv_exit_status()
# exit_status == -1 if no exit code was returned
if exit_status != -1:
LOG.debug(_('Result was %s') % exit_status)
if check_exit_code and exit_status != 0:
raise exception.ProcessExecutionError(exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return (stdout, stderr)
def novadir():
import nova
return os.path.abspath(nova.__file__).split('nova/__init__.py')[0]
def debug(arg):
LOG.debug(_('debug in callback: %s'), arg)
return arg
def generate_uid(topic, size=8):
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
choices = [random.choice(characters) for _x in xrange(size)]
return '%s-%s' % (topic, ''.join(choices))
# Default symbols to use for passwords. Avoids visually confusing characters.
# ~6 bits per symbol
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
'abcdefghijkmnopqrstuvwxyz') # Removed: l
# ~5 bits per symbol
EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1
'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O
def last_completed_audit_period(unit=None, before=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
before: Give the audit period most recently completed before
<timestamp>. Defaults to now.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous."""
if not unit:
unit = FLAGS.instance_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
if before is not None:
rightnow = before
else:
rightnow = timeutils.utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError('Time period must be hour, day, month or year')
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def generate_password(length=20, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
"""Generate a random password from the supplied symbol groups.
At least one symbol from each group will be included. Unpredictable
results if length is less than the number of symbol groups.
Believed to be reasonably secure (with a reasonable password length!)
"""
r = random.SystemRandom()
# NOTE(jerdfelt): Some password policies require at least one character
# from each group of symbols, so start off with one random character
# from each symbol group
password = [r.choice(s) for s in symbolgroups]
# If length < len(symbolgroups), the leading characters will only
# be from the first length groups. Try our best to not be predictable
# by shuffling and then truncating.
r.shuffle(password)
password = password[:length]
length -= len(password)
# then fill with random characters from all symbol groups
symbols = ''.join(symbolgroups)
password.extend([r.choice(symbols) for _i in xrange(length)])
# finally shuffle to ensure first x characters aren't from a
# predictable group
r.shuffle(password)
return ''.join(password)
def last_octet(address):
return int(address.split('.')[-1])
def get_my_linklocal(interface):
try:
if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface)
condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link'
links = [re.search(condition, x) for x in if_str[0].split('\n')]
address = [w.group(1) for w in links if w is not None]
if address[0] is not None:
return address[0]
else:
msg = _('Link Local address is not found.:%s') % if_str
raise exception.NovaException(msg)
except Exception as ex:
msg = _("Couldn't get Link Local IP of %(interface)s"
" :%(ex)s") % locals()
raise exception.NovaException(msg)
def parse_mailmap(mailmap='.mailmap'):
mapping = {}
if os.path.exists(mailmap):
fp = open(mailmap, 'r')
for l in fp:
l = l.strip()
if not l.startswith('#') and ' ' in l:
canonical_email, alias = l.split(' ')
mapping[alias.lower()] = canonical_email.lower()
return mapping
def str_dict_replace(s, mapping):
for s1, s2 in mapping.iteritems():
s = s.replace(s1, s2)
return s
class LazyPluggable(object):
"""A pluggable backend loaded lazily based on some value."""
def __init__(self, pivot, **backends):
self.__backends = backends
self.__pivot = pivot
self.__backend = None
def __get_backend(self):
if not self.__backend:
backend_name = FLAGS[self.__pivot]
if backend_name not in self.__backends:
msg = _('Invalid backend: %s') % backend_name
raise exception.NovaException(msg)
backend = self.__backends[backend_name]
if isinstance(backend, tuple):
name = backend[0]
fromlist = backend[1]
else:
name = backend
fromlist = backend
self.__backend = __import__(name, None, None, fromlist)
LOG.debug(_('backend %s'), self.__backend)
return self.__backend
def __getattr__(self, key):
backend = self.__get_backend()
return getattr(backend, key)
class LoopingCallDone(Exception):
"""Exception to break out and stop a LoopingCall.
The poll-function passed to LoopingCall can raise this exception to
break out of the loop normally. This is somewhat analogous to
StopIteration.
An optional return-value can be included as the argument to the exception;
this return-value will be returned by LoopingCall.wait()
"""
def __init__(self, retvalue=True):
""":param retvalue: Value that LoopingCall.wait() should return."""
self.retvalue = retvalue
class LoopingCall(object):
def __init__(self, f=None, *args, **kw):
self.args = args
self.kw = kw
self.f = f
self._running = False
def start(self, interval, initial_delay=None):
self._running = True
done = event.Event()
def _inner():
if initial_delay:
greenthread.sleep(initial_delay)
try:
while self._running:
self.f(*self.args, **self.kw)
if not self._running:
break
greenthread.sleep(interval)
except LoopingCallDone, e:
self.stop()
done.send(e.retvalue)
except Exception:
LOG.exception(_('in looping call'))
done.send_exception(*sys.exc_info())
return
else:
done.send(True)
self.done = done
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML.
"""
return saxutils.escape(value, {'"': '"', "'": '''})
def utf8(value):
"""Try to turn a string into utf-8 if possible.
Code is directly from the utf8 function in
http://github.com/facebook/tornado/blob/master/tornado/escape.py
"""
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value
class _InterProcessLock(object):
"""Lock implementation which allows multiple locks, working around
issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
not require any cleanup. Since the lock is always held on a file
descriptor rather than outside of the process, the lock gets dropped
automatically if the process crashes, even if __exit__ is not executed.
There are no guarantees regarding usage by multiple green threads in a
single process here. This lock works only between processes. Exclusive
access between local threads should be achieved using the semaphores
in the @synchronized decorator.
Note these locks are released when the descriptor is closed, so it's not
safe to close the file descriptor while another green thread holds the
lock. Just opening and closing the lock file can break synchronisation,
so lock files must be accessed only using this abstraction.
"""
def __init__(self, name):
self.lockfile = None
self.fname = name
def __enter__(self):
self.lockfile = open(self.fname, 'w')
while True:
try:
# Using non-blocking locks since green threads are not
# patched to deal with blocking locking calls.
# Also upon reading the MSDN docs for locking(), it seems
# to have a laughable 10 attempts "blocking" mechanism.
self.trylock()
return self
except IOError, e:
if e.errno in (errno.EACCES, errno.EAGAIN):
# external locks synchronise things like iptables
# updates - give it some time to prevent busy spinning
time.sleep(0.01)
else:
raise
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self.unlock()
self.lockfile.close()
except IOError:
LOG.exception(_("Could not release the acquired lock `%s`")
% self.fname)
def trylock(self):
raise NotImplementedError()
def unlock(self):
raise NotImplementedError()
class _WindowsLock(_InterProcessLock):
def trylock(self):
msvcrt.locking(self.lockfile, msvcrt.LK_NBLCK, 1)
def unlock(self):
msvcrt.locking(self.lockfile, msvcrt.LK_UNLCK, 1)
class _PosixLock(_InterProcessLock):
def trylock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
def unlock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
if os.name == 'nt':
import msvcrt
InterProcessLock = _WindowsLock
else:
import fcntl
InterProcessLock = _PosixLock
_semaphores = weakref.WeakValueDictionary()
def synchronized(name, external=False, lock_path=None):
"""Synchronization decorator.
Decorating a method like so::
@synchronized('mylock')
def foo(self, *args):
...
ensures that only one thread will execute the bar method at a time.
Different methods can share the same lock::
@synchronized('mylock')
def foo(self, *args):
...
@synchronized('mylock')
def bar(self, *args):
...
This way only one of either foo or bar can be executing at a time.
The external keyword argument denotes whether this lock should work across
multiple processes. This means that if two different workers both run a
a method decorated with @synchronized('mylock', external=True), only one
of them will execute at a time.
The lock_path keyword argument is used to specify a special location for
external lock files to live. If nothing is set, then FLAGS.lock_path is
used as a default.
"""
def wrap(f):
@functools.wraps(f)
def inner(*args, **kwargs):
# NOTE(soren): If we ever go natively threaded, this will be racy.
# See http://stackoverflow.com/questions/5390569/dyn
# amically-allocating-and-destroying-mutexes
sem = _semaphores.get(name, semaphore.Semaphore())
if name not in _semaphores:
# this check is not racy - we're already holding ref locally
# so GC won't remove the item and there was no IO switch
# (only valid in greenthreads)
_semaphores[name] = sem
with sem:
LOG.debug(_('Got semaphore "%(lock)s" for method '
'"%(method)s"...'), {'lock': name,
'method': f.__name__})
if external and not FLAGS.disable_process_locking:
LOG.debug(_('Attempting to grab file lock "%(lock)s" for '
'method "%(method)s"...'),
{'lock': name, 'method': f.__name__})
cleanup_dir = False
# We need a copy of lock_path because it is non-local
local_lock_path = lock_path
if not local_lock_path:
local_lock_path = FLAGS.lock_path
if not local_lock_path:
cleanup_dir = True
local_lock_path = tempfile.mkdtemp()
if not os.path.exists(local_lock_path):
cleanup_dir = True
ensure_tree(local_lock_path)
# NOTE(mikal): the lock name cannot contain directory
# separators
safe_name = name.replace(os.sep, '_')
lock_file_path = os.path.join(local_lock_path,
'nova-%s' % safe_name)
try:
lock = InterProcessLock(lock_file_path)
with lock:
LOG.debug(_('Got file lock "%(lock)s" for '
'method "%(method)s"...'),
{'lock': name, 'method': f.__name__})
retval = f(*args, **kwargs)
finally:
# NOTE(vish): This removes the tempdir if we needed
# to create one. This is used to cleanup
# the locks left behind by unit tests.
if cleanup_dir:
shutil.rmtree(local_lock_path)
else:
retval = f(*args, **kwargs)
return retval
return inner
return wrap
def delete_if_exists(pathname):
"""delete a file, but ignore file not found error"""
try:
os.unlink(pathname)
except OSError as e:
if e.errno == errno.ENOENT:
return
else:
raise
def get_from_path(items, path):
"""Returns a list of items matching the specified path.
Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item
in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the
intermediate results are lists it will treat each list item individually.
A 'None' in items or any child expressions will be ignored, this function
will not throw because of None (anywhere) in items. The returned list
will contain no None values.
"""
if path is None:
raise exception.NovaException('Invalid mini_xpath')
(first_token, sep, remainder) = path.partition('/')
if first_token == '':
raise exception.NovaException('Invalid mini_xpath')
results = []
if items is None:
return results
if not isinstance(items, list):
# Wrap single objects in a list
items = [items]
for item in items:
if item is None:
continue
get_method = getattr(item, 'get', None)
if get_method is None:
continue
child = get_method(first_token)
if child is None:
continue
if isinstance(child, list):
# Flatten intermediate lists
for x in child:
results.append(x)
else:
results.append(child)
if not sep:
# No more tokens
return results
else:
return get_from_path(results, remainder)
def flatten_dict(dict_, flattened=None):
"""Recursively flatten a nested dictionary."""
flattened = flattened or {}
for key, value in dict_.iteritems():
if hasattr(value, 'iteritems'):
flatten_dict(value, flattened)
else:
flattened[key] = value
return flattened
def partition_dict(dict_, keys):
"""Return two dicts, one with `keys` the other with everything else."""
intersection = {}
difference = {}
for key, value in dict_.iteritems():
if key in keys:
intersection[key] = value
else:
difference[key] = value
return intersection, difference
def map_dict_keys(dict_, key_map):
"""Return a dict in which the dictionaries keys are mapped to new keys."""
mapped = {}
for key, value in dict_.iteritems():
mapped_key = key_map[key] if key in key_map else key
mapped[mapped_key] = value
return mapped
def subset_dict(dict_, keys):
"""Return a dict that only contains a subset of keys."""
subset = partition_dict(dict_, keys)[0]
return subset
def diff_dict(orig, new):
"""
Return a dict describing how to change orig to new. The keys
correspond to values that have changed; the value will be a list
of one or two elements. The first element of the list will be
either '+' or '-', indicating whether the key was updated or
deleted; if the key was updated, the list will contain a second
element, giving the updated value.
"""
# Figure out what keys went away
result = dict((k, ['-']) for k in set(orig.keys()) - set(new.keys()))
# Compute the updates
for key, value in new.items():
if key not in orig or value != orig[key]:
result[key] = ['+', value]
return result
def check_isinstance(obj, cls):
"""Checks that obj is of type cls, and lets PyLint infer types."""
if isinstance(obj, cls):
return obj
raise Exception(_('Expected object of type: %s') % (str(cls)))
def parse_server_string(server_str):
"""
Parses the given server_string and returns a list of host and port.
If it's not a combination of host part and port, the port element
is a null string. If the input is invalid expression, return a null
list.
"""
try:
# First of all, exclude pure IPv6 address (w/o port).
if netaddr.valid_ipv6(server_str):
return (server_str, '')
# Next, check if this is IPv6 address with a port number combination.
if server_str.find("]:") != -1:
(address, port) = server_str.replace('[', '', 1).split(']:')
return (address, port)
# Third, check if this is a combination of an address and a port
if server_str.find(':') == -1:
return (server_str, '')
# This must be a combination of an address and a port
(address, port) = server_str.split(':')
return (address, port)
except Exception:
LOG.error(_('Invalid server_string: %s'), server_str)
return ('', '')
def gen_uuid():
return uuid.uuid4()
def is_uuid_like(val):
"""For our purposes, a UUID is a string in canonical form:
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
"""
try:
uuid.UUID(val)
return True
except (TypeError, ValueError, AttributeError):
return False
def bool_from_str(val):
"""Convert a string representation of a bool into a bool value"""
if not val:
return False
try:
return True if int(val) else False
except ValueError:
return val.lower() == 'true' or \
val.lower() == 'yes' or \
val.lower() == 'y'
def is_valid_boolstr(val):
"""Check if the provided string is a valid bool string or not. """
val = str(val).lower()
return val == 'true' or val == 'false' or \
val == 'yes' or val == 'no' or \
val == 'y' or val == 'n' or \
val == '1' or val == '0'
def is_valid_ipv4(address):
"""valid the address strictly as per format xxx.xxx.xxx.xxx.
where xxx is a value between 0 and 255.
"""
parts = address.split(".")
if len(parts) != 4:
return False
for item in parts:
try:
if not 0 <= int(item) <= 255:
return False
except ValueError:
return False
return True
def is_valid_cidr(address):
"""Check if the provided ipv4 or ipv6 address is a valid
CIDR address or not"""
try:
# Validate the correct CIDR Address
netaddr.IPNetwork(address)
except netaddr.core.AddrFormatError:
return False
except UnboundLocalError:
# NOTE(MotoKen): work around bug in netaddr 0.7.5 (see detail in
# https://github.com/drkjam/netaddr/issues/2)
return False
# Prior validation partially verify /xx part
# Verify it here
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
return False
return True
def monkey_patch():
""" If the Flags.monkey_patch set as True,
this function patches a decorator
for all functions in specified modules.
You can set decorators for each modules
using FLAGS.monkey_patch_modules.
The format is "Module path:Decorator function".
Example: 'nova.api.ec2.cloud:nova.notifier.api.notify_decorator'
Parameters of the decorator is as follows.
(See nova.notifier.api.notify_decorator)
name - name of the function
function - object of the function
"""
# If FLAGS.monkey_patch is not True, this function do nothing.
if not FLAGS.monkey_patch:
return
# Get list of modules and decorators
for module_and_decorator in FLAGS.monkey_patch_modules:
module, decorator_name = module_and_decorator.split(':')
# import decorator function
decorator = importutils.import_class(decorator_name)
__import__(module)
# Retrieve module information using pyclbr
module_data = pyclbr.readmodule_ex(module)
for key in module_data.keys():
# set the decorator for the class methods
if isinstance(module_data[key], pyclbr.Class):
clz = importutils.import_class("%s.%s" % (module, key))
for method, func in inspect.getmembers(clz, inspect.ismethod):
setattr(clz, method,
decorator("%s.%s.%s" % (module, key, method), func))
# set the decorator for the function
if isinstance(module_data[key], pyclbr.Function):
func = importutils.import_class("%s.%s" % (module, key))
setattr(sys.modules[module], key,
decorator("%s.%s" % (module, key), func))
def convert_to_list_dict(lst, label):
"""Convert a value or list into a list of dicts"""
if not lst:
return None
if not isinstance(lst, list):
lst = [lst]
return [{label: x} for x in lst]
def timefunc(func):
"""Decorator that logs how long a particular function took to execute"""
@functools.wraps(func)
def inner(*args, **kwargs):
start_time = time.time()
try:
return func(*args, **kwargs)
finally:
total_time = time.time() - start_time
LOG.debug(_("timefunc: '%(name)s' took %(total_time).2f secs") %
dict(name=func.__name__, total_time=total_time))
return inner
def generate_glance_url():
"""Generate the URL to glance."""
# TODO(jk0): This will eventually need to take SSL into consideration
# when supported in glance.
return "http://%s:%d" % (FLAGS.glance_host, FLAGS.glance_port)
def generate_image_url(image_ref):
"""Generate an image URL from an image_ref."""
return "%s/images/%s" % (generate_glance_url(), image_ref)
@contextlib.contextmanager
def remove_path_on_error(path):
"""Protect code that wants to operate on PATH atomically.
Any exception will cause PATH to be removed.
"""
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
delete_if_exists(path)
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def total_seconds(td):
"""Local total_seconds implementation for compatibility with python 2.6"""
if hasattr(td, 'total_seconds'):
return td.total_seconds()
else:
return ((td.days * 86400 + td.seconds) * 10 ** 6 +
td.microseconds) / 10.0 ** 6
def sanitize_hostname(hostname):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
if isinstance(hostname, unicode):
hostname = hostname.encode('latin-1', 'ignore')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
return hostname
def read_cached_file(filename, cache_info, reload_func=None):
"""Read from a file if it has been modified.
:param cache_info: dictionary to hold opaque cache.
:param reload_func: optional function to be called with data when
file is reloaded due to a modification.
:returns: data from file
"""
mtime = os.path.getmtime(filename)
if not cache_info or mtime != cache_info.get('mtime'):
LOG.debug(_("Reloading cached file %s") % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
if reload_func:
reload_func(cache_info['data'])
return cache_info['data']
def file_open(*args, **kwargs):
"""Open file
see built-in file() documentation for more details
Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system
state at all (for unit tests)
"""
return file(*args, **kwargs)
def hash_file(file_like_object):
"""Generate a hash for the contents of a file."""
checksum = hashlib.sha1()
for chunk in iter(lambda: file_like_object.read(32768), b''):
checksum.update(chunk)
return checksum.hexdigest()
@contextlib.contextmanager
def temporary_mutation(obj, **kwargs):
"""Temporarily set the attr on a particular object to a given value then
revert when finished.
One use of this is to temporarily set the read_deleted flag on a context
object:
with temporary_mutation(context, read_deleted="yes"):
do_something_that_needed_deleted_objects()
"""
NOT_PRESENT = object()
old_values = {}
for attr, new_value in kwargs.items():
old_values[attr] = getattr(obj, attr, NOT_PRESENT)
setattr(obj, attr, new_value)
try:
yield
finally:
for attr, old_value in old_values.items():
if old_value is NOT_PRESENT:
del obj[attr]
else:
setattr(obj, attr, old_value)
def service_is_up(service):
"""Check whether a service is up based on last heartbeat."""
last_heartbeat = service['updated_at'] or service['created_at']
# Timestamps in DB are UTC.
elapsed = total_seconds(timeutils.utcnow() - last_heartbeat)
return abs(elapsed) <= FLAGS.service_down_time
def generate_mac_address():
"""Generate an Ethernet MAC address."""
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
# bridge mac addresses don't change, but it appears to
# conflict with libvirt, so we use the next highest octet
# that has the unicast and locally administered bits set
# properly: 0xfa.
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
mac = [0xfa, 0x16, 0x3e,
random.randint(0x00, 0x7f),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
def read_file_as_root(file_path):
"""Secure helper to read file as root."""
try:
out, _err = execute('cat', file_path, run_as_root=True)
return out
except exception.ProcessExecutionError:
raise exception.FileNotFound(file_path=file_path)
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:params owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
execute('chown', orig_uid, path, run_as_root=True)
@contextlib.contextmanager
def tempdir(**kwargs):
tmpdir = tempfile.mkdtemp(**kwargs)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError, e:
LOG.error(_('Could not remove tmpdir: %s'), str(e))
def strcmp_const_time(s1, s2):
"""Constant-time string comparison.
:params s1: the first string
:params s2: the second string
:return: True if the strings are equal.
This function takes two strings and compares them. It is intended to be
used when doing a comparison for authentication purposes to help guard
against timing attacks.
"""
if len(s1) != len(s2):
return False
result = 0
for (a, b) in zip(s1, s2):
result |= ord(a) ^ ord(b)
return result == 0
def walk_class_hierarchy(clazz, encountered=None):
"""Walk class hierarchy, yielding most derived classes first"""
if not encountered:
encountered = []
for subclass in clazz.__subclasses__():
if subclass not in encountered:
encountered.append(subclass)
# drill down to leaves first
for subsubclass in walk_class_hierarchy(subclass, encountered):
yield subsubclass
yield subclass
class UndoManager(object):
"""Provides a mechanism to facilitate rolling back a series of actions
when an exception is raised.
"""
def __init__(self):
self.undo_stack = []
def undo_with(self, undo_func):
self.undo_stack.append(undo_func)
def _rollback(self):
for undo_func in reversed(self.undo_stack):
undo_func()
def rollback_and_reraise(self, msg=None, **kwargs):
"""Rollback a series of actions then re-raise the exception.
.. note:: (sirp) This should only be called within an
exception handler.
"""
with excutils.save_and_reraise_exception():
if msg:
LOG.exception(msg, **kwargs)
self._rollback()
def ensure_tree(path):
"""Create a directory (and any ancestor directories required)
:param path: Directory to create
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise
|
tylertian/Openstack
|
openstack F/nova/nova/utils.py
|
Python
|
apache-2.0
| 43,686
|
class Ticket:
def __init__(self, id, description, classes, system, vector=None, kw=None):
self._id = id
self._description = description
self._classes = classes
self._system = system
self._vector = vector
self._keywords = kw
if vector is not None:
self._nonzero_vector = [x for x in vector if x > 0]
else:
self._nonzero_vector = None
@property
def description(self):
return self._description
@property
def classes(self):
return self._classes
@property
def system(self):
return self._system
@property
def id(self):
return self._id
@property
def vector(self):
return self._vector
@property
def nonzero_vector(self):
return self._nonzero_vector
@property
def keywords(self):
return self._keywords
|
w-garcia/BugClustering
|
Ticket.py
|
Python
|
apache-2.0
| 904
|
from django.shortcuts import get_object_or_404, redirect
from django.db import transaction
from rest_framework import viewsets
from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin
from .serializers import GameSerializer, PlayerSerializer, TargetSerializer
from .models import Game, Player, Target
from django.db.models.signals import post_delete
from django.dispatch import receiver
import requests
from rest_framework.response import Response
# from rest_framework.decorators import api_view
# Create your views here.
firebase_url = "https://tanks-for-waiting.firebaseio.com"
get, put, delete = requests.get, requests.put, requests.delete
def redirect_to_game(request):
return redirect("https://tanks-for-waiting.firebaseapp.com/")
class GameViewSet(viewsets.GenericViewSet,
CreateModelMixin,
ListModelMixin,
RetrieveModelMixin):
queryset = Game.objects.all()
serializer_class = GameSerializer
def get_serializer(self, *args, **kwargs):
"""
Return the serializer instance that should be used for validating and
deserializing input, and for serializing output. In this case we are
getting the player_id out of the included payload so we can put them into
a game.
"""
try:
player_id = kwargs['data']['player_id']
serializer_class = self.get_serializer_class()
kwargs['context'] = {'player':get_object_or_404(Player, player_id=player_id)}
return serializer_class(*args, **kwargs)
except KeyError:
serializer_class = self.get_serializer_class()
kwargs['context'] = self.get_serializer_context()
return serializer_class(*args, **kwargs)
class PlayerViewSet(viewsets.ModelViewSet):
queryset = Player.objects.all()
serializer_class = PlayerSerializer
# def destroy(self, request, *args, **kwargs):
# instance = self.get_object()
# RetiredPlayer.objects.create(playtime=instance.start_time - datetime.now())
# self.perform_destroy(instance)
# return Response(status=status.HTTP_204_NO_CONTENT)
class TargetViewSet(viewsets.ModelViewSet):
queryset = Target.objects.all()
serializer_class = TargetSerializer
def get_queryset(self):
'''When you GET targets only shows targets for the game you care about'''
return self.queryset.filter(game_id=self.kwargs['games_pk'])
def get_serializer_context(self):
'''Gets the game_id out of the url'''
context = super().get_serializer_context().copy()
context['game'] = get_object_or_404(Game, game_id=self.kwargs['games_pk'])
return context
@transaction.atomic
def destroy(self, request, *args, **kwargs):
'''Destroys the target both locally and in firebaseio
Tries to find a player_id in the payload, if it doesn't it returns a 403 error.
If it finds a player and that player's location in firebase is near enough
the target in the local database that player gets a point. If the player is not
close enough the target is still destroyed but the player doesn't get a point.'''
try:
body = str(request.body.decode('utf-8'))
player = get_object_or_404(Player, player_id=body)
except:
return Response(status=403)
target = self.get_object()
game = target.game
self.perform_destroy(target)
current_location = get(firebase_url + "/games/{}/tanks/{}.json".format(game.game_id, player.player_id)).json()
if abs(current_location['x'] - target.x) < 100 and abs(current_location['y'] - target.y) < 100:
player.add_point()
return Response("Player")
@receiver(post_delete, sender=Target)
def put_targets(sender, **kwargs):
'''Whever a target is saved locally if it has a game assigned it is put
into firebase'''
target = kwargs['instance']
delete(firebase_url + "/games/{}/targets/{}.json".format(target.game.game_id, target.target_id))
new_target = Target.objects.create(game=target.game)
new_target.put()
|
TanksForWaiting/tanks-for-waiting
|
tanks_api/api/views.py
|
Python
|
apache-2.0
| 4,229
|
# coding: utf-8
from cases.osmtest import OsmTestCase
from chainsrc.ubike import UbikeSource
# 不顯示 stack trace
__unittest = True
## U-bike 圖資測試
class UbikeTestCase(OsmTestCase):
## 取 U-bike 站點資料 (一次性)
@classmethod
def setUpClass(cls):
super(cls, UbikeTestCase).setUpClass()
UbikeTestCase.src = UbikeSource()
## 測試新站點
def test01_new(self):
points = UbikeTestCase.src.getNewPoints()
if len(points)>0:
msg = u'需要新增 U-bike 站 (%d)' % len(points)
for p in points:
msg = msg + '\n%s %s' % (p['ref'], p['name'])
self.fail(msg)
## 測試變更點
def test02_changed(self):
points = UbikeTestCase.src.getChangedPoints()
if len(points)>0:
msg = u'需要修改 U-bike 站 (%d)' % len(points)
for p in points:
msg = msg + '\n[%s] %s %s' % (p['osm_id'], p['ref'], p['name'])
self.fail(msg)
## 測試消失點
def test03_disappeared(self):
points = UbikeTestCase.src.getDisappearedPoints()
if len(points)>0:
msg = u'需要移除 U-bike 站 (%d)' % len(points)
for p in points:
msg = msg + '\n[%s] %s %s' % (p['osm_id'],p['ref'], p['name'])
self.fail(msg)
|
OsmHackTW/integrality_check
|
cases/test_ubike.py
|
Python
|
apache-2.0
| 1,161
|
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!!
VERSION = '1.41.0.dev0'
PROTOBUF_VERSION = '3.15.8'
|
nicolasnoble/grpc
|
tools/distrib/python/grpc_version.py
|
Python
|
apache-2.0
| 738
|
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from Queue import Empty as QueueEmpty
from uuid import uuid4
import structlog
from google.protobuf.empty_pb2 import Empty
from grpc import StatusCode
from grpc._channel import _Rendezvous
from common.utils.grpc_utils import twisted_async
from twisted.internet import task
from common.utils.id_generation import create_cluster_device_id
from voltha.core.config.config_root import ConfigRoot
from voltha.protos.openflow_13_pb2 import PacketIn, Flows, FlowGroups, \
Meters, ofp_port_status, ofp_flow_removed
from voltha.protos.voltha_pb2_grpc import \
add_VolthaLocalServiceServicer_to_server, VolthaLocalServiceServicer
from voltha.protos.voltha_pb2 import \
VolthaInstance, Adapters, LogicalDevices, LogicalDevice, Ports, \
LogicalPort, LogicalPorts, Devices, Device, DeviceType, \
DeviceTypes, DeviceGroups, DeviceGroup, AdminState, OperStatus, ChangeEvent, \
AlarmFilter, AlarmFilters, SelfTestResponse, OfAgentSubscriber
from voltha.protos.device_pb2 import PmConfigs, Images, ImageDownload, ImageDownloads
from voltha.protos.common_pb2 import OperationResp, ConnectStatus
from voltha.protos.bbf_fiber_base_pb2 import AllMulticastDistributionSetData, AllMulticastGemportsConfigData
from voltha.registry import registry
from voltha.protos.omci_mib_db_pb2 import MibDeviceData
from voltha.protos.omci_alarm_db_pb2 import AlarmDeviceData
from requests.api import request
from common.utils.asleep import asleep
log = structlog.get_logger()
class LocalHandler(VolthaLocalServiceServicer):
def __init__(self, core, instance_id, core_store_id, **init_kw):
self.core = core
self.instance_id = instance_id
self.core_store_id = core_store_id
self.init_kw = init_kw
self.root = None
self.started_with_existing_data = False
self.stopped = False
self.restart_delay = 2
self.subscriber = None
self.ofagent_heartbeat_count = 0
self.ofagent_heartbeat_max_count = 3
self.ofagent_heartbeat_delay = 5
self.ofagent_heartbeat_lc = None
self.ofagent_is_alive = True
def start(self, config_backend=None):
log.debug('starting')
if config_backend:
if 'root' in config_backend:
# This is going to block the entire reactor until loading is
# completed
log.info('loading-config-from-persisted-backend')
try:
self.root = ConfigRoot.load(VolthaInstance,
kv_store=config_backend)
self.started_with_existing_data = True
except Exception, e:
log.exception('Failure-loading-from-backend', e=e)
else:
log.info('initializing-a-new-config')
self.root = ConfigRoot(VolthaInstance(**self.init_kw),
kv_store=config_backend)
else:
self.root = ConfigRoot(VolthaInstance(**self.init_kw))
self.core.xpon_handler.start(self.root)
log.info('started')
return self
def register_grpc_service(self):
log.debug('registering')
registry('grpc_server').register(
add_VolthaLocalServiceServicer_to_server, self)
log.info('registered')
def stop(self):
log.debug('stopping')
self.stopped = True
if self.ofagent_heartbeat_lc is not None:
self.ofagent_heartbeat_lc.stop()
self._ofagent_session_termination()
log.info('stopped')
def get_proxy(self, path, exclusive=False):
return self.root.get_proxy(path, exclusive)
def has_started_with_existing_data(self):
return self.started_with_existing_data
# gRPC service method implementations. BE CAREFUL; THESE ARE CALLED ON
# the gRPC threadpool threads.
@twisted_async
def GetVolthaInstance(self, request, context):
log.debug('grpc-request', request=request)
depth = int(dict(context.invocation_metadata()).get('get-depth', 0))
res = self.root.get('/', depth=depth)
return res
@twisted_async
def GetHealth(self, request, context):
log.debug('grpc-request', request=request)
return self.root.get('/health')
@twisted_async
def ListAdapters(self, request, context):
log.debug('grpc-request', request=request)
items = self.root.get('/adapters')
sorted_items = sorted(items, key=lambda i: i.id)
return Adapters(items=sorted_items)
@twisted_async
def ListLogicalDevices(self, request, context):
log.debug('grpc-request', request=request)
items = self.root.get('/logical_devices')
return LogicalDevices(items=items)
@twisted_async
def ListReachableLogicalDevices(self, request, context):
log.debug('grpc-request', request=request)
logical_devices = self.root.get('/logical_devices')
reachable_logical_devices = []
for logical_device in logical_devices:
device = self.root.get('/devices/{}'.format(
logical_device.root_device_id))
if device is not None and device.connect_status == \
ConnectStatus.REACHABLE:
reachable_logical_devices.append(logical_device)
return LogicalDevices(items=reachable_logical_devices)
@twisted_async
def GetLogicalDevice(self, request, context):
log.debug('grpc-request', request=request)
depth = int(dict(context.invocation_metadata()).get('get-depth', 0))
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return LogicalDevice()
try:
return self.root.get('/logical_devices/' + request.id, depth=depth)
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return LogicalDevice()
@twisted_async
def ListLogicalDevicePorts(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return LogicalPorts()
try:
items = self.root.get(
'/logical_devices/{}/ports'.format(request.id))
return LogicalPorts(items=items)
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return LogicalPorts()
@twisted_async
def GetLogicalDevicePort(self, request, context):
log.debug('grpc-request', requst=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return LogicalPort()
try:
return self.root.get(
'/logical_devices/{}/ports/{}'.format(request.id, request.port_id))
except KeyError:
context.set_details(
'Logical port \'{}\' not found on device \'{}\''.format(request.port_id, request.id))
context.set_code(StatusCode.NOT_FOUND)
return LogicalPort()
@twisted_async
def ListLogicalDeviceFlows(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Flows()
try:
flows = self.root.get(
'/logical_devices/{}/flows'.format(request.id))
return flows
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Flows()
@twisted_async
def EnableLogicalDevicePort(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
agent = self.core.get_logical_device_agent(request.id)
agent.port_enable(request.port_id)
return Empty()
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def DisableLogicalDevicePort(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
agent = self.core.get_logical_device_agent(request.id)
agent.port_disable(request.port_id)
return Empty()
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def UpdateLogicalDeviceFlowTable(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
agent = self.core.get_logical_device_agent(request.id)
agent.update_flow_table(request.flow_mod)
return Empty()
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def ListLogicalDeviceFlowGroups(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return FlowGroups()
try:
groups = self.root.get(
'/logical_devices/{}/flow_groups'.format(request.id))
return groups
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return FlowGroups()
@twisted_async
def UpdateLogicalDeviceFlowGroupTable(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
agent = self.core.get_logical_device_agent(request.id)
agent.update_group_table(request.group_mod)
return Empty()
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def ListDevices(self, request, context):
log.debug('grpc-request', request=request)
items = self.root.get('/devices')
return Devices(items=items)
@twisted_async
def GetDevice(self, request, context):
log.debug('grpc-request', request=request)
depth = int(dict(context.invocation_metadata()).get('get-depth', 0))
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Device()
try:
return self.root.get('/devices/' + request.id, depth=depth)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Device()
@twisted_async
def CreateDevice(self, request, context):
log.debug('grpc-request', request=request)
known_device_types = dict(
(dt.id, dt) for dt in self.root.get('/device_types'))
known_devices = self.root.get('/devices')
try:
assert isinstance(request, Device)
device = request
assert device.id == '', 'Device to be created cannot have id yet'
assert device.type in known_device_types, \
'Unknown device type \'{}\''.format(device.type)
assert device.admin_state in (AdminState.UNKNOWN,
AdminState.PREPROVISIONED), \
'Newly created device cannot be ' \
'in admin state \'{}\''.format(device.admin_state)
assert device.WhichOneof("address") is not None, \
'Device must have one contact address e.g. MAC, IPv4, IPv6, H&P'
error_message = 'Device with {} address \'{}\' already exists'
for _device in known_devices:
if _device.HasField(device.WhichOneof("address")):
if device.HasField("mac_address"):
assert device.mac_address != _device.mac_address, \
error_message.format('MAC', device.mac_address)
elif device.HasField("ipv4_address"):
assert device.ipv4_address != _device.ipv4_address, \
error_message.format('IPv4', device.ipv4_address)
elif device.HasField("ipv6_address"):
assert device.ipv6_address != _device.ipv6_address, \
error_message.format('IPv6', device.ipv6_address)
elif device.HasField("host_and_port"):
assert device.host_and_port != _device.host_and_port, \
error_message.format('Host and Port',
device.host_and_port)
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
return Device()
# fill additional data
device.id = create_cluster_device_id(self.core_store_id)
log.debug('device-id-created', device_id=device.id)
device_type = known_device_types[device.type]
device.adapter = device_type.adapter
if device.admin_state != AdminState.PREPROVISIONED:
device.admin_state = AdminState.PREPROVISIONED
device.oper_status = OperStatus.UNKNOWN
device.vendor_id = device_type.vendor_id
# add device to tree
self.root.add('/devices', device)
return request
@twisted_async
def EnableDevice(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
assert device.admin_state in (AdminState.PREPROVISIONED,
AdminState.DISABLED), \
'Device to enable cannot be ' \
'in admin state \'{}\''.format(device.admin_state)
device.admin_state = AdminState.ENABLED
self.root.update(path, device, strict=True)
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def DisableDevice(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
assert device.admin_state == AdminState.ENABLED, \
'Device to disable cannot be ' \
'in admin state \'{}\''.format(device.admin_state)
device.admin_state = AdminState.DISABLED
self.root.update(path, device, strict=True)
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
except Exception, e:
log.exception('disable-exception', e=e)
return Empty()
@twisted_async
def RebootDevice(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
assert device.admin_state != AdminState.DOWNLOADING_IMAGE, \
'Device to reboot cannot be ' \
'in admin state \'{}\''.format(device.admin_state)
agent = self.core.get_device_agent(device.id)
agent.reboot_device(device)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def DownloadImage(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
assert isinstance(request, ImageDownload)
self.root.add('/devices/{}/image_downloads'.\
format(request.id), request)
# assert device.admin_state == AdminState.ENABLED, \
# 'Device to DOWNLOADING_IMAGE cannot be ' \
# 'in admin state \'{}\''.format(device.admin_state)
device.admin_state = AdminState.DOWNLOADING_IMAGE
self.root.update(path, device, strict=True)
agent = self.core.get_device_agent(device.id)
agent.register_image_download(request)
return OperationResp(code=OperationResp.OPERATION_SUCCESS)
except AssertionError as e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
return OperationResp(code=OperationResp.OPERATION_UNSUPPORTED)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
except Exception as e:
log.exception(e.message)
context.set_code(StatusCode.NOT_FOUND)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
@twisted_async
def GetImageDownloadStatus(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
response = ImageDownload(state=ImageDownload.DOWNLOAD_UNKNOWN)
return response
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
agent = self.core.get_device_agent(device.id)
img_dnld = self.root.get('/devices/{}/image_downloads/{}'.\
format(request.id, request.name))
response = agent.get_image_download_status(img_dnld)
#try:
# response = self.root.get('/devices/{}/image_downloads/{}'.\
# format(request.id, request.name))
#except Exception as e:
# log.exception(e.message)
return response
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
response = ImageDownload(state=ImageDownload.DOWNLOAD_UNKNOWN)
return response
except Exception as e:
log.exception(e.message)
response = ImageDownload(state=ImageDownload.DOWNLOAD_FAILED)
return response
@twisted_async
def GetImageDownload(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
response = ImageDownload(state=ImageDownload.DOWNLOAD_UNKNOWN)
return response
try:
response = self.root.get('/devices/{}/image_downloads/{}'.\
format(request.id, request.name))
return response
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
response = ImageDownload(state=ImageDownload.DOWNLOAD_UNKNOWN)
return response
@twisted_async
def ListImageDownloads(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
response = ImageDownload(state=ImageDownload.DOWNLOAD_UNKNOWN)
return response
try:
response = self.root.get('/devices/{}/image_downloads'.\
format(request.id))
return ImageDownloads(items=response)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
response = ImageDownload(state=ImageDownload.DOWNLOAD_UNKNOWN)
return response
@twisted_async
def CancelImageDownload(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
try:
assert isinstance(request, ImageDownload)
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
# assert device.admin_state == AdminState.DOWNLOADING_IMAGE, \
# 'Device to cancel DOWNLOADING_IMAGE cannot be ' \
# 'in admin state \'{}\''.format(device.admin_state)
agent = self.core.get_device_agent(device.id)
agent.cancel_image_download(request)
self.root.remove('/devices/{}/image_downloads/{}'.format(request.id, request.name))
return OperationResp(code=OperationResp.OPERATION_SUCCESS)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
@twisted_async
def ActivateImageUpdate(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
try:
assert isinstance(request, ImageDownload)
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
assert device.admin_state == AdminState.ENABLED, \
'Device to activate image cannot be ' \
'in admin state \'{}\''.format(device.admin_state)
agent = self.core.get_device_agent(device.id)
agent.activate_image_update(request)
return OperationResp(code=OperationResp.OPERATION_SUCCESS)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
@twisted_async
def RevertImageUpdate(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
try:
assert isinstance(request, ImageDownload)
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
assert device.admin_state == AdminState.ENABLED, \
'Device to revert image cannot be ' \
'in admin state \'{}\''.format(device.admin_state)
agent = self.core.get_device_agent(device.id)
agent.revert_image_update(request)
return OperationResp(code=OperationResp.OPERATION_SUCCESS)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return OperationResp(code=OperationResp.OPERATION_FAILURE)
@twisted_async
def DeleteDevice(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
assert device.admin_state == AdminState.DISABLED or device.admin_state == AdminState.PREPROVISIONED, \
'Device to delete cannot be ' \
'in admin state \'{}\''.format(device.admin_state)
self.root.remove(path)
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def ListDevicePorts(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Ports()
try:
items = self.root.get('/devices/{}/ports'.format(request.id))
return Ports(items=items)
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Ports()
@twisted_async
def ListDevicePmConfigs(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return PmConfigs()
try:
pm_configs = self.root.get(
'/devices/{}/pm_configs'.format(request.id))
pm_configs.id = request.id
log.debug('device-for-pms', pm_configs=pm_configs)
return pm_configs
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return PmConfigs()
@twisted_async
def UpdateDevicePmConfigs(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
device = self.root.get('/devices/{}'.format(request.id))
agent = self.core.get_device_agent(request.id)
agent.update_device_pm_config(request)
return Empty()
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def ListDeviceFlows(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Flows()
try:
flows = self.root.get('/devices/{}/flows'.format(request.id))
return flows
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Flows()
@twisted_async
def ListDeviceFlowGroups(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return FlowGroups()
try:
groups = self.root.get(
'/devices/{}/flow_groups'.format(request.id))
return groups
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return FlowGroups()
@twisted_async
def ListDeviceTypes(self, request, context):
log.debug('grpc-request', request=request)
items = self.root.get('/device_types')
sorted_items = sorted(items, key=lambda i: i.id)
return DeviceTypes(items=sorted_items)
@twisted_async
def GetDeviceType(self, request, context):
log.debug('grpc-request', request=request)
depth = int(dict(context.invocation_metadata()).get('get-depth', 0))
if '/' in request.id:
context.set_details(
'Malformed device type id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return DeviceType()
try:
return self.root.get('/device_types/' + request.id, depth=depth)
except KeyError:
context.set_details(
'Device type \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return DeviceType()
@twisted_async
def ListDeviceGroups(self, request, context):
log.debug('grpc-request', request=request)
# TODO is this mapped to tree or taken from coordinator?
items = self.root.get('/device_groups')
return DeviceGroups(items=items)
@twisted_async
def GetDeviceGroup(self, request, context):
log.debug('grpc-request', request=request)
depth = int(dict(context.invocation_metadata()).get('get-depth', 0))
if '/' in request.id:
context.set_details(
'Malformed device group id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return DeviceGroup()
# TODO is this mapped to tree or taken from coordinator?
try:
return self.root.get('/device_groups/' + request.id, depth=depth)
except KeyError:
context.set_details(
'Device group \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return DeviceGroup()
# bbf_fiber rpcs start
@twisted_async
def GetAllChannelgroupConfig(self, request, context):
return self.core.xpon_handler.get_all_channel_group_config(
request, context)
@twisted_async
def CreateChannelgroup(self, request, context):
return self.core.xpon_handler.create_channel_group(request, context)
@twisted_async
def UpdateChannelgroup(self, request, context):
return self.core.xpon_handler.update_channel_group(request, context)
@twisted_async
def DeleteChannelgroup(self, request, context):
return self.core.xpon_handler.delete_channel_group(request, context)
@twisted_async
def GetAllChannelpartitionConfig(self, request, context):
return self.core.xpon_handler.get_all_channel_partition_config(
request, context)
@twisted_async
def CreateChannelpartition(self, request, context):
return self.core.xpon_handler.create_channel_partition(
request, context)
@twisted_async
def UpdateChannelpartition(self, request, context):
return self.core.xpon_handler.update_channel_partition(
request, context)
@twisted_async
def DeleteChannelpartition(self, request, context):
return self.core.xpon_handler.delete_channel_partition(
request, context)
@twisted_async
def GetAllChannelpairConfig(self, request, context):
return self.core.xpon_handler.get_all_channel_pair_config(
request, context)
@twisted_async
def CreateChannelpair(self, request, context):
return self.core.xpon_handler.create_channel_pair(request, context)
@twisted_async
def UpdateChannelpair(self, request, context):
return self.core.xpon_handler.update_channel_pair(request, context)
@twisted_async
def DeleteChannelpair(self, request, context):
return self.core.xpon_handler.delete_channel_pair(request, context)
@twisted_async
def GetAllChannelterminationConfig(self, request, context):
return self.core.xpon_handler.get_all_channel_termination_config(
request, context)
@twisted_async
def CreateChanneltermination(self, request, context):
return self.core.xpon_handler.create_channel_termination(
request, context)
@twisted_async
def UpdateChanneltermination(self, request, context):
return self.core.xpon_handler.update_channel_termination(
request, context)
@twisted_async
def DeleteChanneltermination(self, request, context):
return self.core.xpon_handler.delete_channel_termination(
request, context)
@twisted_async
def GetAllOntaniConfig(self, request, context):
return self.core.xpon_handler.get_all_ont_ani_config(request, context)
@twisted_async
def CreateOntani(self, request, context):
return self.core.xpon_handler.create_ont_ani(request, context)
@twisted_async
def UpdateOntani(self, request, context):
return self.core.xpon_handler.update_ont_ani(request, context)
@twisted_async
def DeleteOntani(self, request, context):
return self.core.xpon_handler.delete_ont_ani(request, context)
@twisted_async
def GetAllVOntaniConfig(self, request, context):
return self.core.xpon_handler.get_all_v_ont_ani_config(
request, context)
@twisted_async
def CreateVOntani(self, request, context):
return self.core.xpon_handler.create_v_ont_ani(request, context)
@twisted_async
def UpdateVOntani(self, request, context):
return self.core.xpon_handler.update_v_ont_ani(request, context)
@twisted_async
def DeleteVOntani(self, request, context):
return self.core.xpon_handler.delete_v_ont_ani(request, context)
@twisted_async
def GetAllVEnetConfig(self, request, context):
return self.core.xpon_handler.get_all_v_enet_config(request, context)
@twisted_async
def CreateVEnet(self, request, context):
return self.core.xpon_handler.create_v_enet(request, context)
@twisted_async
def UpdateVEnet(self, request, context):
return self.core.xpon_handler.update_v_enet(request, context)
@twisted_async
def DeleteVEnet(self, request, context):
return self.core.xpon_handler.delete_v_enet(request, context)
@twisted_async
def GetAllTrafficDescriptorProfileData(self, request, context):
return self.core.xpon_handler.get_all_traffic_descriptor_profile_data(
request, context)
@twisted_async
def CreateTrafficDescriptorProfileData(self, request, context):
return self.core.xpon_handler.create_traffic_descriptor_profile(
request, context)
@twisted_async
def UpdateTrafficDescriptorProfileData(self, request, context):
return self.core.xpon_handler.update_traffic_descriptor_profile(
request, context)
@twisted_async
def DeleteTrafficDescriptorProfileData(self, request, context):
return self.core.xpon_handler.delete_traffic_descriptor_profile(
request, context)
@twisted_async
def GetAllTcontsConfigData(self, request, context):
return self.core.xpon_handler.get_all_tconts_config_data(
request, context)
@twisted_async
def CreateTcontsConfigData(self, request, context):
return self.core.xpon_handler.create_tcont(request, context)
@twisted_async
def UpdateTcontsConfigData(self, request, context):
return self.core.xpon_handler.update_tcont(request, context)
@twisted_async
def DeleteTcontsConfigData(self, request, context):
return self.core.xpon_handler.delete_tcont(request, context)
@twisted_async
def GetAllGemportsConfigData(self, request, context):
return self.core.xpon_handler.get_all_gemports_config_data(
request, context)
@twisted_async
def CreateGemportsConfigData(self, request, context):
return self.core.xpon_handler.create_gem_port(request, context)
@twisted_async
def UpdateGemportsConfigData(self, request, context):
return self.core.xpon_handler.update_gem_port(request, context)
@twisted_async
def DeleteGemportsConfigData(self, request, context):
return self.core.xpon_handler.delete_gem_port(request, context)
@twisted_async
def GetAllMulticastGemportsConfigData(self, request, context):
return AllMulticastGemportsConfigData()
@twisted_async
def CreateMulticastGemportsConfigData(self, request, context):
return Empty()
@twisted_async
def UpdateMulticastGemportsConfigData(self, request, context):
return Empty()
@twisted_async
def DeleteMulticastGemportsConfigData(self, request, context):
return Empty()
@twisted_async
def GetAllMulticastDistributionSetData(self, request, context):
return AllMulticastDistributionSetData()
@twisted_async
def CreateMulticastDistributionSetData(self, request, context):
return Empty()
@twisted_async
def UpdateMulticastDistributionSetData(self, request, context):
return Empty()
@twisted_async
def DeleteMulticastDistributionSetData(self, request, context):
return Empty()
# bbf_fiber rpcs end
def StreamPacketsOut(self, request_iterator, context):
log.debug('start-stream-packets-out')
@twisted_async
def forward_packet_out(packet_out):
agent = self.core.get_logical_device_agent(packet_out.id)
agent.packet_out(packet_out.packet_out)
for request in request_iterator:
forward_packet_out(packet_out=request)
log.debug('stop-stream-packets-out')
return Empty()
def ReceivePacketsIn(self, request, context):
log.debug('start-receive-packets-in')
while self.ofagent_is_alive:
try:
packet_in = self.core.packet_in_queue.get(timeout=1)
yield packet_in
except QueueEmpty:
if self.stopped:
break
log.debug('stop-receive-packets-in')
def send_packet_in(self, device_id, ofp_packet_in):
"""Must be called on the twisted thread"""
packet_in = PacketIn(id=device_id, packet_in=ofp_packet_in)
self.core.packet_in_queue.put(packet_in)
def ReceiveChangeEvents(self, request, context):
log.debug('start-receive-change-events')
while self.ofagent_is_alive:
try:
event = self.core.change_event_queue.get(timeout=1)
yield event
except QueueEmpty:
if self.stopped:
break
log.debug('stop-receive-change-events')
def send_port_change_event(self, device_id, port_status):
"""Must be called on the twisted thread"""
assert isinstance(port_status, ofp_port_status)
event = ChangeEvent(id=device_id, port_status=port_status)
self.core.change_event_queue.put(event)
def send_flow_removed_event(self, device_id, flow_removed):
assert isinstance(flow_removed, ofp_flow_removed)
event = ChangeEvent(id=device_id, flow_removed=flow_removed)
self.core.change_event_queue.put(event)
@twisted_async
def ListAlarmFilters(self, request, context):
try:
filters = self.root.get('/alarm_filters')
return AlarmFilters(filters=filters)
except KeyError:
context.set_code(StatusCode.NOT_FOUND)
return AlarmFilters()
@twisted_async
def GetAlarmFilter(self, request, context):
if '/' in request.id:
context.set_details(
'Malformed alarm filter id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return AlarmFilter()
try:
alarm_filter = self.root.get('/alarm_filters/{}'.format(request.id))
return alarm_filter
except KeyError:
context.set_details(
'Alarm filter \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return AlarmFilter()
@twisted_async
def DeleteAlarmFilter(self, request, context):
if '/' in request.id:
context.set_details(
'Malformed alarm filter id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
self.root.remove('/alarm_filters/{}'.format(request.id))
except KeyError:
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def CreateAlarmFilter(self, request, context):
log.debug('grpc-request', request=request)
try:
assert isinstance(request, AlarmFilter)
alarm_filter = request
assert alarm_filter.id is not None, 'Local Alarm filter to be ' \
'created must have id'
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
return AlarmFilter()
# add device to tree
self.root.add('/alarm_filters', alarm_filter)
return request
@twisted_async
def UpdateAlarmFilter(self, request, context):
if '/' in request.id:
context.set_details(
'Malformed alarm filter id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return AlarmFilter()
try:
assert isinstance(request, AlarmFilter)
alarm_filter = self.root.get('/alarm_filters/{}'.format(request.id))
self.root.update('/alarm_filters/{}'.format(request.id), request)
return request
except KeyError:
context.set_details(
'Alarm filter \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return AlarmFilter()
@twisted_async
def GetImages(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Images()
try:
device = self.root.get('/devices/' + request.id)
return device.images
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Images()
@twisted_async
def SelfTest(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return SelfTestResponse()
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
agent = self.core.get_device_agent(device.id)
resp = agent.self_test(device)
return resp.result
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return SelfTestResponse()
def _ofagent_session_termination(self):
log.debug('start-ofagent-session-termination')
# Stop ofagent heartbeat
if self.ofagent_heartbeat_lc is not None:
self.ofagent_heartbeat_lc.stop()
# Reset flags and assignments
self.ofagent_is_alive = False
self.subscriber = None
self.ofagent_heartbeat_count = 0
# Some local services will stop (packet-in/change-events)
# need to re-register them
registry('grpc_server').register(
add_VolthaLocalServiceServicer_to_server, self)
log.debug('stop-ofagent-session-termination')
def _ofagent_session_heartbeat(self):
log.debug('start-ofagent-heartbeat')
if self.ofagent_heartbeat_count > self.ofagent_heartbeat_max_count:
self._ofagent_session_termination()
else:
self.ofagent_heartbeat_count += 1
log.debug('stop-ofagent-heartbeat')
@twisted_async
def Subscribe(self, request, context):
log.debug('grpc-request', request=request)
# Check if an ofagent subscriber is assigned
if self.subscriber is None:
log.debug('ofagent-subscriber-request')
try:
# Assign the request as the active subscriber
self.subscriber = OfAgentSubscriber(
ofagent_id=request.ofagent_id,
voltha_id=self.instance_id
)
# Start the hearbeat
self.ofagent_heartbeat_count = 0
self.ofagent_heartbeat_lc = task.LoopingCall(self._ofagent_session_heartbeat)
self.ofagent_heartbeat_lc.start(self.ofagent_heartbeat_delay)
log.debug('ofagent-subscriber-connected', subscriber=self.subscriber)
except _Rendezvous, e:
log.error('ofagent-subscriber-failure', exception=repr(e), status=e.code())
except Exception as e:
log.exception('ofagent-subscriber-unexpected-failure', exception=repr(e))
elif self.subscriber.ofagent_id == request.ofagent_id:
log.debug('ofagent-subscriber-matches-assigned',
current=self.subscriber)
# reset counter
self.ofagent_heartbeat_count = 0
else:
log.debug('ofagent-subscriber-not-matching-assigned',
current=self.subscriber)
return self.subscriber
@twisted_async
def GetMibDeviceData(self, request, context):
log.debug('grpc-request', request=request)
depth = int(dict(context.invocation_metadata()).get('get-depth', -1))
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return MibDeviceData()
try:
return self.root.get('/omci_mibs/' + request.id, depth=depth)
except KeyError:
context.set_details(
'OMCI MIB for Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return MibDeviceData()
@twisted_async
def GetAlarmDeviceData(self, request, context):
log.info('grpc-request', request=request)
depth = int(dict(context.invocation_metadata()).get('get-depth', -1))
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return AlarmDeviceData()
try:
return self.root.get('/omci_alarms/' + request.id, depth=depth)
except KeyError:
context.set_details(
'OMCI ALARM for Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return AlarmDeviceData()
@twisted_async
def UpdateLogicalDeviceMeterTable(self, request, context):
log.info('meter-table-update-grpc-request', request=request)
if '/' in request.id:
context.set_details('Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Empty()
try:
agent = self.core.get_logical_device_agent(request.id)
agent.update_meter_table(request.meter_mod)
return Empty()
except KeyError:
context.set_details('Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Empty()
@twisted_async
def ListLogicalDeviceMeters(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed logical device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
return Meters()
try:
meters = self.root.get(
'/logical_devices/{}/meters'.format(request.id))
log.debug("Found meters", meters=meters)
return meters
except KeyError:
context.set_details(
'Logical device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
return Meters()
@twisted_async
def SimulateAlarm(self, request, context):
log.debug('grpc-request', request=request)
if '/' in request.id:
context.set_details(
'Malformed device id \'{}\''.format(request.id))
context.set_code(StatusCode.INVALID_ARGUMENT)
response = OperationResp(code=OperationResp.OPERATION_FAILURE)
return response
try:
path = '/devices/{}'.format(request.id)
device = self.root.get(path)
agent = self.core.get_device_agent(device.id)
response = agent.simulate_alarm(device, request)
return response
except KeyError:
context.set_details(
'Device \'{}\' not found'.format(request.id))
context.set_code(StatusCode.NOT_FOUND)
response = OperationResp(code=OperationResp.OPERATION_FAILURE)
return response
except Exception as e:
log.exception(e.message)
response = OperationResp(code=OperationResp.OPERATION_FAILURE)
return response
|
opencord/voltha
|
voltha/core/local_handler.py
|
Python
|
apache-2.0
| 54,573
|
"""Support for Soma Smartshades."""
from api.soma_api import SomaApi
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import API, DOMAIN, HOST, PORT
DEVICES = "devices"
CONFIG_SCHEMA = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{
DOMAIN: vol.Schema(
{vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.string}
)
},
),
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["cover", "sensor"]
async def async_setup(hass, config):
"""Set up the Soma component."""
if DOMAIN not in config:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
data=config[DOMAIN],
context={"source": config_entries.SOURCE_IMPORT},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Soma from a config entry."""
hass.data[DOMAIN] = {}
hass.data[DOMAIN][API] = SomaApi(entry.data[HOST], entry.data[PORT])
devices = await hass.async_add_executor_job(hass.data[DOMAIN][API].list_devices)
hass.data[DOMAIN][DEVICES] = devices["shades"]
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
class SomaEntity(Entity):
"""Representation of a generic Soma device."""
def __init__(self, device, api):
"""Initialize the Soma device."""
self.device = device
self.api = api
self.current_position = 50
self.battery_state = 0
self.is_available = True
@property
def available(self):
"""Return true if the last API commands returned successfully."""
return self.is_available
@property
def unique_id(self):
"""Return the unique id base on the id returned by pysoma API."""
return self.device["mac"]
@property
def name(self):
"""Return the name of the device."""
return self.device["name"]
@property
def device_info(self) -> DeviceInfo:
"""Return device specific attributes.
Implemented by platform classes.
"""
return DeviceInfo(
identifiers={(DOMAIN, self.unique_id)},
manufacturer="Wazombi Labs",
name=self.name,
)
|
jawilson/home-assistant
|
homeassistant/components/soma/__init__.py
|
Python
|
apache-2.0
| 2,778
|
"""
Given a binary tree, flatten it to a linked list in-place.
For example,
Given
1
/ \
2 5
/ \ \
3 4 6
The flattened tree should look like:
1
\
2
\
3
\
4
\
5
\
6
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def flatten(self, root):
"""
:type root: TreeNode
:rtype: void Do not return anything, modify root in-place instead.
"""
self.flattenHelper(root)
def flattenHelper(self, root):
if not root:
return None, None
leftHead, leftTail = self.flattenHelper(root.left)
rightHead, rightTail = self.flattenHelper(root.right)
if leftHead:
root.left = None
root.right = leftHead
leftTail.right = rightHead
else:
# root.right is already set to rightHead
pass
tail = rightTail or leftTail or root
return root, tail
from utils import *
t = maketree([1, 2, 5, 3, 4, None, 6])
printtree(t)
Solution().flatten(t)
printlist(t, nextKey='right')
|
xiaonanln/myleetcode-python
|
src/114. Flatten Binary Tree to Linked List.py
|
Python
|
apache-2.0
| 1,072
|
#udp1.py
import socket
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s.bind(('127.0.0.1',9999))
print 'Bind UDP on 9999...'
while True:
data,addr=s.recvfrom(1024)
print 'Received from %s:%s' %addr
s.sendto('Hello,%s!'%data,addr)
|
zengboming/python
|
udp1.py
|
Python
|
apache-2.0
| 240
|
#!/usr/bin/env python
#
# Software License Agreement (Apache License)
# Copyright (c) 2017, <Advanced Remanufacturing Technology Centre/Mingli Han>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import rospy
import smach
import smach_ros
import config
# define state Execute
class Execute(smach.State):
def __init__(self):
smach.State.__init__(self,
outcomes=['outcome1','outcome2'],
input_keys=['execute_in'],
output_keys=['execute_out'])
def execute(self, userdata):
rospy.loginfo('Executing state Execute')
# Receive Stop Tag
var = config.client.get_node("ns=3;s=\"PackML_Status\".\"UN\".\"Cmd_Stop\"")
var.set_value(True)
config.stopTag = var.get_value()
print("Received stop command: ", config.stopTag)
if userdata.execute_in == True and config.stopTag == True:
userdata.execute_out = True
return 'outcome1'
else:
return 'outcome2'
|
ros-industrial-consortium/bohr_devel
|
packml_smach/src/executeState.py
|
Python
|
apache-2.0
| 1,575
|
import os
import sys
import re
dir = sys.argv[1]
files = [string for string in os.listdir(dir) if string.endswith(".json") and string.startswith("part-")]
pattern = re.compile("\d+")
for fin in files:
suffix = ""
with open(dir + "/" + fin) as data_in:
for line in data_in:
if suffix == "" :
suffix = pattern.search(line).group()
if len(suffix) == 1:
suffix = "0" + suffix
fout = open(dir + "/" + suffix + ".json", "w+")
fout.write("[")
fout.write(line)
else:
fout.write(",")
fout.write(line)
if suffix != "":
fout.write("]")
fout.close()
suffix = ""
|
cleosson/AnalyticsWithApacheSpark
|
tools/toJson.py
|
Python
|
apache-2.0
| 773
|
class Map(dict):
"""
Example:
m = Map({'first_name': 'Eduardo'}, last_name='Pool', age=24, sports=['Soccer'])
"""
def __init__(self, *args, **kwargs):
super(Map, self).__init__(*args, **kwargs)
for arg in args:
if isinstance(arg, dict):
for k, v in arg.items():
self[k] = v
if kwargs:
for k, v in kwargs.items():
self[k] = v
def __getattr__(self, attr):
return self.get(attr)
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __setitem__(self, key, value):
super(Map, self).__setitem__(key, value)
self.__dict__.update({key: value})
def __delattr__(self, item):
self.__delitem__(item)
def __delitem__(self, key):
super(Map, self).__delitem__(key)
del self.__dict__[key]
def update(self, other=None, **kwargs):
if other is not None:
for k, v in other.items():
self[k] = v
for k, v in kwargs.items():
self[k] = v
|
rwecho/fflask_blog
|
fflask_blog/Repositories/map_dict.py
|
Python
|
apache-2.0
| 1,096
|
from turbo_palm_tree.utility.download_subreddit_submissions \
import DownloadSubredditSubmissions
from turbo_palm_tree.utility.exception import TurboPalmTreeException
|
jtara1/turbo_palm_tree
|
turbo_palm_tree/__init__.py
|
Python
|
apache-2.0
| 171
|
#!/usr/bin/python
'''
Validates command to gather logs from a cluster
Developer usage:
ssh -i log_gathering_key use-tower2.ops.rhcloud.com -- -c <clusterName> -u <kerberosID>
clusterName is checked against a list of known/valid cluster names.
kerberosID is the Kerberos ID of the developer requesting logs, for tracking purposes
'''
# pylint: disable=invalid-name
# pylint: disable=logging-not-lazy
# pylint: disable=broad-except
# The log_gathering_key is kept in the shared-secrets repo. It is generated and
# rotated weekly by the Jenkins job cluster/rotate-log-access-key. The public
# key in .ssh/authorized_keys looks like this:
#
# command="verify-gather-logs-operations.py" ssh-rsa ThePublicKey logs_access_key_week_22-YMD-H:M:S
import argparse
import getpass
import os
import re
import sys
import socket
import logging
import logging.handlers
# The list of valid cluster names to request logs from
VALID_CLUSTER_NAMES = [
'free-int',
'free-stg',
'starter-ca-central-1',
'starter-us-east-1',
'starter-us-east-2',
'starter-us-west-1',
'starter-us-west-2'
]
# This regex is used to check that the provided node names look like a valid
# node name (i.e. from "oc get node") or an inventory host name.
#
# In order to keep the check simple this doesn't aim at RFC 1123 compliance,
# only that the provided node name is "similar enough" to a valid hostname
# and that it matches what our cluster's node names/inventory names look like
#
# Here labels must start with a letter. This prevents using IPs to identify
# nodes (the log gathering script should be modified to allow IPs).
#
# NOTE: this is only an input sanity check, not a full verification that the
# provided node names actually exist. The full/final hostname validation
# happens in the log gathering script itself by accessing the cluster's
# ansible inventory
HOSTNAME_RE = re.compile(
r'([a-z][a-z\d-]{0,62}\.)*'
r'([a-z][a-z\d-]{0,62})$',
re.IGNORECASE
)
# The command that is invoked to perform the actual log collection. This
# command should expect one argument, the cluster name on which to operate, and
# produce the logs as a tarball in stdout, which gets passed directly as the
# output of this script.
LOG_GATHER_CMD = '/home/opsmedic/aos-cd/git/aos-cd-jobs/tower-scripts/bin/gather-logs.sh'
HOSTNAME = socket.gethostname()
USERNAME = getpass.getuser()
INVOCATION = "%s@%s" % (USERNAME, HOSTNAME)
logger = logging.getLogger('verify_command_logger')
logger.setLevel(logging.INFO)
logger.addHandler(logging.handlers.SysLogHandler('/dev/log'))
def valid_krbid(username):
'''Sanity check that the username looks like valid according to the description
of valid usernames from useradd(8)
'''
if re.match(r'^[a-z_][a-z0-9_-]*[$]?$', username) and len(username) <= 32:
return username
else:
raise argparse.ArgumentTypeError("Kerberos ID was not provided in acceptable format")
def hostname(arg):
'''Check that an argument looks like a valid hostname'''
if HOSTNAME_RE.match(arg) and len(arg) < 256:
return arg
else:
raise argparse.ArgumentTypeError("Invalid node name format")
def gather_logs(command):
'''Main function that parses arguments and execs the cluster log
gathering command.
This function never returns (it can raise exceptions though)
'''
usage = "ssh -i gather_logs_key %(prog)s -- -u USER -c CLUSTER [-n node1 node2...] > logs.tar.gz"
parser = argparse.ArgumentParser(prog=INVOCATION, usage=usage)
parser.add_argument('-u', dest='user', help="Your kerberos ID",
required=True, type=valid_krbid)
parser.add_argument('-c', dest='cluster', help="Cluster name",
required=True, choices=VALID_CLUSTER_NAMES)
parser.add_argument('-n', dest='nodes', help="Nodes to gather logs from",
required=False, metavar='node',
type=hostname, nargs='+')
args = parser.parse_args(command.split())
if args.nodes:
os.execlp(LOG_GATHER_CMD, LOG_GATHER_CMD, args.cluster, *args.nodes)
else:
os.execlp(LOG_GATHER_CMD, LOG_GATHER_CMD, args.cluster)
if __name__ == '__main__':
cmd = os.environ.get("SSH_ORIGINAL_COMMAND", "")
logger.info("%s invoked with arguments: %s" %
((os.path.basename(__file__)), cmd))
try:
gather_logs(cmd)
except Exception as e:
logger.info("%s Cluster log gathering failed command '%s': %s" %
((os.path.basename(__file__)), cmd, e))
# The gather_logs() function should never return, as it exec's the program
# to produce the logs. If we're here, something went wrong:
sys.exit(10)
|
rhdedgar/openshift-tools
|
scripts/cicd/verify-gather-logs-operations.py
|
Python
|
apache-2.0
| 4,734
|
#################################################
####### Author: Hugo Pibernat #######
####### Contact: hugopibernat@gmail.com #######
####### Date: April 2014 #######
#################################################
from bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB, probabilityOfABetterThanBAndC, upliftOfAOverBWithProbability
from numpy import mean, concatenate, loadtxt
# Number of samples to generate when performing numeric integration
accuracy = 10000
# Reading data
print "-- reading data"
dataset_dtype = [('casenum',int),('spend_usd',float)]
dataset = loadtxt('input/bayesian-test-input-case3-onlypositive.csv', dtype = dataset_dtype)
A_actuals_spend = [y for (x,y) in dataset if x==1]
B_actuals_spend = [y for (x,y) in dataset if x==2]
C_actuals_spend = [y for (x,y) in dataset if x==3]
numzeros = loadtxt('input/bayesian-test-input-case3-onlyzeros.csv', dtype=[('casenum',int),('zeros',int)])
A_not_spend = [y for (x,y) in numzeros if x==1][0]
B_not_spend = [y for (x,y) in numzeros if x==2][0]
C_not_spend = [y for (x,y) in numzeros if x==3][0]
# Modeling conversions with a binomial variable
print "-- modeling conversion with a binomial variable"
A_k = len(A_actuals_spend)
B_k = len(B_actuals_spend)
C_k = len(C_actuals_spend)
A_n = A_k + A_not_spend
B_n = B_k + B_not_spend
C_n = C_k + C_not_spend
A_conversions = sampleSuccessRateForBinomial(A_n,A_k,samples=accuracy)
B_conversions = sampleSuccessRateForBinomial(B_n,B_k,samples=accuracy)
C_conversions = sampleSuccessRateForBinomial(C_n,C_k,samples=accuracy)
# Modeling the spend with a log-normal
print "-- modeling spend with a log-normal variable"
A_spend = sampleMeanForLogNormal(A_actuals_spend,samples=accuracy)
B_spend = sampleMeanForLogNormal(B_actuals_spend,samples=accuracy)
C_spend = sampleMeanForLogNormal(C_actuals_spend,samples=accuracy)
# Combining samples
print "-- combining samples from both distributions"
A_rps = A_conversions*A_spend
B_rps = B_conversions*B_spend
C_rps = C_conversions*C_spend
# Result
print "-- result"
print "P(A>B and A>C) = {}".format(probabilityOfABetterThanBAndC(A_rps,B_rps,C_rps))
print "P(B>A and B>C) = {}".format(probabilityOfABetterThanBAndC(B_rps,A_rps,C_rps))
print "P(C>A and C>B) = {}".format(probabilityOfABetterThanBAndC(C_rps,A_rps,B_rps))
print "Summary:"
print "mean_A: {} -- mean_B: {} -- mean_C: {}".format(mean(A_rps),mean(B_rps),mean(C_rps))
print "A_k: {} -- A_n: {}".format(A_k,A_n)
print "B_k: {} -- B_n: {}".format(B_k,B_n)
print "C_k: {} -- C_n: {}".format(C_k,C_n)
print ""
print "Find uplift with probability 0.95:"
print "upliftOfAOverBWithProbability(B_rps,A_rps,0.95) = {}".format(upliftOfAOverBWithProbability(B_rps,A_rps,0.95))
print "-- ready for more commands (if executed in the interpreter)"
|
hugopibernat/BayesianABTestAnalysis
|
code/examples/xpromo_test.py
|
Python
|
apache-2.0
| 2,854
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from io import open
from kappa import __version__
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname), encoding='utf-8')
def run_setup():
setup(
name='kappa',
version=__version__,
description='A CLI tool for AWS Lambda developers',
long_description=open_file('README.rst').read(),
url='https://github.com/garnaat/kappa',
author='Mitch Garnaat',
author_email='mitch@garnaat.com',
license='Apache License 2.0',
packages=['kappa', 'kappa.scripts', 'kappa.event_source'],
package_data={'kappa': ['_version']},
package_dir={'kappa': 'kappa'},
entry_points="""
[console_scripts]
kappa=kappa.scripts.cli:cli
""",
install_requires=open_file('requirements.txt').readlines(),
test_suite='tests',
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
)
if __name__ == '__main__':
run_setup()
|
garnaat/kappa
|
setup.py
|
Python
|
apache-2.0
| 1,734
|
import os
import sys
import codecs
# Must be run with python 3 in order to work
ENCODINGS = [
u"ascii",
u"big5",
u"big5hkscs",
u"cp037",
u"cp424",
u"cp437",
u"cp500",
u"cp720",
u"cp737",
u"cp775",
u"cp850",
u"cp852",
u"cp855",
u"cp856",
u"cp857",
u"cp858",
u"cp860",
u"cp861",
u"cp862",
u"cp863",
u"cp864",
u"cp865",
u"cp866",
u"cp869",
u"cp874",
u"cp875",
u"cp932",
u"cp949",
u"cp950",
u"cp1006",
u"cp1026",
u"cp1140",
u"cp1250",
u"cp1251",
u"cp1252",
u"cp1253",
u"cp1254",
u"cp1255",
u"cp1256",
u"cp1257",
u"cp1258",
u"euc_jp",
u"euc_jis_2004",
u"euc_jisx0213",
u"euc_kr",
u"gb2312",
u"gbk",
u"gb18030",
u"hz",
u"iso2022_jp",
u"iso2022_jp_1",
u"iso2022_jp_2",
u"iso2022_jp_2004",
u"iso2022_jp_3",
u"iso2022_jp_ext",
u"iso2022_kr",
u"latin_1",
u"iso8859_2",
u"iso8859_3",
u"iso8859_4",
u"iso8859_5",
u"iso8859_6",
u"iso8859_7",
u"iso8859_8",
u"iso8859_9",
u"iso8859_10",
u"iso8859_13",
u"iso8859_14",
u"iso8859_15",
u"iso8859_16",
u"johab",
u"koi8_r",
u"koi8_u",
u"mac_cyrillic",
u"mac_greek",
u"mac_iceland",
u"mac_latin2",
u"mac_roman",
u"mac_turkish",
u"ptcp154",
u"shift_jis",
u"shift_jis_2004",
u"shift_jisx0213",
u"utf_32",
u"utf_32_be",
u"utf_32_le",
u"utf_16",
u"utf_16_be",
u"utf_16_le",
u"utf_7",
u"utf_8",
u"utf_8_sig"
]
def get_code_points(e):
rtn = u""
for i in range (0, 255):
try:
c = bytes([i]).decode(e)
rtn += c
except:
pass # Error, ignore this character
return rtn
for e in ENCODINGS:
msg = u"This file is encoded in " + e + "\r\nHere are there first 255 code points in this encoding:\r\n"
print(msg)
d = e
if not os.path.exists(e):
os.makedirs(e)
filename = d + "/" + e
f = codecs.open(filename, "w", encoding=e)
points = get_code_points(e)
try:
f.write(msg + points)
except:
print("Encoding error writing to " + filename)
f.close()
|
RobertElderSoftware/roberteldersoftwarediff
|
tests/make_encoded_files.py
|
Python
|
apache-2.0
| 2,258
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Ying Liu, Cisco Systems, Inc.
import logging
from webob import exc
from quantum.api import api_common as common
from quantum.api import extensions
from quantum.api.views import ports as port_view
from quantum.manager import QuantumManager
from quantum.plugins.cisco.common import cisco_faults as faults
from quantum import wsgi
LOG = logging.getLogger('quantum.api.multiports')
class Multiport(object):
"""extension class multiport"""
def __init__(self):
pass
@classmethod
def get_name(cls):
""" Returns Ext Resource Name """
return "Cisco Multiport"
@classmethod
def get_alias(cls):
""" Returns Ext Resource Alias """
return "Cisco Multiport"
@classmethod
def get_description(cls):
""" Returns Ext Resource Description """
return "handle multiple ports in one call"
@classmethod
def get_namespace(cls):
""" Returns Ext Resource Namespace """
return "http://docs.ciscocloud.com/api/ext/multiport/v1.0"
@classmethod
def get_updated(cls):
""" Returns Ext Resource Update Time """
return "2011-08-25T13:25:27-06:00"
@classmethod
def get_resources(cls):
""" Returns Ext Resources """
parent_resource = dict(member_name="tenant",
collection_name="extensions/csco/tenants")
controller = MultiportController(QuantumManager.get_plugin())
return [extensions.ResourceExtension('multiport', controller,
parent=parent_resource)]
class MultiportController(common.QuantumController, wsgi.Controller):
""" multiport API controller
based on QuantumController """
_multiport_ops_param_list = [
{'param-name': 'net_id_list', 'required': True},
{'param-name': 'status', 'required': True},
{'param-name': 'ports_desc', 'required': True},
]
_serialization_metadata = {
"application/xml": {
"attributes": {
"multiport": ["id", "name"],
},
},
}
def __init__(self, plugin):
self._resource_name = 'multiport'
self._plugin = plugin
self.version = "1.0"
# pylint: disable-msg=E1101,W0613
def create(self, request, tenant_id):
""" Creates a new multiport for a given tenant """
try:
body = self._deserialize(request.body, request.get_content_type())
req_body = self._prepare_request_body(
body, self._multiport_ops_param_list)
req_params = req_body[self._resource_name]
except exc.HTTPError as exp:
return faults.Fault(exp)
multiports = self._plugin.create_multiport(tenant_id,
req_params['net_id_list'],
req_params['status'],
req_params['ports_desc'])
builder = port_view.get_view_builder(request, self.version)
result = [builder.build(port)['port'] for port in multiports]
return dict(ports=result)
|
aristanetworks/arista-ovs-quantum
|
quantum/extensions/multiport.py
|
Python
|
apache-2.0
| 3,854
|
route = list()
route.append(('GET', '/', 'demo.Demo.welcome'))
|
ShareAny/LuckyPython
|
application/config/route.py
|
Python
|
apache-2.0
| 63
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.experimental.scan()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from tensorflow.python.data.experimental.ops import scan_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class ScanTest(test_base.DatasetTestBase):
def _counting_dataset(self, start, scan_fn):
return dataset_ops.Dataset.from_tensors(0).repeat().apply(
scan_ops.scan(start, scan_fn))
def testCount(self):
def make_scan_fn(step):
return lambda state, _: (state + step, state)
def dataset_fn(start, step, take):
return self._counting_dataset(start, make_scan_fn(step)).take(take)
for start_val, step_val, take_val in [(0, 1, 10), (0, 1, 0), (10, 1, 10),
(10, 2, 10), (10, -1, 10), (10, -2,
10)]:
next_element = self.getNext(dataset_fn(start_val, step_val, take_val))
for expected, _ in zip(
itertools.count(start_val, step_val), range(take_val)):
self.assertEqual(expected, self.evaluate(next_element()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element())
def testFibonacci(self):
data = dataset_ops.Dataset.from_tensors(1).repeat(None).apply(
scan_ops.scan([0, 1], lambda a, _: ([a[1], a[0] + a[1]], a[1])))
next_element = self.getNext(data)
self.assertEqual(1, self.evaluate(next_element()))
self.assertEqual(1, self.evaluate(next_element()))
self.assertEqual(2, self.evaluate(next_element()))
self.assertEqual(3, self.evaluate(next_element()))
self.assertEqual(5, self.evaluate(next_element()))
self.assertEqual(8, self.evaluate(next_element()))
# TODO(b/119837791): Add coverage for eager.
@test_util.run_deprecated_v1
def testSkipEagerSparseCount(self):
def _sparse(i):
return sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0]]),
values=(i * np.array([1])),
dense_shape=np.array([1, 1]))
def make_scan_fn(step):
return lambda state, _: (_sparse(state.values[0] + step), state)
def dataset_fn(start, step, take):
return self._counting_dataset(_sparse(start),
make_scan_fn(step)).take(take)
for start_val, step_val, take_val in [(0, 1, 10), (0, 1, 0), (10, 1, 10),
(10, 2, 10), (10, -1, 10), (10, -2,
10)]:
next_element = self.getNext(dataset_fn(start_val, step_val, take_val))
for expected, _ in zip(
itertools.count(start_val, step_val), range(take_val)):
self.assertEqual(expected, self.evaluate(next_element()).values[0])
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element())
def testChangingStateShape(self):
# Test the fixed-point shape invariant calculations: start with
# initial values with known shapes, and use a scan function that
# changes the size of the state on each element.
def _scan_fn(state, input_value):
# Statically known rank, but dynamic length.
ret_longer_vector = array_ops.concat([state[0], state[0]], 0)
# Statically unknown rank.
ret_larger_rank = array_ops.expand_dims(state[1], 0)
return (ret_longer_vector, ret_larger_rank), (state, input_value)
dataset = dataset_ops.Dataset.from_tensors(0).repeat(5).apply(
scan_ops.scan(([0], 1), _scan_fn))
self.assertEqual([None], dataset.output_shapes[0][0].as_list())
self.assertIs(None, dataset.output_shapes[0][1].ndims)
self.assertEqual([], dataset.output_shapes[1].as_list())
next_element = self.getNext(dataset)
for i in range(5):
(longer_vector_val, larger_rank_val), _ = self.evaluate(next_element())
self.assertAllEqual([0] * (2**i), longer_vector_val)
self.assertAllEqual(np.array(1, ndmin=i), larger_rank_val)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element())
def testIncorrectStateType(self):
def _scan_fn(state, _):
return constant_op.constant(1, dtype=dtypes.int64), state
dataset = dataset_ops.Dataset.range(10)
with self.assertRaisesRegexp(
TypeError,
"The element types for the new state must match the initial state."):
dataset.apply(
scan_ops.scan(constant_op.constant(1, dtype=dtypes.int32), _scan_fn))
def testIncorrectReturnType(self):
def _scan_fn(unused_state, unused_input_value):
return constant_op.constant(1, dtype=dtypes.int64)
dataset = dataset_ops.Dataset.range(10)
with self.assertRaisesRegexp(
TypeError,
"The scan function must return a pair comprising the new state and the "
"output value."):
dataset.apply(
scan_ops.scan(constant_op.constant(1, dtype=dtypes.int32), _scan_fn))
def testPreserveCardinality(self):
def scan_fn(state, val):
def py_fn(_):
raise StopIteration()
return state, script_ops.py_func(py_fn, [val], dtypes.int64)
dataset = dataset_ops.Dataset.from_tensors(0).apply(
scan_ops.scan(constant_op.constant(1), scan_fn))
get_next = self.getNext(dataset)
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
if __name__ == "__main__":
test.main()
|
asimshankar/tensorflow
|
tensorflow/python/data/experimental/kernel_tests/scan_test.py
|
Python
|
apache-2.0
| 6,663
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 DNAnexus, Inc.
#
# This file is part of dx-toolkit (DNAnexus platform client libraries).
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function, unicode_literals, division, absolute_import
import os, sys, unittest, json, tempfile, subprocess, csv, shutil, re, base64, random, time
import pipes
import hashlib
import collections
from contextlib import contextmanager
import pexpect
import requests
import dxpy
from dxpy.scripts import dx_build_app
from dxpy_testutil import (DXTestCase, check_output, temporary_project, select_project, cd, override_environment,
generate_unique_username_email, without_project_context, without_auth, as_second_user)
import dxpy_testutil as testutil
from dxpy.exceptions import DXAPIError, DXSearchError, EXPECTED_ERR_EXIT_STATUS, HTTPError
from dxpy.compat import str, sys_encoding, open
from dxpy.utils.resolver import ResolutionError, _check_resolution_needed as check_resolution
@contextmanager
def chdir(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
def run(command, **kwargs):
print("$ %s" % (command,))
output = check_output(command, shell=True, **kwargs)
print(output)
return output
def create_file_in_project(fname, trg_proj_id, folder=None):
data = "foo"
if folder is None:
dxfile = dxpy.upload_string(data, name=fname, project=trg_proj_id, wait_on_close=True)
else:
dxfile = dxpy.upload_string(data, name=fname, project=trg_proj_id, folder=folder, wait_on_close=True)
return dxfile.get_id()
def create_project():
project_name = "test_dx_cp_" + str(random.randint(0, 1000000)) + "_" + str(int(time.time() * 1000))
return dxpy.api.project_new({'name': project_name})['id']
def rm_project(proj_id):
dxpy.api.project_destroy(proj_id, {"terminateJobs": True})
def create_folder_in_project(proj_id, path):
dxpy.api.project_new_folder(proj_id, {"folder": path})
def list_folder(proj_id, path):
output = dxpy.api.project_list_folder(proj_id, {"folder": path})
# Canonicalize to account for possibly different ordering
output['folders'] = set(output['folders'])
# (objects is a list of dicts-- which are not hashable-- so just
# sort them to canonicalize instead of putting them in a set)
output['objects'] = sorted(output['objects'])
return output
def makeGenomeObject():
# NOTE: for these tests we don't upload a full sequence file (which
# would be huge, for hg19). Importers and exporters that need to
# look at the full sequence file can't be run on this test
# contigset.
sequence_file = dxpy.upload_string("", hidden=True)
genome_record = dxpy.new_dxrecord()
genome_record.set_details({
"flat_sequence_file": {"$dnanexus_link": sequence_file.get_id()},
"contigs": {
"offsets": [0],
"names": ["chr1"],
"sizes": [249250621]
}
})
genome_record.add_types(["ContigSet"])
genome_record.close()
sequence_file.wait_on_close()
return genome_record.get_id()
class TestDXTestUtils(DXTestCase):
def test_temporary_project(self):
test_dirname = '/test_folder'
with temporary_project('test_temporary_project', select=True) as temp_project:
self.assertEquals('test_temporary_project:/', run('dx pwd').strip())
def test_select_project(self):
test_dirname = '/test_folder'
with temporary_project('test_select_project') as temp_project:
test_projectid = temp_project.get_id()
run('dx mkdir -p {project}:{dirname}'.format(project=test_projectid, dirname=test_dirname))
with select_project(test_projectid):
# This would fail if the project context hadn't been
# successfully changed by select_project
run('dx cd {dirname}'.format(dirname=test_dirname))
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
def test_without_project_context(self):
self.assertIn('DX_PROJECT_CONTEXT_ID', run('dx env --bash'))
with without_project_context():
self.assertNotIn('DX_PROJECT_CONTEXT_ID', run('dx env --bash'))
self.assertIn('DX_PROJECT_CONTEXT_ID', run('dx env --bash'))
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
def test_without_auth(self):
self.assertIn('DX_SECURITY_CONTEXT', run('dx env --bash'))
with without_auth():
self.assertNotIn('DX_SECURITY_CONTEXT', run('dx env --bash'))
self.assertIn('DX_SECURITY_CONTEXT', run('dx env --bash'))
@unittest.skipUnless(testutil.TEST_MULTIPLE_USERS, 'skipping test that would require multiple users')
def test_as_second_user(self):
default_user = run('dx whoami').strip()
second_user = run('dx whoami', env=as_second_user()).strip()
expected_user = json.loads(os.environ['DXTEST_SECOND_USER'])['user'].split('-')[1]
self.assertEqual(expected_user, second_user)
self.assertNotEqual(default_user, second_user)
# TODO: these 'dx rm' and related commands should really exit with code 3 to distinguish user and internal errors
class TestDXRemove(DXTestCase):
def test_remove_folders(self):
folder_name = "/test_folder"
record_name = "test_folder"
record_name2 = "test_folder2"
# Throw error on non-existent folder
with self.assertSubprocessFailure(exit_code=1):
run("dx rm -r {f}".format(f=folder_name))
# make folder and file of the same name, confirm that file is deleted with regular rm call
create_folder_in_project(self.project, folder_name)
self.assertIn(folder_name, list_folder(self.project, "/")['folders'])
run("dx new record {f}".format(f=record_name))
self.assertEquals(record_name,
dxpy.find_one_data_object(classname="record",
describe=True,
project=self.project)['describe']['name'])
# -r flag shouldn't matter, object will take precedence over folder
run("dx rm -r {f}".format(f=record_name))
with self.assertRaises(DXSearchError):
dxpy.find_one_data_object(classname="record", describe=True, project=self.project)
# if no -r flag provided, should throw error since it's a folder
with self.assertSubprocessFailure(exit_code=1):
run("dx rm {f}".format(f=record_name))
# finally remove the folder
run("dx rm -r {f}".format(f=record_name))
self.assertNotIn(folder_name, list_folder(self.project, "/")['folders'])
# make a record and then try to delete that record along with a non-existent record
run("dx new record {f}".format(f=record_name))
self.assertEquals(record_name,
dxpy.find_one_data_object(classname="record",
describe=True,
project=self.project)['describe']['name'])
with self.assertSubprocessFailure(exit_code=1):
run("dx rm {f} {f2}".format(f=record_name, f2=record_name2))
class TestDXClient(DXTestCase):
def test_dx_version(self):
version = run("dx --version")
self.assertIn("dx", version)
def test_dx_debug_request_id(self):
(stdout, stderr) = run("_DX_DEBUG=1 dx ls", also_return_stderr=True)
self.assertRegexpMatches(stderr, "POST \d{13}-\d{1,6} http", msg="stderr does not appear to contain request ID")
def test_dx_actions(self):
with self.assertRaises(subprocess.CalledProcessError):
run("dx")
run("dx help")
folder_name = "эксперимент 1"
cd("/")
run("dx ls")
run("dx mkdir '{f}'".format(f=folder_name))
cd(folder_name)
with tempfile.NamedTemporaryFile() as f:
local_filename = f.name
filename = folder_name
run("echo xyzzt > {tf}".format(tf=local_filename))
fileid = run("dx upload --wait {tf} -o '../{f}/{f}' --brief".format(tf=local_filename,
f=filename))
self.assertEqual(fileid, run("dx ls '../{f}/{f}' --brief".format(f=filename)))
self.assertEqual("xyzzt\n", run("dx head '../{f}/{f}'".format(f=filename)))
run("dx pwd")
cd("..")
run("dx pwd")
run("dx ls")
with self.assertRaises(subprocess.CalledProcessError):
run("dx rm '{f}'".format(f=filename))
cd(folder_name)
run("dx mv '{f}' '{f}2'".format(f=filename))
run("dx mv '{f}2' '{f}'".format(f=filename))
run("dx rm '{f}'".format(f=filename))
table_name = folder_name
with tempfile.NamedTemporaryFile(suffix='.csv') as f:
writer = csv.writer(f)
writer.writerows([['a:uint8', 'b:string', 'c:float'], [1, "x", 1.0], [2, "y", 4.0]])
f.flush()
run("dx import csv -o '../{n}' '{f}' --wait".format(n=table_name, f=f.name))
run("dx export csv '../{n}' --output {o} -f".format(n=table_name, o=f.name))
run("dx get_details '../{n}'".format(n=table_name))
cd("..")
run("dx rmdir '{f}'".format(f=folder_name))
run("dx tree")
run("dx find data --name '{n}'".format(n=table_name))
run("dx find data --name '{n} --property foo=bar'".format(n=table_name))
run("dx rename '{n}' '{n}'2".format(n=table_name))
run("dx rename '{n}'2 '{n}'".format(n=table_name))
run("dx set_properties '{n}' '{n}={n}' '{n}2={n}3'".format(n=table_name))
run("dx unset_properties '{n}' '{n}' '{n}2'".format(n=table_name))
run("dx tag '{n}' '{n}'2".format(n=table_name))
run("dx describe '{n}'".format(n=table_name))
# Path resolution is used
run("dx find jobs --project :")
run("dx find executions --project :")
run("dx find analyses --project :")
run("dx find data --project :")
def test_get_unicode_url(self):
with self.assertSubprocessFailure(stderr_regexp="ResourceNotFound", exit_code=3):
run("dx api project-эксперимент describe")
def test_dx_env(self):
run("dx env")
run("dx env --bash")
run("dx env --dx-flags")
def test_dx_api(self):
with tempfile.NamedTemporaryFile() as fd:
fd.write("{}")
fd.flush()
run("dx api {p} describe --input {fn}".format(p=self.project, fn=fd.name))
@unittest.skipUnless(testutil.TEST_NO_RATE_LIMITS,
'skipping tests that need rate limits to be disabled')
def test_dx_invite(self):
for query in ("Ψ", "alice.nonexistent", "alice.nonexistent {p}", "user-alice.nonexistent {p}",
"alice.nonexistent@example.com {p}", "alice.nonexistent : VIEW"):
with self.assertSubprocessFailure(stderr_regexp="ResourceNotFound", exit_code=3):
run(("dx invite "+query).format(p=self.project))
with self.assertSubprocessFailure(stderr_regexp="invalid choice", exit_code=2):
run(("dx invite alice.nonexistent : ПРОСМОТР").format(p=self.project))
@unittest.skipUnless(testutil.TEST_NO_RATE_LIMITS,
'skipping tests that need rate limits to be disabled')
def test_dx_uninvite(self):
for query in ("Ψ", "alice.nonexistent", "alice.nonexistent {p}", "user-alice.nonexistent {p}",
"alice.nonexistent@example.com {p}"):
with self.assertSubprocessFailure(stderr_regexp="ResourceNotFound", exit_code=3):
run(("dx uninvite "+query).format(p=self.project))
def test_dx_add_rm_types(self):
run("dx new record Ψ")
run("dx add_types Ψ abc xyz")
with self.assertSubprocessFailure(stderr_text="be an array of valid strings for a type name",
exit_code=1):
run("dx add_types Ψ ΨΨ")
run("dx remove_types Ψ abc xyz")
run("dx remove_types Ψ abc xyz")
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx remove_types ΨΨ Ψ")
def test_dx_set_details(self):
record_id = run("dx new record Ψ1 --brief").strip()
run("dx set_details Ψ1 '{\"foo\": \"bar\"}'")
dxrecord = dxpy.DXRecord(record_id)
details = dxrecord.get_details()
self.assertEqual({"foo": "bar"}, details, msg="dx set_details with valid JSON string input failed.")
def test_dx_set_details_with_file(self):
# Create temporary JSON file with valid JSON.
with tempfile.NamedTemporaryFile() as tmp_file, tempfile.NamedTemporaryFile() as tmp_invalid_file:
tmp_file.write('{\"foo\": \"bar\"}')
tmp_file.flush()
# Test -f with valid JSON file.
record_id = run("dx new record Ψ2 --brief").strip()
run("dx set_details Ψ2 -f " + pipes.quote(tmp_file.name))
dxrecord = dxpy.DXRecord(record_id)
details = dxrecord.get_details()
self.assertEqual({"foo": "bar"}, details, msg="dx set_details -f with valid JSON input file failed.")
# Test --details-file with valid JSON file.
record_id = run("dx new record Ψ3 --brief").strip()
run("dx set_details Ψ3 --details-file " + pipes.quote(tmp_file.name))
dxrecord = dxpy.DXRecord(record_id)
details = dxrecord.get_details()
self.assertEqual({"foo": "bar"}, details,
msg="dx set_details --details-file with valid JSON input file failed.")
# Create temporary JSON file with invalid JSON.
tmp_invalid_file.write('{\"foo\": \"bar\"')
tmp_invalid_file.flush()
# Test above with invalid JSON file.
record_id = run("dx new record Ψ4 --brief").strip()
with self.assertSubprocessFailure(stderr_regexp="JSON", exit_code=3):
run("dx set_details Ψ4 -f " + pipes.quote(tmp_invalid_file.name))
# Test command with (-f or --details-file) and CL JSON.
with self.assertSubprocessFailure(stderr_regexp="Error: Cannot provide both -f/--details-file and details",
exit_code=3):
run("dx set_details Ψ4 '{ \"foo\":\"bar\" }' -f " + pipes.quote(tmp_file.name))
# Test piping JSON from STDIN.
record_id = run("dx new record Ψ5 --brief").strip()
run("cat " + pipes.quote(tmp_file.name) + " | dx set_details Ψ5 -f -")
dxrecord = dxpy.DXRecord(record_id)
details = dxrecord.get_details()
self.assertEqual({"foo": "bar"}, details, msg="dx set_details -f - with valid JSON input failed.")
def test_dx_shell(self):
shell = pexpect.spawn("bash")
shell.logfile = sys.stdout
shell.sendline("dx sh")
shell.expect(">")
shell.sendline("Ψ 'Ψ Ψ'")
shell.expect("invalid choice: Ψ".encode(sys_encoding))
shell.expect(">")
shell.sendline("env")
shell.expect("Current user")
shell.sendline("help all")
shell.expect("Commands:")
shell.sendline("exit")
shell.sendline("echo find projects | dx sh")
shell.expect("project-")
def test_dx_get_record(self):
with chdir(tempfile.mkdtemp()):
run("dx new record -o :foo --verbose")
run("dx get :foo")
self.assertTrue(os.path.exists('foo.json'))
run("dx get --no-ext :foo")
self.assertTrue(os.path.exists('foo'))
run("diff -q foo foo.json")
def test_dx_object_tagging(self):
the_tags = ["Σ1=n", "helloo0", "ωω"]
# tag
record_id = run("dx new record Ψ --brief").strip()
run("dx tag Ψ " + " ".join(the_tags))
mytags = dxpy.describe(record_id)['tags']
for tag in the_tags:
self.assertIn(tag, mytags)
# untag
run("dx untag Ψ " + " ".join(the_tags[:2]))
mytags = dxpy.describe(record_id)['tags']
for tag in the_tags[:2]:
self.assertNotIn(tag, mytags)
self.assertIn(the_tags[2], mytags)
# -a flag
second_record_id = run("dx new record Ψ --brief").strip()
self.assertNotEqual(record_id, second_record_id)
run("dx tag -a Ψ " + " ".join(the_tags))
mytags = dxpy.describe(record_id)['tags']
for tag in the_tags:
self.assertIn(tag, mytags)
second_tags = dxpy.describe(second_record_id)['tags']
for tag in the_tags:
self.assertIn(tag, second_tags)
run("dx untag -a Ψ " + " ".join(the_tags))
mytags = dxpy.describe(record_id)['tags']
self.assertEqual(len(mytags), 0)
second_tags = dxpy.describe(second_record_id)['tags']
self.assertEqual(len(second_tags), 0)
# nonexistent name
with self.assertSubprocessFailure(stderr_regexp='Unable to resolve', exit_code=3):
run("dx tag nonexistent atag")
with self.assertSubprocessFailure(stderr_regexp='Unable to resolve', exit_code=3):
run("dx untag nonexistent atag")
def test_dx_project_tagging(self):
the_tags = ["$my.tag", "secoиdtag", "тhird тagggg"]
# tag
run("dx tag : \\" + the_tags[0] + " " + the_tags[1] + " '" + the_tags[2] + "'")
mytags = dxpy.describe(self.project)['tags']
for tag in the_tags:
self.assertIn(tag, mytags)
# untag
run("dx untag : \\" + the_tags[0] + " '" + the_tags[2] + "'")
mytags = dxpy.describe(self.project)['tags']
self.assertIn(the_tags[1], mytags)
for tag in [the_tags[0], the_tags[2]]:
self.assertNotIn(tag, mytags)
# nonexistent name
with self.assertSubprocessFailure(stderr_regexp='Could not find a project named', exit_code=3):
run("dx tag nonexistent: atag")
with self.assertSubprocessFailure(stderr_regexp='Could not find a project named', exit_code=3):
run("dx untag nonexistent: atag")
def test_dx_object_properties(self):
property_names = ["Σ_1^n", "helloo0", "ωω"]
property_values = ["n", "world z", "ω()"]
# set_properties
record_id = run("dx new record Ψ --brief").strip()
run("dx set_properties Ψ " +
" ".join(["'" + prop[0] + "'='" + prop[1] + "'" for prop in zip(property_names,
property_values)]))
my_properties = dxpy.api.record_describe(record_id, {"properties": True})['properties']
for (name, value) in zip(property_names, property_values):
self.assertIn(name, my_properties)
self.assertEqual(value, my_properties[name])
# unset_properties
run("dx unset_properties Ψ '" + "' '".join(property_names[:2]) + "'")
my_properties = dxpy.api.record_describe(record_id, {"properties": True})['properties']
for name in property_names[:2]:
self.assertNotIn(name, my_properties)
self.assertIn(property_names[2], my_properties)
self.assertEqual(property_values[2], my_properties[property_names[2]])
# -a flag
second_record_id = run("dx new record Ψ --brief").strip()
self.assertNotEqual(record_id, second_record_id)
run("dx set_properties -a Ψ " +
" ".join(["'" + prop[0] + "'='" + prop[1] + "'" for prop in zip(property_names,
property_values)]))
my_properties = dxpy.api.record_describe(record_id, {"properties": True})['properties']
for (name, value) in zip(property_names, property_values):
self.assertIn(name, my_properties)
self.assertEqual(value, my_properties[name])
second_properties = dxpy.api.record_describe(second_record_id,
{"properties": True})['properties']
for (name, value) in zip(property_names, property_values):
self.assertIn(name, my_properties)
self.assertEqual(value, my_properties[name])
run("dx unset_properties -a Ψ '" + "' '".join(property_names) + "'")
my_properties = dxpy.api.record_describe(record_id, {"properties": True})['properties']
self.assertEqual(len(my_properties), 0)
second_properties = dxpy.api.record_describe(second_record_id,
{"properties": True})['properties']
self.assertEqual(len(second_properties), 0)
# nonexistent name
with self.assertSubprocessFailure(stderr_regexp='Unable to resolve', exit_code=3):
run("dx set_properties nonexistent key=value")
with self.assertSubprocessFailure(stderr_regexp='Unable to resolve', exit_code=3):
run("dx unset_properties nonexistent key")
# Errors parsing --property value
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties -a Ψ ''")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties -a Ψ foo=bar=baz")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties -a Ψ =foo=bar=")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties -a Ψ foo")
# Property keys must be nonempty
with self.assertSubprocessFailure(stderr_regexp='nonempty strings', exit_code=3):
run("dx set_properties -a Ψ =bar")
# Empty string values should be okay
run("dx set_properties -a Ψ bar=")
my_properties = dxpy.api.record_describe(record_id, {"properties": True})['properties']
self.assertEqual(my_properties["bar"], "")
def test_dx_project_properties(self):
property_names = ["$my.prop", "secoиdprop", "тhird prop"]
property_values = ["$hello.world", "Σ2,n", "stuff"]
# set_properties
run("dx set_properties : " +
" ".join(["'" + prop[0] + "'='" + prop[1] + "'" for prop in zip(property_names,
property_values)]))
my_properties = dxpy.api.project_describe(self.project, {"properties": True})['properties']
for (name, value) in zip(property_names, property_values):
self.assertIn(name, my_properties)
self.assertEqual(value, my_properties[name])
# unset_properties
run("dx unset_properties : '" + property_names[0] + "' '" + property_names[2] + "'")
my_properties = dxpy.api.project_describe(self.project, {"properties": True})['properties']
self.assertIn(property_names[1], my_properties)
self.assertEqual(property_values[1], my_properties[property_names[1]])
for name in [property_names[0], property_names[2]]:
self.assertNotIn(name, my_properties)
# nonexistent name
with self.assertSubprocessFailure(stderr_regexp='Could not find a project named', exit_code=3):
run("dx set_properties nonexistent: key=value")
with self.assertSubprocessFailure(stderr_regexp='Could not find a project named', exit_code=3):
run("dx unset_properties nonexistent: key")
# Errors parsing --property value
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties : ''")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties : foo=bar=baz")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties : =foo=bar=")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx set_properties : foo")
# Property keys must be nonempty
with self.assertSubprocessFailure(stderr_regexp='nonempty strings', exit_code=3):
run("dx set_properties : =bar")
# Empty string values should be okay
run("dx set_properties : bar=")
my_properties = dxpy.api.project_describe(self.project, {"properties": True})['properties']
self.assertEqual(my_properties["bar"], "")
@unittest.skipUnless(testutil.TEST_ONLY_MASTER, 'skipping test that requires latest server version')
def test_dx_describe_project(self):
# Look for field name, some number of spaces, and then the value
field_regexp = lambda fieldname, value: \
"(^|\n)" + re.escape(fieldname) + " +" + re.escape(value) + "(\n|$)"
desc_output = run("dx describe :").strip()
self.assertRegex(desc_output, field_regexp("ID", self.project))
self.assertRegex(desc_output, field_regexp("Name", "dxclient_test_pröject"))
self.assertRegex(desc_output, field_regexp("Region", "aws:us-east-1"))
self.assertRegex(desc_output, field_regexp("Contains PHI", "false"))
self.assertNotRegex(desc_output, field_regexp("Archival state", "null"))
self.assertNotRegex(desc_output, field_regexp("Archival progress", "null"))
self.assertRegex(desc_output, field_regexp("Data usage", "0.00 GB"))
self.assertRegex(desc_output, field_regexp("Storage cost", "$0.000/month"))
self.assertRegex(desc_output, field_regexp("Sponsored egress", "0.00 GB used of 0.00 GB total"))
self.assertRegex(desc_output, field_regexp("At spending limit?", "false"))
self.assertRegex(desc_output, field_regexp("Properties", "-"))
desc_output = run("dx describe --verbose :").strip()
self.assertRegex(desc_output, field_regexp("Archival state", "live"))
self.assertRegex(desc_output, field_regexp("Archival progress", "null"))
def test_dx_remove_project_by_name(self):
# TODO: this test makes no use of the DXTestCase-provided
# project.
project_name = ("test_dx_remove_project_by_name_" + str(random.randint(0, 1000000)) + "_" +
str(int(time.time() * 1000)))
project_id = run("dx new project {name} --brief".format(name=project_name)).strip()
self.assertEqual(run("dx find projects --brief --name {name}".format(name=project_name)).strip(),
project_id)
run("dx rmproject -y {name}".format(name=project_name))
self.assertEqual(run("dx find projects --brief --name {name}".format(name=project_name)), "")
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that requires presence of test user')
def test_dx_project_invite_without_email(self):
user_id = 'user-000000000000000000000001'
with temporary_project() as unique_project:
project_id = unique_project.get_id()
# Check that user is not already invited to project
project_members = dxpy.api.project_describe(project_id, {'fields': {'permissions': True}})['permissions']
self.assertNotIn(user_id, project_members.keys())
# Test --no-email flag
res = run("dx invite {user} {project} VIEW --no-email".format(user=user_id, project=project_id)).strip()
exp = "Invited {user} to {project} (accepted)".format(user=user_id, project=project_id)
self.assertEqual(res, exp)
# Confirm user in project
conf = dxpy.api.project_describe(project_id, {'fields': {'permissions': True}})['permissions']
self.assertEqual(conf[user_id], 'VIEW')
def test_dx_cp(self):
project_name = "test_dx_cp_" + str(random.randint(0, 1000000)) + "_" + str(int(time.time() * 1000))
dest_project_id = run("dx new project {name} --brief".format(name=project_name)).strip()
try:
record_id = run("dx new record --brief --details '{\"hello\": 1}'").strip()
run("dx close --wait {r}".format(r=record_id))
self.assertEqual(run("dx ls --brief {p}".format(p=dest_project_id)), "")
run("dx cp {r} {p}".format(r=record_id, p=dest_project_id))
self.assertEqual(run("dx ls --brief {p}".format(p=dest_project_id)).strip(), record_id)
finally:
run("dx rmproject -y {p}".format(p=dest_project_id))
def test_dx_mkdir(self):
with self.assertRaises(subprocess.CalledProcessError):
run("dx mkdir mkdirtest/b/c")
run("dx mkdir -p mkdirtest/b/c")
run("dx mkdir -p mkdirtest/b/c")
run("dx rm -r mkdirtest")
@unittest.skip('PTFM-16383 Disable flaky test')
def test_dxpy_session_isolation(self):
for var in 'DX_PROJECT_CONTEXT_ID', 'DX_PROJECT_CONTEXT_NAME', 'DX_CLI_WD':
if var in os.environ:
del os.environ[var]
shell1 = pexpect.spawn("bash")
shell2 = pexpect.spawn("bash")
shell1.logfile = shell2.logfile = sys.stdout
shell1.setwinsize(20, 90)
shell2.setwinsize(20, 90)
def expect_dx_env_cwd(shell, wd):
shell.expect(self.project)
shell.expect(wd)
shell.expect([">", "#", "$"]) # prompt
shell1.sendline("dx select "+self.project)
shell1.sendline("dx mkdir /sessiontest1")
shell1.sendline("dx cd /sessiontest1")
shell1.sendline("dx env")
expect_dx_env_cwd(shell1, "sessiontest1")
shell2.sendline("dx select "+self.project)
shell2.sendline("dx mkdir /sessiontest2")
shell2.sendline("dx cd /sessiontest2")
shell2.sendline("dx env")
expect_dx_env_cwd(shell2, "sessiontest2")
shell2.sendline("bash -c 'dx env'")
expect_dx_env_cwd(shell2, "sessiontest2")
shell1.sendline("dx env")
expect_dx_env_cwd(shell1, "sessiontest1")
# Grandchild subprocess inherits session
try:
shell1.sendline("bash -c 'dx env'")
expect_dx_env_cwd(shell1, "sessiontest1")
except:
print("*** TODO: FIXME: Unable to verify that grandchild subprocess inherited session")
def test_dx_ssh_config_revoke(self):
original_ssh_public_key = None
user_id = dxpy.whoami()
original_ssh_public_key = dxpy.api.user_describe(user_id).get("sshPublicKey")
wd = tempfile.mkdtemp()
os.mkdir(os.path.join(wd, ".dnanexus_config"))
def revoke_ssh_public_key(args=["ssh_config", "--revoke"]):
dx_ssh_config_revoke = pexpect.spawn("dx", args=args)
dx_ssh_config_revoke.expect("revoked")
def set_ssh_public_key():
dx_ssh_config = pexpect.spawn("dx ssh_config", env=override_environment(HOME=wd))
dx_ssh_config.logfile = sys.stdout
dx_ssh_config.expect("Select an SSH key pair")
dx_ssh_config.sendline("0")
dx_ssh_config.expect("Enter passphrase")
dx_ssh_config.sendline()
dx_ssh_config.expect("again")
dx_ssh_config.sendline()
dx_ssh_config.expect("Your account has been configured for use with SSH")
def assert_same_ssh_pub_key():
self.assertTrue(os.path.exists(os.path.join(wd, ".dnanexus_config/ssh_id")))
with open(os.path.join(wd, ".dnanexus_config/ssh_id.pub")) as fh:
self.assertEquals(fh.read(), dxpy.api.user_describe(user_id).get('sshPublicKey'))
try:
# public key exists
set_ssh_public_key()
assert_same_ssh_pub_key()
revoke_ssh_public_key()
self.assertNotIn("sshPublicKey", dxpy.api.user_describe(user_id))
# public key does not exist
revoke_ssh_public_key()
self.assertNotIn("sshPublicKey", dxpy.api.user_describe(user_id))
# random input after '--revoke'
revoke_ssh_public_key(args=["ssh_config", '--revoke', 'asdf'])
self.assertNotIn("sshPublicKey", dxpy.api.user_describe(user_id))
finally:
if original_ssh_public_key:
dxpy.api.user_update(user_id, {"sshPublicKey": original_ssh_public_key})
def test_dx_ssh_config(self):
original_ssh_public_key = None
try:
user_id = dxpy.whoami()
original_ssh_public_key = dxpy.api.user_describe(user_id).get('sshPublicKey')
wd = tempfile.mkdtemp()
def get_dx_ssh_config():
dx_ssh_config = pexpect.spawn("dx ssh_config", env=override_environment(HOME=wd))
dx_ssh_config.logfile = sys.stdout
dx_ssh_config.setwinsize(20, 90)
return dx_ssh_config
def read_back_pub_key():
self.assertTrue(os.path.exists(os.path.join(wd, ".dnanexus_config/ssh_id")))
with open(os.path.join(wd, ".dnanexus_config/ssh_id.pub")) as fh:
self.assertEqual(fh.read(), dxpy.api.user_describe(user_id).get('sshPublicKey'))
dx_ssh_config = get_dx_ssh_config()
dx_ssh_config.expect("The DNAnexus configuration directory")
dx_ssh_config.expect("does not exist")
os.mkdir(os.path.join(wd, ".dnanexus_config"))
dx_ssh_config = get_dx_ssh_config()
dx_ssh_config.expect("Select an SSH key pair")
dx_ssh_config.sendline("1")
dx_ssh_config.expect("Enter the location of your SSH key")
dx_ssh_config.sendline("нет ключа")
dx_ssh_config.expect("Unable to find")
dx_ssh_config = get_dx_ssh_config()
dx_ssh_config.expect("Select an SSH key pair")
dx_ssh_config.sendline("0")
dx_ssh_config.expect("Enter passphrase")
dx_ssh_config.sendline()
dx_ssh_config.expect("again")
dx_ssh_config.sendline()
dx_ssh_config.expect("Your account has been configured for use with SSH")
read_back_pub_key()
dx_ssh_config = get_dx_ssh_config()
dx_ssh_config.expect("Select an SSH key pair")
dx_ssh_config.expect("already configured")
dx_ssh_config.sendline("0")
dx_ssh_config.expect("Your account has been configured for use with SSH")
read_back_pub_key()
dx_ssh_config = get_dx_ssh_config()
dx_ssh_config.expect("Select an SSH key pair")
dx_ssh_config.expect("already configured")
dx_ssh_config.sendline("1")
dx_ssh_config.expect("Generate a new SSH key pair")
dx_ssh_config.sendline("0")
dx_ssh_config.expect("Enter passphrase")
dx_ssh_config.sendline()
dx_ssh_config.expect("again")
dx_ssh_config.sendline()
dx_ssh_config.expect("Your account has been configured for use with SSH")
read_back_pub_key()
# Ensure that private key upload is rejected
with open(os.path.join(wd, ".dnanexus_config", "ssh_id")) as private_key:
with self.assertRaisesRegexp(DXAPIError,
'Tried to put a private key in the sshPublicKey field'):
dxpy.api.user_update(user_id, {"sshPublicKey": private_key.read()})
finally:
if original_ssh_public_key:
dxpy.api.user_update(user_id, {"sshPublicKey": original_ssh_public_key})
@contextmanager
def configure_ssh(self):
original_ssh_public_key = None
try:
user_id = dxpy.whoami()
original_ssh_public_key = dxpy.api.user_describe(user_id).get('sshPublicKey')
wd = tempfile.mkdtemp()
os.mkdir(os.path.join(wd, ".dnanexus_config"))
dx_ssh_config = pexpect.spawn("dx ssh_config", env=override_environment(HOME=wd))
dx_ssh_config.logfile = sys.stdout
dx_ssh_config.setwinsize(20, 90)
dx_ssh_config.expect("Select an SSH key pair")
dx_ssh_config.sendline("0")
dx_ssh_config.expect("Enter passphrase")
dx_ssh_config.sendline()
dx_ssh_config.expect("again")
dx_ssh_config.sendline()
dx_ssh_config.expect("Your account has been configured for use with SSH")
yield wd
finally:
if original_ssh_public_key:
dxpy.api.user_update(user_id, {"sshPublicKey": original_ssh_public_key})
@unittest.skipUnless(testutil.TEST_RUN_JOBS, "Skipping test that would run jobs")
def test_dx_ssh(self):
with self.configure_ssh() as wd:
sleep_applet = dxpy.api.applet_new(dict(name="sleep",
runSpec={"code": "sleep 1200",
"interpreter": "bash",
"execDepends": [{"name": "dx-toolkit"}]},
inputSpec=[], outputSpec=[],
dxapi="1.0.0", version="1.0.0",
project=self.project))["id"]
dx = pexpect.spawn("dx run {} --yes --ssh".format(sleep_applet),
env=override_environment(HOME=wd))
dx.logfile = sys.stdout
dx.setwinsize(20, 90)
dx.expect("Waiting for job")
dx.expect("Resolving job hostname and SSH host key", timeout=1200)
# Wait for the line displayed between the first and second MOTDs,
# since we only care about checking the second set of MOTD lines.
# Example of the dividing line:
# dnanexus@job-BP90K3Q0X2v81PXXPZj005Zj.dnanex.us (10.0.0.200) - byobu
dx.expect(["dnanexus.io \(10.0.0.200\) - byobu",
"dnanex.us \(10.0.0.200\) - byobu"], timeout=120)
dx.expect("This is the DNAnexus Execution Environment", timeout=600)
# Check for job name (e.g. "Job: sleep")
dx.expect("Job: \x1b\[1msleep", timeout=5)
# \xf6 is ö
dx.expect("Project: dxclient_test_pr\xf6ject".encode(sys_encoding))
dx.expect("The job is running in terminal 1.", timeout=5)
# Check for terminal prompt and verify we're in the container
job_id = dxpy.find_jobs(name="sleep", project=self.project).next()['id']
dx.expect(("dnanexus@%s" % job_id), timeout=10)
# Make sure the job can be connected to using 'dx ssh <job id>'
dx2 = pexpect.spawn("dx ssh " + job_id, env=override_environment(HOME=wd))
dx2.logfile = sys.stdout
dx2.setwinsize(20, 90)
dx2.expect("Waiting for job")
dx2.expect("Resolving job hostname and SSH host key", timeout=1200)
dx2.expect(("dnanexus@%s" % job_id), timeout=10)
dx2.sendline("whoami")
dx2.expect("dnanexus", timeout=10)
# Exit SSH session and terminate job
dx2.sendline("exit")
dx2.expect("bash running")
dx2.sendcontrol("c") # CTRL-c
dx2.expect("[exited]")
dx2.expect("dnanexus@job", timeout=10)
dx2.sendline("exit")
dx2.expect("still running. Terminate now?")
dx2.sendline("y")
dx2.expect("Terminated job", timeout=60)
@unittest.skipUnless(testutil.TEST_RUN_JOBS, "Skipping test that would run jobs")
def test_dx_run_debug_on(self):
with self.configure_ssh() as wd:
crash_applet = dxpy.api.applet_new(dict(name="crash",
runSpec={"code": "exit 5", "interpreter": "bash",
"execDepends": [{"name": "dx-toolkit"}]},
inputSpec=[], outputSpec=[],
dxapi="1.0.0", version="1.0.0",
project=self.project))["id"]
job_id = run("dx run {} --yes --brief --debug-on AppInternalError".format(crash_applet),
env=override_environment(HOME=wd)).strip()
elapsed = 0
while True:
job_desc = dxpy.describe(job_id)
if job_desc["state"] == "debug_hold":
break
time.sleep(1)
elapsed += 1
if elapsed > 1200:
raise Exception("Timeout while waiting for job to enter debug hold")
dx = pexpect.spawn("dx ssh " + job_id, env=override_environment(HOME=wd))
dx.logfile = sys.stdout
dx.setwinsize(20, 90)
dx.expect("dnanexus@", timeout=1200)
@unittest.skipUnless(testutil.TEST_DX_LOGIN,
'This test requires authserver to run, requires dx login to select the right authserver, ' +
'and may result in temporary account lockout. TODO: update test instrumentation to allow ' +
'it to run')
def test_dx_login(self):
wd = tempfile.mkdtemp()
username = dxpy.user_info()['username']
def get_dx_login(opts=""):
dx_login = pexpect.spawn("dx login" + opts, env=override_environment(HOME=wd))
dx_login.logfile = sys.stdout
dx_login.setwinsize(20, 90)
return dx_login
dx_login = get_dx_login(" --token BAD_TOKEN")
dx_login.expect("The token could not be found")
dx_login.close()
self.assertEqual(dx_login.exitstatus, 1)
dx_login = get_dx_login(" --auth-token BAD_TOKEN")
dx_login.expect("The token could not be found")
dx_login.close()
self.assertEqual(dx_login.exitstatus, 1)
dx_login = get_dx_login()
dx_login.expect("Acquiring credentials")
dx_login.expect("Username")
dx_login.sendline(username)
dx_login.expect("Password: ")
dx_login.sendline("wrong passwörd")
dx_login.expect("Incorrect username and/or password")
dx_login.expect("Username")
dx_login.sendline()
dx_login.expect("Password: ")
dx_login.sendline("wrong passwörd")
dx_login.expect("Incorrect username and/or password")
dx_login.expect("Username")
dx_login.sendline()
dx_login.expect("Password: ")
dx_login.sendline("wrong passwörd")
dx_login.expect("dx: Incorrect username and/or password")
dx_login.close()
self.assertEqual(dx_login.exitstatus, EXPECTED_ERR_EXIT_STATUS)
def test_dx_with_bad_job_id_env(self):
env = override_environment(DX_JOB_ID="foobar")
run("dx env", env=env)
@unittest.skipUnless(testutil.TEST_WITH_AUTHSERVER,
'skipping tests that require a running authserver')
def test_dx_http_request_handles_auth_errors(self):
# The JSON content cannot be processed.
with self.assertRaises(HTTPError):
dxpy.DXHTTPRequest(dxpy.get_auth_server_name() + "/oauth2/token",
{"grant_type": "authorization_code",
"redirect_uri": "/",
"client_id": "apiserver"},
prepend_srv=False,
max_retries=0)
class TestDXNewRecord(DXTestCase):
def test_new_record_basic(self):
run("dx new record -o :foo --verbose")
record_id = run("dx new record -o :foo2 --brief --visibility hidden --property foo=bar " +
"--property baz=quux --tag onetag --tag twotag --type foo --type bar " +
"--details '{\"hello\": \"world\"}'").strip()
self.assertEqual(record_id, run("dx ls :foo2 --brief").strip())
self.assertEqual({"hello": "world"}, json.loads(run("dx get -o - :foo2")))
second_record_id = run("dx new record :somenewfolder/foo --parents --brief").strip()
self.assertEqual(second_record_id, run("dx ls :somenewfolder/foo --brief").strip())
# describe
run("dx describe {record}".format(record=record_id))
desc = json.loads(run("dx describe {record} --details --json".format(record=record_id)))
self.assertEqual(desc['tags'], ['onetag', 'twotag'])
self.assertEqual(desc['types'], ['foo', 'bar'])
self.assertEqual(desc['properties'], {"foo": "bar", "baz": "quux"})
self.assertEqual(desc['details'], {"hello": "world"})
self.assertEqual(desc['hidden'], True)
desc = json.loads(run("dx describe {record} --json".format(record=second_record_id)))
self.assertEqual(desc['folder'], '/somenewfolder')
run("dx rm :foo")
run("dx rm :foo2")
run("dx rm -r :somenewfolder")
def test_dx_new_record_with_close(self):
record_id = run("dx new record --close --brief").strip()
self.assertEqual("closed", dxpy.describe(record_id)['state'])
second_record_id = run("dx new record --brief").strip()
self.assertEqual("open", dxpy.describe(second_record_id)['state'])
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
def test_new_record_without_context(self):
# Without project context, cannot create new object without
# project qualified path
with without_project_context():
with self.assertSubprocessFailure(stderr_regexp='expected the path to be qualified with a project',
exit_code=3):
run("dx new record foo")
# Can create object with explicit project qualifier
record_id = run("dx new record --brief " + self.project + ":foo").strip()
self.assertEqual(dxpy.DXRecord(record_id).name, "foo")
class TestGTables(DXTestCase):
def test_dx_gtables(self):
# new gtable
gri_gtable_id = run("dx new gtable --gri mychr mylo myhi " +
"--columns mychr,mylo:int32,myhi:int32 --brief --property hello=world " +
"--details '{\"hello\":\"world\"}' --visibility visible").strip()
# Add rows to it (?)
# TODO: make this better.
add_rows_input = {"data": [["chr", 1, 10], ["chr2", 3, 13], ["chr1", 3, 10], ["chr1", 11, 13],
["chr1", 5, 12]]}
run("dx api {gt} addRows '{rows}'".format(gt=gri_gtable_id, rows=json.dumps(add_rows_input)))
# close
run("dx close {gt} --wait".format(gt=gri_gtable_id))
# describe
desc = json.loads(run("dx describe {gt} --details --json".format(gt=gri_gtable_id)))
self.assertEqual(desc['types'], ['gri'])
self.assertEqual(desc['indices'],
[{"type": "genomic", "name": "gri", "chr": "mychr", "lo": "mylo", "hi": "myhi"}])
self.assertEqual(desc['properties'], {"hello": "world"})
self.assertEqual(desc['details'], {"hello": "world"})
self.assertEqual(desc['hidden'], False)
# gri query
self.assertEqual(run("dx export tsv {gt} --gri chr1 1 10 -o -".format(gt=gri_gtable_id)),
'\r\n'.join(['mychr:string\tmylo:int32\tmyhi:int32', 'chr1\t3\t10',
'chr1\t5\t12', '']))
# "get" is not supported on gtables
with self.assertSubprocessFailure(stderr_regexp='given object is of class gtable', exit_code=3):
run("dx get {gt}".format(gt=gri_gtable_id))
# Download and re-import with gri
with tempfile.NamedTemporaryFile(suffix='.csv') as fd:
run("dx export tsv {gt} -o {fd} -f".format(gt=gri_gtable_id, fd=fd.name))
fd.flush()
run("dx import tsv {fd} -o gritableimport --gri mychr mylo myhi --wait".format(fd=fd.name))
# Also, upload and download the file just to test out upload/download
run("dx upload {fd} -o uploadedfile --wait".format(fd=fd.name))
run("dx download uploadedfile -f")
run("dx download uploadedfile -o -")
try:
os.remove("uploadedfile")
except IOError:
pass
second_desc = json.loads(run("dx describe gritableimport --json"))
self.assertEqual(second_desc['types'], ['gri'])
self.assertEqual(second_desc['indices'],
[{"type": "genomic", "name": "gri", "chr": "mychr", "lo": "mylo", "hi": "myhi"}])
self.assertEqual(desc['size'], second_desc['size'])
self.assertEqual(desc['length'], second_desc['length'])
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
def test_dx_new_gtable_without_context(self):
# Without project context, cannot create new object without
# project qualified path
with without_project_context():
with self.assertSubprocessFailure(stderr_regexp='expected the path to be qualified with a project',
exit_code=3):
run("dx new gtable --columns mychr,mylo:int32,myhi:int32 foo")
# Can create object with explicit project qualifier
gtable_id = run(
"dx new gtable --brief --columns mychr,mylo:int32,myhi:int32 " + self.project + ":foo").strip()
self.assertEqual(dxpy.DXGTable(gtable_id).name, "foo")
class TestDXWhoami(DXTestCase):
def test_dx_whoami_name(self):
whoami_output = run("dx whoami").strip()
self.assertEqual(whoami_output, dxpy.api.user_describe(dxpy.whoami())['handle'])
def test_dx_whoami_id(self):
whoami_output = run("dx whoami --id").strip()
self.assertEqual(whoami_output, dxpy.whoami())
class TestDXClientUploadDownload(DXTestCase):
def test_dx_upload_download(self):
with self.assertSubprocessFailure(stderr_regexp='expected the path to be a non-empty string',
exit_code=3):
run('dx download ""')
wd = tempfile.mkdtemp()
os.mkdir(os.path.join(wd, "a"))
os.mkdir(os.path.join(wd, "a", "б"))
os.mkdir(os.path.join(wd, "a", "б", "c"))
with tempfile.NamedTemporaryFile(dir=os.path.join(wd, "a", "б")) as fd:
fd.write("0123456789ABCDEF"*64)
fd.flush()
with self.assertSubprocessFailure(stderr_regexp='is a directory but the -r/--recursive option was not given', exit_code=1):
run("dx upload "+wd)
run("dx upload -r "+wd)
run('dx wait "{f}"'.format(f=os.path.join(os.path.basename(wd), "a", "б",
os.path.basename(fd.name))))
with self.assertSubprocessFailure(stderr_regexp='is a folder but the -r/--recursive option was not given', exit_code=1):
run("dx download "+os.path.basename(wd))
old_dir = os.getcwd()
with chdir(tempfile.mkdtemp()):
run("dx download -r "+os.path.basename(wd))
tree1 = check_output("cd {wd}; find .".format(wd=wd), shell=True)
tree2 = check_output("cd {wd}; find .".format(wd=os.path.basename(wd)), shell=True)
self.assertEqual(tree1, tree2)
with chdir(tempfile.mkdtemp()):
os.mkdir('t')
run("dx download -r -o t "+os.path.basename(wd))
tree1 = check_output("cd {wd}; find .".format(wd=wd), shell=True)
tree2 = check_output("cd {wd}; find .".format(wd=os.path.join("t",
os.path.basename(wd))),
shell=True)
self.assertEqual(tree1, tree2)
os.mkdir('t2')
run("dx download -o t2 "+os.path.join(os.path.basename(wd), "a", "б",
os.path.basename(fd.name)))
self.assertEqual(os.stat(os.path.join("t2", os.path.basename(fd.name))).st_size,
len("0123456789ABCDEF"*64))
with chdir(tempfile.mkdtemp()), temporary_project('dx download test proj') as other_project:
run("dx mkdir /super/")
run("dx mv '{}' /super/".format(os.path.basename(wd)))
# Specify an absolute path in another project
with select_project(other_project):
run("dx download -r '{proj}:/super/{path}'".format(proj=self.project, path=os.path.basename(wd)))
tree1 = check_output("cd {wd} && find .".format(wd=wd), shell=True)
tree2 = check_output("cd {wd} && find .".format(wd=os.path.basename(wd)), shell=True)
self.assertEqual(tree1, tree2)
# Now specify a relative path in the same project
with chdir(tempfile.mkdtemp()), select_project(self.project):
run("dx download -r super/{path}/".format(path=os.path.basename(wd)))
tree3 = check_output("cd {wd} && find .".format(wd=os.path.basename(wd)), shell=True)
self.assertEqual(tree1, tree3)
with self.assertSubprocessFailure(stderr_regexp="paths are both file and folder names", exit_code=1):
cmd = "dx cd {d}; dx mkdir {f}; dx download -r {f}*"
run(cmd.format(d=os.path.join("/super", os.path.basename(wd), "a", "б"),
f=os.path.basename(fd.name)))
@unittest.skipUnless(testutil.TEST_WITH_AUTHSERVER,
'skipping tests that require a running authserver')
def test_dx_upload_with_upload_perm(self):
with temporary_project('test proj with UPLOAD perms', reclaim_permissions=True) as temp_project:
data = {"scope": {"projects": {"*": "UPLOAD"}}}
upload_only_auth_token = dxpy.DXHTTPRequest(dxpy.get_auth_server_name() + '/system/newAuthToken', data,
prepend_srv=False, always_retry=True)
token_callable = dxpy.DXHTTPOAuth2({"auth_token": upload_only_auth_token["access_token"],
"auth_token_type": upload_only_auth_token["token_type"],
"auth_token_signature": upload_only_auth_token["token_signature"]})
testdir = tempfile.mkdtemp()
try:
# Filename provided with path
with open(os.path.join(testdir, 'myfilename'), 'w') as f:
f.write('foo')
remote_file = dxpy.upload_local_file(filename=os.path.join(testdir, 'myfilename'),
project=temp_project.get_id(), folder='/', auth=token_callable)
self.assertEqual(remote_file.name, 'myfilename')
# Filename provided with file handle
remote_file2 = dxpy.upload_local_file(file=open(os.path.join(testdir, 'myfilename')),
project=temp_project.get_id(), folder='/', auth=token_callable)
self.assertEqual(remote_file2.name, 'myfilename')
finally:
shutil.rmtree(testdir)
@unittest.skipUnless(testutil.TEST_ENV,
'skipping test that would clobber your local environment')
def test_dx_download_no_env(self):
testdir = tempfile.mkdtemp()
with tempfile.NamedTemporaryFile(dir=testdir) as fd:
fd.write("foo")
fd.flush()
file_id = run("dx upload " + fd.name + " --brief --wait").strip()
self.assertTrue(file_id.startswith('file-'))
# download file
output_path = os.path.join(testdir, 'output')
with without_project_context():
run('dx download ' + file_id + ' -o ' + output_path)
run('cmp ' + output_path + ' ' + fd.name)
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
def test_dx_upload_no_env(self):
# Without project context, cannot upload to a
# non-project-qualified destination
with without_project_context():
with self.assertSubprocessFailure(stderr_regexp='expected the path to be qualified with a project',
exit_code=3):
run("dx upload --path foo /dev/null")
# Can upload to a path specified with explicit project qualifier
file_id = run("dx upload --brief --path " + self.project + ":foo /dev/null").strip()
self.assertEqual(dxpy.DXFile(file_id).name, "foo")
def test_dx_make_download_url(self):
testdir = tempfile.mkdtemp()
output_testdir = tempfile.mkdtemp()
with tempfile.NamedTemporaryFile(dir=testdir) as fd:
fd.write("foo")
fd.flush()
file_id = run("dx upload " + fd.name + " --brief --wait").strip()
self.assertTrue(file_id.startswith('file-'))
# download file
download_url = run("dx make_download_url " + file_id).strip()
run("wget -P " + output_testdir + " " + download_url)
run('cmp ' + os.path.join(output_testdir, os.path.basename(fd.name)) + ' ' + fd.name)
# download file with a different name
download_url = run("dx make_download_url " + file_id + " --filename foo")
run("wget -P " + output_testdir + " " + download_url)
run('cmp ' + os.path.join(output_testdir, "foo") + ' ' + fd.name)
def test_dx_make_download_url_project_affinity(self):
# Ensure that URLs created with make_download_url never have project
# affinity. In particular, ensures that download URLs created in a job
# (when the workspace is set to a container) continue to work after the
# job has terminated
with temporary_project("make_download_url test 2") as temp_project_2:
with temporary_project("make_download_url test 1", select=True) as temp_project_1:
fh = dxpy.upload_string("foo", project=temp_project_1.get_id(), wait_on_close=True)
# file now appears in both projects
temp_project_1.clone(temp_project_2.get_id(), objects=[fh.get_id()])
download_url = run("dx make_download_url " + fh.get_id()).strip()
run("wget -O /dev/null " + download_url)
# Even after project 1 is destroyed, the download URL should still work
with self.assertSubprocessFailure(stderr_regexp="Not Found", exit_code=8):
run("wget -O /dev/null " + download_url)
def test_dx_upload_mult_paths(self):
testdir = tempfile.mkdtemp()
os.mkdir(os.path.join(testdir, 'a'))
with tempfile.NamedTemporaryFile(dir=testdir) as fd:
fd.write("root-file")
fd.flush()
with tempfile.NamedTemporaryFile(dir=os.path.join(testdir, "a")) as fd2:
fd2.write("a-file")
fd2.flush()
run(("dx upload -r {testdir}/{rootfile} {testdir}/a " +
"--wait").format(testdir=testdir, rootfile=os.path.basename(fd.name)))
listing = run("dx ls").split("\n")
self.assertIn("a/", listing)
self.assertIn(os.path.basename(fd.name), listing)
listing = run("dx ls a").split("\n")
self.assertIn(os.path.basename(fd2.name), listing)
def test_dx_upload_mult_paths_with_dest(self):
testdir = tempfile.mkdtemp()
os.mkdir(os.path.join(testdir, 'a'))
with tempfile.NamedTemporaryFile(dir=testdir) as fd:
fd.write("root-file")
fd.flush()
with tempfile.NamedTemporaryFile(dir=os.path.join(testdir, "a")) as fd2:
fd2.write("a-file")
fd2.flush()
run("dx mkdir /destdir")
run(("dx upload -r {testdir}/{rootfile} {testdir}/a --destination /destdir " +
"--wait").format(testdir=testdir, rootfile=os.path.basename(fd.name)))
listing = run("dx ls /destdir/").split("\n")
self.assertIn("a/", listing)
self.assertIn(os.path.basename(fd.name), listing)
listing = run("dx ls /destdir/a").split("\n")
self.assertIn(os.path.basename(fd2.name), listing)
@unittest.skipUnless(testutil.TEST_RUN_JOBS, "Skipping test that would run jobs")
def test_dx_download_by_job_id_and_output_field(self):
test_project_name = 'PTFM-13437'
test_file_name = 'test_file_01'
expected_result = 'asdf1234...'
with temporary_project(test_project_name, select=True) as temp_project:
temp_project_id = temp_project.get_id()
# Create and run minimal applet to generate output file.
code_str = """import dxpy
@dxpy.entry_point('main')
def main():
test_file_01 = dxpy.upload_string('{exp_res}', name='{filename}')
output = {{}}
output['{filename}'] = dxpy.dxlink(test_file_01)
return output
dxpy.run()
"""
code_str = code_str.format(exp_res=expected_result, filename=test_file_name)
app_spec = {"name": "test_applet_dx_download_by_jbor",
"project": temp_project_id,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [{"name": test_file_name, "class": "file"}],
"runSpec": {"code": code_str, "interpreter": "python2.7"},
"version": "1.0.0"}
applet_id = dxpy.api.applet_new(app_spec)['id']
applet = dxpy.DXApplet(applet_id)
job = applet.run({}, project=temp_project_id)
job.wait_on_done()
job_id = job.get_id()
# Case: Correctly specify "<job_id>:<output_field>"; save to file.
with chdir(tempfile.mkdtemp()):
run("dx download " + job_id + ":" + test_file_name)
with open(test_file_name) as fh:
result = fh.read()
self.assertEqual(expected_result, result)
# Case: Correctly specify file id; print to stdout.
test_file_id = dxpy.DXFile(job.describe()['output'][test_file_name]).get_id()
result = run("dx download " + test_file_id + " -o -").strip()
self.assertEqual(expected_result, result)
# Case: Correctly specify file name; print to stdout.
result = run("dx download " + test_file_name + " -o -").strip()
self.assertEqual(expected_result, result)
# Case: Correctly specify "<job_id>:<output_field>"; print to stdout.
result = run("dx download " + job_id + ":" + test_file_name + " -o -").strip()
self.assertEqual(expected_result, result)
# Case: File does not exist.
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx download foo -o -")
# Case: Invalid output field name when specifying <job_id>:<output_field>.
with self.assertSubprocessFailure(stderr_regexp="Could not find", exit_code=3):
run("dx download " + job_id + ":foo -o -")
# In a directory structure like:
# ROOT/
# X.txt
# A/
# B/
# Make sure that files/subdirs are not downloaded twice. This checks that we fixed
# PTFM-14106.
def test_dx_download_root_recursive(self):
data = "ABCD"
def gen_file(fname, proj_id):
dxfile = dxpy.upload_string(data, name=fname, project=proj_id, wait_on_close=True)
return dxfile
# Download the project recursively, with command [cmd_string].
# Compare the downloaded directory against the first download
# structure.
def test_download_cmd(org_dir, cmd_string):
testdir = tempfile.mkdtemp()
with chdir(testdir):
run(cmd_string)
run("diff -Naur {} {}".format(org_dir, testdir))
shutil.rmtree(testdir)
with temporary_project('test_proj', select=True) as temp_project:
proj_id = temp_project.get_id()
gen_file("X.txt", proj_id)
dxpy.api.project_new_folder(proj_id, {"folder": "/A"})
dxpy.api.project_new_folder(proj_id, {"folder": "/B"})
# Create an entire copy of the project directory structure,
# which will be compared to all other downloads.
orig_dir = tempfile.mkdtemp()
with chdir(orig_dir):
run("dx download -r {}:/".format(proj_id))
test_download_cmd(orig_dir, "dx download -r /")
test_download_cmd(orig_dir, "dx download -r {}:/*".format(proj_id))
test_download_cmd(orig_dir, "dx download -r *")
shutil.rmtree(orig_dir)
# Test download to stdout
def test_download_to_stdout(self):
data = "ABCD"
def gen_file(fname, proj_id):
dxfile = dxpy.upload_string(data, name=fname, project=proj_id, wait_on_close=True)
return dxfile
with temporary_project('test_proj', select=True) as temp_project:
proj_id = temp_project.get_id()
gen_file("X.txt", proj_id)
buf = run("dx download -o - X.txt")
self.assertEqual(buf, data)
def test_dx_download_resume_and_checksum(self):
def assert_md5_checksum(filename, hasher):
with open(filename, "rb") as fh:
self.assertEqual(hashlib.md5(fh.read()).hexdigest(), hasher.hexdigest())
def truncate(filename, size):
with open(filename, "rb+") as fh:
fh.seek(size)
fh.truncate()
# Manually upload 2 parts
part1, part2 = b"0123456789ABCDEF"*1024*64*5, b"0"
dxfile = dxpy.new_dxfile(name="test")
dxfile.upload_part(part1, index=1)
dxfile.upload_part(part2, index=2)
dxfile.close(block=True)
wd = tempfile.mkdtemp()
run("cd {wd}; dx download test; ls -la".format(wd=wd))
assert_md5_checksum(os.path.join(wd, "test"), hashlib.md5(part1 + part2))
truncate(os.path.join(wd, "test"), 1024*1024*5)
run("cd {wd}; dx download -f test".format(wd=wd))
assert_md5_checksum(os.path.join(wd, "test"), hashlib.md5(part1 + part2))
truncate(os.path.join(wd, "test"), 1024*1024*5 - 1)
run("cd {wd}; dx download -f test".format(wd=wd))
assert_md5_checksum(os.path.join(wd, "test"), hashlib.md5(part1 + part2))
truncate(os.path.join(wd, "test"), 1)
run("cd {wd}; dx download -f test".format(wd=wd))
assert_md5_checksum(os.path.join(wd, "test"), hashlib.md5(part1 + part2))
run("cd {wd}; rm test; touch test".format(wd=wd))
run("cd {wd}; dx download -f test".format(wd=wd))
assert_md5_checksum(os.path.join(wd, "test"), hashlib.md5(part1 + part2))
def test_upload_binary_data_with_debugging_info(self):
# Really a test that the _DX_DEBUG output doesn't barf on binary data
with chdir(tempfile.mkdtemp()):
with open('binary', 'wb') as f:
f.write(b'\xee\xee\xee\xef')
run('_DX_DEBUG=1 dx upload binary')
run('_DX_DEBUG=2 dx upload binary')
run('_DX_DEBUG=3 dx upload binary')
class TestDXClientDownloadDataEgressBilling(DXTestCase):
def gen_file(self, fname, data, proj_id):
return dxpy.upload_string(data, name=fname, project=proj_id, wait_on_close=True)
def get_billed_project(self):
with open(self.temp_file_fd.name, "r") as fd:
return fd.read()
# Clean testing state prior to running a download test.
#
# We need to remove the local file before downloading. The file
# has already been downloaded, and the 'dx download' code will
# skip re-downloading, causing test failure.
def prologue(self, file1, file2):
with open(self.temp_file_fd.name, "w") as fd:
fd.truncate()
for filename in [file1, file2]:
if os.path.exists(filename):
os.remove(filename)
def setUp(self):
self.temp_file_fd = tempfile.NamedTemporaryFile()
# set output file to verify api call is called with correct project
os.environ['_DX_DUMP_BILLED_PROJECT'] = self.temp_file_fd.name
def tearDown(self):
del os.environ['_DX_DUMP_BILLED_PROJECT']
self.temp_file_fd.close()
@unittest.skipUnless(testutil.TEST_ENV,
'skipping test that would clobber your local environment')
def test_dx_cat_project_context(self):
proj1_name = 'test_proj1'
proj2_name = 'test_proj2'
with temporary_project(proj1_name, select=True) as proj, \
temporary_project(proj2_name) as proj2, \
chdir(tempfile.mkdtemp()):
data1 = 'ABCD'
file1_name = "file1"
file1_id = self.gen_file(file1_name, data1, proj.get_id()).get_id()
data2 = '1234'
file2_name = "file2"
file2_id = self.gen_file(file2_name, data2, proj2.get_id()).get_id()
# Success: project from context contains file specified by ID
buf = run("dx download -o - {f}".format(f=file1_id))
self.assertEqual(buf, data1)
# Project context alone, when combined with file by ID, is
# not sufficient to indicate user's intent to use that
# project
self.assertEqual(self.get_billed_project(), "")
# Success: project from context contains file specified by dxlink
buf = run("dx download -o - '{{\"$dnanexus_link\": \"{f}\"}}'".format(f=file1_id))
self.assertEqual(buf, data1)
self.assertEqual(self.get_billed_project(), "")
# Success: project from context contains file specified by name
buf = run("dx download -o - {f}".format(f=file1_name))
self.assertEqual(buf, data1)
self.assertEqual(self.get_billed_project(), proj.get_id())
# Success: project specified by context does not contains file specified by ID
buf = run("dx download -o - {f}".format(f=file2_id))
self.assertEqual(buf, data2)
self.assertEqual(self.get_billed_project(), "")
# Success: project specified by context does not contains file specified by dxlink
buf = run("dx download -o - '{{\"$dnanexus_link\": \"{f}\"}}'".format(f=file2_id))
self.assertEqual(buf, data2)
self.assertEqual(self.get_billed_project(), "")
# Failure: project specified by context does not contains file specified by name
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx download -o - {f}".format(f=file2_name))
@unittest.skipUnless(testutil.TEST_ENV,
'skipping test that would clobber your local environment')
def test_dx_download_project_context(self):
proj1_name = 'test_proj1'
proj2_name = 'test_proj2'
with temporary_project(proj1_name, select=True) as proj, \
temporary_project(proj2_name) as proj2, \
chdir(tempfile.mkdtemp()):
data1 = 'ABCD'
file1_name = "file1"
file1_id = self.gen_file(file1_name, data1, proj.get_id()).get_id()
data2 = '1234'
file2_name = "file2"
file2_id = self.gen_file(file2_name, data2, proj2.get_id()).get_id()
# Success: project from context contains file specified by ID
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress {f}".format(f=file1_id))
# Project context alone, when combined with file by ID, is
# not sufficient to indicate user's intent to use that
# project
self.assertEqual(self.get_billed_project(), "")
# Success: project from context contains file specified by dxlink
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress '{{\"$dnanexus_link\": \"{f}\"}}'".format(f=file1_id))
self.assertEqual(self.get_billed_project(), "")
# Success: project from context contains file specified by name
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress {f}".format(f=file1_name))
self.assertEqual(self.get_billed_project(), proj.get_id())
# Success: project specified by context does not contains file specified by ID
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress {f}".format(f=file2_id))
self.assertEqual(self.get_billed_project(), "")
# Success: project specified by context does not contains file specified by dxlink
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress '{{\"$dnanexus_link\": \"{f}\"}}'".format(f=file2_id))
self.assertEqual(self.get_billed_project(), "")
# Failure: project specified by context does not contains file specified by name
self.prologue(file1_name, file2_name)
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx download -f --no-progress {f}".format(f=file2_name))
def test_dx_download_project_explicit(self):
proj1_name = 'test_proj1_' + str(time.time())
proj2_name = 'test_proj2_' + str(time.time())
with temporary_project(proj1_name, select=True) as proj, \
temporary_project(proj2_name) as proj2, \
chdir(tempfile.mkdtemp()):
data1 = 'ABCD'
file1_name = "file1"
file1_id = self.gen_file(file1_name, data1, proj.get_id()).get_id()
data2 = '1234'
file2_name = "file2"
file2_id = self.gen_file(file2_name, data2, proj2.get_id()).get_id()
# Explicit project provided
# Success: project specified by ID contains file specified by ID
buf = run("dx download -o - {p}:{f}".format(p=proj2.get_id(), f=file2_id))
self.assertEqual(buf, data2)
self.assertEqual(self.get_billed_project(), proj2.get_id())
# Success: project specified by ID contains file specified by name
buf = run("dx download -o - {p}:{f}".format(p=proj.get_id(), f=file1_name))
self.assertEqual(buf, data1)
self.assertEqual(self.get_billed_project(), proj.get_id())
# Success: project specified by name contains file specified by ID
buf = run("dx download -o - {p}:{f}".format(p=proj2_name, f=file2_id))
self.assertEqual(buf, data2)
self.assertEqual(self.get_billed_project(), proj2.get_id())
# Success: project specified by name contains file specified by name
buf = run("dx download -o - {p}:{f}".format(p=proj1_name, f=file1_name))
self.assertEqual(buf, data1)
self.assertEqual(self.get_billed_project(), proj.get_id())
# Failure: project specified by ID does not contain file specified by ID
with self.assertSubprocessFailure(stderr_regexp="Error: project does not", exit_code=1):
run("dx download -o - {p}:{f}".format(p=proj2.get_id(), f=file1_id))
# Failure: project specified by ID does not contain file specified by name
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx download -o - {p}:{f}".format(p=proj.get_id(), f=file2_name))
# Failure: project specified by name does not contain file specified by ID
with self.assertSubprocessFailure(stderr_regexp="Error: project does not", exit_code=1):
run("dx download -o - {p}:{f}".format(p=proj2_name, f=file1_id))
# Failure: project specified by name does not contain file specified by name
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx download -o - {p}:{f}".format(p=proj1_name, f=file2_name))
# Test api call parameters when downloading to local file instead of cat to std out
# Success: project specified by ID contains file specified by ID
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress {p}:{f}".format(p=proj2.get_id(), f=file2_id))
self.assertEqual(self.get_billed_project(), proj2.get_id())
# Success: project specified by ID contains file specified by name
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress {p}:{f}".format(p=proj.get_id(), f=file1_name))
self.assertEqual(self.get_billed_project(), proj.get_id())
# Success: project specified by name contains file specified by ID
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress {p}:{f}".format(p=proj2_name, f=file2_id))
self.assertEqual(self.get_billed_project(), proj2.get_id())
# Success: project specified by name contains file specified by name
self.prologue(file1_name, file2_name)
run("dx download -f --no-progress {p}:{f}".format(p=proj1_name, f=file1_name))
self.assertEqual(self.get_billed_project(), proj.get_id())
# Failure: project specified by ID does not contain file specified by ID
with self.assertSubprocessFailure(stderr_regexp="Error: specified project does not", exit_code=1):
run("dx download -f --no-progress {p}:{f}".format(p=proj2.get_id(), f=file1_id))
# Failure: project specified by ID does not contain file specified by name
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx download -f --no-progress {p}:{f}".format(p=proj.get_id(), f=file2_name))
# Failure: project specified by name does not contain file specified by ID
with self.assertSubprocessFailure(stderr_regexp="Error: specified project does not", exit_code=1):
run("dx download -f --no-progress {p}:{f}".format(p=proj2_name, f=file1_id))
# Failure: project specified by name does not contain file specified by name
with self.assertSubprocessFailure(stderr_regexp="Unable to resolve", exit_code=3):
run("dx download -f --no-progress {p}:{f}".format(p=proj1_name, f=file2_name))
def test_dx_download_multiple_projects_with_same_name(self):
proj_name = 'test_proj1'
with temporary_project(proj_name, select=True) as proj, \
temporary_project(proj_name) as proj2, \
chdir(tempfile.mkdtemp()):
data1 = 'ABCD'
file1_name = "file1"
file1_id = self.gen_file(file1_name, data1, proj.get_id()).get_id()
data2 = '1234'
file2_name = "file1"
file2_id = self.gen_file(file2_name, data2, proj2.get_id()).get_id()
# Success: project specified by ID contains file specified by ID
buf = run("dx download -o - {pid}:{f}".format(pid=proj2.get_id(), f=file2_id))
self.assertEqual(buf, data2)
self.assertEqual(self.get_billed_project(), proj2.get_id())
# Failure: project specified by name contains file specified by ID
with self.assertSubprocessFailure(stderr_regexp="ResolutionError: Found multiple projects", exit_code=3):
run("dx download -o - {pname}:{f}".format(pname=proj_name, f=file2_id))
# Replicate same tests for non-cat (download to file) route
# Success: project specified by ID contains file specified by ID
run("dx download -f --no-progress {pid}:{f}".format(pid=proj.get_id(), f=file1_id))
self.assertEqual(self.get_billed_project(), proj.get_id())
# Failure: project specified by name contains file specified by ID
with self.assertSubprocessFailure(stderr_regexp="ResolutionError: Found multiple projects", exit_code=3):
run("dx download -f --no-progress {pname}:{f}".format(pname=proj_name, f=file2_id))
@unittest.skipUnless(testutil.TEST_ENV and testutil.TEST_RUN_JOBS,
'skipping test that would clobber your local environment and run jobs')
def test_dx_download_jbors(self):
proj1_name = 'test_proj1'
proj2_name = 'test_proj2'
with temporary_project(proj1_name, select=True) as proj1, \
temporary_project(proj2_name) as proj2, \
chdir(tempfile.mkdtemp()):
dxfile = dxpy.upload_string("foo", project=proj1.get_id(), wait_on_close=True)
applet_id = dxpy.api.applet_new({
"project": proj1.get_id(),
"dxapi": "0.0.1",
"inputSpec": [{"name": "infile", "class": "file"}],
"outputSpec": [{"name": "outfile", "class": "file"}],
"runSpec": {"interpreter": "bash",
"code": """
dx-jobutil-add-output outfile `dx-jobutil-parse-link "$infile"`
"""
}})["id"]
applet = dxpy.DXApplet(applet_id)
dxjob1 = applet.run({"infile": {"$dnanexus_link": dxfile.get_id()}}, project=proj1.get_id())
dxjob2 = applet.run({"infile": {"$dnanexus_link": dxfile.get_id()}}, project=proj2.get_id())
dxjob1.wait_on_done()
dxjob2.wait_on_done()
# Test downloading from jobs running in the current project
# context, and outside of the current project context
run("dx download -f --no-progress {job1}:outfile".format(job1=dxjob1.get_id()))
self.assertEqual(self.get_billed_project(), proj1.get_id())
run("dx download --no-progress -o - {job1}:outfile".format(job1=dxjob1.get_id()))
self.assertEqual(self.get_billed_project(), proj1.get_id())
run("dx download -f --no-progress {job2}:outfile".format(job2=dxjob2.get_id()))
self.assertEqual(self.get_billed_project(), proj2.get_id())
run("dx download --no-progress -o - {job2}:outfile".format(job2=dxjob2.get_id()))
self.assertEqual(self.get_billed_project(), proj2.get_id())
# Test downloading without a project context set
with without_project_context():
run("dx download -f --no-progress {job1}:outfile".format(job1=dxjob1.get_id()))
self.assertEqual(self.get_billed_project(), proj1.get_id())
class TestDXClientDescribe(DXTestCase):
def test_projects(self):
run("dx describe :")
run("dx describe " + self.project)
run("dx describe " + self.project + ":")
# need colon to recognize as project name
with self.assertSubprocessFailure(exit_code=3):
run("dx describe dxclient_test_pröject")
# bad project name
with self.assertSubprocessFailure(exit_code=3):
run("dx describe dne:")
# nonexistent project ID
with self.assertSubprocessFailure(exit_code=3):
run("dx describe project-123456789012345678901234")
def test_bad_current_project(self):
with self.assertSubprocessFailure(stderr_regexp='No matches found', exit_code=3):
run("dx describe nonexistent --project-context-id foo")
run("dx describe " + self.project + " --project-context-id foo")
def test_user_describe_self_shows_bill_to(self):
## Verify `billTo` from /user-xxxx/describe.
user_id = dxpy.whoami()
user_desc = dxpy.api.user_describe(user_id)
self.assertTrue("billTo" in user_desc)
self.assertEqual(user_desc.get("billTo"), user_id)
## Verify `billTo` from "dx describe user-xxxx".
bill_to_label = "Default bill to"
cli_user_desc = run("dx describe " + user_id).strip().split("\n")
parsed_desc = filter(lambda line: line.startswith(bill_to_label),
cli_user_desc)
self.assertEqual(len(parsed_desc), 1)
key_and_value = parsed_desc[0].split(bill_to_label)
self.assertEqual(key_and_value[0], "")
self.assertEqual(key_and_value[1].strip(), user_id)
## Verify `billTo` from "dx describe user-xxxx --json".
cli_user_desc_json = json.loads(
run("dx describe " + user_id + " --json")
)
self.assertTrue("billTo" in cli_user_desc_json)
self.assertEqual(cli_user_desc_json.get("billTo"), user_id)
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_describe_deleted_app(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": ""},
"name": "applet_to_delete"})['id']
app_new_output = dxpy.api.app_new({"name": "app_to_delete",
"applet": applet_id,
"version": "1.0.0"})
# make second app with no default tag
app_new_output2 = dxpy.api.app_new({"name": "app_to_delete",
"applet": applet_id,
"version": "1.0.1"})
dxpy.api.app_delete(app_new_output2["id"])
run("dx describe " + app_new_output2["id"])
class TestDXClientRun(DXTestCase):
def setUp(self):
self.other_proj_id = run("dx new project other --brief").strip()
super(TestDXClientRun, self).setUp()
def tearDown(self):
dxpy.api.project_destroy(self.other_proj_id, {'terminateJobs': True})
super(TestDXClientRun, self).tearDown()
def test_dx_run_applet_with_input_spec(self):
record = dxpy.new_dxrecord(name="my_record")
applet_id = dxpy.api.applet_new({
"project": self.project,
"dxapi": "0.0.1",
"inputSpec": [
{"name": "int0", "class": "int"},
{"name": "int1", "class": "int", "optional": True},
{"name": "string0", "class": "string"},
{"name": "string1", "class": "string", "optional": True},
{"name": "record0", "class": "record"},
{"name": "record1", "class": "record", "optional": True},
],
"outputSpec": [
{"name": "outint", "class": "int"},
{"name": "outstring", "class": "string"},
{"name": "outrecord", "class": "record"},
],
"runSpec": {"interpreter": "bash",
"code": """
dx-jobutil-add-output outint $int0
dx-jobutil-add-output outstring $string0
dx-jobutil-add-output outrecord $record0
"""
}})["id"]
applet = dxpy.DXApplet(applet_id)
#############################
# With only required inputs #
#############################
# Run with applet handler.
job = applet.run({"int0": 16, "string0": "input_string",
"record0": {"$dnanexus_link": record.get_id()}})
job_desc = job.describe()
exp = {"int0": 16, "string0": "input_string",
"record0": {"$dnanexus_link": record.get_id()}}
self.assertEquals(job_desc["input"], exp)
# Run with "dx run".
job_id = run("dx run {applet_id} -iint0=16 -istring0=input_string -irecord0={record_id} --brief".format(
applet_id=applet_id, record_id=record.get_id())).strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc["input"], exp)
job_id = run("dx run {applet_id} -iint0:int=16 -istring0:string=input_string -irecord0:record={record_id} --brief".format(
applet_id=applet_id, record_id=record.get_id())).strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc["input"], exp)
# Run with "dx run" with JBORs.
other_job_id = run("dx run {applet_id} -iint0={job_id}:outint -istring0={job_id}:outstring -irecord0={job_id}:outrecord --brief".format(
applet_id=applet_id, job_id=job_id)).strip()
job_desc = dxpy.describe(other_job_id)
exp = {"int0": {"$dnanexus_link": {"field": "outint",
"job": job_id}},
"string0": {"$dnanexus_link": {"field": "outstring",
"job": job_id}},
"record0": {"$dnanexus_link": {"field": "outrecord",
"job": job_id}}}
self.assertEquals(job_desc["input"], exp)
# Run with "dx run" with input name mapped to data object name.
job_id = run("dx run {applet_id} -iint0=16 -istring0=input_string -irecord0=my_record --brief".format(
applet_id=applet_id)).strip()
job_desc = dxpy.describe(job_id)
exp = {"int0": 16, "string0": "input_string",
"record0": {"$dnanexus_link": {"project": self.project,
"id": record.get_id()}}}
self.assertEquals(job_desc["input"], exp)
#####################################
# With required and optional inputs #
#####################################
second_record = dxpy.new_dxrecord()
# Run with applet handler.
job = applet.run({"int0": 16, "string0": "input_string",
"record0": {"$dnanexus_link": record.get_id()},
"int1": 32, "string1": "second_input_string",
"record1": {"$dnanexus_link": second_record.get_id()}})
job_desc = job.describe()
exp = {"int0": 16, "int1": 32, "string0": "input_string",
"string1": "second_input_string",
"record0": {"$dnanexus_link": record.get_id()},
"record1": {"$dnanexus_link": second_record.get_id()}}
self.assertEquals(job_desc["input"], exp)
# Run with "dx run".
job_id = run("dx run {applet_id} -iint0=16 -istring0=input_string -irecord0={record_id} -iint1=32 -istring1=second_input_string -irecord1={second_record_id} --brief".format(
applet_id=applet_id, record_id=record.get_id(),
second_record_id=second_record.get_id())).strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc["input"], exp)
# Run with "dx run" with JBORs.
other_job_id = run("dx run {applet_id} -iint0=32 -iint1={job_id}:outint -istring0=second_input_string -istring1={job_id}:outstring -irecord0={second_record_id} -irecord1={job_id}:outrecord --brief".format(
applet_id=applet_id, job_id=job_id,
second_record_id=second_record.get_id())).strip()
job_desc = dxpy.describe(other_job_id)
exp = {"int0": 32,
"int1": {"$dnanexus_link": {"field": "outint",
"job": job_id}},
"string0": "second_input_string",
"string1": {"$dnanexus_link": {"field": "outstring",
"job": job_id}},
"record0": {"$dnanexus_link": second_record.get_id()},
"record1": {"$dnanexus_link": {"field": "outrecord",
"job": job_id}}}
self.assertEquals(job_desc["input"], exp)
def test_dx_run_applet_without_input_spec(self):
record = dxpy.new_dxrecord(name="my_record")
applet_id = dxpy.api.applet_new({
"project": self.project,
"dxapi": "0.0.1",
"outputSpec": [
{"name": "outint", "class": "int"},
{"name": "outstring", "class": "string"},
{"name": "outrecord", "class": "record"},
],
"runSpec": {"interpreter": "bash",
"code": """
record_id=`dx new record --close --brief`
dx-jobutil-add-output outint 32
dx-jobutil-add-output outstring output_string
dx-jobutil-add-output outrecord $record_id
"""
}})["id"]
applet = dxpy.DXApplet(applet_id)
# Run with applet handler.
job = applet.run({"int0": 16, "string0": "input_string",
"record0": {"$dnanexus_link": record.get_id()}})
job_desc = job.describe()
exp = {"int0": 16, "string0": "input_string",
"record0": {"$dnanexus_link": record.get_id()}}
self.assertEquals(job_desc["input"], exp)
# Run with "dx run".
job_id = run("dx run {applet_id} -iint0=16 -istring0=input_string -irecord0={record_id} --brief".format(applet_id=applet_id, record_id=record.get_id())).strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc["input"], exp)
job_id = run("dx run {applet_id} -iint0:int=16 -istring0:string=input_string -irecord0:record={record_id} --brief".format(applet_id=applet_id, record_id=record.get_id())).strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc["input"], exp)
# Run with "dx run" with JBORs.
other_job_id = run("dx run {applet_id} -iint0={job_id}:outint -istring0={job_id}:outstring -irecord0={job_id}:outrecord --brief".format(applet_id=applet_id, job_id=job_id)).strip()
job_desc = dxpy.describe(other_job_id)
exp = {"int0": {"$dnanexus_link": {"field": "outint",
"job": job_id}},
"string0": {"$dnanexus_link": {"field": "outstring",
"job": job_id}},
"record0": {"$dnanexus_link": {"field": "outrecord",
"job": job_id}}}
self.assertEquals(job_desc["input"], exp)
other_job_id = run("dx run {applet_id} -irecord0={record_id} -irecord1={job_id}:outrecord --brief".format(
applet_id=applet_id, job_id=job_id, record_id=record.get_id()
)).strip()
job_desc = dxpy.describe(other_job_id)
exp = {"record0": {"$dnanexus_link": record.get_id()},
"record1": {"$dnanexus_link": {"field": "outrecord",
"job": job_id}}}
self.assertEquals(job_desc["input"], exp)
# Run with "dx run" with repeated input names: order of input values
# preserved.
other_job_id = run("dx run {applet_id} -irecord0={record_id} -irecord0={job_id}:outrecord --brief".format(
applet_id=applet_id, job_id=job_id, record_id=record.get_id()
)).strip()
job_desc = dxpy.describe(other_job_id)
exp = {"record0": [{"$dnanexus_link": record.get_id()},
{"$dnanexus_link": {"field": "outrecord",
"job": job_id}}]}
self.assertEquals(job_desc["input"], exp)
other_job_id = run("dx run {applet_id} -irecord0={job_id}:outrecord -irecord0={record_id} --brief".format(
applet_id=applet_id, job_id=job_id, record_id=record.get_id()
)).strip()
job_desc = dxpy.describe(other_job_id)
exp = {"record0": [{"$dnanexus_link": {"field": "outrecord",
"job": job_id}},
{"$dnanexus_link": record.get_id()}]}
self.assertEquals(job_desc["input"], exp)
# Run with "dx run" with input name mapped to data object name.
job_id = run("dx run {applet_id} -irecord0=my_record --brief".format(applet_id=applet_id)).strip()
job_desc = dxpy.describe(job_id)
exp = {"record0": {"$dnanexus_link": {"project": self.project,
"id": record.get_id()}}}
self.assertEquals(job_desc["input"], exp)
def test_dx_resolve(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
record_id0 = dxpy.api.record_new({"project": self.project,
"dxapi": "1.0.0",
"name": "resolve_record0"})['id']
record_id1 = dxpy.api.record_new({"project": self.project,
"dxapi": "1.0.0",
"name": "resolve_record1"})['id']
record_id2 = dxpy.api.record_new({"project": self.project,
"dxapi": "1.0.0",
"name": "resolve_record2"})['id']
glob_id = dxpy.api.record_new({"project": self.project,
"dxapi": "1.0.0",
"name": "glob_resolve_record"})['id']
job_id = run("dx run " + applet_id + " -iinput0=resolve_record0 -iinput1=resolve_record1 " +
"-iinput2=glob_resolve* -iint0=5 -iint1=15 --brief -y").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc['input']['input0']['$dnanexus_link']['id'], record_id0)
self.assertEquals(job_desc['input']['input1']['$dnanexus_link']['id'], record_id1)
self.assertEquals(job_desc['input']['input2']['$dnanexus_link']['id'], glob_id)
self.assertEquals(job_desc['input']['int0'], 5)
self.assertEquals(job_desc['input']['int1'], 15)
# If multiple entities are provided with the same input name, then their resolved result should
# appear in a list, in the order in which they were provided, no matter the method of resolution
job_id = run("dx run " + applet_id + " -iinput0=resolve_record0 -iinput0=25 -iinput0=glob_resolve* " +
"-iinput0=resolve_record1 -iinput1=" + record_id0 + " -iinput1=50 -iinput1=resolve_record1 " +
"--brief -y").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(len(job_desc['input']['input0']), 4)
self.assertEquals(job_desc['input']['input0'][0]['$dnanexus_link']['id'], record_id0)
self.assertEquals(job_desc['input']['input0'][1], 25)
self.assertEquals(job_desc['input']['input0'][2]['$dnanexus_link']['id'], glob_id)
self.assertEquals(job_desc['input']['input0'][3]['$dnanexus_link']['id'], record_id1)
self.assertEquals(len(job_desc['input']['input1']), 3)
self.assertEquals(job_desc['input']['input1'][0]['$dnanexus_link'], record_id0)
self.assertEquals(job_desc['input']['input1'][1], 50)
self.assertEquals(job_desc['input']['input1'][2]['$dnanexus_link']['id'], record_id1)
# If a record cannot be resolved, then the return value should just be the record name passed in
job_id = run("dx run " + applet_id + " --brief -y -iinput0=cannot_resolve " +
"-iinput1=resolve_record0 -iint0=10").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc['input']['input0'], "cannot_resolve")
self.assertEquals(job_desc['input']['input1']['$dnanexus_link']['id'], record_id0)
self.assertEquals(job_desc['input']['int0'], 10)
job_id = run("dx run " + applet_id + " --brief -y -iinput0=glob_cannot_resolve*").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc['input']['input0'], "glob_cannot_resolve*")
# Should simply use given name if it corresponds to multiple records (glob or not);
# length validation errors out, but exec_io catches it
dup_record_id = dxpy.api.record_new({"project": self.project,
"dxapi": "1.0.0",
"name": "resolve_record0"})['id']
job_id = run("dx run " + applet_id + " --brief -y -iinput0=resolve_record0").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc['input']['input0'], "resolve_record0")
job_id = run("dx run " + applet_id + " --brief -y -iinput0=resolve_record*").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc['input']['input0'], "resolve_record*")
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [
{"name": "input0", "class": "record"},
{"name": "input1", "class": "array:record", "optional": True},
{"name": "int0", "class": "int"},
{"name": "int1", "class": "array:int", "optional": True},
{"name": "bool0", "class": "array:boolean", "optional": True}
],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
# Try with applet that has an input spec
job_id = run("dx run " + applet_id + " --brief -y -iinput0=resolve_record1 -iint0=10 " +
"-iinput1=resolve_record2 -iinput1=resolve_record1 -iint1=0 -iint1=1 -iint1=2 " +
"-ibool0=true -ibool0=0").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc['input']['input0']['$dnanexus_link']['id'], record_id1)
self.assertEquals(job_desc['input']['input1'][0]['$dnanexus_link']['id'], record_id2)
self.assertEquals(job_desc['input']['input1'][1]['$dnanexus_link']['id'], record_id1)
self.assertEquals(job_desc['input']['int0'], 10)
self.assertEquals(job_desc['input']['int1'], [0, 1, 2])
self.assertEquals(job_desc['input']['bool0'], [True, False])
# Workflows should show same behavior as applets
workflow_id = run("dx new workflow myworkflow --output-folder /foo --brief").strip()
stage_id = dxpy.api.workflow_add_stage(workflow_id,
{"editVersion": 0, "executable": applet_id})['stage']
record_id = dxpy.api.record_new({"project": self.project,
"dxapi": "1.0.0",
"name": "myrecord"})['id']
analysis_id = run("dx run " + workflow_id + " -i" + stage_id + ".input0=myrecord -i" +
stage_id + ".int0=77 -y --brief").strip()
analysis_desc = dxpy.describe(analysis_id)
self.assertEquals(analysis_desc['input'][stage_id + '.input0']['$dnanexus_link']['id'], record_id)
self.assertEquals(analysis_desc['input'][stage_id + '.int0'], 77)
def test_dx_resolve_check_resolution_needed(self):
# If no entity_name is given, no entity_name should be returned
self.assertEquals(check_resolution("some_path", "project_id", "/", None), (False, "project_id", "/", None))
self.assertEquals(check_resolution("some_path", self.project, "/", None), (False, self.project, "/", None))
record_id = dxpy.api.record_new({"project": self.project,
"dxapi": "1.0.0",
"name": "myrecord"})['id']
self.assertEquals(check_resolution("some_path", self.project, "/", "myrecord"),
(True, self.project, "/", "myrecord"))
self.assertEquals(check_resolution("some_path", "not_a_real_project_id", "/", "notarealrecord"),
(True, "not_a_real_project_id", "/", "notarealrecord"))
# If the entity is a DX ID, but not an expected class, the result should be False, None, None, None
result = check_resolution("some_path", self.project, "/", record_id, expected_classes=["file"])
self.assertEquals(result, (False, None, None, None))
# If entity_id is a hash, there is no need to resolve, and the describe
# output is returned (should work no matter what project is given)
result = check_resolution("some_path", self.project, "/", record_id, expected_classes=["record"])
self.assertEquals(result[:3], (False, self.project, "/"))
desc_output = result[3]
self.assertEquals(desc_output["describe"]["project"], self.project)
self.assertEquals(desc_output["describe"]["name"], "myrecord")
self.assertEquals(desc_output["id"], record_id)
desc_output = check_resolution("some_path", None, "/", record_id, enclose_in_list=True)[3][0]
self.assertEquals(desc_output["describe"]["project"], self.project)
self.assertEquals(desc_output["describe"]["name"], "myrecord")
self.assertEquals(desc_output["id"], record_id)
# Describing entity_id should work even if the project hint is wrong
result = check_resolution("some_path", self.project, "/", record_id, describe={"project": self.other_proj_id,
"fields": {"sponsored": True}})
self.assertEquals(result[:3], (False, self.project, "/"))
desc_output = result[3]
self.assertEquals(desc_output["describe"]["sponsored"], False)
self.assertEquals(desc_output["id"], record_id)
# Even if the given project is not a real project ID, the correct project ID
# should be in the describe output
desc_output = check_resolution("some_path", "not_a_real_project_id", "/", record_id)[3]
self.assertEquals(desc_output["describe"]["project"], self.project)
self.assertEquals(desc_output["describe"]["name"], "myrecord")
self.assertEquals(desc_output["id"], record_id)
# If describing an entity ID fails, then a ResolutionError should be
# raised
with self.assertRaisesRegexp(ResolutionError, "The entity record-\d+ could not be found"):
check_resolution("some_path", self.project, "/", "record-123456789012345678901234")
def test_dx_run_depends_on_success(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
job_dep_id = run("dx run " + applet_id + " --brief -y").strip()
record_dep_id = dxpy.api.record_new({"project": self.project})['id']
job_id = run("dx run " + applet_id + " --brief -y").strip()
job_desc = dxpy.describe(job_id)
self.assertEquals(job_desc['dependsOn'], [])
job_id = run("dx run " + applet_id + " --brief -y -d " + job_dep_id).strip()
job_desc = dxpy.describe(job_id)
self.assertEqual(job_desc['dependsOn'], [job_dep_id])
job_id = run("dx run " + applet_id + " -d " + job_dep_id + " --depends-on " +
record_dep_id + " --brief -y").strip()
job_desc = dxpy.describe(job_id)
self.assertEqual(sorted(job_desc['dependsOn']), sorted([job_dep_id, record_dep_id]))
def test_dx_run_depends_on_failure(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
job1_dep_id = run("dx run " + applet_id + " --brief -y").strip()
job2_dep_id = run("dx run " + applet_id + " --brief -y").strip()
# Testing for missing arguments:
with self.assertSubprocessFailure(stderr_regexp='-d/--depends-on.*expected one argument', exit_code=2):
run("dx run " + applet_id + " --brief -y --depends-on " + job2_dep_id + " --depends-on")
with self.assertSubprocessFailure(stderr_regexp='-d/--depends-on.*expected one argument', exit_code=2):
run("dx run " + applet_id + " -d --depends-on " + job1_dep_id + " --brief -y")
with self.assertSubprocessFailure(stderr_regexp='unrecognized arguments', exit_code=2):
run("dx run " + applet_id + " --brief -y -d " + job2_dep_id + " " + job1_dep_id)
with self.assertSubprocessFailure(stderr_regexp='could not be found', exit_code=3):
run("dx run " + applet_id + " --brief -y -d not_a_valid_id")
# Testing for use of --depends-on with running workflows
workflow_id = run("dx new workflow myworkflow --output-folder /foo --brief").strip()
stage_id = dxpy.api.workflow_add_stage(workflow_id,
{"editVersion": 0, "executable": applet_id})['stage']
with self.assertSubprocessFailure(stderr_regexp='--depends-on.*workflows', exit_code=3):
run("dx run " + workflow_id + " -d " + job1_dep_id + " -y --brief")
with self.assertSubprocessFailure(stderr_regexp='--depends-on.*workflows', exit_code=3):
run("dx run myworkflow -d " + job1_dep_id + " -y --brief")
def test_dx_run_no_hidden_executables(self):
# hidden applet
applet_name = "hidden_applet"
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"},
"hidden": True,
"name": applet_name})['id']
run("dx describe hidden_applet")
with self.assertSubprocessFailure(stderr_regexp='ResolutionError: Unable to resolve "{f}"'
.format(f=applet_name), exit_code=3):
run("dx run hidden_applet")
# by ID will still work
run("dx run " + applet_id + " -y")
# hidden workflow
workflow_name = "hidden_workflow"
dxworkflow = dxpy.new_dxworkflow(name=workflow_name, hidden=True)
dxworkflow.add_stage(applet_id)
with self.assertSubprocessFailure(stderr_regexp='ResolutionError: Unable to resolve "{f}"'
.format(f=workflow_name), exit_code=3):
run("dx run hidden_workflow")
# by ID will still work
run("dx run " + dxworkflow.get_id() + " -y")
@unittest.skipUnless(testutil.TEST_RUN_JOBS,
'skipping tests that would run jobs')
def test_dx_run_jbor_array_ref(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"name": "myapplet",
"dxapi": "1.0.0",
"inputSpec": [{"name": "record",
"class": "record",
"optional": True}],
"outputSpec": [{"name": "record",
"class": "record"},
{"name": "record_array",
"class": "array:record"}],
"runSpec": {"interpreter": "bash",
"bundledDepends": [],
"execDepends": [],
"code": '''
first_record=$(dx new record firstrecord --brief)
dx close $first_record
second_record=$(dx new record secondrecord --brief)
dx close $second_record
dx-jobutil-add-output record $first_record
dx-jobutil-add-output record_array $first_record --array
dx-jobutil-add-output record_array $second_record --array
'''}})["id"]
remote_job = dxpy.DXApplet(applet_id).run({})
remote_job.wait_on_done()
remote_job_output = remote_job.describe()["output"]["record_array"]
# check other dx functionality here for convenience
# dx describe/path resolution
jbor_array_ref = '{job_id}:record_array.'.format(job_id=remote_job.get_id())
desc_output = run('dx describe ' + jbor_array_ref + '0')
self.assertIn("firstrecord", desc_output)
self.assertNotIn("secondrecord", desc_output)
with self.assertSubprocessFailure(exit_code=3):
run("dx get " + remote_job.get_id() + ":record.foo")
with self.assertSubprocessFailure(stderr_regexp='not an array', exit_code=3):
run("dx get " + remote_job.get_id() + ":record.0")
with self.assertSubprocessFailure(stderr_regexp='out of range', exit_code=3):
run("dx get " + jbor_array_ref + '2')
# dx run
second_remote_job = run('dx run myapplet -y --brief -irecord=' + jbor_array_ref + '1').strip()
second_remote_job_desc = run('dx describe ' + second_remote_job)
self.assertIn(jbor_array_ref + '1', second_remote_job_desc)
self.assertIn(remote_job_output[1]["$dnanexus_link"], second_remote_job_desc)
self.assertNotIn(remote_job_output[0]["$dnanexus_link"], second_remote_job_desc)
# use dx get to hydrate a directory and test dx-run-app-locally
def create_app_dir_from_applet(applet_id):
tempdir = tempfile.mkdtemp()
with chdir(tempdir):
run('dx get ' + applet_id)
return os.path.join(tempdir, dxpy.describe(applet_id)['name'])
appdir = create_app_dir_from_applet(applet_id)
local_output = check_output(['dx-run-app-locally', appdir, '-irecord=' + jbor_array_ref + '1'])
self.assertIn(remote_job_output[1]["$dnanexus_link"], local_output)
self.assertNotIn(remote_job_output[0]["$dnanexus_link"], local_output)
@unittest.skipUnless(testutil.TEST_RUN_JOBS,
'skipping tests that would run jobs')
def test_dx_run_priority(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"name": "myapplet",
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": ""},
"access": {"project": "VIEW",
"allProjects": "VIEW",
"network": []}})["id"]
normal_job_id = run("dx run myapplet --priority normal --brief -y").strip()
normal_job_desc = dxpy.describe(normal_job_id)
self.assertEqual(normal_job_desc["priority"], "normal")
high_priority_job_id = run("dx run myapplet --priority high --brief -y").strip()
high_priority_job_desc = dxpy.describe(high_priority_job_id)
self.assertEqual(high_priority_job_desc["priority"], "high")
# don't actually need these to run
run("dx terminate " + normal_job_id)
run("dx terminate " + high_priority_job_id)
# --watch implies --priority high
try:
run("dx run myapplet -y --watch")
except subprocess.CalledProcessError:
# ignore any watching errors; just want to test requested
# priority
pass
watched_job_id = run("dx find jobs -n 1 --brief").strip()
self.assertNotIn(watched_job_id, [normal_job_id, high_priority_job_id])
watched_job_desc = dxpy.describe(watched_job_id)
self.assertEqual(watched_job_desc['applet'], applet_id)
self.assertEqual(watched_job_desc['priority'], 'high')
# errors
with self.assertSubprocessFailure(exit_code=2):
# expect argparse error code 2 for bad choice
run("dx run myapplet --priority standard")
# no warning when no special access requested
dx_run_output = run("dx run myapplet --priority normal -y")
for string in ["WARNING", "developer", "Internet", "write access"]:
self.assertNotIn(string, dx_run_output)
# test for printing a warning when extra permissions are
# requested and run as normal priority
extra_perms_applet = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": ""},
"access": {"developer": True,
"project": "UPLOAD",
"network": ["github.com"]}})["id"]
# no warning when running at high priority
dx_run_output = run("dx run " + extra_perms_applet + " --priority high -y")
for string in ["WARNING", "developer", "Internet", "write access"]:
self.assertNotIn(string, dx_run_output)
# warning when running at normal priority; mention special
# permissions present
dx_run_output = run("dx run " + extra_perms_applet + " --priority normal -y")
for string in ["WARNING", "developer", "Internet", "write access"]:
self.assertIn(string, dx_run_output)
# no warning with --brief
dx_run_output = run("dx run " + extra_perms_applet + " --priority normal --brief -y")
self.assertRegex(dx_run_output.strip(), '^job-[0-9a-zA-Z]{24}$')
# test with allProjects set but no explicit permissions to the
# project context
extra_perms_applet = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": ""},
"access": {"allProjects": "CONTRIBUTE"}})["id"]
# no warning when running at high priority
dx_run_output = run("dx run " + extra_perms_applet + " --priority high -y")
for string in ["WARNING", "developer", "Internet", "write access"]:
self.assertNotIn(string, dx_run_output)
# warning when running at normal priority; mention special
# permissions present
dx_run_output = run("dx run " + extra_perms_applet + " --priority normal -y")
for string in ["WARNING", "write access"]:
self.assertIn(string, dx_run_output)
for string in ["developer", "Internet"]:
self.assertNotIn(string, dx_run_output)
# workflow tests
workflow_id = run("dx new workflow myworkflow --brief").strip()
run("dx add stage {workflow} {applet}".format(workflow=workflow_id,
applet=extra_perms_applet))
# no warning when run at high priority
dx_run_output = run("dx run myworkflow --priority high -y")
for string in ["WARNING", "developer", "Internet", "write access"]:
self.assertNotIn(string, dx_run_output)
# and check that priority was set properly
analysis_id = next(dxpy.find_executions(classname='analysis', limit=1))['id']
self.assertEqual(dxpy.describe(analysis_id)["priority"], "high")
# get warnings when run at normal priority
dx_run_output = run("dx run myworkflow --priority normal -y")
for string in ["WARNING", "write access"]:
self.assertIn(string, dx_run_output)
for string in ["developer", "Internet"]:
self.assertNotIn(string, dx_run_output)
# and check that priority was set properly
analysis_id = next(dxpy.find_executions(classname='analysis', limit=1))['id']
self.assertEqual(dxpy.describe(analysis_id)["priority"], "normal")
def test_dx_run_tags_and_properties(self):
# success
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
property_names = ["$my.prop", "secoиdprop", "тhird prop"]
property_values = ["$hello.world", "Σ2,n", "stuff"]
the_tags = ["Σ1=n", "helloo0", "ωω"]
job_id = run("dx run " + applet_id + ' -inumber=32 --brief -y ' +
" ".join(["--property '" + prop[0] + "'='" + prop[1] + "'" for
prop in zip(property_names, property_values)]) +
"".join([" --tag " + tag for tag in the_tags])).strip()
job_desc = dxpy.api.job_describe(job_id)
self.assertEqual(job_desc['tags'].sort(), the_tags.sort())
self.assertEqual(len(job_desc['properties']), 3)
for name, value in zip(property_names, property_values):
self.assertEqual(job_desc['properties'][name], value)
# Test setting tags and properties afterwards
run("dx tag " + job_id + " foo bar foo")
run("dx set_properties " + job_id + " foo=bar Σ_1^n=n")
job_desc_lines = run("dx describe " + job_id + " --delim ' '").splitlines()
found_tags = False
found_properties = False
for line in job_desc_lines:
if line.startswith('Tags'):
self.assertIn("foo", line)
self.assertIn("bar", line)
found_tags = True
if line.startswith('Properties'):
self.assertIn("foo=bar", line)
self.assertIn("Σ_1^n=n", line)
found_properties = True
self.assertTrue(found_tags)
self.assertTrue(found_properties)
run("dx untag " + job_id + " foo")
run("dx unset_properties " + job_id + " Σ_1^n")
job_desc = json.loads(run("dx describe " + job_id + " --json"))
self.assertIn("bar", job_desc['tags'])
self.assertNotIn("foo", job_desc['tags'])
self.assertEqual(job_desc["properties"]["foo"], "bar")
self.assertNotIn("Σ_1^n", job_desc["properties"])
def test_dx_run_extra_args(self):
# success
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
job_id = run("dx run " + applet_id + " -inumber=32 --name overwritten_name " +
'--delay-workspace-destruction --brief -y ' +
'--extra-args \'{"input": {"second": true}, "name": "new_name"}\'').strip()
job_desc = dxpy.api.job_describe(job_id)
self.assertTrue(job_desc['delayWorkspaceDestruction'])
self.assertEqual(job_desc['name'], 'new_name')
self.assertIn('number', job_desc['input'])
self.assertEqual(job_desc['input']['number'], 32)
self.assertIn('second', job_desc['input'])
self.assertEqual(job_desc['input']['second'], True)
# parsing error
with self.assertSubprocessFailure(stderr_regexp='JSON', exit_code=3):
run("dx run " + applet_id + " --extra-args not-a-JSON-string")
def test_dx_run_clone(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
other_applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
def check_new_job_metadata(new_job_desc, cloned_job_desc, overridden_fields=[]):
'''
:param new_job_desc: the describe hash in the new job
:param cloned_job_desc: the description of the job that was cloned
:param overridden_fields: the metadata fields in describe that were overridden (and should not be checked)
'''
# check clonedFrom hash in new job's details
self.assertIn('clonedFrom', new_job_desc['details'])
self.assertEqual(new_job_desc['details']['clonedFrom']['id'], cloned_job_desc['id'])
self.assertEqual(new_job_desc['details']['clonedFrom']['executable'],
cloned_job_desc.get('applet') or cloned_job_desc.get('app'))
for metadata in ['project', 'folder', 'name', 'runInput', 'systemRequirements']:
self.assertEqual(new_job_desc['details']['clonedFrom'][metadata],
cloned_job_desc[metadata])
# check not_overridden_fields match/have the correct transformation
all_fields = set(['name', 'project', 'folder', 'input', 'systemRequirements',
'applet', 'tags', 'properties', 'priority'])
fields_to_check = all_fields.difference(overridden_fields)
for metadata in fields_to_check:
if metadata == 'name':
self.assertEqual(new_job_desc[metadata], cloned_job_desc[metadata] + ' (re-run)')
else:
self.assertEqual(new_job_desc[metadata], cloned_job_desc[metadata])
# originally, set everything and have an instance type for all
# entry points
orig_job_id = run("dx run " + applet_id +
' -inumber=32 --name jobname --folder /output ' +
'--instance-type mem2_hdd2_x2 ' +
'--tag Ψ --tag $hello.world ' +
'--property Σ_1^n=n --property $hello.=world ' +
'--priority normal ' +
'--brief -y').strip()
orig_job_desc = dxpy.api.job_describe(orig_job_id)
# control
self.assertEqual(orig_job_desc['name'], 'jobname')
self.assertEqual(orig_job_desc['project'], self.project)
self.assertEqual(orig_job_desc['folder'], '/output')
self.assertEqual(orig_job_desc['input'], {'number': 32})
self.assertEqual(orig_job_desc['systemRequirements'], {'*': {'instanceType': 'mem2_hdd2_x2'}})
# clone the job
# nothing different
new_job_desc = dxpy.api.job_describe(run("dx run --clone " + orig_job_id +
" --brief -y").strip())
check_new_job_metadata(new_job_desc, orig_job_desc)
def get_new_job_desc(cmd_suffix):
new_job_id = run("dx run --clone " + orig_job_id + " --brief -y " + cmd_suffix).strip()
return dxpy.api.job_describe(new_job_id)
# override applet
new_job_desc = get_new_job_desc(other_applet_id)
self.assertEqual(new_job_desc['applet'], other_applet_id)
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['applet'])
# override name
new_job_desc = get_new_job_desc("--name newname")
self.assertEqual(new_job_desc['name'], 'newname')
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['name'])
# override tags
new_job_desc = get_new_job_desc("--tag new_tag --tag second_new_tag")
self.assertEqual(new_job_desc['tags'], ['new_tag', 'second_new_tag'])
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['tags'])
# override properties
new_job_desc = get_new_job_desc("--property foo=bar --property baz=quux")
self.assertEqual(new_job_desc['properties'], {"foo": "bar", "baz": "quux"})
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['properties'])
# override priority
new_job_desc = get_new_job_desc("--priority high")
self.assertEqual(new_job_desc['priority'], "high")
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['priority'])
# override folder
new_job_desc = get_new_job_desc("--folder /otherfolder")
self.assertEqual(new_job_desc['folder'], '/otherfolder')
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['folder'])
# override project
new_job_desc = get_new_job_desc("--project " + self.other_proj_id)
self.assertEqual(new_job_desc['project'], self.other_proj_id)
self.assertEqual(new_job_desc['folder'], '/output')
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['project', 'folder'])
# override project and folder
new_job_desc = get_new_job_desc("--folder " + self.other_proj_id + ":")
self.assertEqual(new_job_desc['project'], self.other_proj_id)
self.assertEqual(new_job_desc['folder'], '/')
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['project', 'folder'])
# override input with -i
new_job_desc = get_new_job_desc("-inumber=42")
self.assertEqual(new_job_desc['input'], {"number": 42})
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['input'])
# add other input fields with -i
new_job_desc = get_new_job_desc("-inumber2=42")
self.assertEqual(new_job_desc['input'], {"number": 32, "number2": 42})
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['input'])
# override input with --input-json (original input discarded)
new_job_desc = get_new_job_desc("--input-json '{\"number2\": 42}'")
self.assertEqual(new_job_desc['input'], {"number2": 42})
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['input'])
# override the blanket instance type
new_job_desc = get_new_job_desc("--instance-type mem2_hdd2_x1")
self.assertEqual(new_job_desc['systemRequirements'],
{'*': {'instanceType': 'mem2_hdd2_x1'}})
check_new_job_metadata(new_job_desc, orig_job_desc,
overridden_fields=['systemRequirements'])
# override instance type for specific entry point(s)
new_job_desc = get_new_job_desc("--instance-type '" +
json.dumps({"some_ep": "mem2_hdd2_x1",
"some_other_ep": "mem2_hdd2_x4"}) + "'")
self.assertEqual(new_job_desc['systemRequirements'],
{'*': {'instanceType': 'mem2_hdd2_x2'},
'some_ep': {'instanceType': 'mem2_hdd2_x1'},
'some_other_ep': {'instanceType': 'mem2_hdd2_x4'}})
check_new_job_metadata(new_job_desc, orig_job_desc,
overridden_fields=['systemRequirements'])
# new original job with entry point-specific systemRequirements
orig_job_id = run("dx run " + applet_id +
" --instance-type '{\"some_ep\": \"mem2_hdd2_x1\"}' --brief -y").strip()
orig_job_desc = dxpy.api.job_describe(orig_job_id)
self.assertEqual(orig_job_desc['systemRequirements'],
{'some_ep': {'instanceType': 'mem2_hdd2_x1'}})
# override all entry points
new_job_desc = get_new_job_desc("--instance-type mem2_hdd2_x2")
self.assertEqual(new_job_desc['systemRequirements'], {'*': {'instanceType': 'mem2_hdd2_x2'}})
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['systemRequirements'])
# override a different entry point; original untouched
new_job_desc = get_new_job_desc("--instance-type '{\"some_other_ep\": \"mem2_hdd2_x2\"}'")
self.assertEqual(new_job_desc['systemRequirements'],
{'some_ep': {'instanceType': 'mem2_hdd2_x1'},
'some_other_ep': {'instanceType': 'mem2_hdd2_x2'}})
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['systemRequirements'])
# override the same entry point
new_job_desc = get_new_job_desc("--instance-type '{\"some_ep\": \"mem2_hdd2_x2\"}'")
self.assertEqual(new_job_desc['systemRequirements'],
{'some_ep': {'instanceType': 'mem2_hdd2_x2'}})
check_new_job_metadata(new_job_desc, orig_job_desc, overridden_fields=['systemRequirements'])
@unittest.skipUnless(testutil.TEST_RUN_JOBS,
'skipping tests that would run jobs')
def test_dx_describe_job_with_resolved_jbors(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [{"name": "array", "class": "array:int"}],
"outputSpec": [{"name": "array", "class": "array:int"}],
"runSpec": {"interpreter": "python2.7",
"code": '''#!/usr/bin/env python
@dxpy.entry_point('main')
def main(array):
output = {"array": array}
return output
'''}})['id']
first_job_handler = dxpy.DXJob(dxpy.api.applet_run(applet_id,
{"project": self.project,
"input": {"array": [0, 1, 5]}})['id'])
# Launch a second job which depends on the first, using two
# arrays in an array (to be flattened) as input
second_job_run_input = {"project": self.project,
"input": {"array": [first_job_handler.get_output_ref("array"),
first_job_handler.get_output_ref("array")]}}
second_job_handler = dxpy.DXJob(dxpy.api.applet_run(applet_id, second_job_run_input)['id'])
first_job_handler.wait_on_done()
# Need to wait for second job to become runnable (idle and
# waiting_on_input are the only states before it becomes
# runnable)
while second_job_handler.describe()['state'] in ['idle', 'waiting_on_input']:
time.sleep(0.1)
second_job_desc = run("dx describe " + second_job_handler.get_id())
first_job_res = first_job_handler.get_id() + ":array => [ 0, 1, 5 ]"
self.assertIn(first_job_res, second_job_desc)
# Launch another job which depends on the first done job and
# the second (not-done) job; the first job can and should be
# mentioned in the resolved JBORs list, but the second
# shouldn't.
third_job_run_input = {"project": self.project,
"input": {"array": [first_job_handler.get_output_ref("array"),
first_job_handler.get_output_ref("array", index=2),
second_job_handler.get_output_ref("array")]}}
third_job = dxpy.api.applet_run(applet_id, third_job_run_input)['id']
third_job_desc = run("dx describe " + third_job)
self.assertIn(first_job_res, third_job_desc)
self.assertIn(first_job_handler.get_id() + ":array.2 => 5", third_job_desc)
self.assertNotIn(second_job_handler.get_id() + ":array =>", third_job_desc)
def test_dx_run_ssh_no_config(self):
# Create minimal applet.
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "python2.7",
"code": '''#!/usr/bin/env python
@dxpy.entry_point('main')
def main():
return
'''}})['id']
# Case: Execute "dx run --ssh" before configuring SSH.
path = tempfile.mkdtemp()
shell = pexpect.spawn("dx run --ssh " + applet_id, env=dict(os.environ, DX_USER_CONF_DIR=path))
shell.expect("Warning:")
shell.sendline("N")
shell.expect("IOError")
shell.close()
self.assertEqual(3, shell.exitstatus)
class TestDXClientWorkflow(DXTestCase):
default_inst_type = "mem2_hdd2_x2"
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that would run jobs')
def test_dx_run_workflow(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [{"name": "number", "class": "int"}],
"outputSpec": [{"name": "number", "class": "int"}],
"runSpec": {"interpreter": "bash",
"code": "exit 1"}
})['id']
workflow_id = run("dx new workflow myworkflow --output-folder /foo --brief").strip()
stage_id = dxpy.api.workflow_add_stage(workflow_id,
{"editVersion": 0, "executable": applet_id})['stage']
analysis_id = run("dx run " + workflow_id + " -i0.number=32 -y --brief").strip()
self.assertTrue(analysis_id.startswith('analysis-'))
analysis_desc = run("dx describe " + analysis_id)
self.assertIn(stage_id + '.number = 32', analysis_desc)
self.assertIn('foo', analysis_desc)
analysis_desc = json.loads(run("dx describe " + analysis_id + " --json"))
time.sleep(2) # May need to wait for job to be created in the system
job_desc = run("dx describe " + analysis_desc["stages"][0]["execution"]["id"])
self.assertIn(' number = 32', job_desc)
# Test setting tags and properties on analysis
run("dx tag " + analysis_id + " foo bar foo")
run("dx set_properties " + analysis_id + " foo=bar Σ_1^n=n")
analysis_desc_lines = run("dx describe " + analysis_id).splitlines()
found_tags = False
found_properties = False
for line in analysis_desc_lines:
if line.startswith('Tags'):
self.assertIn("foo", line)
self.assertIn("bar", line)
found_tags = True
if line.startswith('Properties'):
self.assertIn("foo=bar", line)
self.assertIn("Σ_1^n=n", line)
found_properties = True
self.assertTrue(found_tags)
self.assertTrue(found_properties)
run("dx untag " + analysis_id + " foo")
run("dx unset_properties " + analysis_id + " Σ_1^n")
analysis_desc = run("dx describe " + analysis_id + " --delim ' '")
self.assertIn("Tags bar\n", analysis_desc)
self.assertIn("Properties foo=bar\n", analysis_desc)
# Missing input throws appropriate error
with self.assertSubprocessFailure(stderr_regexp='Some inputs.+are missing', exit_code=3):
run("dx run " + workflow_id + " -y")
# Setting the input in the workflow allows it to be run
run("dx update stage " + workflow_id + " 0 -inumber=42")
run("dx run " + workflow_id + " -y")
# initialize a new workflow from an analysis
new_workflow_desc = run("dx new workflow --init " + analysis_id)
self.assertNotIn(workflow_id, new_workflow_desc)
self.assertIn(analysis_id, new_workflow_desc)
self.assertIn(stage_id, new_workflow_desc)
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that runs jobs')
def test_dx_run_clone_analysis(self):
dxpy.api.applet_new({
"project": self.project,
"name": "myapplet",
"dxapi": "1.0.0",
"inputSpec": [{"name": "number", "class": "int"}],
"outputSpec": [{"name": "number", "class": "int"}],
"runSpec": {"interpreter": "bash",
"code": "dx-jobutil-add-output number 32"}
})
# make a workflow with the stage twice
run("dx new workflow myworkflow")
run("dx add stage myworkflow myapplet -inumber=32 --instance-type mem2_hdd2_x2")
run("dx add stage myworkflow myapplet -inumber=52 --instance-type mem2_hdd2_x1")
# run it
analysis_id = run("dx run myworkflow -y --brief").strip()
# test cases
no_change_analysis_id = run("dx run --clone " + analysis_id + " --brief -y").strip()
change_an_input_analysis_id = run("dx run --clone " + analysis_id +
" -i0.number=52 --brief -y").strip()
change_inst_type_analysis_id = run("dx run --clone " + analysis_id +
" --instance-type mem2_hdd2_x2 --brief -y").strip()
time.sleep(2) # May need to wait for any new jobs to be created in the system
# make assertions for test cases
orig_analysis_desc = dxpy.describe(analysis_id)
# no change: expect both stages to have reused jobs
no_change_analysis_desc = dxpy.describe(no_change_analysis_id)
self.assertEqual(no_change_analysis_desc['stages'][0]['execution']['id'],
orig_analysis_desc['stages'][0]['execution']['id'])
self.assertEqual(no_change_analysis_desc['stages'][1]['execution']['id'],
orig_analysis_desc['stages'][1]['execution']['id'])
# change an input: new job for that stage
change_an_input_analysis_desc = dxpy.describe(change_an_input_analysis_id)
self.assertEqual(change_an_input_analysis_desc['stages'][0]['execution']['input'],
{"number": 52})
# second stage still the same
self.assertEqual(change_an_input_analysis_desc['stages'][1]['execution']['id'],
orig_analysis_desc['stages'][1]['execution']['id'])
# change inst type: only affects stage with different inst type
change_inst_type_analysis_desc = dxpy.describe(change_inst_type_analysis_id)
# first stage still the same
self.assertEqual(change_inst_type_analysis_desc['stages'][0]['execution']['id'],
orig_analysis_desc['stages'][0]['execution']['id'])
# second stage different
self.assertNotEqual(change_inst_type_analysis_desc['stages'][1]['execution']['id'],
orig_analysis_desc['stages'][1]['execution']['id'])
self.assertEqual(change_inst_type_analysis_desc['stages'][1]['execution']['instanceType'],
'mem2_hdd2_x2')
# Run in a different project and add some metadata
try:
other_proj_id = run("dx new project 'cloned analysis project' --brief").strip()
new_analysis_id = run("dx run --clone " + analysis_id + " --destination " + other_proj_id +
":foo --tag sometag --property propkey=propval " +
"--brief -y").strip()
new_analysis_desc = dxpy.describe(new_analysis_id)
self.assertEqual(new_analysis_desc['project'], other_proj_id)
self.assertEqual(new_analysis_desc['folder'], '/foo')
self.assertEqual(new_analysis_desc['tags'], ['sometag'])
self.assertEqual(new_analysis_desc['properties'], {'propkey': 'propval'})
time.sleep(2)
new_job_desc = dxpy.describe(new_analysis_desc['stages'][0]['execution']['id'])
self.assertEqual(new_job_desc['project'], other_proj_id)
self.assertEqual(new_job_desc['input']['number'], 32)
finally:
run("dx rmproject -y " + other_proj_id)
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that runs jobs')
def test_dx_run_workflow_prints_cached_executions(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"name": "myapplet",
"dxapi": "1.0.0",
"inputSpec": [{"name": "number", "class": "int"}],
"outputSpec": [{"name": "number", "class": "int"}],
"runSpec": {"interpreter": "bash",
"code": "dx-jobutil-add-output number 32"}
})['id']
workflow_id = run("dx new workflow myworkflow --brief").strip()
stage_id = run("dx add stage myworkflow myapplet --brief").strip()
run_resp = dxpy.api.workflow_run(workflow_id,
{"project": self.project,
"input": {(stage_id + ".number"): 32}})
first_analysis_id = run_resp['id']
self.assertTrue(first_analysis_id.startswith('analysis-'))
job_id = run_resp['stages'][0]
self.assertTrue(job_id.startswith('job-'))
# wait for events to propagate and for the job to be created
time.sleep(2)
# Running the workflow again with no changes should result in
# the job getting reused
run_output = run("dx run " + workflow_id + " -i0.number=32 -y").strip()
self.assertIn('will reuse results from a previous analysis', run_output)
self.assertIn(job_id, run_output)
second_analysis_id = run_output[run_output.rfind('analysis-'):]
self.assertNotEqual(first_analysis_id, second_analysis_id)
# Running the workflow again with changes to the input should
# NOT result in the job getting reused
run_output = run("dx run " + workflow_id + " -i0.number=52 -y").strip()
self.assertNotIn('will reuse results from a previous analysis', run_output)
self.assertNotIn(job_id, run_output)
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that runs jobs')
def test_dx_run_workflow_with_inst_type_requests(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"name": "myapplet",
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": ""}
})['id']
workflow_id = run("dx new workflow myworkflow --brief").strip()
stage_ids = [run("dx add stage myworkflow myapplet --name 'an=awful=name' --brief").strip(),
run("dx add stage myworkflow myapplet --name 'second' --brief").strip()]
# control (no request)
no_req_id = run('dx run myworkflow -y --brief').strip()
# request for all stages
all_stg_req_id = run('dx run myworkflow --instance-type mem2_hdd2_x1 -y --brief').strip()
# request for a stage specifically (by name)
stg_req_id = run('dx run myworkflow --instance-type an=awful=name=mem2_hdd2_x2 ' +
'--instance-type second=mem2_hdd2_x1 -y --brief').strip()
time.sleep(2) # give time for all jobs to be populated
no_req_desc = dxpy.describe(no_req_id)
self.assertEqual(no_req_desc['stages'][0]['execution']['instanceType'],
self.default_inst_type)
self.assertEqual(no_req_desc['stages'][1]['execution']['instanceType'],
self.default_inst_type)
all_stg_req_desc = dxpy.describe(all_stg_req_id)
self.assertEqual(all_stg_req_desc['stages'][0]['execution']['instanceType'],
'mem2_hdd2_x1')
self.assertEqual(all_stg_req_desc['stages'][1]['execution']['instanceType'],
'mem2_hdd2_x1')
stg_req_desc = dxpy.describe(stg_req_id)
self.assertEqual(stg_req_desc['stages'][0]['execution']['instanceType'],
'mem2_hdd2_x2')
self.assertEqual(stg_req_desc['stages'][1]['execution']['instanceType'],
'mem2_hdd2_x1')
# request for a stage specifically (by index); if same inst
# type as before, should reuse results
self.assertIn(stg_req_desc['stages'][0]['execution']['id'],
run('dx run myworkflow --instance-type 0=mem2_hdd2_x2 -y'))
# and by stage ID
self.assertIn(stg_req_desc['stages'][0]['execution']['id'],
run('dx run myworkflow --instance-type ' + stage_ids[0] + '=mem2_hdd2_x2 -y'))
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that would attempt to run a job')
def test_dx_run_workflow_with_stage_folders(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"name": "myapplet",
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": ""}
})['id']
workflow_id = run("dx new workflow myworkflow --brief").strip()
stage_ids = [run("dx add stage myworkflow myapplet --name 'a_simple_name' " +
"--output-folder /foo --brief").strip(),
run("dx add stage myworkflow myapplet --name 'second' " +
"--relative-output-folder foo --brief").strip()]
cmd = 'dx run myworkflow --folder /output -y --brief --rerun-stage "*" '
# control (no runtime request for stage folders)
no_req_id = run(cmd).strip()
# request for all stages
all_stg_folder_id = run(cmd + '--stage-output-folder "*" bar').strip()
all_stg_rel_folder_id = run(cmd + '--stage-relative-output-folder "*" /bar').strip()
# request for stage specifically (by name)
per_stg_folders_id = run(cmd + '--stage-relative-output-folder a_simple_name /baz ' + # as "baz"
'--stage-output-folder second baz').strip() # resolves as ./baz
# request for stage specifically (by index)
per_stg_folders_id_2 = run(cmd + '--stage-output-folder 1 quux ' +
'--stage-relative-output-folder 0 /quux').strip()
# only modify one
per_stg_folders_id_3 = run(cmd + '--stage-output-folder ' + stage_ids[0] + ' /hello').strip()
time.sleep(2) # give time for all jobs to be generated
def expect_stage_folders(analysis_id, first_stage_folder, second_stage_folder):
analysis_desc = dxpy.describe(analysis_id)
self.assertEqual(analysis_desc['stages'][0]['execution']['folder'],
first_stage_folder)
self.assertEqual(analysis_desc['stages'][1]['execution']['folder'],
second_stage_folder)
expect_stage_folders(no_req_id, '/foo', '/output/foo')
expect_stage_folders(all_stg_folder_id, '/bar', '/bar')
expect_stage_folders(all_stg_rel_folder_id, '/output/bar', '/output/bar')
expect_stage_folders(per_stg_folders_id, '/output/baz', '/baz')
expect_stage_folders(per_stg_folders_id_2, '/output/quux', '/quux')
expect_stage_folders(per_stg_folders_id_3, '/hello', '/output/foo')
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that would attempt to run a job')
def test_inaccessible_stage(self):
applet_id = dxpy.api.applet_new({"name": "myapplet",
"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [{"name": "number", "class": "int"}],
"outputSpec": [{"name": "number", "class": "int"}],
"runSpec": {"interpreter": "bash",
"code": "exit 1"}
})['id']
workflow_id = run("dx new workflow myworkflow --brief").strip()
run("dx add stage myworkflow myapplet")
run("dx rm myapplet")
# describe shows it
desc = run("dx describe myworkflow")
self.assertIn("inaccessible", desc)
# list stages shows it
list_output = run("dx list stages myworkflow")
self.assertIn("inaccessible", list_output)
# run refuses to run it
with self.assertSubprocessFailure(stderr_regexp='following inaccessible stage\(s\)',
exit_code=3):
run("dx run myworkflow")
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
def test_dx_new_workflow_without_context(self):
# Without project context, cannot create new object without
# project qualified path
with without_project_context():
with self.assertSubprocessFailure(stderr_regexp='expected the path to be qualified with a project',
exit_code=3):
run("dx new workflow foo")
# Can create object with explicit project qualifier
workflow_id = run("dx new workflow --brief " + self.project + ":foo").strip()
self.assertEqual(dxpy.DXWorkflow(workflow_id).name, "foo")
def test_dx_new_workflow(self):
workflow_id = run("dx new workflow --title=тitle --summary=SΨmmary --brief " +
"--description=DΣsc wØrkflØwname --output-folder /wØrkflØwØutput").strip()
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc["id"], workflow_id)
self.assertEqual(desc["editVersion"], 0)
self.assertEqual(desc["name"], "wØrkflØwname")
self.assertEqual(desc["title"], "тitle")
self.assertEqual(desc["summary"], "SΨmmary")
self.assertEqual(desc["description"], "DΣsc")
self.assertEqual(desc["outputFolder"], "/wØrkflØwØutput")
self.assertEqual(desc["project"], self.project)
# add some stages and then create a new one initializing from
# the first
applet_id = dxpy.api.applet_new({"name": "myapplet",
"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash", "code": ""}
})['id']
run("dx add stage wØrkflØwname " + applet_id)
new_workflow_id = run("dx new workflow --init wØrkflØwname --title newtitle " +
"--summary newsummary --output-folder /output --brief").strip()
desc = dxpy.describe(new_workflow_id)
self.assertNotEqual(new_workflow_id, workflow_id)
self.assertEqual(desc["id"], new_workflow_id)
self.assertEqual(desc["editVersion"], 0)
self.assertEqual(desc["name"], "wØrkflØwname")
self.assertEqual(desc["title"], "newtitle")
self.assertEqual(desc["summary"], "newsummary")
self.assertEqual(desc["description"], "DΣsc")
self.assertEqual(desc["outputFolder"], "/output")
self.assertEqual(desc["project"], self.project)
self.assertEqual(len(desc["stages"]), 1)
self.assertEqual(desc["stages"][0]["executable"], applet_id)
# run without --brief; should see initializedFrom information
new_workflow_desc = run("dx new workflow --init " + workflow_id)
self.assertIn(workflow_id, new_workflow_desc)
# error when initializing from a nonexistent workflow
run("dx rm " + workflow_id)
with self.assertSubprocessFailure(stderr_regexp='could not be found', exit_code=3):
run("dx new workflow --init " + workflow_id)
def test_dx_workflow_resolution(self):
with self.assertSubprocessFailure(stderr_regexp='Unable to resolve', exit_code=3):
run("dx update workflow foo")
record_id = run("dx new record --type pipeline --brief").strip()
run("dx describe " + record_id)
with self.assertSubprocessFailure(stderr_regexp='Could not resolve', exit_code=3):
run("dx update workflow " + record_id)
def test_dx_describe_workflow(self):
workflow_id = run("dx new workflow myworkflow --title title --brief").strip()
desc = run("dx describe " + workflow_id)
self.assertIn("Input Spec", desc)
self.assertIn("Output Spec", desc)
applet_id = dxpy.api.applet_new({"name": "myapplet",
"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [{"name": "number", "class": "int"}],
"outputSpec": [{"name": "number", "class": "int"}],
"runSpec": {"interpreter": "bash",
"code": "exit 0"}
})['id']
first_stage = run("dx add stage " + workflow_id + " -inumber=10 " + applet_id +
" --brief").strip()
desc = run("dx describe myworkflow")
self.assertIn("Input Spec", desc)
self.assertIn("default=10", desc)
def test_dx_add_remove_list_stages(self):
workflow_id = run("dx new workflow myworkflow --title title --brief").strip()
run("dx describe " + workflow_id)
applet_id = dxpy.api.applet_new({"name": "myapplet",
"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [{"name": "number", "class": "int"}],
"outputSpec": [{"name": "number", "class": "int"}],
"runSpec": {"interpreter": "bash",
"code": "exit 0"}
})['id']
stage_ids = []
# list stages when there are no stages yet
list_output = run("dx list stages myworkflow")
self.assertIn("No stages", list_output)
stage_ids.append(run("dx add stage " + workflow_id + " --name first " + applet_id +
" --brief").strip())
# not-yet-existing folder path should work
# also, set input and instance type
stage_ids.append(run("dx add stage myworkflow --relative-output-folder output myapplet " +
"--brief -inumber=32 --instance-type mem2_hdd2_x2").strip())
# test relative folder path
run("dx mkdir -p a/b/c")
cd("a/b/c")
stage_ids.append(run("dx add stage " + workflow_id + " --name second --output-folder . " +
applet_id +
" --brief --instance-type '{\"main\": \"mem2_hdd2_x2\"}'").strip())
with self.assertSubprocessFailure(stderr_regexp='not found in the input spec', exit_code=3):
# input spec should be checked
run("dx add stage " + workflow_id + " " + applet_id + " -inonexistent=42")
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(len(desc['stages']), len(stage_ids))
for i, stage_id in enumerate(stage_ids):
self.assertEqual(desc['stages'][i]['id'], stage_id)
self.assertEqual(desc['stages'][0]['folder'], None)
self.assertEqual(desc['stages'][1]['folder'], 'output')
self.assertEqual(desc['stages'][1]['input']['number'], 32)
self.assertEqual(desc['stages'][1]['systemRequirements'],
{"*": {"instanceType": "mem2_hdd2_x2"}})
self.assertEqual(desc['stages'][2]['folder'], '/a/b/c')
self.assertEqual(desc['stages'][2]['systemRequirements'],
{"main": {"instanceType": "mem2_hdd2_x2"}})
# errors
# when adding a stage with both absolute and relative output folders
with self.assertSubprocessFailure(stderr_regexp="output-folder", exit_code=2):
run("dx add stage " + workflow_id + " " + applet_id +
" --output-folder /foo --relative-output-folder foo")
# bad executable that can't be found
with self.assertSubprocessFailure(stderr_regexp="ResolutionError", exit_code=3):
run("dx add stage " + workflow_id + " foo")
# bad input
with self.assertSubprocessFailure(stderr_regexp="parsed", exit_code=3):
run("dx add stage " + workflow_id + " -inumber=foo " + applet_id)
# bad instance type arg
with self.assertSubprocessFailure(stderr_regexp="instance-type", exit_code=3):
run("dx add stage " + workflow_id + " " + applet_id + " --instance-type {]")
# unrecognized instance typ
with self.assertSubprocessFailure(stderr_regexp="InvalidInput", exit_code=3):
run("dx add stage " + workflow_id + " " + applet_id + " --instance-type foo")
# list stages
list_output = run("dx list stages " + workflow_id)
self.assertIn("myworkflow (" + workflow_id + ")", list_output)
self.assertIn("Title: title", list_output)
self.assertIn("Output Folder: -", list_output)
for i in range(0, len(stage_ids)):
self.assertIn("Stage " + str(i), list_output)
self.assertIn("<workflow output folder>/output", list_output)
self.assertIn("number=32", list_output)
self.assertIn("/a/b/c", list_output)
run("dx describe " + workflow_id)
# remove a stage by index
remove_output = run("dx remove stage /myworkflow 1")
self.assertIn(stage_ids[1], remove_output)
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(len(desc['stages']), 2)
self.assertEqual(desc['stages'][0]['id'], stage_ids[0])
self.assertEqual(desc['stages'][0]['folder'], None)
self.assertEqual(desc['stages'][1]['id'], stage_ids[2])
self.assertEqual(desc['stages'][1]['folder'], '/a/b/c')
# remove a stage by ID
remove_output = run("dx remove stage " + workflow_id + " " + stage_ids[0] + ' --brief').strip()
self.assertEqual(remove_output, stage_ids[0])
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(len(desc['stages']), 1)
self.assertEqual(desc['stages'][0]['id'], stage_ids[2])
self.assertEqual(desc['stages'][0]['name'], 'second')
self.assertEqual(desc['stages'][0]['folder'], '/a/b/c')
# remove a stage by name
run("dx remove stage " + workflow_id + " second")
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(len(desc['stages']), 0)
# remove something out of range
with self.assertSubprocessFailure(stderr_regexp="out of range", exit_code=3):
run("dx remove stage /myworkflow 5")
# remove some bad stage ID
with self.assertSubprocessFailure(stderr_regexp="nor found as a stage name", exit_code=3):
run("dx remove stage /myworkflow badstageID")
# remove nonexistent stage
with self.assertSubprocessFailure(stderr_regexp="ResourceNotFound", exit_code=3):
run("dx remove stage /myworkflow stage-123456789012345678901234")
def test_dx_update_workflow(self):
workflow_id = run("dx new workflow myworkflow --brief").strip()
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc['editVersion'], 0)
self.assertEqual(desc['title'], "myworkflow")
self.assertIsNone(desc["outputFolder"])
# set title, summary, description, outputFolder
run("dx update workflow myworkflow --title тitle --summary SΨmmary --description=DΣsc " +
"--output-folder .")
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc['editVersion'], 1)
self.assertEqual(desc['title'], "тitle")
self.assertEqual(desc['summary'], "SΨmmary")
self.assertEqual(desc['description'], "DΣsc")
self.assertEqual(desc['outputFolder'], "/")
# describe
describe_output = run("dx describe myworkflow --delim ' '")
self.assertIn("Output Folder /", describe_output)
# unset title, outputFolder
run("dx update workflow myworkflow --no-title --no-output-folder")
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc['editVersion'], 2)
self.assertEqual(desc['title'], "myworkflow")
self.assertIsNone(desc['outputFolder'])
# describe
describe_output = run("dx describe myworkflow --delim ' '")
self.assertNotIn("Title тitle", describe_output)
self.assertIn("Summary SΨmmary", describe_output)
self.assertNotIn("Description", describe_output)
self.assertNotIn("DΣsc", describe_output)
self.assertIn("Output Folder -", describe_output)
describe_output = run("dx describe myworkflow --verbose --delim ' '")
self.assertIn("Description DΣsc", describe_output)
# no-op
output = run("dx update workflow myworkflow")
self.assertIn("No updates requested", output)
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc['editVersion'], 2)
self.assertEqual(desc['title'], "myworkflow")
with self.assertSubprocessFailure(stderr_regexp="no-title", exit_code=2):
run("dx update workflow myworkflow --title foo --no-title")
with self.assertSubprocessFailure(stderr_regexp="no-title", exit_code=2):
run("dx update workflow myworkflow --output-folder /foo --no-output-folder")
def test_dx_update_stage(self):
workflow_id = run("dx new workflow myworkflow --brief").strip()
run("dx describe " + workflow_id)
applet_id = dxpy.api.applet_new({"name": "myapplet",
"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [{"name": "number", "class": "int"}],
"outputSpec": [{"name": "number", "class": "int"}],
"runSpec": {"interpreter": "bash",
"code": "exit 0"}
})['id']
stage_id = run("dx add stage " + workflow_id + " " + applet_id + " --brief").strip()
empty_applet_id = dxpy.api.applet_new({"name": "emptyapplet",
"project": self.project,
"dxapi": "1.0.0",
"inputSpec": [],
"outputSpec": [],
"runSpec": {"interpreter": "bash",
"code": "exit 0"}
})['id']
desc = dxpy.api.workflow_describe(workflow_id)
self.assertIsNone(desc["stages"][0]["name"])
self.assertEqual(desc["stages"][0]["folder"], None)
self.assertEqual(desc["stages"][0]["input"], {})
self.assertEqual(desc["stages"][0]["systemRequirements"], {})
# set the name, folder, some input, and the instance type
run("dx update stage myworkflow 0 --name тitle -inumber=32 --relative-output-folder=foo " +
"--instance-type mem2_hdd2_x2")
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc["editVersion"], 2)
self.assertEqual(desc["stages"][0]["name"], "тitle")
self.assertEqual(desc["stages"][0]["folder"], "foo")
self.assertEqual(desc["stages"][0]["input"]["number"], 32)
self.assertEqual(desc["stages"][0]["systemRequirements"],
{"*": {"instanceType": "mem2_hdd2_x2"}})
# use a relative folder path and also set instance type using JSON
run("dx update stage myworkflow 0 --name тitle -inumber=32 --output-folder=. " +
"--instance-type '{\"main\": \"mem2_hdd2_x2\"}'")
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc["editVersion"], 3)
self.assertEqual(desc["stages"][0]["folder"], "/")
self.assertEqual(desc["stages"][0]["systemRequirements"],
{"main": {"instanceType": "mem2_hdd2_x2"}})
# unset name
run("dx update stage myworkflow " + stage_id + " --no-name")
desc = dxpy.api.workflow_describe(workflow_id)
self.assertEqual(desc["editVersion"], 4)
self.assertIsNone(desc["stages"][0]["name"])
# set incompatible executable; expect a helpful error msg
# telling us to use --force; then use it
with self.assertSubprocessFailure(stderr_regexp="--force", exit_code=3):
run("dx update stage myworkflow 0 --executable " + empty_applet_id)
run("dx update stage myworkflow 0 --force --executable " + empty_applet_id)
run("dx rm " + empty_applet_id)
desc_string = run("dx describe myworkflow")
run("dx update stage myworkflow 0 --force --executable " + applet_id)
# some errors
with self.assertSubprocessFailure(stderr_regexp="no-name", exit_code=2):
run("dx update stage myworkflow 0 --name foo --no-name")
with self.assertSubprocessFailure(stderr_regexp="output-folder", exit_code=2):
run("dx update stage myworkflow 0 --output-folder /foo --relative-output-folder foo")
with self.assertSubprocessFailure(stderr_regexp="parsed", exit_code=3):
run("dx update stage myworkflow 0 -inumber=foo")
with self.assertSubprocessFailure(stderr_regexp="ResolutionError", exit_code=3):
run("dx update stage myworkflow 0 --executable foo")
with self.assertSubprocessFailure(stderr_regexp="instance-type", exit_code=3):
run("dx update stage myworkflow 0 --instance-type {]")
# no-op
output = run("dx update stage myworkflow 0 --alias default --force")
self.assertIn("No updates requested", output)
# update something out of range
with self.assertSubprocessFailure(stderr_regexp="out of range", exit_code=3):
run("dx update stage /myworkflow 5 --name foo")
# remove some bad stage ID
with self.assertSubprocessFailure(stderr_regexp="nor found as a stage name", exit_code=3):
run("dx update stage /myworkflow badstageID --name foo")
# remove nonexistent stage
with self.assertSubprocessFailure(stderr_regexp="ResourceNotFound", exit_code=3):
run("dx update stage /myworkflow stage-123456789012345678901234 --name foo")
class TestDXClientFind(DXTestCase):
def test_dx_find_apps(self):
# simple test here does not assume anything about apps that do
# or do not exist
from dxpy.app_categories import APP_CATEGORIES
category_help = run("dx find apps --category-help")
for category in APP_CATEGORIES:
self.assertIn(category, category_help)
run("dx find apps --category foo") # any category can be searched
def test_dx_find_data_by_class(self):
ids = {"record": run("dx new record --brief").strip(),
"workflow": run("dx new workflow --brief").strip(),
"file": run("echo foo | dx upload - --brief").strip(),
"gtable": run("dx new gtable --columns col1:int --brief").strip()}
for classname in ids:
self.assertEqual(run("dx find data --brief --class " + classname).strip(),
self.project + ':' + ids[classname])
def test_dx_find_data_by_tag(self):
record_ids = [run("dx new record --brief --tag Ψ --tag foo --tag baz").strip(),
run("dx new record --brief --tag Ψ --tag foo --tag bar").strip()]
found_records = run("dx find data --tag baz --brief").strip()
self.assertEqual(found_records, dxpy.WORKSPACE_ID + ':' + record_ids[0])
found_records = run("dx find data --tag Ψ --tag foo --tag foobar --brief").strip()
self.assertEqual(found_records, '')
found_records = run("dx find data --tag foo --tag Ψ --brief").strip().split("\n")
self.assertIn(dxpy.WORKSPACE_ID + ':' + record_ids[0], found_records)
self.assertIn(dxpy.WORKSPACE_ID + ':' + record_ids[1], found_records)
def test_dx_find_data_by_property(self):
record_ids = [run("dx new record --brief " +
"--property Ψ=world --property foo=bar --property bar=").strip(),
run("dx new record --brief --property Ψ=notworld --property foo=bar").strip()]
found_records = run("dx find data --property Ψ=world --property foo=bar --brief").strip()
self.assertEqual(found_records, dxpy.WORKSPACE_ID + ':' + record_ids[0])
# presence
found_records = run("dx find data --property Ψ --brief").strip().split("\n")
self.assertIn(dxpy.WORKSPACE_ID + ':' + record_ids[0], found_records)
self.assertIn(dxpy.WORKSPACE_ID + ':' + record_ids[1], found_records)
found_records = run("dx find data --property Ψ --property foo=baz --brief").strip()
self.assertEqual(found_records, '')
found_records = run("dx find data --property Ψ --property foo=bar --brief").strip().split("\n")
self.assertIn(dxpy.WORKSPACE_ID + ':' + record_ids[0], found_records)
self.assertIn(dxpy.WORKSPACE_ID + ':' + record_ids[1], found_records)
# Empty string values should be okay
found_records = run("dx find data --property bar= --brief").strip()
self.assertEqual(found_records, dxpy.WORKSPACE_ID + ':' + record_ids[0])
# Errors parsing --property value
with self.assertSubprocessFailure(stderr_regexp='nonempty strings', exit_code=3):
run("dx find data --property ''")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx find data --property foo=bar=baz")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx find data --property =foo=bar=")
# Property keys must be nonempty
with self.assertSubprocessFailure(stderr_regexp='nonempty strings', exit_code=3):
run("dx find data --property =bar")
def test_dx_find_data_by_scope(self):
# Name of temporary project to use in test cases.
test_projectname = 'Test-Project-PTFM-7023'
# Tests for deprecated --project flag.
# Case: --project specified.
test_dirname = '/test-folder-PTFM-7023-01'
test_recordname = '/test-record-01'
with temporary_project(test_projectname) as temp_project:
test_projectid = temp_project.get_id()
record_id = run('dx new record -p --brief ' + test_projectid + ':' + test_dirname +
test_recordname).strip()
found_record_id = run('dx find data --brief --project ' + test_projectid).strip()
self.assertEqual(found_record_id, test_projectid + ':' + record_id)
# Tests for deprecated --folder flag.
# Case: --folder specified, WORKSPACE_ID set.
test_dirname = '/test-folder-PTFM-7023-02'
test_subdirname = '/test-subfolder'
test_recordname = '/test-record-02'
record_ids = [run('dx new record -p --brief ' + test_dirname + test_recordname).strip(),
run('dx new record -p --brief ' + test_dirname + test_subdirname + test_recordname).strip()]
found_record_ids = run('dx find data --brief --folder ' + test_dirname).strip().split('\n')
self.assertEqual(set(dxpy.WORKSPACE_ID + ':' + record_id for record_id in record_ids), set(found_record_ids))
# Case: --folder and --project specified.
test_dirname = '/test-folder-PTFM-7023-03'
test_recordname = '/test-record-03'
with temporary_project(test_projectname) as temp_project:
test_projectid = temp_project.get_id()
record_id = run('dx new record -p --brief ' + test_projectid + ':' + test_dirname +
test_recordname).strip()
found_record_id = run('dx find data --brief --project ' + test_projectid + ' --folder ' +
test_dirname).strip()
self.assertEqual(found_record_id, test_projectid + ':' + record_id)
# Case: --folder and --norecurse specified, WORKSPACE_ID set.
test_dirname = '/test-folder-PTFM-7023-04'
test_subdirname = '/test-subfolder'
test_recordname = '/test-record-04'
record_id = run('dx new record -p --brief ' + test_dirname + test_recordname).strip()
run('dx new record -p --brief ' + test_dirname + test_subdirname + test_recordname)
found_record_id = run('dx find data --brief --folder ' + test_dirname + ' --norecurse').strip()
self.assertEqual(found_record_id, dxpy.WORKSPACE_ID + ':' + record_id)
# Case: --folder, --project, and --norecurse specified.
test_dirname = '/test-folder-PTFM-7023-05'
test_subdirname = '/test-subfolder'
test_recordname = '/test-record-05'
with temporary_project(test_projectname) as temp_project:
test_projectid = temp_project.get_id()
record_id = run('dx new record -p --brief ' + test_projectid + ':' + test_dirname +
test_recordname).strip()
run('dx new record -p --brief ' + test_projectid + ':' + test_dirname + test_subdirname + test_recordname)
found_record_id = run('dx find data --brief --project ' + test_projectid + ' --folder ' +
test_dirname + ' --norecurse').strip()
self.assertEqual(found_record_id, test_projectid + ':' + record_id)
# Tests for --path flag.
# Case: --path specified, WORKSPACE_ID set.
test_dirname = '/test-folder-PTFM-7023-06'
test_subdirname = '/test-subfolder'
test_recordname = '/test-record-06'
run('dx new record -p --brief ' + test_recordname)
record_ids = [run('dx new record -p --brief ' + test_dirname + test_recordname).strip(),
run('dx new record -p --brief ' + test_dirname + test_subdirname + test_recordname).strip()]
found_record_ids = run('dx find data --brief --path ' + test_dirname).strip().split('\n')
self.assertEqual(set(dxpy.WORKSPACE_ID + ':' + record_id for record_id in record_ids), set(found_record_ids))
# Case: --path and --project specified.
test_dirname = '/test-folder-PTFM-7023-07'
test_recordname = '/test-record-07'
with temporary_project(test_projectname) as temp_project:
test_projectid = temp_project.get_id()
run('dx new record -p --brief ' + test_recordname)
record_id = run('dx new record -p --brief ' + test_projectid + ':' + test_dirname +
test_recordname).strip()
found_record_id = run('dx find data --brief --project ' + test_projectid + ' --path ' +
test_dirname).strip()
self.assertEqual(found_record_id, test_projectid + ':' + record_id)
# Case: --path and --norecurse specified, WORKSPACE_ID set.
test_dirname = '/test-folder-PTFM-7023-08'
test_subdirname = '/test-subfolder'
test_recordname = '/test-record-08'
record_id = run('dx new record -p --brief ' + test_dirname + test_recordname).strip()
run('dx new record -p --brief ' + test_dirname + test_subdirname + test_recordname)
found_record_id = run('dx find data --brief --path ' + test_dirname + ' --norecurse').strip()
self.assertEqual(found_record_id, dxpy.WORKSPACE_ID + ':' + record_id)
# Case: --path, --project, and --norecurse specified.
test_dirname = '/test-folder-PTFM-7023-09'
test_subdirname = '/test-subfolder'
test_recordname = '/test-record-09'
with temporary_project(test_projectname) as temp_project:
test_projectid = temp_project.get_id()
record_id = run('dx new record -p --brief ' + test_projectid + ':' + test_dirname +
test_recordname).strip()
run('dx new record -p --brief ' + test_projectid + ':' + test_dirname + test_subdirname + test_recordname)
found_record_id = run('dx find data --brief --project ' + test_projectid + ' --path ' +
test_dirname + ' --norecurse').strip()
self.assertEqual(found_record_id, test_projectid + ':' + record_id)
# Case: --path specified as PROJECTID:FOLDERPATH.
test_dirname = '/test-folder-PTFM-7023-10'
test_recordname = '/test-record-10'
with temporary_project(test_projectname) as temp_project:
test_projectid = temp_project.get_id()
record_ids = [run('dx new record -p --brief ' + test_projectid + ':' + test_dirname +
test_recordname).strip(),
run('dx new record -p --brief ' + test_projectid + ':' + test_dirname +
test_subdirname + test_recordname).strip()]
# Case: --norecurse not specified.
found_record_id = run('dx find data --brief --path ' + test_projectid + ':' +
test_dirname).strip().split('\n')
self.assertEqual(set(found_record_id), set(test_projectid + ':' + record_id for record_id in record_ids))
# Case: --norecurse specified.
found_record_id = run('dx find data --brief --path ' + test_projectid + ':' + test_dirname +
' --norecurse').strip()
self.assertEqual(found_record_id, test_projectid + ':' + record_ids[0])
# Case: --path specified as relative path, WORKSPACE_ID set.
test_dirname = '/test-folder-PTFM-7023-12'
test_subdirname = '/test-subfolder'
test_recordname = '/test-record-12'
run('dx new record -p --brief ' + test_recordname)
record_id = run('dx new record -p --brief ' + test_dirname + test_subdirname + test_recordname).strip()
cd(test_dirname)
found_record_id = run('dx find data --brief --path ' + test_subdirname[1:]).strip()
self.assertEqual(found_record_id, dxpy.WORKSPACE_ID + ':' + record_id)
run('dx clearenv')
test_dirname = '/test-folder-PTFM-7023-14'
test_recordname = '/test-record-14'
with temporary_project(test_projectname) as temp_project, select_project(None):
test_projectid = temp_project.get_id()
run('dx new record -p --brief ' + test_projectid + ':' + test_dirname + test_recordname)
# FIXME: the following test is flaky because we're not able
# to effectively unset the project using
# select_project(None). This merely unsets the environment
# variable, which doesn't work because it just allows the
# previous value of the project context (e.g. obtained from
# the user-global config) to bleed through. Therefore,
# although we run 'clearenv' above, another process can
# swoop in and set a project which is then seen in the
# subprocess call below-- contrary to our intentions. (Given
# this, the current implementation of select_project(None)
# may be completely faulty to begin with.)
#
# In order to really make this test work, we need to be able
# to encode (in the environment variable or in the config
# file) an empty project in such a way that it sticks.
#
# # Case: --path specified, WORKSPACE_ID not set (fail).
# with self.assertSubprocessFailure(stderr_regexp="if a project is not specified", exit_code=1):
# run('dx find data --brief --path ' + test_dirname)
# Case: --project and --path PROJECTID:FOLDERPATH specified (fail).
with self.assertSubprocessFailure(stderr_regexp="Cannot supply both --project and --path " +
"PROJECTID:FOLDERPATH", exit_code=3):
run('dx find data --brief --project ' + test_projectid + ' --path ' + test_projectid + ':' +
test_dirname)
# Case: --folder and --path specified (fail).
with self.assertSubprocessFailure(stderr_regexp="Cannot supply both --folder and --path", exit_code=3):
run('dx find data --brief --folder ' + test_projectid + ':' + test_dirname + ' --path ' +
test_projectid + ':' + test_dirname)
def test_dx_find_projects(self):
unique_project_name = 'dx find projects test ' + str(time.time())
with temporary_project(unique_project_name) as unique_project:
self.assertEqual(run("dx find projects --name " + pipes.quote(unique_project_name)),
unique_project.get_id() + ' : ' + unique_project_name + ' (ADMINISTER)\n')
self.assertEqual(run("dx find projects --brief --name " + pipes.quote(unique_project_name)),
unique_project.get_id() + '\n')
json_output = json.loads(run("dx find projects --json --name " + pipes.quote(unique_project_name)))
self.assertEqual(len(json_output), 1)
self.assertEqual(json_output[0]['id'], unique_project.get_id())
def test_dx_find_projects_by_created(self):
created_project_name = 'dx find projects test ' + str(time.time())
with temporary_project(created_project_name) as unique_project:
self.assertEqual(run("dx find projects --created-after=-1d --brief --name " +
pipes.quote(created_project_name)), unique_project.get_id() + '\n')
self.assertEqual(run("dx find projects --created-before=" + str(int(time.time() + 1000) * 1000) +
" --brief --name " + pipes.quote(created_project_name)),
unique_project.get_id() + '\n')
self.assertEqual(run("dx find projects --created-after=-1d --created-before=" +
str(int(time.time() + 1000) * 1000) + " --brief --name " +
pipes.quote(created_project_name)), unique_project.get_id() + '\n')
self.assertEqual(run("dx find projects --created-after=" + str(int(time.time() + 1000) * 1000) + " --name "
+ pipes.quote(created_project_name)), "")
def test_dx_find_projects_by_tag(self):
other_project_id = run("dx new project other --brief").strip()
try:
run("dx tag : Ψ world")
proj_desc = dxpy.describe(dxpy.WORKSPACE_ID)
self.assertEqual(len(proj_desc["tags"]), 2)
self.assertIn("Ψ", proj_desc["tags"])
self.assertIn("world", proj_desc["tags"])
found_projects = run("dx find projects --tag Ψ --tag world --brief").strip().split('\n')
self.assertIn(dxpy.WORKSPACE_ID, found_projects)
self.assertNotIn(other_project_id, found_projects)
found_projects = run("dx find projects --tag Ψ --tag world --tag foobar --brief").strip().split('\n')
self.assertNotIn(dxpy.WORKSPACE_ID, found_projects)
self.assertNotIn(other_project_id, found_projects)
run("dx tag " + other_project_id + " Ψ world foobar")
found_projects = run("dx find projects --tag world --tag Ψ --brief").strip().split("\n")
self.assertIn(dxpy.WORKSPACE_ID, found_projects)
self.assertIn(other_project_id, found_projects)
except:
raise
finally:
run("dx rmproject -y " + other_project_id)
def test_dx_find_projects_by_property(self):
other_project_id = run("dx new project other --brief").strip()
try:
run("dx set_properties : Ψ=world foo=bar bar=")
proj_desc = dxpy.api.project_describe(dxpy.WORKSPACE_ID, {"properties": True})
self.assertEqual(len(proj_desc["properties"]), 3)
self.assertEqual(proj_desc["properties"]["Ψ"], "world")
self.assertEqual(proj_desc["properties"]["foo"], "bar")
self.assertEqual(proj_desc["properties"]["bar"], "")
run("dx set_properties " + other_project_id + " Ψ=notworld foo=bar")
found_projects = run("dx find projects --property Ψ=world --property foo=bar --brief").strip().split("\n")
self.assertIn(dxpy.WORKSPACE_ID, found_projects)
self.assertNotIn(other_project_id, found_projects)
found_projects = run("dx find projects --property bar= --brief").strip().split('\n')
self.assertIn(dxpy.WORKSPACE_ID, found_projects)
self.assertNotIn(other_project_id, found_projects)
# presence
found_projects = run("dx find projects --property Ψ --brief").strip().split("\n")
self.assertIn(dxpy.WORKSPACE_ID, found_projects)
self.assertIn(other_project_id, found_projects)
found_projects = run("dx find projects --property Ψ --property foo=baz --brief").strip().split("\n")
self.assertNotIn(dxpy.WORKSPACE_ID, found_projects)
self.assertNotIn(other_project_id, found_projects)
found_projects = run("dx find projects --property Ψ --property foo=bar --brief").strip().split("\n")
self.assertIn(dxpy.WORKSPACE_ID, found_projects)
self.assertIn(other_project_id, found_projects)
except:
raise
finally:
run("dx rmproject -y " + other_project_id)
# Errors parsing --property value
with self.assertSubprocessFailure(stderr_regexp='nonempty strings', exit_code=3):
run("dx find projects --property ''")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx find projects --property foo=bar=baz")
with self.assertSubprocessFailure(stderr_regexp='property_key', exit_code=3):
run("dx find projects --property =foo=bar=")
# Property keys must be nonempty
with self.assertSubprocessFailure(stderr_regexp='nonempty strings', exit_code=3):
run("dx find projects --property =bar")
# Empty string values should be okay
run("dx find projects --property bar=")
def test_dx_find_projects_phi(self):
projectName = "tempProject+{t}".format(t=time.time())
with temporary_project(name=projectName) as project_1:
res = run('dx find projects --phi true --brief --name ' + pipes.quote(projectName))
self.assertTrue(len(res) == 0, "Expected no PHI projects to be found")
res = run('dx find projects --phi false --brief --name ' + pipes.quote(projectName)).strip().split('\n')
self.assertTrue(len(res) == 1, "Expected to find one project")
self.assertTrue(res[0] == project_1.get_id())
# --phi must contain one argument.
with self.assertSubprocessFailure(stderr_regexp='expected one argument', exit_code=2):
run('dx find projects --phi')
@unittest.skipUnless(testutil.TEST_RUN_JOBS,
'skipping tests that would run jobs')
def test_dx_find_jobs_by_tags_and_properties(self):
applet_id = dxpy.api.applet_new({"project": self.project,
"dxapi": "1.0.0",
"runSpec": {"interpreter": "bash",
"code": "echo 'hello'"}
})['id']
property_names = ["$my.prop", "secoиdprop", "тhird prop"]
property_values = ["$hello.world", "Σ2,n", "stuff"]
the_tags = ["Σ1=n", "helloo0", "ωω"]
job_id = run("dx run " + applet_id + ' -inumber=32 --brief -y ' +
" ".join(["--property '" + prop[0] + "'='" + prop[1] + "'" for prop in zip(property_names, property_values)]) +
"".join([" --tag " + tag for tag in the_tags])).strip()
# matches
self.assertEqual(run("dx find jobs --brief --tag " + the_tags[0]).strip(), job_id)
self.assertEqual(run("dx find jobs --brief" + "".join([" --tag " + tag for tag in the_tags])).strip(),
job_id)
self.assertEqual(run("dx find jobs --brief --property " + property_names[1]).strip(), job_id)
self.assertEqual(run("dx find jobs --brief --property '" +
property_names[1] + "'='" + property_values[1] + "'").strip(),
job_id)
self.assertEqual(run("dx find jobs --brief" +
"".join([" --property '" + key + "'='" + value + "'" for
key, value in zip(property_names, property_values)])).strip(),
job_id)
# no matches
self.assertEqual(run("dx find jobs --brief --tag foo").strip(), "")
self.assertEqual(run("dx find jobs --brief --property foo").strip(), "")
self.assertEqual(run("dx find jobs --brief --property '" +
property_names[1] + "'=badvalue").strip(), "")
@unittest.skipUnless(testutil.TEST_RUN_JOBS,
'skipping test that would run a job')
def test_find_executions(self):
dxapplet = dxpy.DXApplet()
dxapplet.new(name="test_applet",
dxapi="1.0.0",
inputSpec=[{"name": "chromosomes", "class": "record"},
{"name": "rowFetchChunk", "class": "int"}
],
outputSpec=[{"name": "mappings", "class": "record"}],
runSpec={"code": "def main(): pass",
"interpreter": "python2.7",
"execDepends": [{"name": "python-numpy"}]})
dxrecord = dxpy.new_dxrecord()
dxrecord.close()
prog_input = {"chromosomes": {"$dnanexus_link": dxrecord.get_id()},
"rowFetchChunk": 100}
dxworkflow = dxpy.new_dxworkflow(name='find_executions test workflow')
stage = dxworkflow.add_stage(dxapplet, stage_input=prog_input)
dxanalysis = dxworkflow.run({stage+".rowFetchChunk": 200},
tags=["foo"],
properties={"foo": "bar"})
dxapplet.run(applet_input=prog_input)
dxjob = dxapplet.run(applet_input=prog_input,
tags=["foo", "bar"],
properties={"foo": "baz"})
cd("{project_id}:/".format(project_id=dxapplet.get_proj_id()))
# Wait for job to be created
executions = [stage['execution']['id'] for stage in dxanalysis.describe()['stages']]
t = 0
while len(executions) > 0:
try:
dxpy.api.job_describe(executions[len(executions) - 1], {})
executions.pop()
except DXAPIError:
t += 1
if t > 20:
raise Exception("Timeout while waiting for job to be created for an analysis stage")
time.sleep(1)
options = "--user=self"
self.assertEqual(len(run("dx find executions "+options).splitlines()), 8)
self.assertEqual(len(run("dx find jobs "+options).splitlines()), 6)
self.assertEqual(len(run("dx find analyses "+options).splitlines()), 2)
options += " --project="+dxapplet.get_proj_id()
self.assertEqual(len(run("dx find executions "+options).splitlines()), 8)
self.assertEqual(len(run("dx find jobs "+options).splitlines()), 6)
self.assertEqual(len(run("dx find analyses "+options).splitlines()), 2)
options += " --created-after=-150s --no-subjobs --applet="+dxapplet.get_id()
self.assertEqual(len(run("dx find executions "+options).splitlines()), 8)
self.assertEqual(len(run("dx find jobs "+options).splitlines()), 6)
self.assertEqual(len(run("dx find analyses "+options).splitlines()), 2)
options2 = options + " --brief -n 9000"
self.assertEqual(len(run("dx find executions "+options2).splitlines()), 4)
self.assertEqual(len(run("dx find jobs "+options2).splitlines()), 3)
self.assertEqual(len(run("dx find analyses "+options2).splitlines()), 1)
options3 = options2 + " --origin="+dxjob.get_id()
self.assertEqual(len(run("dx find executions "+options3).splitlines()), 1)
self.assertEqual(len(run("dx find jobs "+options3).splitlines()), 1)
self.assertEqual(len(run("dx find analyses "+options3).splitlines()), 0)
options3 = options2 + " --root="+dxanalysis.get_id()
self.assertEqual(len(run("dx find executions "+options3).splitlines()), 2)
self.assertEqual(len(run("dx find jobs "+options3).splitlines()), 1)
self.assertEqual(len(run("dx find analyses "+options3).splitlines()), 1)
options2 = options + " --origin-jobs"
self.assertEqual(len(run("dx find executions "+options2).splitlines()), 8)
self.assertEqual(len(run("dx find jobs "+options2).splitlines()), 6)
self.assertEqual(len(run("dx find analyses "+options2).splitlines()), 2)
options2 = options + " --origin-jobs -n 9000"
self.assertEqual(len(run("dx find executions "+options2).splitlines()), 8)
self.assertEqual(len(run("dx find jobs "+options2).splitlines()), 6)
self.assertEqual(len(run("dx find analyses "+options2).splitlines()), 2)
options2 = options + " --all-jobs"
self.assertEqual(len(run("dx find executions "+options2).splitlines()), 8)
self.assertEqual(len(run("dx find jobs "+options2).splitlines()), 6)
self.assertEqual(len(run("dx find analyses "+options2).splitlines()), 2)
options2 = options + " --state=done"
self.assertEqual(len(run("dx find executions "+options2).splitlines()), 0)
self.assertEqual(len(run("dx find jobs "+options2).splitlines()), 0)
self.assertEqual(len(run("dx find analyses "+options2).splitlines()), 0)
def assert_cmd_gives_ids(cmd, ids):
self.assertEqual(set(execid.strip() for execid in run(cmd).splitlines()),
set(ids))
# Search by tag
options2 = options + " --all-jobs --brief"
options3 = options2 + " --tag foo"
analysis_id = dxanalysis.get_id()
job_id = dxjob.get_id()
assert_cmd_gives_ids("dx find executions "+options3, [analysis_id, job_id])
assert_cmd_gives_ids("dx find jobs "+options3, [job_id])
assert_cmd_gives_ids("dx find analyses "+options3, [analysis_id])
options3 = options2 + " --tag foo --tag bar"
assert_cmd_gives_ids("dx find executions "+options3, [job_id])
assert_cmd_gives_ids("dx find jobs "+options3, [job_id])
assert_cmd_gives_ids("dx find analyses "+options3, [])
# Search by property (presence and by value)
options3 = options2 + " --property foo"
assert_cmd_gives_ids("dx find executions "+options3, [analysis_id, job_id])
assert_cmd_gives_ids("dx find jobs "+options3, [job_id])
assert_cmd_gives_ids("dx find analyses "+options3, [analysis_id])
options3 = options2 + " --property foo=baz"
assert_cmd_gives_ids("dx find executions "+options3, [job_id])
assert_cmd_gives_ids("dx find jobs "+options3, [job_id])
assert_cmd_gives_ids("dx find analyses "+options3, [])
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that requires presence of test org')
def test_find_orgs(self):
org_with_billable_activities = "org-members_with_billing_rights"
self.assertTrue(dxpy.api.org_describe(org_with_billable_activities)["allowBillableActivities"])
org_without_billable_activities = "org-members_without_billing_rights"
self.assertFalse(dxpy.api.org_describe(org_without_billable_activities)["allowBillableActivities"])
org_with_admin = "org-piratelabs"
self.assertTrue(dxpy.api.org_describe(org_with_admin)["level"] == "ADMIN")
cmd = "dx find orgs --level {l} {o} --json"
results = json.loads(run(cmd.format(l="MEMBER", o="")).strip())
self.assertItemsEqual([org_with_billable_activities,
org_without_billable_activities,
org_with_admin],
[result["id"] for result in results])
results = json.loads(run(cmd.format(
l="MEMBER", o="--with-billable-activities")).strip())
self.assertItemsEqual([org_with_billable_activities,
org_with_admin],
[result["id"] for result in results])
results = json.loads(run(cmd.format(
l="MEMBER", o="--without-billable-activities")).strip())
self.assertItemsEqual([org_without_billable_activities],
[result["id"] for result in results])
results = json.loads(run(cmd.format(l="ADMIN", o="")).strip())
self.assertItemsEqual([org_with_admin],
[result["id"] for result in results])
results = json.loads(run(cmd.format(
l="ADMIN", o="--with-billable-activities")).strip())
self.assertItemsEqual([org_with_admin],
[result["id"] for result in results])
results = json.loads(run(cmd.format(
l="ADMIN", o="--without-billable-activities")).strip())
self.assertItemsEqual([], [result["id"] for result in results])
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that requires presence of test org')
def test_find_orgs_format(self):
cmd = "dx find orgs --level MEMBER {o}"
# Assert that only org ids are returned, line-separated.
results = run(cmd.format(o="--brief")).strip().split("\n")
pattern = re.compile("^org-[a-zA-Z0-9_]*$")
for result in results:
self.assertTrue(pattern.match(result))
# Assert that the return format is like: "<org_id><delim><org_name>"
results = run(cmd.format(o="")).strip().split("\n")
pattern = re.compile("^org-[a-zA-Z0-9_]* : .*$")
for result in results:
self.assertTrue(pattern.match(result))
results = run(cmd.format(o="--delim ' @ '")).strip().split("\n")
pattern = re.compile("^org-[a-zA-Z0-9_]* @ .*$")
for result in results:
self.assertTrue(pattern.match(result))
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that requires presence of test org, project, and user')
class TestDXClientFindInOrg(DXTestCase):
@classmethod
def setUpClass(cls):
cls.org_id = "org-piratelabs"
cls.user_alice = "user-000000000000000000000000" # ADMIN
cls.user_bob = "user-000000000000000000000001"
dxpy.api.org_invite(cls.org_id, {"invitee": cls.user_bob}) # Invite user_bob as MEMEBER of org-piratelabs
cls.project_ppb = "project-0000000000000000000000pb" # public project in "org-piratelabs"
@classmethod
def tearDownClass(cls):
dxpy.api.org_remove_member(cls.org_id, {"user": cls.user_bob})
def test_dx_find_org_members_negative(self):
# No org id
with self.assertSubprocessFailure(stderr_regexp='dx find org members: error: too few arguments', exit_code=2):
run("dx find org members")
# No input to --level
with self.assertSubprocessFailure(stderr_regexp='error: argument --level: expected one argument', exit_code=2):
run("dx find org members org-piratelabs --level")
def test_dx_find_org_members(self):
org_members = [self.user_alice, self.user_bob] # sorted ascending by user ID
org_members.sort()
# Basic test to check consistency of client output to directly invoking API
output = run("dx find org members org-piratelabs --brief").strip().split("\n")
dx_api_output = dxpy.api.org_find_members(self.org_id)
self.assertEqual(output, [member['id'] for member in dx_api_output['results']])
self.assertEqual(output, org_members)
# With --level flag
output = run("dx find org members org-piratelabs --level {l} --brief".format(l="ADMIN")).strip().split("\n")
self.assertItemsEqual(output, [self.user_alice])
output = run("dx find org members org-piratelabs --level {l} --brief".format(l="MEMBER")).strip().split("\n")
self.assertItemsEqual(output, [self.user_bob])
def test_dx_find_org_members_format(self):
cmd = "dx find org members org-piratelabs {opts}"
# Assert that only member ids are returned, line-separated
output = run(cmd.format(opts="--brief")).strip().split("\n")
pattern = "^user-[a-zA-Z0-9]*$"
for result in output:
self.assertRegex(result, pattern)
# Assert that return format is like: "<user_id> : <user_name> (<level>)"
levels = "(?:ADMIN|MEMBER)"
output = run(cmd.format(opts="")).strip().split("\n")
pattern = "^user-[a-zA-Z0-9]* : .* \(" + levels + "\)$"
for result in output:
self.assertRegex(result, pattern)
# Test --json output
# TODO: Deprecate 'createProjectsAndApps'
output = json.loads(run(cmd.format(opts='--json')))
query_user_describe = {"fields": {"class": True, "first": True, "last": True, "middle": True, "handle": True}}
expected = [{"appAccess": True,
"projectAccess": "ADMINISTER",
"level": "ADMIN",
"createProjectsAndApps": True,
"allowBillableActivities": True,
"id": self.user_alice,
"describe": dxpy.api.user_describe(self.user_alice, query_user_describe)},
{"appAccess": True,
"projectAccess": "CONTRIBUTE",
"createProjectsAndApps": False,
"allowBillableActivities": False,
"level": "MEMBER",
"id": self.user_bob,
"describe": dxpy.api.user_describe(self.user_bob, query_user_describe)}]
self.assertEqual(output, expected)
def test_dx_find_org_projects_invalid(self):
cmd = "dx find org projects org-irrelevant {opts}"
# --ids must contain at least one id.
with self.assertSubprocessFailure(stderr_regexp='expected at least one argument', exit_code=2):
run(cmd.format(opts="--ids"))
# --tag must contain at least one tag.
with self.assertSubprocessFailure(stderr_regexp='expected one argument', exit_code=2):
run(cmd.format(opts="--tag"))
# --property must contain at least one property.
with self.assertSubprocessFailure(stderr_regexp='expected one argument', exit_code=2):
run(cmd.format(opts="--property"))
# Only one of --public-only and --private-only may be specified.
with self.assertSubprocessFailure(stderr_regexp='not allowed with argument', exit_code=2):
run(cmd.format(opts="--public-only --private-only"))
# --phi must contain one argument.
with self.assertSubprocessFailure(stderr_regexp='expected one argument', exit_code=2):
run(cmd.format(opts="--phi"))
def test_dx_find_org_projects(self):
with temporary_project() as project_1, temporary_project() as project_2:
project1_id = project_1.get_id()
project2_id = project_2.get_id() # project not billed to org
org_projects = [self.project_ppb, project1_id]
dxpy.api.project_update(project1_id, {"billTo": self.org_id})
self.assertEqual(dxpy.api.project_describe(project1_id)['billTo'], self.org_id)
# Basic test to check consistency of client output to directly invoking API
output = run("dx find org projects org-piratelabs --brief").strip().split("\n")
dx_api_output = dxpy.api.org_find_projects(self.org_id)
self.assertEqual(output, [result['id'] for result in dx_api_output['results']])
self.assertItemsEqual(output, org_projects)
# With --ids flag
output = run("dx find org projects org-piratelabs --ids {p}".format(p=project2_id)).strip().split("\n")
self.assertItemsEqual(output, [''])
output = run("dx find org projects org-piratelabs --ids {p} --brief".format(
p=project1_id)).strip().split("\n")
self.assertItemsEqual(output, [project1_id])
output = run("dx find org projects org-piratelabs --ids {p1} {p2} --brief".format(p1=project1_id,
p2=project2_id)).strip().split("\n")
self.assertItemsEqual(output, [project1_id])
# With --tag
dxpy.api.project_add_tags(project1_id, {'tags': ['tag-1', 'tag-2']})
dxpy.api.project_add_tags(project2_id, {'tags': ['tag-1', 'tag-2']})
output = run("dx find org projects org-piratelabs --tag {t1} --brief".format(
t1='tag-1')).strip().split("\n")
self.assertEqual(output, [project1_id])
# With multiple --tag
output = run("dx find org projects org-piratelabs --tag {t1} --tag {t2} --brief".format(t1='tag-1',
t2='tag-2')).strip().split("\n")
self.assertEqual(output, [project1_id])
output = run("dx find org projects org-piratelabs --tag {t1} --tag {t2} --brief".format(t1='tag-1',
t2='tag-3')).strip().split("\n")
self.assertEqual(output, [""])
# With --property
dxpy.api.project_set_properties(project1_id, {'properties': {'property-1': 'value1', 'property-2':
'value2'}})
dxpy.api.project_set_properties(project2_id, {'properties': {'property-1': 'value1', 'property-2':
'value2'}})
output = run("dx find org projects org-piratelabs --property {p1} --brief".format(
p1='property-1')).strip().split("\n")
self.assertItemsEqual(output, [project1_id])
# With multiple --property
output = run("dx find org projects org-piratelabs --property {p1} --property {p2} --brief".format(
p1='property-1', p2='property-2')).strip().split("\n")
self.assertItemsEqual(output, [project1_id])
output = run("dx find org projects org-piratelabs --property {p1} --property {p2} --brief".format(
p1='property-1', p2='property-3')).strip().split("\n")
self.assertItemsEqual(output, [""])
def test_dx_find_org_projects_public(self):
with temporary_project() as p1, temporary_project() as p2:
# Private project in `org_id`.
private_project_id = p1.get_id()
dxpy.api.project_update(private_project_id, {"billTo": self.org_id})
# Assert that `p2` exists.
self.assertEqual(dxpy.api.project_describe(p2.get_id(), {})["level"], "ADMINISTER")
cmd = "dx find org projects org-piratelabs {opts} --brief"
output = run(cmd.format(opts="")).strip().split("\n")
self.assertItemsEqual(output, [private_project_id, self.project_ppb])
output = run(cmd.format(opts="--public-only")).strip().split("\n")
self.assertItemsEqual(output, [self.project_ppb])
output = run(cmd.format(opts="--private-only")).strip().split("\n")
self.assertItemsEqual(output, [private_project_id])
def test_dx_find_org_projects_created(self):
with temporary_project() as unique_project:
project_id = unique_project.get_id()
org_projects = [self.project_ppb, project_id]
dxpy.api.project_update(project_id, {"billTo": self.org_id})
created = dxpy.api.project_describe(project_id)['created']
# Test integer time stamp
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-before={cb} --brief".format(
cb=str(created + 1000))).strip().split("\n"), org_projects)
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-after={ca} --brief".format(
ca=str(created - 1000))).strip().split("\n"), [project_id])
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-after={ca} --created-before={cb} --brief".format(
ca=str(created - 1000), cb=str(created + 1000))).strip().split("\n"), [project_id])
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-before={cb} --brief".format(
cb=str(created - 1000))).strip().split("\n"), [self.project_ppb])
# Test integer with suffix
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-before={cb} --brief".format(
cb="-1d")).strip().split("\n"), [self.project_ppb])
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-after={ca} --brief".format(
ca="-1d")).strip().split("\n"), [project_id])
# Test date
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-before={cb} --brief".format(
cb="2015-10-28")).strip().split("\n"), [self.project_ppb])
self.assertItemsEqual(run("dx find org projects org-piratelabs --created-after={ca} --brief".format(
ca="2015-10-28")).strip().split("\n"), [project_id])
def test_dx_find_org_projects_format(self):
cmd = "dx find org projects org-piratelabs {opts}"
# Assert that only project ids are returned, line-separated
output = run(cmd.format(opts="--brief")).strip().split("\n")
pattern = "^project-[a-zA-Z0-9]{24}$"
for result in output:
self.assertRegex(result, pattern)
# Assert that return format is like: "<project_id><project_name><level>"
levels = "(?:ADMINISTER|CONTRIBUTE|UPLOAD|VIEW|NONE)"
output = run(cmd.format(opts="")).strip().split("\n")
pattern = "^project-[a-zA-Z0-9]{24} : .* \(" + levels + "\)$"
for result in output:
self.assertRegex(result, pattern)
# Test --json output
output = json.loads(run(cmd.format(opts="--json")))
expected = [{"id": self.project_ppb,
"level": "ADMINISTER",
"public": True,
"describe": dxpy.api.project_describe(self.project_ppb)}]
self.assertEqual(output, expected)
def test_dx_find_org_projects_phi(self):
projectName = "tempProject+{t}".format(t=time.time())
with temporary_project(name=projectName) as project_1:
project1_id = project_1.get_id()
dxpy.api.project_update(project1_id, {"billTo": self.org_id})
res = run('dx find org projects org-piratelabs --phi true --brief --name ' + pipes.quote(projectName))
self.assertTrue(len(res) == 0, "Expected no PHI projects to be found")
res = run('dx find org projects org-piratelabs --phi false --brief --name ' + pipes.quote(projectName)).strip().split("\n")
self.assertTrue(len(res) == 1, "Expected to find one project")
self.assertEqual(res[0], project1_id)
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping tests that require org creation')
class TestDXClientOrg(DXTestCase):
@classmethod
def get_unique_org_handle(cls):
return "dx_test_new_org_{t}_{r}".format(t=int(time.time()), r=random.randint(0, 32768))
def setUp(self):
org_handle = TestDXClientOrg.get_unique_org_handle()
org_new_input = {"name": org_handle, "handle": org_handle}
self.org_id = dxpy.api.org_new(org_new_input)["id"]
super(TestDXClientOrg, self).setUp()
def test_create_new_org_negative(self):
# No handle supplied
with self.assertRaisesRegexp(subprocess.CalledProcessError, "error: argument --handle is required"):
run('dx new org')
with self.assertRaisesRegexp(subprocess.CalledProcessError, "error: argument --handle is required"):
run('dx new org "Test Org"')
with self.assertRaisesRegexp(subprocess.CalledProcessError, "error: argument --handle is required"):
run('dx new org --member-list-visibility MEMBER')
with self.assertRaisesRegexp(subprocess.CalledProcessError, "error: argument --handle is required"):
run('dx new org --project-transfer-ability MEMBER')
with self.assertRaisesRegexp(subprocess.CalledProcessError, "error: argument --handle is required"):
run('dx new org --member-list-visibility ADMIN --project-transfer-ability MEMBER')
with self.assertRaisesRegexp(subprocess.CalledProcessError,
"error: argument --member-list-visibility: invalid choice"):
run('dx new org --member-list-visibility NONE')
def test_create_new_org(self):
# Basic test with only required input args; optional input arg defaults propagated properly.
org_handle = TestDXClientOrg.get_unique_org_handle()
org_id = run('dx new org "Test New Org" --handle {h} --brief'.format(h=org_handle)).strip()
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org")
self.assertEqual(res['policies']['memberListVisibility'], "ADMIN")
self.assertEqual(res['policies']['restrictProjectTransfer'], "ADMIN")
# Test --member-list-visibility flag
org_handle = TestDXClientOrg.get_unique_org_handle()
policy_mlv = "MEMBER"
org_id = run('dx new org "Test New Org" --handle {h} --member-list-visibility {mlv} --brief'
.format(h=org_handle, mlv=policy_mlv)).strip()
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org")
self.assertEqual(res['policies']['memberListVisibility'], policy_mlv)
self.assertEqual(res['policies']['restrictProjectTransfer'], "ADMIN")
org_handle = TestDXClientOrg.get_unique_org_handle()
policy_mlv = "PUBLIC"
org_id = run('dx new org "Test New Org" --handle {h} --member-list-visibility {mlv} --brief'
.format(h=org_handle, mlv=policy_mlv)).strip()
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org")
self.assertEqual(res['policies']['memberListVisibility'], policy_mlv)
self.assertEqual(res['policies']['restrictProjectTransfer'], "ADMIN")
# Test --project-transfer-ability flag
org_handle = TestDXClientOrg.get_unique_org_handle()
policy_pta = "MEMBER"
org_id = run('dx new org "Test New Org" --handle {h} --project-transfer-ability {pta} --brief'
.format(h=org_handle, pta=policy_pta)).strip()
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org")
self.assertEqual(res['policies']['memberListVisibility'], "ADMIN")
self.assertEqual(res['policies']['restrictProjectTransfer'], policy_pta)
# Assert non-brief output format
org_handle = TestDXClientOrg.get_unique_org_handle()
output = run('dx new org "Test New Org" --handle {h}'.format(h=org_handle)).strip()
self.assertEquals(output, 'Created new org called "Test New Org" (org-' + org_handle + ')')
def test_create_new_org_prompt(self):
# Prompt with only handle
org_handle = TestDXClientOrg.get_unique_org_handle()
dx_new_org = pexpect.spawn('dx new org --handle {h}'.format(h=org_handle), logfile=sys.stderr)
dx_new_org.expect('Enter descriptive name')
dx_new_org.sendline("Test New Org Prompt")
dx_new_org.expect('Created new org')
org_id = "org-" + org_handle
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org Prompt")
self.assertEqual(res['policies']["memberListVisibility"], "ADMIN")
self.assertEqual(res['policies']["restrictProjectTransfer"], "ADMIN")
# Prompt with "--member-list-visibility" & "--handle"
org_handle = TestDXClientOrg.get_unique_org_handle()
dx_new_org = pexpect.spawn('dx new org --handle {h} --member-list-visibility {mlv}'.format(h=org_handle,
mlv="PUBLIC"), logfile=sys.stderr)
dx_new_org.expect('Enter descriptive name')
dx_new_org.sendline("Test New Org Prompt")
dx_new_org.expect('Created new org')
org_id = "org-" + org_handle
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org Prompt")
self.assertEqual(res['policies']["memberListVisibility"], "PUBLIC")
self.assertEqual(res['policies']["restrictProjectTransfer"], "ADMIN")
org_handle = TestDXClientOrg.get_unique_org_handle()
dx_new_org = pexpect.spawn('dx new org --handle {h} --member-list-visibility {mlv}'.format(h=org_handle,
mlv="MEMBER"), logfile=sys.stderr)
dx_new_org.expect('Enter descriptive name')
dx_new_org.sendline("Test New Org Prompt")
dx_new_org.expect('Created new org')
org_id = "org-" + org_handle
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org Prompt")
self.assertEqual(res['policies']["memberListVisibility"], "MEMBER")
self.assertEqual(res['policies']["restrictProjectTransfer"], "ADMIN")
org_handle = TestDXClientOrg.get_unique_org_handle()
dx_new_org = pexpect.spawn('dx new org --handle {h} --member-list-visibility {mlv}'.format(h=org_handle,
mlv="ADMIN"), logfile=sys.stderr)
dx_new_org.expect('Enter descriptive name')
dx_new_org.sendline("Test New Org Prompt")
dx_new_org.expect('Created new org')
org_id = "org-" + org_handle
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org Prompt")
self.assertEqual(res['policies']["memberListVisibility"], "ADMIN")
self.assertEqual(res['policies']["restrictProjectTransfer"], "ADMIN")
# Prompt with "--project-transfer-ability" & "handle"
org_handle = TestDXClientOrg.get_unique_org_handle()
dx_new_org = pexpect.spawn('dx new org --handle {h} --project-transfer-ability {pta}'.format(h=org_handle,
pta="MEMBER"), logfile=sys.stderr)
dx_new_org.expect('Enter descriptive name')
dx_new_org.sendline("Test New Org Prompt")
dx_new_org.expect('Created new org')
org_id = "org-" + org_handle
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org Prompt")
self.assertEqual(res['policies']["memberListVisibility"], "ADMIN")
self.assertEqual(res['policies']["restrictProjectTransfer"], "MEMBER")
org_handle = TestDXClientOrg.get_unique_org_handle()
dx_new_org = pexpect.spawn('dx new org --handle {h} --project-transfer-ability {pta}'.format(h=org_handle,
pta="ADMIN"), logfile=sys.stderr)
dx_new_org.expect('Enter descriptive name')
dx_new_org.sendline("Test New Org Prompt")
dx_new_org.expect('Created new org')
org_id = "org-" + org_handle
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org Prompt")
self.assertEqual(res['policies']["memberListVisibility"], "ADMIN")
self.assertEqual(res['policies']["restrictProjectTransfer"], "ADMIN")
# Prompt with "--member-list-visibility", "--project-transfer-ability", & "--handle"
org_handle = TestDXClientOrg.get_unique_org_handle()
dx_new_org = pexpect.spawn('dx new org --handle {h} --member-list-visibility {p} --project-transfer-ability {p}'.format(
h=org_handle, p="MEMBER"), logfile=sys.stderr)
dx_new_org.expect('Enter descriptive name')
dx_new_org.sendline("Test New Org Prompt")
dx_new_org.expect('Created new org')
org_id = "org-" + org_handle
res = dxpy.api.org_describe(org_id)
self.assertEqual(res['handle'], org_handle)
self.assertEqual(res['name'], "Test New Org Prompt")
self.assertEqual(res['policies']["memberListVisibility"], "MEMBER")
self.assertEqual(res['policies']["restrictProjectTransfer"], "MEMBER")
def test_org_update_negative(self):
# Org id is required.
invalid_cmds = ["dx update org",
"dx update org --name foo --member-list-visibility ADMIN --project-transfer-ability ADMIN"]
for invalid_cmd in invalid_cmds:
with self.assertSubprocessFailure(stderr_regexp="too few arguments", exit_code=2):
run(invalid_cmd)
# --project-transfer-ability may not be PUBLIC.
with self.assertSubprocessFailure(stderr_regexp="--project-transfer-ability.*invalid", exit_code=2):
run("dx update org {o} --project-transfer-ability PUBLIC".format(o=self.org_id))
def test_org_update(self):
def get_name_and_policies(org_id=None):
if org_id is None:
org_id = self.org_id
org_desc = dxpy.api.org_describe(org_id)
return (org_desc["name"], org_desc["policies"])
# ---Regression tests---
# Do not need to actually update the org at all.
cur_org_name, cur_org_policies = get_name_and_policies()
res = run('dx update org {o} --brief'.format(o=self.org_id)).strip()
self.assertEqual(res, self.org_id)
new_org_name, new_org_policies = get_name_and_policies(res)
self.assertEqual(new_org_name, cur_org_name)
self.assertEqual(new_org_policies, cur_org_policies)
# --name.
cur_org_name, cur_org_policies = new_org_name, new_org_policies
proposed_org_name = "foo"
self.assertNotEqual(proposed_org_name, cur_org_name)
res = run('dx update org {o} --name "{n}" --brief'.format(o=self.org_id, n=proposed_org_name)).strip()
self.assertEqual(res, self.org_id)
new_org_name, new_org_policies = get_name_and_policies(res)
self.assertEqual(new_org_name, proposed_org_name)
self.assertEqual(new_org_policies, cur_org_policies)
# --member-list-visibility.
cur_org_name, cur_org_policies = new_org_name, new_org_policies
proposed_mlv = "MEMBER"
self.assertNotEqual(proposed_mlv, cur_org_policies["memberListVisibility"])
exp_org_policies = dict(cur_org_policies, memberListVisibility=proposed_mlv)
res = run('dx update org {o} --member-list-visibility {p} --brief'.format(o=self.org_id,
p=proposed_mlv)).strip()
self.assertEqual(res, self.org_id)
new_org_name, new_org_policies = get_name_and_policies(res)
self.assertEqual(new_org_name, cur_org_name)
self.assertEqual(new_org_policies, exp_org_policies)
cur_org_name, cur_org_policies = new_org_name, new_org_policies
proposed_mlv = "PUBLIC"
self.assertNotEqual(proposed_mlv, cur_org_policies["memberListVisibility"])
exp_org_policies = dict(cur_org_policies, memberListVisibility=proposed_mlv)
res = run('dx update org {o} --member-list-visibility {p} --brief'.format(o=self.org_id,
p=proposed_mlv)).strip()
self.assertEqual(res, self.org_id)
new_org_name, new_org_policies = get_name_and_policies(res)
self.assertEqual(new_org_name, cur_org_name)
self.assertEqual(new_org_policies, exp_org_policies)
# --project-transfer-ability.
cur_org_name, cur_org_policies = new_org_name, new_org_policies
proposed_pta = "ADMIN"
self.assertNotEqual(proposed_pta, cur_org_policies["restrictProjectTransfer"])
exp_org_policies = dict(cur_org_policies, restrictProjectTransfer=proposed_pta)
res = run('dx update org {o} --project-transfer-ability {p} --brief'.format(o=self.org_id,
p=proposed_pta)).strip()
self.assertEqual(res, self.org_id)
new_org_name, new_org_policies = get_name_and_policies(res)
self.assertEqual(new_org_name, cur_org_name)
self.assertEqual(new_org_policies, exp_org_policies)
# All args.
cur_org_name, cur_org_policies = new_org_name, new_org_policies
proposed_org_name = "bar"
proposed_mlv = "ADMIN"
proposed_pta = "MEMBER"
exp_org_policies = dict(cur_org_policies, memberListVisibility=proposed_mlv,
restrictProjectTransfer=proposed_pta)
res = run('dx update org {o} --name {n} --member-list-visibility {mlv} --project-transfer-ability {pta} --brief'.format(
o=self.org_id, n=proposed_org_name, mlv=proposed_mlv, pta=proposed_pta)).strip()
self.assertEqual(res, self.org_id)
new_org_name, new_org_policies = get_name_and_policies(res)
self.assertEqual(new_org_name, proposed_org_name)
self.assertEqual(new_org_policies, exp_org_policies)
def test_org_update_format(self):
res = run('dx update org {o}'.format(o=self.org_id)).strip()
self.assertRegex(res, "^Updated.*{o}$".format(o=self.org_id))
res = run('dx update org {o} --brief'.format(o=self.org_id)).strip()
self.assertEqual(res, self.org_id)
class TestDXClientNewProject(DXTestCase):
def test_dx_new_project_with_region(self):
project_id = run("dx new project --brief --region aws:us-east-1 ProjectInUSEast").strip()
self.assertEquals(dxpy.api.project_describe(project_id, {})['region'], "aws:us-east-1")
dxpy.api.project_destroy(project_id, {})
with self.assertRaisesRegexp(subprocess.CalledProcessError, "InvalidInput"):
run("dx new project --brief --region aws:not-a-region InvalidRegionProject")
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that requires presence of test org')
def test_dx_create_new_project_with_bill_to(self):
curr_bill_to = dxpy.api.user_describe(dxpy.whoami())['billTo']
alice_id = "user-000000000000000000000000"
org_id = "org-piratelabs"
project_name = "test_dx_create_project"
# Check that requesting user has allowBillableActivities permission in org
member_access = dxpy.api.org_describe(org_id)
self.assertTrue(member_access['level'] == 'ADMIN' or member_access['allowBillableActivities'])
# Check that billTo of requesting user is the requesting user
dxpy.api.user_update(dxpy.whoami(), {'billTo': alice_id})
self.assertEquals(dxpy.api.user_describe(dxpy.whoami())['billTo'], alice_id)
# Create project billTo org
project_id = run("dx new project {name} --bill-to {billTo} --brief".format(name=project_name,
billTo=org_id)).strip()
self.assertEquals(dxpy.api.project_describe(project_id, {'fields': {'billTo': True}})['billTo'], org_id)
dxpy.api.project_destroy(project_id)
# Create project billTo requesting user
project_id = run("dx new project {name} --bill-to {billTo} --brief".format(name=project_name,
billTo=dxpy.whoami())).strip()
self.assertEquals(dxpy.api.project_describe(project_id, {'fields': {'billTo': True}})['billTo'], dxpy.whoami())
dxpy.api.project_destroy(project_id)
# Create project billTo invalid org
with self.assertSubprocessFailure(stderr_regexp='ResourceNotFound', exit_code=3):
run("dx new project {name} --bill-to {billTo} --brief".format(name=project_name, billTo='org-invalid'))
# With user's billTo set to org
dxpy.api.user_update(dxpy.whoami(), {'billTo': org_id})
self.assertEqual(dxpy.api.user_describe(dxpy.whoami())['billTo'], org_id)
project_id = run("dx new project {name} --bill-to {billTo} --brief".format(name=project_name,
billTo=dxpy.whoami())).strip()
self.assertEquals(dxpy.api.project_describe(project_id, {'fields': {'billTo': True}})['billTo'], dxpy.whoami())
dxpy.api.project_destroy(project_id)
project_id = run("dx new project {name} --bill-to {billTo} --brief".format(name=project_name,
billTo=org_id)).strip()
self.assertEquals(dxpy.api.project_describe(project_id, {'fields': {'billTo': True}})['billTo'], org_id)
dxpy.api.project_destroy(project_id)
# reset original user settings
dxpy.api.user_update(dxpy.whoami(), {'billTo': curr_bill_to})
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV and testutil.TEST_WITH_AUTHSERVER,
'skipping tests that require presence of test org and running authserver')
class TestDXClientNewUser(DXTestCase):
def _now(self):
return str(int(time.time()))
def _assert_user_desc(self, user_id, exp_user_desc):
user_desc = dxpy.api.user_describe(user_id)
for field in exp_user_desc:
self.assertEqual(user_desc[field], exp_user_desc[field])
def setUp(self):
self.org_id = "org-piratelabs"
super(TestDXClientNewUser, self).setUp()
def tearDown(self):
super(TestDXClientNewUser, self).tearDown()
def test_create_user_account_and_set_bill_to_negative(self):
username, email = generate_unique_username_email()
first = "Asset"
cmd = "dx new user"
called_process_error_opts = [
"",
"--username {u}".format(u=username),
"--email {e}".format(e=email),
"--username {u} --email {e} --first {f} \
--token-duration {t}".format(u=username, e=email, f=first,
t="not_an_int"),
]
for invalid_opts in called_process_error_opts:
with self.assertRaises(subprocess.CalledProcessError):
run(" ".join([cmd, invalid_opts]))
dx_api_error_opts = [
"--username {u} --email {e}".format(u=username, e=email),
"--username {u} --email bad_email".format(u=username),
"--username bu --email {e}".format(e=email),
"--username {u} --email {e} --first {f} --org does_not_exist --set-bill-to".format(
u=username, e=email, f=first),
]
for invalid_opts in dx_api_error_opts:
with self.assertRaisesRegexp(subprocess.CalledProcessError,
"DXAPIError"):
run(" ".join([cmd, invalid_opts]))
resource_not_found_opts = [
"--username {u} --email {e} --first {f} --org does_not_exist".format(
u=username, e=email, f=first),
]
for invalid_opts in resource_not_found_opts:
with self.assertRaisesRegexp(subprocess.CalledProcessError,
"ResourceNotFound"):
run(" ".join([cmd, invalid_opts]))
dx_cli_error_opts = [
"--username {u} --email {e} --first {f} --level MEMBER".format(
u=username, e=email, f=first),
"--username {u} --email {e} --first {f} --set-bill-to".format(
u=username, e=email, f=first),
"--username {u} --email {e} --first {f} --allow-billable-activities".format(
u=username, e=email, f=first),
"--username {u} --email {e} --first {f} --no-app-access".format(
u=username, e=email, f=first),
"--username {u} --email {e} --first {f} --project-access VIEW".format(
u=username, e=email, f=first),
"--username {u} --email {e} --first {f} --no-email".format(
u=username, e=email, f=first),
]
for invalid_opts in dx_cli_error_opts:
with self.assertRaisesRegexp(subprocess.CalledProcessError,
"DXCLIError"):
run(" ".join([cmd, invalid_opts]))
def test_self_signup_negative(self):
# How to unset context?
pass
def test_create_user_account_only(self):
first = "Asset"
last = "The"
middle = "T."
cmd = "dx new user"
# Basic with first name only.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --first {f} --brief".format(
cmd=cmd, u=username, e=email, f=first)).strip()
self._assert_user_desc(user_id, {"first": first})
# Basic with last name only.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --last {l} --brief".format(
cmd=cmd, u=username, e=email, l=last)).strip()
self._assert_user_desc(user_id, {"last": last})
# Basic with all options we can verify.
# TODO: Test --occupation.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --first {f} --middle {m} --last {l} --brief".format(
cmd=cmd, u=username, e=email, f=first, m=middle,
l=last)).strip()
self._assert_user_desc(user_id, {"first": first,
"last": last,
"middle": middle})
def test_create_user_account_and_invite_to_org(self):
# TODO: Test --no-email flag.
first = "Asset"
cmd = "dx new user"
# Grant default org membership level and permission flags.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --first {f} --org {o} --brief".format(
cmd=cmd, u=username, e=email, f=first,
o=self.org_id)).strip()
self._assert_user_desc(user_id, {"first": first})
exp = {
"level": "MEMBER",
"allowBillableActivities": False,
"createProjectsAndApps": False,
"appAccess": True,
"projectAccess": "CONTRIBUTE",
"id": user_id
}
res = dxpy.api.org_find_members(self.org_id, {"id": [user_id]})["results"][0]
self.assertEqual(res, exp)
# Grant default org membership level and permission flags; `username`
# has uppercase chars.
username, email = generate_unique_username_email()
username = username.upper()
user_id = run("{cmd} --username {u} --email {e} --first {f} --org {o} --brief".format(
cmd=cmd, u=username, e=email, f=first,
o=self.org_id)).strip()
self._assert_user_desc(user_id, {"first": first})
exp = {
"level": "MEMBER",
"allowBillableActivities": False,
"createProjectsAndApps": False,
"appAccess": True,
"projectAccess": "CONTRIBUTE",
"id": user_id
}
res = dxpy.api.org_find_members(self.org_id, {"id": [user_id]})["results"][0]
self.assertEqual(res, exp)
# Grant custom org membership level and permission flags.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --first {f} --org {o} --level {l} --allow-billable-activities --no-app-access --project-access {pa} --brief".format(
cmd=cmd, u=username, e=email, f=first,
o=self.org_id, l="MEMBER", pa="VIEW")).strip()
self._assert_user_desc(user_id, {"first": first})
exp = {
"level": "MEMBER",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": False,
"projectAccess": "VIEW",
"id": user_id
}
res = dxpy.api.org_find_members(self.org_id, {"id": [user_id]})["results"][0]
self.assertEqual(res, exp)
# Grant ADMIN org membership level; ignore all other org permission
# options.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --first {f} --org {o} --level {l} --no-app-access --project-access {pa} --brief".format(
cmd=cmd, u=username, e=email, f=first,
o=self.org_id, l="ADMIN", pa="VIEW")).strip()
self._assert_user_desc(user_id, {"first": first})
exp = {
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER",
"id": user_id
}
res = dxpy.api.org_find_members(self.org_id, {"id": [user_id]})["results"][0]
self.assertEqual(res, exp)
def test_create_user_account_and_set_bill_to(self):
first = "Asset"
cmd = "dx new user --set-bill-to" # Set --set-bill-to option.
# --allow-billable-activities is implied; grant custom org membership
# level and other permission flags.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --first {f} --org {o} --level {l} --project-access {pa} --brief".format(
cmd=cmd, u=username, e=email, f=first,
o=self.org_id, l="MEMBER", pa="VIEW")).strip()
self._assert_user_desc(user_id, {"first": first})
exp = {
"level": "MEMBER",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "VIEW",
"id": user_id
}
res = dxpy.api.org_find_members(self.org_id, {"id": [user_id]})["results"][0]
self.assertEqual(res, exp)
# Grant ADMIN org membership level.
username, email = generate_unique_username_email()
user_id = run("{cmd} --username {u} --email {e} --first {f} --org {o} --level ADMIN --brief".format(
cmd=cmd, u=username, e=email, f=first,
o=self.org_id)).strip()
self._assert_user_desc(user_id, {"first": first})
exp = {
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER",
"id": user_id
}
res = dxpy.api.org_find_members(self.org_id, {"id": [user_id]})["results"][0]
self.assertEqual(res, exp)
def test_create_user_account_and_set_token_duration_negative(self):
first = "Asset"
username, email = "token_duration_neg", "token_duration_neg@example.com"
cmd = "dx new user --username {u} --email {e} --first {f} --token-duration {td}"
invalid_token_durations = [
"8md", # "md" is an invalid unit
"8.5", # float is an invalid input
"8.5d", # float with unit is an invalid input
"31d" # longer than 30 days
]
# test invalid inputs for token duration
for invalid_token_duration in invalid_token_durations:
with self.assertRaisesRegexp(subprocess.CalledProcessError, "ValueError"):
run(cmd.format(u=username.lower(), e=email, f=first, td=invalid_token_duration))
def test_create_user_account_and_set_token_duration(self):
first = "Asset"
cmd = "dx new user --username {u} --email {e} --first {f} --token-duration={td} --brief"
token_durations = ["10000", "10d", "-10000", "-10d"]
for token_duration in token_durations:
username, email = generate_unique_username_email()
user_id = run(cmd.format(u=username, e=email, f=first, td=token_duration)).strip()
self.assertEqual(user_id, "user-" + username.lower())
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping tests that require presence of test user and org')
class TestDXClientMembership(DXTestCase):
def _add_user(self, user_id, level="ADMIN"):
dxpy.api.org_invite(self.org_id,
{"invitee": user_id, "level": level})
def _remove_user(self, user_id):
dxpy.api.org_remove_member(self.org_id, {"user": user_id})
with self.assertRaises(IndexError):
self._org_find_members(user_id)
def _org_find_members(self, user_id):
return dxpy.api.org_find_members(self.org_id, {"id": [user_id]})["results"][0]
def setUp(self):
# Bob.
self.username = "000000000000000000000001"
self.user_id = "user-" + self.username
# ADMIN: Alice.
self.org_id = "org-piratelabs"
super(TestDXClientMembership, self).setUp()
def tearDown(self):
self._remove_user(self.user_id)
super(TestDXClientMembership, self).tearDown()
def test_add_membership_default(self):
cmd = "dx add member {o} {u} --level {l}"
run(cmd.format(o=self.org_id, u=self.username, l="ADMIN"))
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
self._remove_user(self.user_id)
run(cmd.format(o=self.org_id, u=self.username, l="MEMBER"))
exp_membership = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": False,
"createProjectsAndApps": False,
"appAccess": True,
"projectAccess": "CONTRIBUTE"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
def test_add_membership_with_options(self):
cmd = "dx add member {o} {u} --level {l}"
run("{cmd} --no-app-access --project-access NONE".format(
cmd=cmd.format(o=self.org_id, u=self.username, l="ADMIN")))
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
self._remove_user(self.user_id)
run("{cmd} --allow-billable-activities --no-app-access --project-access NONE".format(
cmd=cmd.format(o=self.org_id, u=self.username, l="MEMBER")))
exp_membership = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": False,
"projectAccess": "NONE"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
def test_add_membership_negative(self):
cmd = "dx add member"
called_process_error_opts = [
"",
"some_username --level ADMIN",
"org-foo --level ADMIN",
"org-foo some_username",
]
for invalid_opts in called_process_error_opts:
with self.assertRaises(subprocess.CalledProcessError):
run(" ".join([cmd, invalid_opts]))
self._add_user(self.user_id)
# Cannot add a user who is already a member of the org.
with self.assertRaisesRegexp(subprocess.CalledProcessError, "DXCLIError"):
run(" ".join([cmd, self.org_id, self.username, "--level ADMIN"]))
def test_remove_membership_default(self):
self._add_user(self.user_id)
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
run("dx remove member {o} {u} -y".format(o=self.org_id, u=self.username))
with self.assertRaises(IndexError):
self._org_find_members(self.user_id)
def test_remove_membership_interactive_conf(self):
self._add_user(self.user_id)
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
dx_rm_member_int = pexpect.spawn("dx remove member {o} {u}".format(
o=self.org_id, u=self.username), logfile=sys.stderr)
dx_rm_member_int.expect("Please confirm")
dx_rm_member_int.sendline("")
dx_rm_member_int.expect("Please confirm")
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
dx_rm_member_int = pexpect.spawn("dx remove member {o} {u}".format(
o=self.org_id, u=self.username), logfile=sys.stderr)
dx_rm_member_int.expect("Please confirm")
dx_rm_member_int.sendintr()
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
dx_rm_member_int = pexpect.spawn("dx remove member {o} {u}".format(
o=self.org_id, u=self.username), logfile=sys.stderr)
dx_rm_member_int.expect("Please confirm")
dx_rm_member_int.sendline("n")
dx_rm_member_int.expect("Aborting removal")
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
dx_rm_member_int = pexpect.spawn("dx remove member {o} {u}".format(
o=self.org_id, u=self.username))
dx_rm_member_int.logfile = sys.stdout
dx_rm_member_int.expect("Please confirm")
dx_rm_member_int.sendline("y")
dx_rm_member_int.expect("Removed user-{u}".format(u=self.username))
def test_remove_membership_interactive_conf_format(self):
self._add_user(self.user_id)
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
project_id_1 = "project-000000000000000000000001"
prev_bill_to_1 = dxpy.api.project_describe(project_id_1, {"fields": {"billTo": True}})["billTo"]
dxpy.api.project_update(project_id_1, {"billTo": self.org_id})
project_permissions = dxpy.api.project_describe(project_id_1, {"fields": {"permissions": True}})["permissions"]
self.assertEqual(project_permissions[self.user_id], "VIEW")
project_id_2 = "project-000000000000000000000002"
prev_bill_to_2 = dxpy.api.project_describe(project_id_2, {"fields": {"billTo": True}})["billTo"]
dxpy.api.project_update(project_id_2, {"billTo": self.org_id})
dxpy.api.project_invite(project_id_2, {"invitee": self.user_id, "level": "ADMINISTER"})
project_permissions = dxpy.api.project_describe(project_id_2, {"fields": {"permissions": True}})["permissions"]
self.assertEqual(project_permissions[self.user_id], "ADMINISTER")
dx_rm_member_int = pexpect.spawn("dx remove member {o} {u}".format(
o=self.org_id, u=self.username))
dx_rm_member_int.logfile = sys.stdout
dx_rm_member_int.expect("Please confirm")
dx_rm_member_int.sendline("y")
dx_rm_member_int.expect("Removed user-{u}".format(u=self.username))
dx_rm_member_int.expect("Removed user-{u} from the following projects:".format(
u=self.username))
dx_rm_member_int.expect("\t" + project_id_1)
dx_rm_member_int.expect("\t" + project_id_2)
dx_rm_member_int.expect("Removed user-{u} from the following apps:".format(
u=self.username))
dx_rm_member_int.expect("None")
dxpy.api.project_update(project_id_1, {"billTo": prev_bill_to_1})
dxpy.api.project_update(project_id_2, {"billTo": prev_bill_to_2})
def test_remove_membership_negative(self):
cmd = "dx remove member"
# Cannot remove a user who is not currently a member of the org.
with self.assertRaisesRegexp(subprocess.CalledProcessError,
"DXCLIError"):
run(" ".join([cmd, self.org_id, self.username]))
called_process_error_opts = [
"",
"some_username",
"org-foo",
]
for invalid_opts in called_process_error_opts:
with self.assertRaises(subprocess.CalledProcessError):
run(" ".join([cmd, invalid_opts]))
def test_update_membership_positive(self):
# default test
self._add_user(self.user_id)
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
run("dx update member {o} {u} --level MEMBER --allow-billable-activities false --project-access VIEW --app-access true".format(
o=self.org_id, u=self.username))
exp_membership = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": False,
"createProjectsAndApps": False,
"projectAccess": "VIEW",
"appAccess": True}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
run("dx update member {o} {u} --allow-billable-activities true --app-access false".format(
o=self.org_id, u=self.username))
exp_membership = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"projectAccess": "VIEW",
"appAccess": False}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
def test_update_membership_to_member_without_membership_flags(self):
cmd = "dx update member {o} {u} --level MEMBER".format(o=self.org_id, u=self.username)
# ADMIN to MEMBER.
self._add_user(self.user_id)
exp = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"projectAccess": "ADMINISTER",
"appAccess": True}
membership_response = self._org_find_members(self.user_id)
self.assertEqual(membership_response, exp)
run(cmd)
exp = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": False,
"createProjectsAndApps": False,
"projectAccess": "CONTRIBUTE",
"appAccess": True}
membership_response = self._org_find_members(self.user_id)
self.assertEqual(membership_response, exp)
# MEMBER to MEMBER.
run(cmd + " --allow-billable-activities true")
exp = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"projectAccess": "CONTRIBUTE",
"appAccess": True}
membership_response = self._org_find_members(self.user_id)
self.assertEqual(membership_response, exp)
run(cmd)
membership_response = self._org_find_members(self.user_id)
self.assertEqual(membership_response, exp)
def test_update_membership_negative(self):
cmd = "dx update member"
# Cannot update the membership of a user who is not currently a member
# of the org.
with self.assertRaisesRegexp(subprocess.CalledProcessError,
"DXCLIError"):
run(" ".join([cmd, self.org_id, self.username, "--level ADMIN"]))
called_process_error_opts = [
"",
"some_username --level ADMIN",
"org-foo --level ADMIN",
"org-foo some_username --level NONE",
]
for invalid_opts in called_process_error_opts:
with self.assertRaises(subprocess.CalledProcessError):
run(" ".join([cmd, invalid_opts]))
# We expect the following to fail as an API call, as ADMIN doesn't
# take options
self._add_user(self.user_id)
api_error_opts = [
"{} {} --allow-billable-activities true".format(self.org_id,
self.username),
]
for invalid_opt in api_error_opts:
with self.assertRaisesRegexp(subprocess.CalledProcessError,
"InvalidInput"):
run(' '.join([cmd, invalid_opt]))
def test_add_update_remove_membership(self):
cmd = "dx add member {o} {u} --level {l} --project-access UPLOAD"
run(cmd.format(o=self.org_id, u=self.username, l="MEMBER"))
exp_membership = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": False,
"createProjectsAndApps": False,
"appAccess": True,
"projectAccess": "UPLOAD"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx update member {o} {u} --level MEMBER --allow-billable-activities true"
run(cmd.format(o=self.org_id, u=self.username))
exp_membership.update(allowBillableActivities=True, createProjectsAndApps=True)
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx update member {o} {u} --level ADMIN"
run(cmd.format(o=self.org_id, u=self.username))
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx update member {o} {u} --level MEMBER --allow-billable-activities true --project-access CONTRIBUTE --app-access false"
run(cmd.format(o=self.org_id, u=self.username))
exp_membership.update(level="MEMBER", projectAccess="CONTRIBUTE", appAccess=False)
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx remove member {o} {u} -y"
run(cmd.format(o=self.org_id, u=self.username))
with self.assertRaises(IndexError):
self._org_find_members(self.user_id)
def test_add_update_remove_membership_with_user_id(self):
# This is similar to `test_add_update_remove_membership()` above, but
# it specifies user id instead of username as arg to `dx` command.
cmd = "dx add member {o} {u} --level {l} --project-access UPLOAD"
run(cmd.format(o=self.org_id, u=self.user_id, l="MEMBER"))
exp_membership = {"id": self.user_id,
"level": "MEMBER",
"allowBillableActivities": False,
"createProjectsAndApps": False,
"appAccess": True,
"projectAccess": "UPLOAD"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx update member {o} {u} --level MEMBER --allow-billable-activities true"
run(cmd.format(o=self.org_id, u=self.user_id))
exp_membership.update(allowBillableActivities=True, createProjectsAndApps=True)
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx update member {o} {u} --level ADMIN"
run(cmd.format(o=self.org_id, u=self.user_id))
exp_membership = {"id": self.user_id,
"level": "ADMIN",
"allowBillableActivities": True,
"createProjectsAndApps": True,
"appAccess": True,
"projectAccess": "ADMINISTER"}
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx update member {o} {u} --level MEMBER --allow-billable-activities true --project-access CONTRIBUTE --app-access false"
run(cmd.format(o=self.org_id, u=self.user_id))
exp_membership.update(level="MEMBER", projectAccess="CONTRIBUTE", appAccess=False)
membership = self._org_find_members(self.user_id)
self.assertEqual(membership, exp_membership)
cmd = "dx remove member {o} {u} -y"
run(cmd.format(o=self.org_id, u=self.user_id))
with self.assertRaises(IndexError):
self._org_find_members(self.user_id)
class TestDXClientUpdateProject(DXTestCase):
cmd = "dx update project {pid} --{item} {n}"
def setUp(self):
proj_name = u"Project_name"
self.project = dxpy.api.project_new({"name": proj_name})['id']
dxpy.config["DX_PROJECT_CONTEXT_ID"] = self.project
cd(self.project + ":/")
dxpy.config.__init__(suppress_warning=True)
if 'DX_CLI_WD' in dxpy.config:
del dxpy.config['DX_CLI_WD']
def project_describe(self, input_params):
return dxpy.api.project_describe(self.project, input_params)
def test_update_strings(self):
update_items = {'name': 'NewProjectName',
'summary': 'This is a summary',
'description': 'This is a description'}
#Update items one by one.
for item in update_items:
run(self.cmd.format(pid=self.project, item=item, n=pipes.quote(update_items[item])))
describe_input = {}
describe_input[item] = 'true'
self.assertEqual(self.project_describe(describe_input)[item],
update_items[item])
def test_update_multiple_items(self):
#Test updating multiple items in a single api call
update_items = {'name': 'NewProjectName',
'summary': 'This is new a summary',
'description': 'This is new a description',
'protected': 'false'}
cmd = "dx update project {pid} --name {name} --summary {summary} --description {desc} --protected {protect}"
run(cmd.format(pid=self.project, name=pipes.quote(update_items['name']),
summary=pipes.quote(update_items['summary']), desc=pipes.quote(update_items['description']),
protect=update_items['protected']))
describe_input = {}
for item in update_items:
describe_input[item] = 'true'
result = self.project_describe(describe_input)
for item in update_items:
if item == 'protected':
self.assertFalse(result[item])
else:
self.assertEqual(result[item], update_items[item])
def test_update_project_by_name(self):
describe_input = {}
describe_input['name'] = 'true'
project_name = self.project_describe(describe_input)['name']
new_name = 'Another Project Name'
run(self.cmd.format(pid=project_name, item='name', n=pipes.quote(new_name)))
result = self.project_describe(describe_input)
self.assertEqual(result['name'], new_name)
def test_update_booleans(self):
update_items = {'protected': 'true',
'restricted': 'true'}
for item in update_items:
run(self.cmd.format(pid=self.project, item=item, n=update_items[item]))
describe_input = {}
describe_input[item] = 'true'
self.assertTrue(self.project_describe(describe_input)[item])
def test_bill_non_existent_user(self):
# Test that the api returns an invalid input when giving a non existing user
cmd = "dx update project {pid} --bill-to user-wronguser"
with self.assertSubprocessFailure(stderr_text="InvalidInput"):
run(cmd.format(pid=self.project))
@unittest.skipUnless(testutil.TEST_HTTP_PROXY,
'skipping HTTP Proxy support test that needs squid3')
class TestHTTPProxySupport(DXTestCase):
def setUp(self):
squid_wd = os.path.join(os.path.dirname(__file__), 'http_proxy')
self.proxy_process = subprocess.Popen(['squid3', '-N', '-f', 'squid.conf'], cwd=squid_wd)
time.sleep(1)
print("Waiting for squid to come up...")
t = 0
while True:
try:
if requests.get("http://localhost:3129").status_code == requests.codes.bad_request:
if self.proxy_process.poll() is not None:
# Got a response on port 3129, but our proxy
# quit with an error, so it must be another
# process.
raise Exception("Tried launching squid, but port 3129 is already bound")
print("squid is up")
break
except requests.exceptions.RequestException:
pass
time.sleep(0.5)
t += 1
if t > 16:
raise Exception("Failed to launch Squid")
self.proxy_env_no_auth = os.environ.copy()
self.proxy_env_no_auth["HTTP_PROXY"] = "http://localhost:3129"
self.proxy_env_no_auth["HTTPS_PROXY"] = "http://localhost:3129"
self.proxy_env = os.environ.copy()
self.proxy_env["HTTP_PROXY"] = "http://proxyuser:proxypassword@localhost:3129"
self.proxy_env["HTTPS_PROXY"] = "http://proxyuser:proxypassword@localhost:3129"
def test_proxy(self):
run("dx find projects", env=self.proxy_env)
with self.assertSubprocessFailure(stderr_regexp="407 Proxy Authentication Required"):
run("dx find projects", env=self.proxy_env_no_auth)
def tearDown(self):
self.proxy_process.terminate()
class TestDXBuildApp(DXTestCase):
def setUp(self):
super(TestDXBuildApp, self).setUp()
self.temp_file_path = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.temp_file_path)
super(TestDXBuildApp, self).tearDown()
def run_and_assert_stderr_matches(self, cmd, stderr_regexp):
with self.assertSubprocessFailure(stderr_regexp=stderr_regexp, exit_code=28):
run(cmd + ' && exit 28')
def write_app_directory(self, app_name, dxapp_str, code_filename=None, code_content="\n"):
# Note: if called twice with the same app_name, will overwrite
# the dxapp.json and code file (if specified) but will not
# remove any other files that happened to be present
try:
os.mkdir(os.path.join(self.temp_file_path, app_name))
except OSError as e:
if e.errno != 17: # directory already exists
raise e
if dxapp_str is not None:
with open(os.path.join(self.temp_file_path, app_name, 'dxapp.json'), 'wb') as manifest:
manifest.write(dxapp_str.encode())
if code_filename:
with open(os.path.join(self.temp_file_path, app_name, code_filename), 'w') as code_file:
code_file.write(code_content)
return os.path.join(self.temp_file_path, app_name)
def test_help_without_security_context(self):
env = override_environment(DX_SECURITY_CONTEXT=None, DX_APISERVER_HOST=None,
DX_APISERVER_PORT=None, DX_APISERVER_PROTOCOL=None)
run("dx build -h", env=env)
def test_accepts_semver(self):
self.assertTrue(dx_build_app.APP_VERSION_RE.match('3.1.41') is not None)
self.assertTrue(dx_build_app.APP_VERSION_RE.match('3.1.41-rc.1') is not None)
self.assertFalse(dx_build_app.APP_VERSION_RE.match('3.1.41-rc.1.') is not None)
self.assertFalse(dx_build_app.APP_VERSION_RE.match('3.1.41-rc..1') is not None)
self.assertTrue(dx_build_app.APP_VERSION_RE.match('22.0.999+git.abcdef') is not None)
self.assertFalse(dx_build_app.APP_VERSION_RE.match('22.0.999+git.abcdef$') is not None)
self.assertFalse(dx_build_app.APP_VERSION_RE.match('22.0.999+git.abcdef.') is not None)
self.assertTrue(dx_build_app.APP_VERSION_RE.match('22.0.999-rc.1+git.abcdef') is not None)
def test_version_suffixes(self):
app_spec = {
"name": "test_versioning_åpp",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("test_versioning_app", json.dumps(app_spec), "code.py")
self.assertTrue(dx_build_app._get_version_suffix(app_dir, '1.0.0').startswith('+build.'))
self.assertTrue(dx_build_app._get_version_suffix(app_dir, '1.0.0+git.abcdef')
.startswith('.build.'))
def test_build_applet(self):
app_spec = {
"name": "minimal_applet",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_åpplet", json.dumps(app_spec), "code.py")
new_applet = json.loads(run("dx build --json " + app_dir))
applet_describe = json.loads(run("dx describe --json " + new_applet["id"]))
self.assertEqual(applet_describe["class"], "applet")
self.assertEqual(applet_describe["id"], applet_describe["id"])
self.assertEqual(applet_describe["name"], "minimal_applet")
def test_dx_build_applet_dxapp_json_created_with_makefile(self):
app_name = "nodxapp_applet"
app_dir = self.write_app_directory(app_name, None, "code.py")
app_spec = {
"name": app_name,
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
makefile_str = "dxapp.json:\n\tcp temp_dxapp.json dxapp.json\n"
with open(os.path.join(app_dir, 'temp_dxapp.json'), 'wb') as manifest:
manifest.write(json.dumps(app_spec).encode())
with open(os.path.join(app_dir, "Makefile"), 'w') as makefile:
makefile.write(makefile_str)
run("dx build " + app_dir)
def test_dx_build_applet_no_app_linting(self):
run("dx clearenv")
# Case: Missing title, summary, description.
app_spec = {
"name": "dx_build_applet_missing_fields",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"categories": ["Annotation"]
}
app_dir = self.write_app_directory("dx_build_applet_missing_fields", json.dumps(app_spec), "code.py")
args = ['dx', 'build', app_dir]
p = subprocess.Popen(args, stderr=subprocess.PIPE)
out, err = p.communicate()
self.assertFalse(err.startswith("WARNING"))
# Case: Usage of period at end of summary.
app_spec = {
"name": "dx_build_applet_summary_with_period",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"title": "Title",
"summary": "Summary without period",
"description": "Description with period.",
"categories": ["Annotation"]
}
app_dir = self.write_app_directory("dx_build_applet_summary_with_period", json.dumps(app_spec), "code.py")
args = ['dx', 'build', app_dir]
p = subprocess.Popen(args, stderr=subprocess.PIPE)
out, err = p.communicate()
self.assertFalse(err.startswith("WARNING"))
# Case: Usage of unknown categories.
unknown_category = "asdf1234"
app_spec = {
"name": "dx_build_applet_unknown_cat",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"title": "Title",
"summary": "Summary without period",
"description": "Description without period",
"categories": [unknown_category]
}
app_dir = self.write_app_directory("dx_build_applet_unknown_cat", json.dumps(app_spec), "code.py")
args = ['dx', 'build', app_dir]
p = subprocess.Popen(args, stderr=subprocess.PIPE)
out, err = p.communicate()
self.assertFalse(err.startswith("WARNING"))
def test_build_applet_dry_run(self):
app_spec = {
"name": "minimal_applet_dry_run",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_applet_dry_run", json.dumps(app_spec), "code.py")
with self.assertSubprocessFailure(stderr_regexp='cannot be specified together', exit_code=2):
run("dx build --dry-run " + app_dir + " --run -y --brief")
run("dx build --dry-run " + app_dir)
self.assertEqual(len(list(dxpy.find_data_objects(name="minimal_applet_dry_run"))), 0)
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that would run jobs')
def test_build_applet_and_run_immediately(self):
app_spec = {
"name": "minimal_applet_to_run",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_åpplet_to_run", json.dumps(app_spec), "code.py")
job_id = run("dx build " + app_dir + ' --run -y --brief').strip()
job_desc = json.loads(run('dx describe --json ' + job_id))
# default priority should be high for running after building
# an applet
self.assertEqual(job_desc['name'], 'minimal_applet_to_run')
self.assertEqual(job_desc['priority'], 'high')
# if priority is explicitly requested as normal, it should be
# honored
job_id = run("dx build -f " + app_dir + ' --run --priority normal -y --brief').strip()
job_desc = json.loads(run('dx describe --json ' + job_id))
self.assertEqual(job_desc['name'], 'minimal_applet_to_run')
self.assertEqual(job_desc['priority'], 'normal')
@unittest.skipUnless(testutil.TEST_RUN_JOBS, 'skipping test that would run jobs')
def test_remote_build_applet_and_run_immediately(self):
app_spec = {
"name": "minimal_remote_build_applet_to_run",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_remote_build_åpplet_to_run", json.dumps(app_spec),
"code.py")
job_name = ("remote_build_test_run_" + str(int(time.time() * 1000)) + "_" +
str(random.randint(0, 1000)))
run("dx build --remote " + app_dir + " --run -y --name=" + job_name)
resulting_jobs = list(dxpy.find_executions(name=job_name, project=self.project, return_handler=True))
self.assertEqual(1, len(resulting_jobs))
self.assertEqual('minimal_remote_build_applet_to_run',
resulting_jobs[0].describe()['executableName'])
@unittest.skipUnless(testutil.TEST_RUN_JOBS and testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps and run jobs')
def test_remote_build_app(self):
app_spec = {
"name": "minimal_remote_build_app",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_remote_build_åpp", json.dumps(app_spec), "code.py")
run("dx build --remote --app " + app_dir)
def test_remote_build_app_and_run_immediately(self):
app_spec = {
"name": "minimal_remote_build_app_to_run",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_remote_build_åpp_to_run", json.dumps(app_spec),
"code.py")
# Not supported yet
with self.assertSubprocessFailure(stderr_regexp='cannot all be specified together', exit_code=2):
run("dx build --remote --app " + app_dir + " --run --yes")
def test_build_applet_warnings(self):
app_spec = {
"title": "title",
"summary": "a summary sentence.",
"description": "foo",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [{"name": "34", "class": "int"}],
"outputSpec": [{"name": "92", "class": "string"}],
"version": "1.0.0",
"categories": ["foo", "Import", "Export"]
}
app_dir = self.write_app_directory("test_build_åpplet_warnings", json.dumps(app_spec), "code.py")
with open(os.path.join(app_dir, 'Readme.md'), 'w') as readme:
readme.write('a readme file')
applet_expected_warnings = ["missing a name",
'input 0 has illegal name',
'output 0 has illegal name']
applet_unexpected_warnings = ["should be all lowercase",
"does not match containing directory",
"missing a title",
"missing a summary",
"should be a short phrase not ending in a period",
"missing a description",
'"description" field shadows file',
'"description" field should be written in complete sentences',
'unrecognized category',
'should end in "Importer"',
'should end in "Exporter"',
"should be semver compliant"]
try:
run("dx build " + app_dir)
self.fail("dx build invocation should have failed because of bad IO spec")
except subprocess.CalledProcessError as err:
for warning in applet_expected_warnings:
self.assertIn(warning, err.stderr)
for warning in applet_unexpected_warnings:
self.assertNotIn(warning, err.stderr)
# some more errors
app_spec = {
"dxapi": "1.0.0",
"runSpec": {"file": "code.py"}
}
app_dir = self.write_app_directory("test_build_second_åpplet_warnings", json.dumps(app_spec), "code.py")
with self.assertSubprocessFailure(stderr_regexp='interpreter field was not present'):
run("dx build " + app_dir)
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_build_app_warnings(self):
app_spec = {
"name": "Foo",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "foo"
}
app_dir = self.write_app_directory("test_build_app_warnings", json.dumps(app_spec), "code.py")
app_unexpected_warnings = ["missing a name",
"should be a short phrase not ending in a period",
'"description" field shadows file',
'"description" field should be written in complete sentences',
'unrecognized category',
'should end in "Importer"',
'should end in "Exporter"',
'input 0 has illegal name',
'output 0 has illegal name']
app_expected_warnings = ["should be all lowercase",
"does not match containing directory",
"missing a title",
"missing a summary",
"missing a description",
"should be semver compliant"]
try:
# Expect "dx build" to succeed, exit with error code to
# grab stderr.
run("dx build --app " + app_dir + " && exit 28")
except subprocess.CalledProcessError as err:
self.assertEqual(err.returncode, 28)
for warning in app_unexpected_warnings:
self.assertNotIn(warning, err.stderr)
for warning in app_expected_warnings:
self.assertIn(warning, err.stderr)
def test_build_applet_with_no_dxapp_json(self):
app_dir = self.write_app_directory("åpplet_with_no_dxapp_json", None, "code.py")
with self.assertSubprocessFailure(stderr_regexp='does not contain dxapp\.json', exit_code=3):
run("dx build " + app_dir)
def test_build_applet_with_malformed_dxapp_json(self):
app_dir = self.write_app_directory("åpplet_with_malformed_dxapp_json", "{", "code.py")
with self.assertSubprocessFailure(stderr_regexp='Could not parse dxapp\.json file', exit_code=3):
run("dx build " + app_dir)
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_build_app(self):
app_spec = {
"name": "minimal_app",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_åpp", json.dumps(app_spec), "code.py")
new_app = json.loads(run("dx build --create-app --json " + app_dir))
app_describe = json.loads(run("dx describe --json " + new_app["id"]))
self.assertEqual(app_describe["class"], "app")
self.assertEqual(app_describe["id"], app_describe["id"])
self.assertEqual(app_describe["version"], "1.0.0")
self.assertEqual(app_describe["name"], "minimal_app")
self.assertFalse("published" in app_describe)
self.assertTrue(os.path.exists(os.path.join(app_dir, 'code.py')))
self.assertFalse(os.path.exists(os.path.join(app_dir, 'code.pyc')))
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that would create apps')
def test_build_app_and_pretend_to_update_devs(self):
app_spec = {
"name": "test_build_app_and_pretend_to_update_devs",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"developers": ['user-dnanexus']
}
app_dir = self.write_app_directory("test_build_app_and_pretend_to_update_devs",
json.dumps(app_spec), "code.py")
# Without --yes, the build will succeed except that it will skip
# the developer update
self.run_and_assert_stderr_matches('dx build --create-app --json ' + app_dir,
'skipping requested change to the developer list')
app_developers = dxpy.api.app_list_developers('app-test_build_app_and_pretend_to_update_devs')['developers']
self.assertEqual(len(app_developers), 1) # the id of the user we are calling as
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that would create apps')
def test_build_app_and_update_devs(self):
app_spec = {
"name": "test_build_app_and_update_devs",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("test_build_app_and_update_devs", json.dumps(app_spec),
"code.py")
my_userid = dxpy.whoami()
run('dx build --create-app --json ' + app_dir)
app_developers = dxpy.api.app_list_developers('app-test_build_app_and_update_devs')['developers']
self.assertEqual(app_developers, [my_userid])
# Add a developer
app_spec['developers'] = [my_userid, 'user-eve']
self.write_app_directory("test_build_app_and_update_devs", json.dumps(app_spec), "code.py")
self.run_and_assert_stderr_matches('dx build --create-app --yes --json ' + app_dir,
'the following developers will be added: user-eve')
app_developers = dxpy.api.app_list_developers('app-test_build_app_and_update_devs')['developers']
self.assertEqual(set(app_developers), set([my_userid, 'user-eve']))
# Add and remove a developer
app_spec['developers'] = [my_userid, 'user-000000000000000000000001']
self.write_app_directory("test_build_app_and_update_devs", json.dumps(app_spec), "code.py")
self.run_and_assert_stderr_matches(
'dx build --create-app --yes --json ' + app_dir,
'the following developers will be added: user-000000000000000000000001; and ' \
+ 'the following developers will be removed: user-eve'
)
app_developers = dxpy.api.app_list_developers('app-test_build_app_and_update_devs')['developers']
self.assertEqual(set(app_developers), set([my_userid, 'user-000000000000000000000001']))
# Remove a developer
app_spec['developers'] = [my_userid]
self.write_app_directory("test_build_app_and_update_devs", json.dumps(app_spec), "code.py")
self.run_and_assert_stderr_matches('dx build --create-app --yes --json ' + app_dir,
'the following developers will be removed: ' +
'user-000000000000000000000001')
app_developers = dxpy.api.app_list_developers('app-test_build_app_and_update_devs')['developers']
self.assertEqual(app_developers, [my_userid])
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_build_app_with_region(self):
app_spec = {
"name": "minimal_app_regions",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("minimal_app_regions", json.dumps(app_spec), "code.py")
new_app = json.loads(run("dx build --create-app --region aws:us-east-1 --json " + app_dir))
app_describe = json.loads(run("dx describe --json " + new_app["id"]))
self.assertEqual(app_describe["region"], "aws:us-east-1")
with self.assertRaisesRegexp(subprocess.CalledProcessError, "InvalidInput"):
run("dx build --create-app --region aws:not-a-region --json " + app_dir)
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_invalid_project_context(self):
app_spec = {
"name": "invalid_project_context",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7"
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("invalid_project_context", json.dumps(app_spec), "code.py")
# Set the project context to a nonexistent project. This
# shouldn't have any effect since building an app is supposed to
# be hygienic.
env = override_environment(DX_PROJECT_CONTEXT_ID='project-B00000000000000000000000')
run("dx build --create-app --json " + app_dir, env=env)
def test_invalid_execdepends(self):
app_spec = {
"name": "invalid_execdepends",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"execDepends": {"name": "oops"}
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("invalid_execdepends", json.dumps(app_spec), "code.py")
with self.assertSubprocessFailure(stderr_regexp="Expected runSpec\.execDepends to"):
run("dx build --json " + app_dir)
def test_invalid_authorized_users(self):
app_spec = {
"name": "invalid_authorized_users",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"authorizedUsers": "PUBLIC"
}
app_dir = self.write_app_directory("invalid_authorized_users", json.dumps(app_spec), "code.py")
with self.assertSubprocessFailure(stderr_regexp='Expected authorizedUsers to be a list of strings'):
run("dx build --json " + app_dir)
app_spec["authorizedUsers"] = ["foo"]
app_dir = self.write_app_directory("invalid_authorized_users_2", json.dumps(app_spec),
"code.py")
with self.assertSubprocessFailure(stderr_regexp='contains an entry which is not'):
run("dx build --json " + app_dir)
def test_duplicate_keys_in_spec(self):
app_spec = {
"name": "test_duplicate_keys_in_spec",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7"
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
spec = json.dumps(app_spec).replace('"file": "code.py"', '"file": "code.py", "file": "code.py"')
app_dir = self.write_app_directory("duplicate_keys_in_spec", spec, "code.py")
with self.assertSubprocessFailure(stderr_regexp="duplicate key: "):
run("dx build --json " + app_dir)
def test_deps_without_network_access(self):
app_spec = {
"name": "test_deps_without_network_access",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"execDepends": [{"name": "ddd", "package_manager": "pip"}]
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("deps_without_network_access", json.dumps(app_spec),
"code.py")
with self.assertSubprocessFailure(stderr_regexp=("runSpec.execDepends specifies non-APT " +
"dependencies, but no network access spec " +
"is given")):
run("dx build --json " + app_dir)
def test_overwrite_applet(self):
app_spec = {
"name": "applet_overwriting",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("applet_overwriting", json.dumps(app_spec), "code.py")
applet_id = json.loads(run("dx build --json " + app_dir))["id"]
# Verify that we can succeed by writing to a different folder.
run("dx mkdir subfolder")
run("dx build --destination=subfolder/applet_overwriting " + app_dir)
with self.assertSubprocessFailure():
run("dx build " + app_dir)
run("dx build -f " + app_dir)
# Verify that the original app was deleted by the previous
# dx build -f
with self.assertSubprocessFailure(exit_code=3):
run("dx describe " + applet_id)
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_update_app_categories(self):
app1_spec = {
"name": "update_app_categories",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"categories": ["A"]
}
app2_spec = {
"name": "update_app_categories",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.1",
"categories": ["B"]
}
app_dir = self.write_app_directory("update_app_categories", json.dumps(app1_spec), "code.py")
app_id = json.loads(run("dx build --create-app --json " + app_dir))['id']
self.assertEquals(json.loads(run("dx api " + app_id + " listCategories"))["categories"], ['A'])
shutil.rmtree(app_dir)
self.write_app_directory("update_app_categories", json.dumps(app2_spec), "code.py")
run("dx build --create-app --json " + app_dir)
self.assertEquals(json.loads(run("dx api " + app_id + " listCategories"))["categories"], ['B'])
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that would create apps')
def test_update_app_authorized_users(self):
app0_spec = {
"name": "update_app_authorized_users",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "0.0.1"
}
app1_spec = {
"name": "update_app_authorized_users",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"authorizedUsers": []
}
app2_spec = {
"name": "update_app_authorized_users",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.1",
"authorizedUsers": ["user-eve"]
}
app_dir = self.write_app_directory("update_app_authorized_users", json.dumps(app0_spec),
"code.py")
app_id = json.loads(run("dx build --create-app --json " + app_dir))['id']
self.assertEquals(json.loads(run("dx api " + app_id +
" listAuthorizedUsers"))["authorizedUsers"], [])
shutil.rmtree(app_dir)
self.write_app_directory("update_app_authorized_users", json.dumps(app1_spec), "code.py")
run("dx build --create-app --json " + app_dir)
self.assertEquals(json.loads(run("dx api " + app_id +
" listAuthorizedUsers"))["authorizedUsers"], [])
shutil.rmtree(app_dir)
self.write_app_directory("update_app_authorized_users", json.dumps(app2_spec), "code.py")
run("dx build --create-app --yes --json " + app_dir)
self.assertEquals(json.loads(run("dx api " + app_id +
" listAuthorizedUsers"))["authorizedUsers"], ["user-eve"])
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_dx_add_list_remove_users(self):
'''
This test is for some other dx subcommands, but it's in this
test suite to take advantage of app-building methods.
'''
# Only create the app if it's not available already (makes
# local testing easier)
try:
app_desc = dxpy.api.app_describe("app-test_dx_users", {})
app_id = app_desc["id"]
# reset users to empty list
run("dx remove users app-test_dx_users " + " ".join(app_desc["authorizedUsers"]))
except:
app_id = None
if app_id is None:
app_spec = {
"name": "test_dx_users",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "0.0.1"
}
app_dir = self.write_app_directory("test_dx_users", json.dumps(app_spec), "code.py")
app_id = json.loads(run("dx build --create-app --json " + app_dir))['id']
# don't use "app-" prefix, duplicate and multiple members are fine
run("dx add users test_dx_users eve user-eve org-piratelabs")
users = run("dx list users app-test_dx_users").strip().split("\n")
self.assertEqual(len(users), 2)
self.assertIn("user-eve", users)
self.assertIn("org-piratelabs", users)
run("dx remove users test_dx_users eve org-piratelabs")
# use version string
users = run("dx list users app-test_dx_users/0.0.1").strip()
# bad paths and exit codes
with self.assertSubprocessFailure(stderr_regexp='could not be resolved', exit_code=3):
run('dx add users nonexistentapp user-eve')
with self.assertSubprocessFailure(stderr_regexp='could not be resolved', exit_code=3):
run('dx list users app-nonexistentapp')
with self.assertSubprocessFailure(stderr_regexp='could not be resolved', exit_code=3):
run('dx remove users app-nonexistentapp/1.0.0 user-eve')
with self.assertSubprocessFailure(stderr_regexp='ResourceNotFound', exit_code=3):
run('dx add users test_dx_users org-nonexistentorg')
with self.assertSubprocessFailure(stderr_regexp='ResourceNotFound', exit_code=3):
run('dx add users test_dx_users nonexistentuser')
with self.assertSubprocessFailure(stderr_regexp='ResourceNotFound', exit_code=3):
run('dx add users test_dx_users piratelabs')
# ResourceNotFound is not thrown when removing things
run('dx remove users test_dx_users org-nonexistentorg')
run('dx remove users test_dx_users nonexistentuser')
run('dx remove users test_dx_users piratelabs')
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_dx_add_list_remove_developers(self):
'''
This test is for some other dx subcommands, but it's in this
test suite to take advantage of app-building methods.
'''
# Only create the app if it's not available already (makes
# local testing easier)
try:
app_desc = dxpy.api.app_describe("app-test_dx_developers", {})
app_id = app_desc["id"]
my_userid = app_desc["createdBy"]
developers = dxpy.api.app_list_developers("app-test_dx_developers", {})["developers"]
# reset developers to default list
if len(developers) != 1:
run("dx remove developers app-test_dx_developers " +
" ".join([dev for dev in developers if dev != my_userid]))
except:
app_id = None
if app_id is None:
app_spec = {
"name": "test_dx_developers",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "0.0.1"
}
app_dir = self.write_app_directory("test_dx_developers", json.dumps(app_spec), "code.py")
app_desc = json.loads(run("dx build --create-app --json " + app_dir))
app_id = app_desc['id']
my_userid = app_desc["createdBy"]
developers = run("dx list developers app-test_dx_developers").strip()
self.assertEqual(developers, my_userid)
# use hash ID
run("dx add developers " + app_id + " eve")
developers = run("dx list developers app-test_dx_developers").strip().split("\n")
self.assertEqual(len(developers), 2)
self.assertIn(my_userid, developers)
# don't use "app-" prefix, duplicate, multiple, and non- members are fine
run("dx remove developers test_dx_developers PUBLIC eve user-eve org-piratelabs")
developers = run("dx list developers app-test_dx_developers").strip()
self.assertEqual(developers, my_userid)
# use version string
run("dx list developers app-test_dx_developers/0.0.1")
# bad paths and exit codes
with self.assertSubprocessFailure(stderr_regexp='could not be resolved', exit_code=3):
run('dx add developers nonexistentapp eve')
with self.assertSubprocessFailure(stderr_regexp='could not be resolved', exit_code=3):
run('dx list developers app-nonexistentapp')
with self.assertSubprocessFailure(stderr_regexp='could not be resolved', exit_code=3):
run('dx remove developers app-nonexistentapp/1.0.0 eve')
with self.assertSubprocessFailure(stderr_regexp='ResourceNotFound', exit_code=3):
run('dx add developers test_dx_developers nonexistentuser')
with self.assertSubprocessFailure(stderr_regexp='ResourceNotFound', exit_code=3):
run('dx add developers test_dx_developers piratelabs')
# ResourceNotFound is not thrown when removing things
run('dx remove developers test_dx_developers org-nonexistentorg')
run('dx remove developers test_dx_developers nonexistentuser')
run('dx remove developers test_dx_developers piratelabs')
# Raise an error if you try to add an org developer (currently unsupported by the API)
with self.assertSubprocessFailure(stderr_regexp='unsupported', exit_code=3):
run('dx add developers test_dx_developers org-piratelabs')
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
'skipping test that would create apps')
def test_build_app_autonumbering(self):
app_spec = {
"name": "build_app_autonumbering",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("build_app_autonumbering", json.dumps(app_spec), "code.py")
run("dx build --create-app --json --publish " + app_dir)
with self.assertSubprocessFailure(stderr_regexp="Could not create"):
print(run("dx build --create-app --json --no-version-autonumbering " + app_dir))
run("dx build --create-app --json " + app_dir) # Creates autonumbered version
def test_build_failure(self):
app_spec = {
"name": "build_failure",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("build_failure", json.dumps(app_spec), "code.py")
with open(os.path.join(app_dir, 'Makefile'), 'w') as makefile:
makefile.write("all:\n\texit 7")
with self.assertSubprocessFailure(stderr_regexp="make -j[0-9]+ in target directory failed with exit code"):
run("dx build " + app_dir)
# Somewhat indirect test of --no-parallel-build
with self.assertSubprocessFailure(stderr_regexp="make in target directory failed with exit code"):
run("dx build --no-parallel-build " + app_dir)
def test_syntax_checks(self):
app_spec = {
"name": "syntax_checks",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("syntax_checks",
json.dumps(app_spec),
code_filename="code.py",
code_content="def improper():\nprint 'oops'")
with self.assertSubprocessFailure(stderr_regexp="Entry point file \\S+ has syntax errors"):
run("dx build " + app_dir)
run("dx build --no-check-syntax " + app_dir)
@unittest.skipUnless(testutil.TEST_RUN_JOBS,
'skipping test that would run jobs')
def test_build_and_run_applet_remote(self):
app_spec = {
"name": "build_applet_remote",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [
{"name": "in1", "class": "int"},
],
"outputSpec": [
{"name": "out1", "class": "int"}
],
"version": "1.0.0"
}
app_code = """import dxpy
@dxpy.entry_point("main")
def main(in1):
return {"out1": in1 + 1}
"""
app_dir = self.write_app_directory(
'build_applet_remote', json.dumps(app_spec), code_filename='code.py', code_content=app_code)
remote_build_output = run('dx build --remote ' + app_dir).strip().split('\n')[-1]
# TODO: it would be nice to have the output of dx build --remote
# more machine readable (perhaps when --json is specified)
build_job_id = re.search('job-[A-Za-z0-9]{24}', remote_build_output).group(0)
build_job_describe = json.loads(run('dx describe --json ' + build_job_id))
applet_id = build_job_describe['output']['output_applet']['$dnanexus_link']
invocation_job_id = run('dx run --brief --yes ' + applet_id + ' -iin1=8675309').strip()
run('dx wait ' + invocation_job_id)
invocation_job_describe = json.loads(run('dx describe --json ' + invocation_job_id))
self.assertEquals(invocation_job_describe['output']['out1'], 8675310)
def test_applet_help(self):
app_spec = {
"name": "applet_help",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [
{"name": "reads", "class": "array:gtable", "type": "LetterReads", "label": "Reads",
"help": "One or more Reads table objects."},
{"name": "required", "class": "file", "label": "Required", "help": "Another parameter"},
{"name": "optional", "class": "file", "label": "Optional",
"help": "Optional parameter", "optional": True}
],
"outputSpec": [
{"name": "mappings", "class": "gtable", "type": "LetterMappings", "label": "Mappings",
"help": "The mapped reads."}
],
"version": "1.0.0"
}
app_dir = self.write_app_directory("åpplet_help", json.dumps(app_spec),
code_filename="code.py", code_content="")
applet_id = json.loads(run("dx build --json " + app_dir))["id"]
applet_help = run("dx run " + applet_id + " -h")
self.assertTrue("Reads: -ireads=(gtable, type LetterReads) [-ireads=... [...]]" in applet_help)
self.assertTrue("Required: -irequired=(file)" in applet_help)
self.assertTrue("Optional: [-ioptional=(file)]" in applet_help)
self.assertTrue("Mappings: mappings (gtable, type LetterMappings)" in applet_help)
def test_upload_resources(self):
run("dx mkdir /subfolder")
cd("/subfolder")
app_spec = {
"name": "upload_resources",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("upload_åpp_resources", json.dumps(app_spec), "code.py")
os.mkdir(os.path.join(app_dir, 'resources'))
with open(os.path.join(app_dir, 'resources', 'test.txt'), 'w') as resources_file:
resources_file.write('test\n')
new_applet = json.loads(run("dx build --json " + app_dir))
applet_describe = json.loads(run("dx describe --json " + new_applet["id"]))
resources_file = applet_describe['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
resources_file_describe = json.loads(run("dx describe --json " + resources_file))
# Verify that the bundled depends appear in the same folder.
self.assertEqual(resources_file_describe['folder'], '/subfolder')
def test_upload_resources_advanced(self):
app_spec = {
"name": "upload_resources_advanced",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("upload_åpp_resources_advanced", json.dumps(app_spec), "code.py")
os.mkdir(os.path.join(app_dir, 'resources'))
with open(os.path.join(app_dir, 'test_file1.txt'), 'w') as file1:
file1.write('test_file1\n') # Not in resources folder, so will not affect checksum
with open(os.path.join(app_dir, 'resources', 'test_file2.txt'), 'w') as resources_file2:
resources_file2.write('test_file2\n')
# Create symbolic link to test_file1.txt
if 'symbolic_link' in os.listdir(os.path.join(app_dir, 'resources')):
os.remove(os.path.join(app_dir, 'resources', 'symbolic_link'))
os.symlink(os.path.join(app_dir, 'test_file1.txt'), os.path.join(app_dir, 'resources', 'symbolic_link'))
new_applet = json.loads(run("dx build --json " + app_dir))
applet_describe = dxpy.api.applet_describe(new_applet["id"])
resources_file = applet_describe['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
id1 = dxpy.api.file_describe(resources_file)['id']
# Remove test_file1.txt, even though symbolic_link points to it. Removal itself will not affect checksum
os.remove(os.path.join(app_dir, 'test_file1.txt'))
new_applet = json.loads(run("dx build -f --json " + app_dir))
applet_describe = dxpy.api.applet_describe(new_applet["id"])
resources_file = applet_describe['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
id2 = dxpy.api.file_describe(resources_file)['id']
self.assertEqual(id1, id2) # No upload happened
# Make symbolic_link point to test_file2.txt, giving it a different modification time
os.remove(os.path.join(app_dir, 'resources', 'symbolic_link'))
os.symlink(os.path.join(app_dir, 'resources', 'test_file2.txt'),
os.path.join(app_dir, 'resources', 'symbolic_link'))
new_applet = json.loads(run("dx build -f --json " + app_dir))
applet_describe = dxpy.api.applet_describe(new_applet["id"])
resources_file = applet_describe['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
id3 = dxpy.api.file_describe(resources_file)['id']
self.assertNotEqual(id2, id3) # Upload should have happened
new_applet = json.loads(run("dx build -f --ensure-upload --json " + app_dir))
applet_describe = dxpy.api.applet_describe(new_applet["id"])
resources_file = applet_describe['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
resources_file_describe = json.loads(run("dx describe --json " + resources_file))
id4 = resources_file_describe['id']
self.assertNotEqual(id3, id4) # Upload should have happened
self.assertNotIn('resource_bundle_checksum', resources_file_describe['properties'])
def test_archive_in_another_project(self):
app_spec = {
"name": "archive_in_another_project",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("archive_in_another_project", json.dumps(app_spec), "code.py")
with temporary_project("Temporary working project", select=True) as temp_project:
orig_applet = json.loads(run("dx build --json -d {p}: {app_dir}".format(
p=self.project, app_dir=app_dir)))["id"]
new_applet = json.loads(run("dx build --json --archive -d {p}: {app_dir}".format(
p=self.project, app_dir=app_dir)))["id"]
self.assertEqual(dxpy.DXApplet(orig_applet).describe(incl_properties=True)["properties"]["replacedWith"],
new_applet)
def test_categories_propagated_to_tags(self):
app_spec = {
"name": "categories_propagated_to_tags",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"tags": ["mytag"],
"categories": ["Import"]
}
app_dir = self.write_app_directory("categories_propagated_to_tags", json.dumps(app_spec), "code.py")
applet_id = json.loads(run("dx build --json -d categories1 " + app_dir))["id"]
self.assertEqual(set(dxpy.DXApplet(applet_id).describe()["tags"]),
set(["mytag", "Import"]))
app_spec2 = {
"name": "categories_propagated_to_tags",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0",
"categories": ["Import"]
}
app_dir2 = self.write_app_directory("categories_propagated_to_tags", json.dumps(app_spec2), "code.py")
applet_id2 = json.loads(run("dx build --json -d categories2 " + app_dir2))["id"]
self.assertEqual(set(dxpy.DXApplet(applet_id2).describe()["tags"]),
set(["Import"]))
def test_bundled_depends_reuse(self):
app_spec = {
"name": "bundled_depends_reuse",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("bundled_depends_reuse", json.dumps(app_spec), "code.py")
os.mkdir(os.path.join(app_dir, 'resources'))
with open(os.path.join(app_dir, 'resources', 'foo.txt'), 'w') as file_in_resources:
file_in_resources.write('foo\n')
first_applet = json.loads(run("dx build --json -d {p}:applet1 {app_dir}".format(
p=self.project, app_dir=app_dir)))["id"]
second_applet = json.loads(run("dx build --json -d {p}:applet2 {app_dir}".format(
p=self.project, app_dir=app_dir)))["id"]
# The second applet should reuse the bundle from the first.
# touch foo.txt
os.utime(os.path.join(app_dir, 'resources', 'foo.txt'), None)
# But the third applet should not share with the first two,
# because the resources have been touched in between.
third_applet = json.loads(run("dx build --json -d {p}:applet3 {app_dir}".format(
p=self.project, app_dir=app_dir)))["id"]
self.assertEquals(
dxpy.DXApplet(first_applet).describe()['runSpec']['bundledDepends'][0]['id']['$dnanexus_link'],
dxpy.DXApplet(second_applet).describe()['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
)
self.assertNotEqual(
dxpy.DXApplet(first_applet).describe()['runSpec']['bundledDepends'][0]['id']['$dnanexus_link'],
dxpy.DXApplet(third_applet).describe()['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
)
def test_bundled_depends_reuse_with_force(self):
app_spec = {
"name": "bundled_depends_reuse_with_force",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("bundled_depends_reuse_with_force", json.dumps(app_spec), "code.py")
os.mkdir(os.path.join(app_dir, 'resources'))
with open(os.path.join(app_dir, 'resources', 'foo.txt'), 'w') as file_in_resources:
file_in_resources.write('foo\n')
# For this to work, "dx build" must not remove the first applet
# until after the second applet has been built, since otherwise
# the first applet's bundled depends will be garbage collected
first_applet = json.loads(run("dx build --json -d {p}:applet1 {app_dir}".format(
p=self.project, app_dir=app_dir)))["id"]
first_bundled_resources = \
dxpy.DXApplet(first_applet).describe()['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
second_applet = json.loads(run("dx build --json -f -d {p}:applet1 {app_dir}".format(
p=self.project, app_dir=app_dir)))["id"]
second_bundled_resources = \
dxpy.DXApplet(second_applet).describe()['runSpec']['bundledDepends'][0]['id']['$dnanexus_link']
# Verify that the resources are shared...
self.assertEquals(first_bundled_resources, second_bundled_resources)
# ...and that the first applet has been removed
with self.assertSubprocessFailure(exit_code=3):
run("dx describe " + first_applet)
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
def test_build_without_context(self):
app_spec = {
"name": "applet_without_context",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("applet_without_context", json.dumps(app_spec), "code.py")
# Without project context, cannot create new object without
# project qualified path
with without_project_context():
with self.assertSubprocessFailure(stderr_regexp='expected the path to be qualified with a project',
exit_code=3):
run("dx build --json --destination foo " + app_dir)
# Can create object with explicit project qualifier
applet_describe = json.loads(run("dx build --json --destination " + self.project + ":foo " + app_dir))
self.assertEqual(applet_describe["name"], "foo")
def test_asset_depends_using_name(self):
# upload a tar.gz file and mark it hidden
asset_name = "test-asset.tar.gz"
asset_file = dxpy.upload_string("xxyyzz", project=self.project, hidden=True, wait_on_close=True,
name=asset_name)
# create a record with details to the hidden asset
record_name = "asset-record"
record_details = {"archiveFileId": {"$dnanexus_link": asset_file.get_id()}}
record_properties = {"version": "0.0.1"}
dxpy.new_dxrecord(project=self.project, types=["AssetBundle"], details=record_details, name=record_name,
properties=record_properties, close=True)
app_spec = {
"name": "asset_depends",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"assetDepends": [{"name": record_name, "version": "0.0.1", "project": self.project}]
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("asset_depends", json.dumps(app_spec), "code.py")
asset_applet = json.loads(run("dx build --json {app_dir}".format(app_dir=app_dir)))["id"]
self.assertEquals(
dxpy.DXApplet(asset_applet).describe()['runSpec']['bundledDepends'][0],
{'id': {'$dnanexus_link': asset_file.get_id()}, 'name': asset_name}
)
def test_asset_depends_using_id(self):
# upload a tar.gz file and mark it hidden
asset_name = "test-asset.tar.gz"
asset_file = dxpy.upload_string("xxyyzz", project=self.project, hidden=True, wait_on_close=True,
name=asset_name)
# create a record with details to the hidden asset
record_name = "asset-record"
record_details = {"archiveFileId": {"$dnanexus_link": asset_file.get_id()}}
record_properties = {"version": "0.0.1"}
record = dxpy.new_dxrecord(project=self.project, types=["AssetBundle"], details=record_details,
name=record_name, properties=record_properties, close=True)
app_spec = {
"name": "asset_depends",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"assetDepends": [{"id": record.get_id()}]
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("asset_depends", json.dumps(app_spec), "code.py")
asset_applet = json.loads(run("dx build --json {app_dir}".format(app_dir=app_dir)))["id"]
self.assertEquals(
dxpy.DXApplet(asset_applet).describe()['runSpec']['bundledDepends'][0],
{'id': {'$dnanexus_link': asset_file.get_id()}, 'name': asset_name}
)
def test_asset_depends_failure(self):
# upload a tar.gz file and mark it hidden
asset_name = "test-asset.tar.gz"
asset_file = dxpy.upload_string("xxyyzz", project=self.project, hidden=True, wait_on_close=True,
name=asset_name)
# create a record with details to the hidden asset
record_name = "asset-record"
record_details = {"archiveFileId": {"$dnanexus_link": asset_file.get_id()}}
record_properties = {"version": "0.0.1"}
dxpy.new_dxrecord(project=self.project, types=["AssetBundle"], details=record_details, name=record_name,
properties=record_properties, close=True)
app_spec = {
"name": "asset_depends",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"assetDepends": [{"name": record_name, "version": "0.1.1", "project": self.project}]
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("asset_depends", json.dumps(app_spec), "code.py")
with self.assertSubprocessFailure(stderr_regexp="No asset bundle was found", exit_code=3):
run("dx build --json {app_dir}".format(app_dir=app_dir))
def test_asset_depends_malform_details(self):
# upload a tar.gz file and mark it hidden
asset_name = "test-asset.tar.gz"
asset_file = dxpy.upload_string("xxyyzz", project=self.project, hidden=True, wait_on_close=True,
name=asset_name)
# create a record with details to the hidden asset
record_name = "asset-record"
record_details = {"wrongField": {"$dnanexus_link": asset_file.get_id()}}
record_properties = {"version": "0.0.1"}
dxpy.new_dxrecord(project=self.project, types=["AssetBundle"], details=record_details, name=record_name,
properties=record_properties, close=True)
app_spec = {
"name": "asset_depends",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"assetDepends": [{"name": record_name, "version": "0.0.1", "project": self.project}]
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("asset_depends", json.dumps(app_spec), "code.py")
with self.assertSubprocessFailure(stderr_regexp="The required field 'archiveFileId'", exit_code=3):
run("dx build --json {app_dir}".format(app_dir=app_dir))
def test_asset_depends_clone(self):
# create an asset in this project
asset_name = "test-asset.tar.gz"
asset_file = dxpy.upload_string("xxyyzz", project=self.project, hidden=True, wait_on_close=True,
name=asset_name)
# create a record with details to the hidden asset
record_name = "asset-record"
record_details = {"archiveFileId": {"$dnanexus_link": asset_file.get_id()}}
record_properties = {"version": "0.0.1"}
record = dxpy.new_dxrecord(project=self.project, types=["AssetBundle"], details=record_details,
name=record_name, properties=record_properties, close=True)
# create an applet with assetDepends in a different project
with temporary_project('test_select_project', select=True):
app_spec = {
"name": "asset_depends",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"assetDepends": [{"id": record.get_id()}]
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("asset_depends", json.dumps(app_spec), "code.py")
run("dx build --json {app_dir}".format(app_dir=app_dir))
temp_record_id = run("dx ls {asset} --brief".format(asset=record_name)).strip()
self.assertEquals(temp_record_id, record.get_id())
def test_asset_depends_clone_app(self):
# upload a tar.gz file and mark it hidden
asset_name = "test-asset.tar.gz"
asset_file = dxpy.upload_string("xxyyzz", project=self.project, hidden=True, wait_on_close=True,
name=asset_name)
# create a record with details to the hidden asset
record_name = "asset-record"
record_details = {"archiveFileId": {"$dnanexus_link": asset_file.get_id()}}
record_properties = {"version": "0.0.1"}
dxpy.new_dxrecord(project=self.project, types=["AssetBundle"], details=record_details, name=record_name,
properties=record_properties, close=True)
app_spec = {
"name": "asset_depends",
"dxapi": "1.0.0",
"runSpec": {
"file": "code.py",
"interpreter": "python2.7",
"assetDepends": [{"name": record_name, "version": "0.0.1", "project": self.project}]
},
"inputSpec": [],
"outputSpec": [],
"version": "1.0.0"
}
app_dir = self.write_app_directory("asset_depends", json.dumps(app_spec), "code.py")
asset_applet = json.loads(run("dx build --json {app_dir}".format(app_dir=app_dir)))["id"]
# clone the applet to a different project and test that the hidden file is also cloned
with temporary_project('test_select_project', select=True) as temp_project:
dxpy.DXApplet(asset_applet, project=self.project).clone(temp_project.get_id())
# check that asset_file is also cloned to this project
temp_asset_fid = run("dx ls {asset} --brief".format(asset=asset_name)).strip()
self.assertEquals(temp_asset_fid, asset_file.get_id())
class TestDXGetExecutables(DXTestCase):
def setUp(self):
super(TestDXGetExecutables, self).setUp()
self.temp_file_path = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.temp_file_path)
super(TestDXGetExecutables, self).tearDown()
def write_app_directory(self, app_name, dxapp_str, code_filename=None, code_content="\n"):
# Note: if called twice with the same app_name, will overwrite
# the dxapp.json and code file (if specified) but will not
# remove any other files that happened to be present
try:
os.mkdir(os.path.join(self.temp_file_path, app_name))
except OSError as e:
if e.errno != 17: # directory already exists
raise e
if dxapp_str is not None:
with open(os.path.join(self.temp_file_path, app_name, 'dxapp.json'), 'wb') as manifest:
manifest.write(dxapp_str.encode())
if code_filename:
with open(os.path.join(self.temp_file_path, app_name, code_filename), 'w') as code_file:
code_file.write(code_content)
return os.path.join(self.temp_file_path, app_name)
def test_get_applet(self):
# TODO: not sure why self.assertEqual doesn't consider
# assertEqual to pass unless the strings here are unicode strings
app_spec = {
"name": "get_applet",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [{"name": "in1", "class": "file"}],
"outputSpec": [{"name": "out1", "class": "file"}],
"description": "Description\n",
"developerNotes": "Developer notes\n",
"types": ["Foo"],
"tags": ["bar"],
"properties": {"sample_id": "123456"},
"details": {"key1": "value1"},
}
# description and developerNotes should be un-inlined back to files
output_app_spec = dict((k, v) for (k, v) in app_spec.iteritems() if k not in ('description',
'developerNotes'))
output_app_spec["runSpec"] = {"file": "src/code.py", "interpreter": "python2.7"}
app_dir = self.write_app_directory("get_åpplet", json.dumps(app_spec), "code.py",
code_content="import os\n")
os.mkdir(os.path.join(app_dir, "resources"))
with open(os.path.join(app_dir, "resources", "resources_file"), 'w') as f:
f.write('content\n')
new_applet_id = json.loads(run("dx build --json " + app_dir))["id"]
with chdir(tempfile.mkdtemp()):
run("dx get " + new_applet_id)
self.assertTrue(os.path.exists("get_applet"))
self.assertTrue(os.path.exists(os.path.join("get_applet", "dxapp.json")))
output_json = json.load(open(os.path.join("get_applet", "dxapp.json")))
self.assertEqual(output_app_spec, output_json)
self.assertNotIn("bundledDepends", output_json["runSpec"])
self.assertEqual("Description\n", open(os.path.join("get_applet", "Readme.md")).read())
self.assertEqual("Developer notes\n",
open(os.path.join("get_applet", "Readme.developer.md")).read())
self.assertEqual("import os\n", open(os.path.join("get_applet", "src", "code.py")).read())
self.assertEqual("content\n",
open(os.path.join("get_applet", "resources", "resources_file")).read())
# Target applet does not exist
with self.assertSubprocessFailure(stderr_regexp='Unable to resolve', exit_code=3):
run("dx get path_does_not_exist")
# -o dest (dest does not exist yet)
run("dx get -o dest get_applet")
self.assertTrue(os.path.exists("dest"))
self.assertTrue(os.path.exists(os.path.join("dest", "dxapp.json")))
# -o -
with self.assertSubprocessFailure(stderr_regexp='cannot be dumped to stdout', exit_code=3):
run("dx get -o - " + new_applet_id)
# -o dir (such that dir/applet_name is empty)
os.mkdir('destdir')
os.mkdir(os.path.join('destdir', 'get_applet'))
run("dx get -o destdir get_applet") # Also tests getting by name
self.assertTrue(os.path.exists(os.path.join("destdir", "get_applet", "dxapp.json")))
# -o dir (such that dir/applet_name is not empty)
os.mkdir('destdir_nonempty')
os.mkdir(os.path.join('destdir_nonempty', 'get_applet'))
with open(os.path.join('destdir_nonempty', 'get_applet', 'myfile'), 'w') as f:
f.write('content')
get_applet_error = 'path "destdir_nonempty/get_applet" already exists'
with self.assertSubprocessFailure(stderr_regexp=get_applet_error, exit_code=3):
run("dx get -o destdir_nonempty get_applet")
# -o dir (such that dir/applet_name is a file)
os.mkdir('destdir_withfile')
with open(os.path.join('destdir_withfile', 'get_applet'), 'w') as f:
f.write('content')
with self.assertSubprocessFailure(stderr_regexp='already exists', exit_code=3):
run("dx get -o destdir_withfile get_applet")
# -o dir --overwrite (such that dir/applet_name is a file)
os.mkdir('destdir_withfile_force')
with open(os.path.join('destdir_withfile_force', 'get_applet'), 'w') as f:
f.write('content')
run("dx get --overwrite -o destdir_withfile_force get_applet")
self.assertTrue(os.path.exists(os.path.join("destdir_withfile_force", "get_applet",
"dxapp.json")))
# -o file
with open('destfile', 'w') as f:
f.write('content')
with self.assertSubprocessFailure(stderr_regexp='already exists', exit_code=3):
run("dx get -o destfile get_applet")
# -o file --overwrite
run("dx get --overwrite -o destfile get_applet")
self.assertTrue(os.path.exists("destfile"))
self.assertTrue(os.path.exists(os.path.join("destfile", "dxapp.json")))
def test_get_applet_omit_resources(self):
# TODO: not sure why self.assertEqual doesn't consider
# assertEqual to pass unless the strings here are unicode strings
app_spec = {
"name": "get_applet",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [{"name": "in1", "class": "file"}],
"outputSpec": [{"name": "out1", "class": "file"}],
"description": "Description\n",
"developerNotes": "Developer notes\n",
"types": ["Foo"],
"tags": ["bar"],
"properties": {"sample_id": "123456"},
"details": {"key1": "value1"},
}
# description and developerNotes should be un-inlined back to files
output_app_spec = dict((k, v) for (k, v) in app_spec.iteritems() if k not in ('description',
'developerNotes'))
output_app_spec["runSpec"] = {"file": "src/code.py", "interpreter": "python2.7"}
app_dir = self.write_app_directory("get_åpplet", json.dumps(app_spec), "code.py",
code_content="import os\n")
os.mkdir(os.path.join(app_dir, "resources"))
with open(os.path.join(app_dir, "resources", "resources_file"), 'w') as f:
f.write('content\n')
new_applet_id = json.loads(run("dx build --json " + app_dir))["id"]
with chdir(tempfile.mkdtemp()):
run("dx get --omit-resources " + new_applet_id)
self.assertFalse(os.path.exists(os.path.join("get_applet", "resources")))
output_json = json.load(open(os.path.join("get_applet", "dxapp.json")))
self.assertIn("bundledDepends", output_json["runSpec"])
seenResources = False
for bd in output_json["runSpec"]["bundledDepends"]:
if bd["name"] == "resources.tar.gz":
seenResources = True
break
self.assertTrue(seenResources)
def test_get_applet_field_cleanup(self):
# TODO: not sure why self.assertEqual doesn't consider
# assertEqual to pass unless the strings here are unicode strings
# When retrieving the applet, we'll get back an empty list for
# types, tags, etc. Those should not be written back to the
# dxapp.json so as not to pollute it.
app_spec = {
"name": "get_applet_field_cleanup",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [],
"outputSpec": []
}
output_app_spec = app_spec.copy()
output_app_spec["runSpec"] = {"file": "src/code.py", "interpreter": "python2.7"}
app_dir = self.write_app_directory("get_åpplet_field_cleanup", json.dumps(app_spec), "code.py",
code_content="import os\n")
os.mkdir(os.path.join(app_dir, "resources"))
with open(os.path.join(app_dir, "resources", "resources_file"), 'w') as f:
f.write('content\n')
new_applet_id = json.loads(run("dx build --json " + app_dir))["id"]
with chdir(tempfile.mkdtemp()):
run("dx get " + new_applet_id)
self.assertTrue(os.path.exists("get_applet_field_cleanup"))
self.assertTrue(os.path.exists(os.path.join("get_applet_field_cleanup", "dxapp.json")))
output_json = json.load(open(os.path.join("get_applet_field_cleanup", "dxapp.json")))
self.assertEqual(output_app_spec, output_json)
self.assertFalse(os.path.exists(os.path.join("get_applet", "Readme.md")))
self.assertFalse(os.path.exists(os.path.join("get_applet", "Readme.developer.md")))
def make_app(self, name, open_source=True, published=True, authorized_users=[]):
app_spec = {
"name": name,
"title": "Sir",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [{"name": "in1", "class": "file"}],
"outputSpec": [{"name": "out1", "class": "file"}],
"description": "Description\n",
"developerNotes": "Developer notes\n",
"authorizedUsers": authorized_users,
"openSource": open_source,
"version": "0.0.1"
}
# description and developerNotes should be un-inlined back to files
output_app_spec = dict((k, v)
for (k, v) in app_spec.iteritems()
if k not in ('description', 'developerNotes'))
output_app_spec["runSpec"] = {"file": "src/code.py", "interpreter": "python2.7"}
app_dir = self.write_app_directory(name,
json.dumps(app_spec),
"code.py",
code_content="import os\n")
os.mkdir(os.path.join(app_dir, "resources"))
with open(os.path.join(app_dir, "resources", "resources_file"), 'w') as f:
f.write('content\n')
if published:
build_cmd = "dx build --create-app --json --publish "
else:
build_cmd = "dx build --create-app --json "
app_json = json.loads(run(build_cmd + app_dir))
app_id = app_json["id"]
app_describe = dxpy.api.app_describe(app_id)
self.assertEqual(app_describe["class"], "app")
self.assertEqual(app_describe["version"], "0.0.1")
self.assertEqual(app_describe["name"], name)
if published:
self.assertTrue("published" in app_describe)
else:
self.assertFalse("published" in app_describe)
self.assertTrue(os.path.exists(os.path.join(app_dir, 'code.py')))
self.assertFalse(os.path.exists(os.path.join(app_dir, 'code.pyc')))
return [app_id, output_app_spec]
def assert_app_get_initialized(self, name, app_spec):
self.assertTrue(os.path.exists(name))
self.assertTrue(os.path.exists(os.path.join(name,
"dxapp.json")))
output_json = json.load(open(os.path.join(name,
"dxapp.json")),
object_pairs_hook=collections.OrderedDict)
black_list = ['published']
if not app_spec['openSource']:
black_list.append('openSource')
if not app_spec['authorizedUsers']:
black_list.append('authorizedUsers')
filtered_app_spec = dict((k, v)
for (k, v) in app_spec.iteritems()
if k not in black_list)
self.assertDictSubsetOf(filtered_app_spec, output_json)
self.assertFileContentsEqualsString([name, "src",
"code.py"],
"import os\n")
self.assertFileContentsEqualsString([name,
"Readme.md"],
"Description\n")
self.assertFileContentsEqualsString([name, "Readme.developer.md"],
"Developer notes\n")
self.assertFileContentsEqualsString([name, "resources", "resources_file"],
"content\n")
def _test_cant_get_app(self, name, open_source, published, authorized_users):
[app_id, output_app_spec] = self.make_app(name,
open_source,
published,
authorized_users)
with chdir(tempfile.mkdtemp()):
# -o -
with self.assertSubprocessFailure(stderr_regexp='cannot be dumped to stdout', exit_code=3):
run("dx get -o - " + app_id)
# Target app does not exist
with self.assertSubprocessFailure(stderr_regexp='Unable to resolve', exit_code=3):
run("dx get path_does_not_exist")
def _test_get_app(self, name, open_source, published, authorized_users):
second = json.loads(os.environ['DXTEST_SECOND_USER'])
second_user_id = second['user']
[app_id, output_app_spec] = self.make_app(name,
open_source,
published,
authorized_users)
with chdir(tempfile.mkdtemp()):
run("dx get {}".format(app_id))
self.assert_app_get_initialized(name, output_app_spec)
# Second test app is openSource && published, second user is an authorized user, should succeed
with chdir(tempfile.mkdtemp()):
with without_project_context():
if second_user_id in authorized_users and open_source and published:
run('dx get {}'.format(app_id), env=as_second_user())
self.assert_app_get_initialized(name, output_app_spec)
else:
with self.assertSubprocessFailure(stderr_regexp='code 401', exit_code=3):
run('dx get {}'.format(app_id), env=as_second_user())
@unittest.skipUnless(testutil.TEST_ENV, 'skipping test that would clobber your local environment')
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that would create apps')
@unittest.skipUnless(testutil.TEST_MULTIPLE_USERS, 'skipping test that would require another user')
def test_get_app(self):
second = json.loads(os.environ['DXTEST_SECOND_USER'])
second_user_id = second['user']
authorized_users = [second_user_id]
self._test_cant_get_app("get_app_failure", True, True, authorized_users)
self._test_get_app("get_app_open_source_published", True, True, authorized_users)
self._test_get_app("get_app_open_source", True, False, authorized_users)
self._test_get_app("get_app_published", False, True, authorized_users)
self._test_get_app("get_app", False, False, authorized_users)
self._test_get_app("get_app_open_source_published_no_authusers", True, True, [])
self._test_get_app("get_app_published_no_authusers", False, True, [])
def test_get_app_by_name(self):
[app_id, output_app_spec] = self.make_app("cool_app_name", False, False, [])
with chdir(tempfile.mkdtemp()):
run("dx get app-cool_app_name")
self.assert_app_get_initialized("cool_app_name", output_app_spec)
with chdir(tempfile.mkdtemp()):
run("dx get app-cool_app_name/0.0.1")
self.assert_app_get_initialized("cool_app_name", output_app_spec)
with chdir(tempfile.mkdtemp()):
with self.assertSubprocessFailure(stderr_regexp="Could not find an app", exit_code=3):
run("dx get app-not_so_cool_app_name")
with self.assertSubprocessFailure(stderr_regexp="Could not find an app", exit_code=3):
run("dx get app-cool_app_name/1.0.0")
@unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that would create apps')
def test_get_app_omit_resources(self):
app_spec = {
"name": "get_app_open_source",
"title": "Sir",
"dxapi": "1.0.0",
"runSpec": {"file": "code.py", "interpreter": "python2.7"},
"inputSpec": [{"name": "in1", "class": "file"}],
"outputSpec": [{"name": "out1", "class": "file"}],
"description": "Description\n",
"developerNotes": "Developer notes\n",
"openSource": True,
"version": "0.0.1"
}
# description and developerNotes should be un-inlined back to files
output_app_spec = dict((k, v)
for (k, v) in app_spec.iteritems()
if k not in ('description', 'developerNotes'))
output_app_spec["runSpec"] = {"file": "src/code.py", "interpreter": "python2.7"}
app_dir = self.write_app_directory("get_app_open_source",
json.dumps(app_spec),
"code.py",
code_content="import os\n")
os.mkdir(os.path.join(app_dir, "resources"))
with open(os.path.join(app_dir, "resources", "resources_file"), 'w') as f:
f.write('content\n')
new_app_json = json.loads(run("dx build --create-app --json " + app_dir))
new_app_id = new_app_json["id"]
# app_describe = json.loads(run("dx describe --json " + new_app_json["id"]))
app_describe = dxpy.api.app_describe(new_app_json["id"])
self.assertEqual(app_describe["class"], "app")
self.assertEqual(app_describe["version"], "0.0.1")
self.assertEqual(app_describe["name"], "get_app_open_source")
self.assertFalse("published" in app_describe)
self.assertTrue(os.path.exists(os.path.join(app_dir, 'code.py')))
self.assertFalse(os.path.exists(os.path.join(app_dir, 'code.pyc')))
with chdir(tempfile.mkdtemp()):
run("dx get --omit-resources " + new_app_id)
self.assertFalse(os.path.exists(os.path.join("get_app_open_source", "resources")))
output_json = json.load(open(os.path.join("get_app_open_source", "dxapp.json")))
self.assertTrue("bundledDepends" in output_json["runSpec"])
seenResources = False
for bd in output_json["runSpec"]["bundledDepends"]:
if bd["name"] == "resources.tar.gz":
seenResources = True
break
self.assertTrue(seenResources)
class TestDXBuildReportHtml(unittest.TestCase):
js = "console.log('javascript');"
css = "body {background-color: green;}"
def setUp(self):
self.temp_file_path = tempfile.mkdtemp()
self.gif_base64 = "R0lGODdhAQABAIAAAAQCBAAAACwAAAAAAQABAAACAkQBADs="
gif_file = open("{}/img.gif".format(self.temp_file_path), "wb")
gif_file.write(base64.b64decode(self.gif_base64))
gif_file.close()
wiki_logo = "http://upload.wikimedia.org/wikipedia/en/thumb/8/80/Wikipedia-logo-v2.svg/200px-Wikipedia-logo-v2.svg.png"
script_file = open("{}/index.js".format(self.temp_file_path), "w")
script_file.write(self.js)
script_file.close()
css_file = open("{}/index.css".format(self.temp_file_path), "w")
css_file.write(self.css)
css_file.close()
html_file = open("{}/index.html".format(self.temp_file_path), "w")
html = "<html><head><link rel='stylesheet' href='index.css' type='text/css'/><script src='index.js'></script></head><body><a href='/'/><a href='/' target='_new'/><img src='img.gif'/><img src='{}'/></body></html>".format(wiki_logo)
html_file.write(html)
html_file.close()
self.proj_id = dxpy.api.project_new({'name': 'TestDXBuildReportHtml Project'})['id']
os.environ['DX_PROJECT_CONTEXT_ID'] = self.proj_id
def tearDown(self):
shutil.rmtree(self.temp_file_path)
dxpy.api.project_destroy(self.proj_id, {'terminateJobs': True})
def test_local_file(self):
run("dx-build-report-html {d}/index.html --local {d}/out.html".format(d=self.temp_file_path))
out_path = "{}/out.html".format(self.temp_file_path)
self.assertTrue(os.path.exists(out_path))
f = open(out_path, "r")
html = f.read()
f.close()
self.assertTrue(re.search(self.gif_base64, html))
self.assertEquals(len(re.split("src=\"data:image", html)), 3)
self.assertEquals(len(re.split("<img", html)), 3)
self.assertTrue(re.search("target=\"_top\"", html))
self.assertTrue(re.search("target=\"_new\"", html))
self.assertTrue(re.search("<style", html))
self.assertTrue(re.search(re.escape(self.css), html))
self.assertFalse(re.search("<link", html))
self.assertFalse(re.search("index.css", html))
self.assertTrue(re.search(re.escape(self.js), html))
self.assertFalse(re.search("index.js", html))
def test_image_only(self):
run("dx-build-report-html {d}/img.gif --local {d}/gif.html".format(d=self.temp_file_path))
out_path = "{}/gif.html".format(self.temp_file_path)
self.assertTrue(os.path.exists(out_path))
f = open(out_path, "r")
html = f.read()
f.close()
self.assertTrue(re.search("<img src=\"data:", html))
def test_remote_file(self):
report = json.loads(run("dx-build-report-html {d}/index.html --remote /html_report -w 47 -g 63".format(d=self.temp_file_path)))
fileId = report["fileIds"][0]
desc = json.loads(run("dx describe {record} --details --json".format(record=report["recordId"])))
self.assertEquals(desc["types"], ["Report", "HTMLReport"])
self.assertEquals(desc["name"], "html_report")
self.assertEquals(desc["details"]["files"][0]["$dnanexus_link"], fileId)
self.assertEquals(desc["details"]["width"], "47")
self.assertEquals(desc["details"]["height"], "63")
desc = json.loads(run("dx describe {file} --details --json".format(file=fileId)))
self.assertTrue(desc["hidden"])
self.assertEquals(desc["name"], "index.html")
run("dx rm {record} {file}".format(record=report["recordId"], file=fileId))
@unittest.skipUnless(testutil.TEST_GTABLE, 'skipping test that would create a GTable')
class TestDXBedToSpans(DXTestCase):
def setUp(self):
super(TestDXBedToSpans, self).setUp()
self.bed = """chr1\t127471196\t127472363\tPos1\t0\t+\t127471196\t127472363\t255,0,0
"""
self.expected_tsv = """chr:string\tlo:int32\thi:int32\tname:string\tscore:float\tstrand:string\tthick_start:int32\tthick_end:int32\titem_rgb:string\r
chr1\t127471196\t127472363\tPos1\t0\t+\t127471196\t127472363\t255,0,0\r
"""
self.tempdir = tempfile.mkdtemp()
self.genome_id = makeGenomeObject()
def tearDown(self):
shutil.rmtree(self.tempdir)
super(TestDXBedToSpans, self).tearDown()
def test_bed_to_spans_conversion(self):
tempfile1 = os.path.join(self.tempdir, 'test1.bed')
with open(tempfile1, 'w') as f:
f.write(self.bed)
output = json.loads(
run('dx-bed-to-spans {f} {g}'.format(f=tempfile1, g=self.genome_id)).strip().split('\n')[-1]
)
table_id = output[0]['$dnanexus_link']
gtable_describe = dxpy.api.gtable_describe(table_id, {})
self.assertEquals(gtable_describe['name'], 'test1.bed')
self.assertTrue('Spans' in gtable_describe['types'])
run('dx wait {g}'.format(g=table_id))
self.assertEquals(run('dx export tsv -o - {g}'.format(g=table_id)), self.expected_tsv)
def test_bed_spans_roundtrip(self):
round_tripped_bed = "chr1\t127471196\t127472363\tPos1\t0\t+\t127471196\t127472363\t255,0,0\n"
tempfile1 = os.path.join(self.tempdir, 'test1.bed')
with open(tempfile1, 'w') as f:
f.write(self.bed)
output = json.loads(
run('dx-bed-to-spans {f} {g}'.format(f=tempfile1, g=self.genome_id)).strip().split('\n')[-1]
)
table_id = output[0]['$dnanexus_link']
run('dx wait {g}'.format(g=table_id))
run('dx-spans-to-bed --output {o} {g}'.format(o=os.path.join(self.tempdir, 'roundtrip.bed'), g=table_id))
self.assertEquals(open(os.path.join(self.tempdir, 'roundtrip.bed')).read(), round_tripped_bed)
@unittest.skipUnless(testutil.TEST_GTABLE, 'skipping test that would create a GTable')
class TestDXBedToGenes(DXTestCase):
def setUp(self):
super(TestDXBedToGenes, self).setUp()
self.bed = """chr1\t66999824\t67210768\tNM_032291\t0\t+\t67000041\t67208778\t0\t3\t227,64,25,\t0,91705,98928,
"""
self.expected_tsv = """chr:string\tlo:int32\thi:int32\tname:string\tspan_id:int32\ttype:string\tstrand:string\tis_coding:boolean\tparent_id:int32\tframe:int16\tdescription:string\r
chr1\t66999824\t67000041\tNM_032291\t1\t5' UTR\t+\tFalse\t0\t-1\t\r
chr1\t66999824\t67210768\tNM_032291\t0\ttranscript\t+\tFalse\t-1\t-1\t\r
chr1\t67000041\t67000051\tNM_032291\t2\tCDS\t+\tTrue\t0\t-1\t\r
chr1\t67091529\t67091593\tNM_032291\t3\tCDS\t+\tTrue\t0\t-1\t\r
chr1\t67098752\t67098777\tNM_032291\t4\tCDS\t+\tTrue\t0\t-1\t\r
"""
self.tempdir = tempfile.mkdtemp()
self.genome_id = makeGenomeObject()
def tearDown(self):
shutil.rmtree(self.tempdir)
super(TestDXBedToGenes, self).tearDown()
def test_bed_to_genes_conversion(self):
tempfile1 = os.path.join(self.tempdir, 'test1.bed')
with open(tempfile1, 'w') as f:
f.write(self.bed)
output = json.loads(run('dx-bed-to-spans {f} {g}'.format(f=tempfile1, g=self.genome_id)).strip().split('\n')[-1])
table_id = output[0]['$dnanexus_link']
run('dx wait {g}'.format(g=table_id))
self.assertTrue('Genes' in dxpy.api.gtable_describe(table_id, {})['types'])
self.assertEquals(run('dx export tsv -o - {g}'.format(g=table_id)), self.expected_tsv)
@unittest.skipUnless(testutil.TEST_GTABLE, 'skipping test that would create a GTable')
class TestDXFastQToReads(DXTestCase):
def setUp(self):
super(TestDXFastQToReads, self).setUp()
self.fastq = """@HWI-ST689:7:1101:1246:1986#0/1
NGGGGCCTAATTAAACTAAAGAGCTTCTGCACAGCAAAAGAAACTATGAACAGAGCAAACAGACAGAACAGGAGAAGATATTTGCAAATTATGCATCCAAC
+HWI-ST689:7:1101:1246:1986#0/1
BP\ccccceegggh]ghhhhhhhhhhhhhhhhhhhghefgedfghhhhhhhhh`eghhehhhfgfhhfggegbcdaabbbdddcbcZ`bb_bbbdcbbbb]
@HWI-ST689:7:1101:1477:1962#0/1
NGTAACTCCTCTTTGCAACACCACAGCCATCGCCCCCTACCTCCTTGCCAATCCCAGGCTCCTCTCCTGATGGTAACATTACTTTTCTCCTACTCTAAGGT
+HWI-ST689:7:1101:1477:1962#0/1
BP\ccceegfgggiiiifihhiihhihidghihfhfiiiiiiiiiihaffdghhgcgdbggfeeeedddR]bZLTZZ]bc`bccdcccccb`b`Y_BBBBB
"""
self.expected_tsv = """name:string\tsequence:string\tquality:string\r
HWI-ST689:7:1101:1246:1986#0/1\tNGGGGCCTAATTAAACTAAAGAGCTTCTGCACAGCAAAAGAAACTATGAACAGAGCAAACAGACAGAACAGGAGAAGATATTTGCAAATTATGCATCCAAC\t#1=DDDDDFFHHHI>HIIIIIIIIIIIIIIIIIIIHIFGHFEGHIIIIIIIIIAFHIIFIIIGHGIIGHHFHCDEBBCCCEEEDCD;ACC@CCCEDCCCC>\r
HWI-ST689:7:1101:1477:1962#0/1\tNGTAACTCCTCTTTGCAACACCACAGCCATCGCCCCCTACCTCCTTGCCAATCCCAGGCTCCTCTCCTGATGGTAACATTACTTTTCTCCTACTCTAAGGT\t#1=DDDFFHGHHHJJJJGJIIJJIIJIJEHIJIGIGJJJJJJJJJJIBGGEHIIHDHECHHGFFFFEEE3>C;-5;;>CDACDDEDDDDDCACA:@#####\r
"""
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
super(TestDXFastQToReads, self).tearDown()
def test_fastq_to_reads_conversion(self):
tempfile1 = os.path.join(self.tempdir, 'test1.fq')
with open(tempfile1, 'w') as f:
f.write(self.fastq)
output = json.loads(run('dx-fastq-to-reads {f}'.format(f=tempfile1)).strip().split('\n')[-1])
table_id = output['table_id']
run('dx wait {g}'.format(g=table_id))
self.assertEquals(run('dx export tsv -o - {g}'.format(g=table_id)), self.expected_tsv)
def test_fastq_reads_roundtrip(self):
round_tripped_fastq = """@HWI-ST689:7:1101:1246:1986#0/1
NGGGGCCTAATTAAACTAAAGAGCTTCTGCACAGCAAAAGAAACTATGAACAGAGCAAACAGACAGAACAGGAGAAGATATTTGCAAATTATGCATCCAAC
+
#1=DDDDDFFHHHI>HIIIIIIIIIIIIIIIIIIIHIFGHFEGHIIIIIIIIIAFHIIFIIIGHGIIGHHFHCDEBBCCCEEEDCD;ACC@CCCEDCCCC>
@HWI-ST689:7:1101:1477:1962#0/1
NGTAACTCCTCTTTGCAACACCACAGCCATCGCCCCCTACCTCCTTGCCAATCCCAGGCTCCTCTCCTGATGGTAACATTACTTTTCTCCTACTCTAAGGT
+
#1=DDDFFHGHHHJJJJGJIIJJIIJIJEHIJIGIGJJJJJJJJJJIBGGEHIIHDHECHHGFFFFEEE3>C;-5;;>CDACDDEDDDDDCACA:@#####
"""
tempfile2 = os.path.join(self.tempdir, 'test2.fq')
with open(tempfile2, 'w') as f:
f.write(self.fastq)
output = json.loads(run('dx-fastq-to-reads {f}'.format(f=tempfile2)).strip().split('\n')[-1])
table_id = output['table_id']
run('dx wait {g}'.format(g=table_id))
run('dx-reads-to-fastq --output {o} {g}'.format(o=os.path.join(self.tempdir, 'roundtrip.fq'), g=table_id))
self.assertEquals(open(os.path.join(self.tempdir, 'roundtrip.fq')).read(), round_tripped_fastq)
@unittest.skipUnless(testutil.TEST_GTABLE, 'skipping test that would create a GTable')
class TestDXGtfToGenes(DXTestCase):
def setUp(self):
super(TestDXGtfToGenes, self).setUp()
self.expected_gtf = """chr1\t.\texon\t101\t200\t.\t+\t.\tgene_id ""; transcript_id "mytranscript-noncoding"
chr1\t.\tCDS\t151\t200\t.\t+\t0\tgene_id "mygene-coding"; transcript_id "mytranscript-coding"
"""
self.tempdir = tempfile.mkdtemp()
self.genome_id = makeGenomeObject()
def tearDown(self):
shutil.rmtree(self.tempdir)
super(TestDXGtfToGenes, self).tearDown()
def test_genes_to_gtf_conversion(self):
genes_table = dxpy.new_dxgtable([
dxpy.DXGTable.make_column_desc("type", "string"),
dxpy.DXGTable.make_column_desc("span_id", "int64"),
dxpy.DXGTable.make_column_desc("name", "string"),
dxpy.DXGTable.make_column_desc("strand", "string"),
dxpy.DXGTable.make_column_desc("is_coding", "boolean"),
dxpy.DXGTable.make_column_desc("parent_id", "int64"),
dxpy.DXGTable.make_column_desc("frame", "int64"),
dxpy.DXGTable.make_column_desc("description", "string"),
dxpy.DXGTable.make_column_desc("chr", "string"),
dxpy.DXGTable.make_column_desc("lo", "int64"),
dxpy.DXGTable.make_column_desc("hi", "int64")
])
genes_table.add_rows(data=[
["transcript", 5, "mytranscript-noncoding", "+", False, -1, -1, "my test transcript", "chr1", 100, 200],
["exon", 6, "", "+", False, 5, -1, "", "chr1", 100, 200],
["gene", 54, "mygene-coding", "+", True, -1, -1, "my test gene", "chr1", 150, 200],
["transcript", 55, "mytranscript-coding", "+", True, 54, -1, "my test transcript", "chr1", 150, 200],
["CDS", 75, "", "+", True, 55, 0, "", "chr1", 150, 200]
])
genes_table.set_details({
"original_contigset": {"$dnanexus_link": self.genome_id}
})
genes_table.close(block=True)
self.assertEquals(run('dx-genes-to-gtf {g}'.format(g=genes_table.get_id())),
self.expected_gtf)
@unittest.skipUnless(testutil.TEST_GTABLE, 'skipping test that would create a GTable')
class TestDXSamToMappings(DXTestCase):
def setUp(self):
super(TestDXSamToMappings, self).setUp()
self.tempdir = tempfile.mkdtemp()
self.expected_sam = """@SQ\tSN:chr1\tLN:249250621
@RG\tID:0\tSM:Sample_0
FOO.12345678\t0\t1\t54932369\t60\t7M1D93M\t*\t0\t0\tTAATAAGGTTGTTGTTGTTGTT\t1:1ADDDACFHA?HGFGIIE+<\tMD:Z:1A5^A93\tRG:Z:0
"""
self.genome_id = makeGenomeObject()
def tearDown(self):
shutil.rmtree(self.tempdir)
super(TestDXSamToMappings, self).tearDown()
def test_mappings_to_sam_conversion(self):
mappings_table = dxpy.new_dxgtable([
dxpy.DXGTable.make_column_desc("sequence", "string"),
dxpy.DXGTable.make_column_desc("quality", "string"),
dxpy.DXGTable.make_column_desc("name", "string"),
dxpy.DXGTable.make_column_desc("status", "string"),
dxpy.DXGTable.make_column_desc("chr", "string"),
dxpy.DXGTable.make_column_desc("lo", "int32"),
dxpy.DXGTable.make_column_desc("hi", "int32"),
dxpy.DXGTable.make_column_desc("negative_strand", "boolean"),
dxpy.DXGTable.make_column_desc("error_probability", "uint8"),
dxpy.DXGTable.make_column_desc("qc_fail", "boolean"),
dxpy.DXGTable.make_column_desc("duplicate", "boolean"),
dxpy.DXGTable.make_column_desc("cigar", "string"),
dxpy.DXGTable.make_column_desc("template_id", "int64"),
dxpy.DXGTable.make_column_desc("read_group", "uint16"),
dxpy.DXGTable.make_column_desc("sam_field_MD", "string"),
dxpy.DXGTable.make_column_desc("sam_field_XN", "int32")
])
mappings_table.add_rows(data=[[
"TAATAAGGTTGTTGTTGTTGTT",
"1:1ADDDACFHA?HGFGIIE+<",
"FOO.12345678",
"PRIMARY",
"1",
54932368,
54932390,
False,
60,
False,
False,
"7M1D93M",
289090731,
0,
"1A5^A93",
-2147483648
]], part=1)
mappings_table.set_details({
"read_groups": [
{"num_singles": 1, "num_pairs": 0}
],
"original_contigset": {"$dnanexus_link": self.genome_id}
})
mappings_table.close(block=True)
self.assertEquals(run('dx-mappings-to-sam {g}'.format(g=mappings_table.get_id())),
self.expected_sam)
@unittest.skipUnless(testutil.TEST_TCSH, 'skipping tests that require tcsh to be installed')
class TestTcshEnvironment(unittest.TestCase):
def test_tcsh_dash_c(self):
# tcsh -c doesn't set $_, or provide any other way for us to determine the source directory, so
# "source environment" only works from DNANEXUS_HOME
run('cd $DNANEXUS_HOME && env - HOME=$HOME PATH=/usr/local/bin:/usr/bin:/bin tcsh -c "source /etc/csh.cshrc && source /etc/csh.login && source $DNANEXUS_HOME/environment && dx --help"')
run('cd $DNANEXUS_HOME && env - HOME=$HOME PATH=/usr/local/bin:/usr/bin:/bin tcsh -c "source /etc/csh.cshrc && source /etc/csh.login && source $DNANEXUS_HOME/environment.csh && dx --help"')
def test_tcsh_source_environment(self):
tcsh = pexpect.spawn("env - HOME=$HOME PATH=/usr/local/bin:/usr/bin:/bin tcsh")
tcsh.logfile = sys.stdout
tcsh.setwinsize(20, 90)
tcsh.sendline("source /etc/csh.cshrc")
tcsh.sendline("source /etc/csh.login")
tcsh.sendline("dx")
tcsh.expect("Command not found")
tcsh.sendline("source ../../../environment")
tcsh.sendline("dx")
tcsh.expect("dx is a command-line client")
class TestDXScripts(DXTestCase):
def test_minimal_invocation(self):
# For dxpy scripts that have no other tests, these dummy calls
# ensure that the coverage report is aware of them (instead of
# excluding them altogether from the report, which artificially
# inflates our %covered).
#
# This is a hack and obviously it would be preferable to figure
# out why the coverage generator sometimes likes to include
# these files and sometimes likes to exclude them.
run('dx-gff-to-genes -h')
run('dx-gtf-to-genes -h')
run('dx-variants-to-vcf -h')
run('dx-genes-to-gff -h')
run('dx-genes-to-gtf -h')
run('dx-mappings-to-fastq -h')
run('dx-build-applet -h')
class TestDXCp(DXTestCase):
@classmethod
def setUpClass(cls):
# setup two projects
cls.proj_id1 = create_project()
cls.proj_id2 = create_project()
cls.counter = 1
@classmethod
def tearDownClass(cls):
rm_project(cls.proj_id1)
rm_project(cls.proj_id2)
@classmethod
def gen_uniq_fname(cls):
cls.counter += 1
return "file_{}".format(cls.counter)
# Make sure a folder (path) has the same contents in the two projects.
# Note: the contents of the folders are not listed recursively.
def verify_folders_are_equal(self, path):
listing_proj1 = list_folder(self.proj_id1, path)
listing_proj2 = list_folder(self.proj_id2, path)
self.assertEqual(listing_proj1, listing_proj2)
def verify_file_ids_are_equal(self, path1, path2=None):
if path2 is None:
path2 = path1
listing_proj1 = run("dx ls {proj}:/{path} --brief".format(proj=self.proj_id1, path=path1).strip())
listing_proj2 = run("dx ls {proj}:/{path} --brief".format(proj=self.proj_id2, path=path2).strip())
self.assertEqual(listing_proj1, listing_proj2)
# create new file with the same name in the target
# dx cp proj-1111:/file-1111 proj-2222:/
def test_file_with_same_name(self):
create_folder_in_project(self.proj_id1, "/earthsea")
create_folder_in_project(self.proj_id2, "/earthsea")
file_id = create_file_in_project(self.gen_uniq_fname(), self.proj_id1, folder="/earthsea")
run("dx cp {p1}:/earthsea/{f} {p2}:/earthsea/".format(f=file_id, p1=self.proj_id1, p2=self.proj_id2))
self.verify_folders_are_equal("/earthsea")
# copy and rename
# dx cp proj-1111:/file-1111 proj-2222:/file-2222
def test_cp_rename(self):
basename = self.gen_uniq_fname()
file_id = create_file_in_project(basename, self.proj_id1)
run("dx cp {p1}:/{f1} {p2}:/{f2}".format(f1=basename, f2="AAA.txt",
p1=self.proj_id1, p2=self.proj_id2))
self.verify_file_ids_are_equal(basename, path2="AAA.txt")
# multiple arguments
# dx cp proj-1111:/file-1111 proj-2222:/file-2222 proj-3333:/
def test_multiple_args(self):
fname1 = self.gen_uniq_fname()
fname2 = self.gen_uniq_fname()
fname3 = self.gen_uniq_fname()
create_file_in_project(fname1, self.proj_id1)
create_file_in_project(fname2, self.proj_id1)
create_file_in_project(fname3, self.proj_id1)
run("dx cp {p1}:/{f1} {p1}:/{f2} {p1}:/{f3} {p2}:/".
format(f1=fname1, f2=fname2, f3=fname3, p1=self.proj_id1, p2=self.proj_id2))
self.verify_file_ids_are_equal(fname1)
self.verify_file_ids_are_equal(fname2)
self.verify_file_ids_are_equal(fname3)
# copy an entire directory
def test_cp_dir(self):
create_folder_in_project(self.proj_id1, "/foo")
run("dx cp {p1}:/foo {p2}:/".format(p1=self.proj_id1, p2=self.proj_id2))
self.verify_folders_are_equal("/foo")
# Weird error code:
# This part makes sense:
# 'InvalidState: If cloned, a folder would conflict with the route of an existing folder.'
# This does not:
# 'Successfully cloned from project: None, code 422'
#
def test_copy_empty_folder_on_existing_folder(self):
create_folder_in_project(self.proj_id1, "/bar")
create_folder_in_project(self.proj_id2, "/bar")
with self.assertSubprocessFailure(stderr_regexp='If cloned, a folder would conflict', exit_code=3):
run("dx cp {p1}:/bar {p2}:/".format(p1=self.proj_id1, p2=self.proj_id2))
self.verify_folders_are_equal("/bar")
def test_copy_folder_on_existing_folder(self):
create_folder_in_project(self.proj_id1, "/baz")
create_file_in_project(self.gen_uniq_fname(), self.proj_id1, folder="/baz")
run("dx cp {p1}:/baz {p2}:/".format(p1=self.proj_id1, p2=self.proj_id2))
with self.assertSubprocessFailure(stderr_regexp='If cloned, a folder would conflict', exit_code=3):
run("dx cp {p1}:/baz {p2}:/".format(p1=self.proj_id1, p2=self.proj_id2))
self.verify_folders_are_equal("/baz")
# PTFM-13569: This used to give a weird error message, like so:
# dx cp project-BV80zyQ0Ffb7fj64v03fffqX:/foo/XX.txt project-BV80vzQ0P9vk785K1GgvfZKv:/foo/XX.txt
# The following objects already existed in the destination container and were not copied:
# [
# "
# f
# l
# ...
def test_copy_overwrite(self):
fname1 = self.gen_uniq_fname()
file_id1 = create_file_in_project(fname1, self.proj_id1)
run("dx cp {p1}:/{f} {p2}:/{f}".format(p1=self.proj_id1, f=fname1, p2=self.proj_id2))
output = run("dx cp {p1}:/{f} {p2}:/{f}".format(p1=self.proj_id1,
f=fname1, p2=self.proj_id2))
self.assertIn("destination", output)
self.assertIn("already existed", output)
self.assertIn(file_id1, output)
# 'dx cp' used to give a confusing error message when source file is not found.
# Check that this has been fixed
def test_error_msg_for_nonexistent_folder(self):
fname1 = self.gen_uniq_fname()
create_file_in_project(fname1, self.proj_id1)
# The file {proj_id1}:/{f} exists, however, {proj_id1}/{f} does not
expected_err_msg = "ResolutionError: The specified folder could not be found in {p}".format(p=self.project)
with self.assertSubprocessFailure(stderr_regexp=expected_err_msg, exit_code=3):
run("dx cp {p1}/{f} {p2}:/".format(p1=self.proj_id1, f=fname1, p2=self.proj_id2))
with self.assertSubprocessFailure(stderr_regexp="The destination folder does not exist",
exit_code=3):
run("dx cp {p1}:/{f} {p2}:/xxx/yyy/z.txt".format(p1=self.proj_id1, f=fname1, p2=self.proj_id2))
with self.assertSubprocessFailure(
stderr_regexp="source path and the destination path resolved to the same project",
exit_code=3):
run("dx cp {p1}:/{f} {p1}:/".format(p1=self.proj_id1, f=fname1))
@unittest.skip("PTFM-11906 This doesn't work yet.")
def test_file_in_other_project(self):
''' Copy a file-id, where the file is not located in the default project-id.
Main idea: create projects A and B. Create a file in A, and copy it to project B,
-without- specifying a source project.
This could work, with some enhancements to the 'dx cp' implementation.
'''
file_id = create_file_in_project(self.gen_uniq_fname(), self.proj_id1)
run('dx cp ' + file_id + ' ' + self.proj_id2)
@unittest.skipUnless(testutil.TEST_ENV,
'skipping test that would clobber your local environment')
# This will start working, once PTFM-11906 is addressed. The issue is
# that you must specify a project when copying a file. In theory this
# can be addressed, because the project can be found, given the file-id.
def test_no_env(self):
''' Try to copy a file when the context is empty.
'''
# create a file in the current project
# -- how do we get the current project id?
file_id = create_file_in_project(self.gen_uniq_fname(), self.project)
# Copy the file to a new project.
# This does not currently work, because the context is not set.
proj_id = create_project()
with self.assertSubprocessFailure(stderr_regexp='project must be specified or a current project set',
exit_code=3), without_project_context():
run('dx cp ' + file_id + ' ' + proj_id)
#cleanup
rm_project(proj_id)
if __name__ == '__main__':
if 'DXTEST_FULL' not in os.environ:
sys.stderr.write('WARNING: env var DXTEST_FULL is not set; tests that create apps or run jobs will not be run\n')
unittest.main()
|
johnwallace123/dx-toolkit
|
src/python/test/test_dxclient.py
|
Python
|
apache-2.0
| 383,723
|
import config_file_manager
# get all roles
# returns a dictionary
def get_roles():
# load roles from ./configuration/roles/main.yml
return config_file_manager.get_roles_config()
# def parse_roles(roles):
# for role, playbooks in roles.iteritems():
# print "Applying role" + role + " since only one found!"
# for playbook in playbooks:
# # apply playbook
# print playbook
# print config_file_manager.get_playbook(playbook)
def get_role_playbooks(role_name):
if role_name in roles:
return config_file_manager.get_playbook(playbook)
# else:
# print "provided role name not fount"
# return empty[]
if __name__ == '__main__':
get_roles()
|
dkoudlo/py-manage-server
|
py-manage-server/role_manager.py
|
Python
|
apache-2.0
| 769
|
"""Utility command to filter table rows based on the value of a column and a provided regex."""
import unsync
@unsync.command()
@unsync.option('--source', '-s', required=True, help='The source data table.')
@unsync.option('--filter', '-f', type=unsync.Tuple([str, str]), multiple=True, required=True, help='A tuple of values, first is the column to filter on and second is the regex to use.')
@unsync.option('--destination', '-d', help='The destination data table for matched rows. If blank will overwrite the source table.')
def search(data, source, filter, destination):
"""Include rows where the specified column matches the given regex."""
if not destination:
destination = source
s = data.get(source)
for column, pattern in filter:
s = s.search(column, pattern)
data.set(destination, s)
|
PGower/Unsync
|
unsync_petl/unsync_petl/search.py
|
Python
|
apache-2.0
| 830
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from http import HTTPStatus
from watcher.decision_engine import rpcapi as deapi
from watcher import objects
from watcher.tests.api import base as api_base
from watcher.tests.objects import utils as obj_utils
class TestPost(api_base.FunctionalTest):
def setUp(self):
super(TestPost, self).setUp()
obj_utils.create_test_goal(self.context)
obj_utils.create_test_strategy(self.context)
obj_utils.create_test_audit_template(self.context)
@mock.patch.object(deapi.DecisionEngineAPI, 'trigger_audit')
def test_trigger_audit(self, mock_trigger_audit):
audit = obj_utils.create_test_audit(
self.context,
audit_type=objects.audit.AuditType.EVENT.value)
response = self.post_json(
'/webhooks/%s' % audit['uuid'], {},
headers={'OpenStack-API-Version': 'infra-optim 1.4'})
self.assertEqual(HTTPStatus.ACCEPTED, response.status_int)
mock_trigger_audit.assert_called_once_with(
mock.ANY, audit['uuid'])
def test_trigger_audit_with_no_audit(self):
response = self.post_json(
'/webhooks/no-audit', {},
headers={'OpenStack-API-Version': 'infra-optim 1.4'},
expect_errors=True)
self.assertEqual(HTTPStatus.NOT_FOUND, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_trigger_audit_with_not_allowed_audittype(self):
audit = obj_utils.create_test_audit(self.context)
response = self.post_json(
'/webhooks/%s' % audit['uuid'], {},
headers={'OpenStack-API-Version': 'infra-optim 1.4'},
expect_errors=True)
self.assertEqual(HTTPStatus.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_trigger_audit_with_not_allowed_audit_state(self):
audit = obj_utils.create_test_audit(
self.context,
audit_type=objects.audit.AuditType.EVENT.value,
state=objects.audit.State.FAILED)
response = self.post_json(
'/webhooks/%s' % audit['uuid'], {},
headers={'OpenStack-API-Version': 'infra-optim 1.4'},
expect_errors=True)
self.assertEqual(HTTPStatus.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
|
openstack/watcher
|
watcher/tests/api/v1/test_webhooks.py
|
Python
|
apache-2.0
| 3,148
|
#!/usr/bin/python
import sys
import libvhd
def vhd_create(filename, size, disk_type=None, create_flags=None):
"""Usage: <filename> <size> [<disk_type>] [<create_flags>]"""
return libvhd.vhd_create(filename, size, disk_type=disk_type,
create_flags=create_flags)
def vhd_convert_from_raw(src_filename, dest_filename, disk_type=None,
extra=None):
"""Usage: <src_filename> <dest_filename> [<"disk_type">] [<"sparse">]"""
if disk_type == "sparse" and extra is None:
extra = disk_type
disk_type = None
if extra == "sparse":
sparse = True
else:
sparse = False
return libvhd.vhd_convert_from_raw(src_filename, dest_filename,
disk_type=disk_type,
sparse=sparse)
def vhd_convert_to_raw(src_filename, dest_filename, extra=None):
"""Usage: <src_filename> <dest_filename> [<"sparse">]"""
if extra == 'sparse':
sparse = True
else:
sparse = False
return libvhd.vhd_convert_to_raw(src_filename, dest_filename,
sparse=sparse)
def vhd_print(filename):
"""Usage: <filename>"""
vhd = libvhd.VHD(filename, 'rdonly')
footer = vhd.get_footer()
for name, val in footer.iteritems():
print "%s: %s" % (name, val)
if __name__ == "__main__":
args = sys.argv[:]
prog_name = args.pop(0)
commands = {
'create': vhd_create,
'raw_to_vhd': vhd_convert_from_raw,
'vhd_to_raw': vhd_convert_to_raw,
'print': vhd_print}
def _get_usage(fn):
doc = fn.__doc__
for x in doc.split('\n'):
if x.startswith("Usage: "):
return x[7:]
return ""
def _usage(cmd=None):
if cmd is not None:
print "Usage:"
cmds = {cmd: commands[cmd]}
else:
print "Usage: %s <command> [<args>]" % prog_name
print "Valid commands:"
cmds = commands
for cmd, fn in cmds.iteritems():
print " %s [<options>] %s" % (cmd, _get_usage(fn))
if len(args) == 0:
_usage()
sys.exit(0)
cmd = args.pop(0)
try:
cmd_fn = commands[cmd]
except KeyError:
print "Error: Invalid command"
_usage()
sys.exit(1)
try:
cmd_fn(*args)
except TypeError, e:
error_str = str(e)
if (error_str.find(cmd_fn.__name__) > -1 and
error_str.find('takes') > -1):
print "Error: Invalid arguments"
_usage(cmd)
sys.exit(1)
raise
except libvhd.VHDException, e:
_usage(cmd)
print "Error: %s" % str(e)
sys.exit(1)
sys.exit(0)
|
comstud/python-vhdutil
|
src/vhdutil.py
|
Python
|
apache-2.0
| 2,708
|
import abc
from probabilities.base import probability_distribution
from probabilities.base import probability
from tests import test_sub_class
class BaseProbabilityDistributionTest(test_sub_class.BaseSubClassTest):
def setUp(self):
super().setUp()
self.super_classes.add(probability_distribution.ProbabilityDistribution)
@abc.abstractproperty
def _probability_distribution_class(self):
raise NotImplementedError()
@abc.abstractproperty
def tested_object(self):
raise NotImplementedError()
@property
def _sub_class(self):
return self._probability_distribution_class
def test_property_class(self):
self.assertIsSubClass(self.tested_object._probability_class, probability.Probability)
|
tomerghelber/probabilities
|
tests/base/test_probability_distribution/base_test_probability_distribution.py
|
Python
|
apache-2.0
| 770
|
import mock
from nose.tools import * # noqa
import httplib as http
from boto.exception import S3ResponseError
from framework.auth import Auth
from tests.base import OsfTestCase
from tests.factories import ProjectFactory, AuthUserFactory
from website.addons.s3.model import S3GuidFile
from website.addons.s3.utils import validate_bucket_name
from utils import create_mock_wrapper, create_mock_key
class TestS3ViewsConfig(OsfTestCase):
def setUp(self):
super(TestS3ViewsConfig, self).setUp()
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.auth = ('test', self.user.api_keys[0]._primary_key)
self.project = ProjectFactory(creator=self.user)
self.project.add_addon('s3', auth=self.consolidated_auth)
self.project.creator.add_addon('s3')
self.user_settings = self.user.get_addon('s3')
self.user_settings.access_key = 'We-Will-Rock-You'
self.user_settings.secret_key = 'Idontknowanyqueensongs'
self.user_settings.save()
self.node_settings = self.project.get_addon('s3')
self.node_settings.bucket = 'Sheer-Heart-Attack'
self.node_settings.user_settings = self.project.creator.get_addon('s3')
self.node_settings.save()
self.node_url = '/api/v1/project/{0}/'.format(self.project._id)
@mock.patch('website.addons.s3.views.config.does_bucket_exist')
@mock.patch('website.addons.s3.views.config.adjust_cors')
def test_s3_settings_no_bucket(self, mock_cors, mock_does_bucket_exist):
mock_does_bucket_exist.return_value = False
mock_cors.return_value = True
url = self.project.api_url + 's3/settings/'
rv = self.app.post_json(url, {}, expect_errors=True, auth=self.user.auth)
assert_true('trouble' in rv.body)
@mock.patch('website.addons.s3.views.config.does_bucket_exist')
@mock.patch('website.addons.s3.views.config.adjust_cors')
def test_s3_set_bucket(self, mock_cors, mock_exist):
mock_cors.return_value = True
mock_exist.return_value = True
url = self.project.api_url + 's3/settings/'
self.app.post_json(
url, {'s3_bucket': 'hammertofall'}, auth=self.user.auth,
)
self.project.reload()
self.node_settings.reload()
assert_equal(self.node_settings.bucket, 'hammertofall')
assert_equal(self.project.logs[-1].action, 's3_bucket_linked')
def test_s3_set_bucket_no_settings(self):
user = AuthUserFactory()
self.project.add_contributor(user, save=True)
url = self.project.api_url + 's3/settings/'
res = self.app.post_json(
url, {'s3_bucket': 'hammertofall'}, auth=user.auth,
expect_errors=True
)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_s3_set_bucket_no_auth(self):
user = AuthUserFactory()
user.add_addon('s3')
self.project.add_contributor(user, save=True)
url = self.project.api_url + 's3/settings/'
res = self.app.post_json(
url, {'s3_bucket': 'hammertofall'}, auth=user.auth,
expect_errors=True
)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_s3_set_bucket_already_authed(self):
user = AuthUserFactory()
user.add_addon('s3')
user_settings = user.get_addon('s3')
user_settings.access_key = 'foo'
user_settings.secret_key = 'bar'
user_settings.save()
self.project.add_contributor(user, save=True)
url = self.project.api_url + 's3/settings/'
res = self.app.post_json(
url, {'s3_bucket': 'hammertofall'}, auth=user.auth,
expect_errors=True
)
assert_equal(res.status_code, http.BAD_REQUEST)
@mock.patch('website.addons.s3.api.S3Wrapper.get_wrapped_key')
@mock.patch('website.addons.s3.api.S3Wrapper.from_addon')
def test_s3_set_bucket_registered(self, mock_from_addon, mock_wrapped_key):
mock_from_addon.return_value = create_mock_wrapper()
mock_wrapped_key.return_value = create_mock_key()
registration = self.project.register_node(
None, self.consolidated_auth, '', ''
)
url = registration.api_url + 's3/settings/'
res = self.app.post_json(
url, {'s3_bucket': 'hammertofall'}, auth=self.user.auth,
expect_errors=True,
)
assert_equal(res.status_code, http.BAD_REQUEST)
@mock.patch('website.addons.s3.views.config.has_access')
@mock.patch('website.addons.s3.views.config.create_osf_user')
def test_user_settings(self, mock_user, mock_access):
mock_access.return_value = True
mock_user.return_value = (
'osf-user-12345',
{
'access_key_id': 'scout',
'secret_access_key': 'ssshhhhhhhhh'
}
)
url = '/api/v1/settings/s3/'
self.app.post_json(
url,
{
'access_key': 'scout',
'secret_key': 'Atticus'
},
auth=self.user.auth
)
self.user_settings.reload()
assert_equals(self.user_settings.access_key, 'scout')
@mock.patch('website.addons.s3.model.AddonS3UserSettings.remove_iam_user')
def test_s3_remove_user_settings(self, mock_access):
mock_access.return_value = True
self.user_settings.access_key = 'to-kill-a-mocking-bucket'
self.user_settings.secret_key = 'itsasecret'
self.user_settings.save()
url = '/api/v1/settings/s3/'
self.app.delete(url, auth=self.user.auth)
self.user_settings.reload()
assert_equals(self.user_settings.access_key, None)
assert_equals(self.user_settings.secret_key, None)
assert_equals(mock_access.call_count, 1)
@mock.patch('website.addons.s3.model.AddonS3UserSettings.remove_iam_user')
def test_s3_remove_user_settings_none(self, mock_access):
self.user_settings.access_key = None
self.user_settings.secret_key = None
self.user_settings.save()
url = '/api/v1/settings/s3/'
self.app.delete(url, auth=self.user.auth)
self.user_settings.reload()
assert_equals(mock_access.call_count, 0)
@mock.patch('website.addons.s3.views.config.has_access')
def test_user_settings_no_auth(self, mock_access):
mock_access.return_value = False
url = '/api/v1/settings/s3/'
rv = self.app.post_json(url, {}, auth=self.user.auth, expect_errors=True)
assert_equals(rv.status_int, http.BAD_REQUEST)
@mock.patch('website.addons.s3.api.S3Wrapper.get_wrapped_key')
@mock.patch('website.addons.s3.api.S3Wrapper.from_addon')
def test_view_creates_guid(self, mock_from_addon, mock_wrapped_key):
mock_from_addon.return_value = create_mock_wrapper()
mock_wrapped_key.return_value = create_mock_key()
guid_count = S3GuidFile.find().count()
# View file for the first time
url = self.project.url + 's3/test.py'
res = self.app.get(url, auth=self.user.auth).maybe_follow(auth=self.user.auth)
guids = S3GuidFile.find()
# GUID count has been incremented by one
assert_equal(
guids.count(),
guid_count + 1
)
# Client has been redirected to GUID
assert_equal(
res.request.path.strip('/'),
guids[guids.count() - 1]._id
)
# View file for the second time
self.app.get(url, auth=self.user.auth).follow(auth=self.user.auth)
# GUID count has not been incremented
assert_equal(
S3GuidFile.find().count(),
guid_count + 1
)
@mock.patch('website.addons.s3.views.config.has_access')
@mock.patch('website.addons.s3.views.config.create_osf_user')
def test_node_settings_no_user_settings(self, mock_user, mock_access):
self.node_settings.user_settings = None
self.node_settings.save()
url = self.node_url + 's3/authorize/'
mock_access.return_value = True
mock_user.return_value = (
'osf-user-12345',
{
'access_key_id': 'scout',
'secret_access_key': 'ssshhhhhhhhh'
}
)
self.app.post_json(url, {'access_key': 'scout', 'secret_key': 'ssshhhhhhhhh'}, auth=self.user.auth)
self.user_settings.reload()
assert_equals(self.user_settings.access_key, 'scout')
def test_node_settings_no_user_settings_ui(self):
self.node_settings.user_settings.access_key = None
self.node_settings.user_settings = None
self.node_settings.save()
url = self.project.url + 'settings/'
rv = self.app.get(url, auth=self.user.auth)
assert_true('<label for="s3Addon">Access Key</label>' in rv.body)
@mock.patch('website.addons.s3.model.get_bucket_drop_down')
def test_node_settings_user_settings_ui(self, mock_dropdown):
mock_dropdown.return_value = ['mybucket']
url = self.project.url + 'settings/'
rv = self.app.get(url, auth=self.user.auth)
assert_true('mybucket' in rv.body)
class TestS3ViewsCRUD(OsfTestCase):
def setUp(self):
super(TestS3ViewsCRUD, self).setUp()
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.auth = ('test', self.user.api_keys[0]._primary_key)
self.project = ProjectFactory(creator=self.user)
self.project.add_addon('s3', auth=self.consolidated_auth)
self.project.creator.add_addon('s3')
self.user_settings = self.user.get_addon('s3')
self.user_settings.access_key = 'We-Will-Rock-You'
self.user_settings.secret_key = 'Idontknowanyqueensongs'
self.user_settings.save()
self.node_settings = self.project.get_addon('s3')
self.node_settings.bucket = 'Sheer-Heart-Attack'
self.node_settings.user_settings = self.project.creator.get_addon('s3')
self.node_settings.save()
self.node_url = '/api/v1/project/{0}/'.format(self.project._id)
@mock.patch('website.addons.s3.api.S3Wrapper.get_wrapped_key')
@mock.patch('website.addons.s3.api.S3Wrapper.from_addon')
def test_view_file(self, mock_from_addon, mock_wrapped_key):
mock_from_addon.return_value = create_mock_wrapper()
mock_wrapped_key.return_value = create_mock_key()
url = '/project/{0}/s3/view/pizza.png/'.format(self.project._id)
res = self.app.get(
url,
auth=self.user.auth,
).maybe_follow(
auth=self.user.auth,
)
assert_equal(res.status_code, 200)
assert_in('Delete <i class="icon-trash"></i>', res)
@mock.patch('website.addons.s3.api.S3Wrapper.get_wrapped_key')
@mock.patch('website.addons.s3.api.S3Wrapper.from_addon')
def test_view_file_non_contributor(self, mock_from_addon, mock_wrapped_key):
mock_from_addon.return_value = create_mock_wrapper()
mock_wrapped_key.return_value = create_mock_key()
self.project.is_public = True
self.project.save()
user2 = AuthUserFactory()
url = '/project/{0}/s3/view/pizza.png/'.format(self.project._id)
res = self.app.get(
url,
auth=user2.auth,
).maybe_follow(
auth=user2.auth,
)
assert_equal(res.status_code, 200)
assert_not_in('Delete <i class="icon-trash"></i>', res)
@mock.patch('website.addons.s3.views.crud.S3Wrapper.from_addon')
def test_view_faux_file(self, mock_from_addon):
mock_from_addon.return_value = mock.Mock()
mock_from_addon.return_value.get_wrapped_key.return_value = None
url = '/project/{0}/s3/view/faux.sho/'.format(self.project._id)
rv = self.app.get(url, auth=self.user.auth, expect_errors=True).maybe_follow()
assert_equals(rv.status_int, http.NOT_FOUND)
@mock.patch('website.addons.s3.views.crud.S3Wrapper.from_addon')
def test_view_upload_url(self, mock_from_addon):
mock_from_addon.return_value = mock.Mock()
mock_from_addon.return_value.does_key_exist.return_value = False
rv = self.app.post_json(self.node_url + 's3/', {'name': 'faux.sho'}, auth=self.user.auth)
assert_true('faux.sho' in rv.body and self.node_settings.bucket in rv.body and rv.status_int == http.OK)
@mock.patch('website.addons.s3.views.crud.S3Wrapper.from_addon')
def test_download_file_faux_file(self, mock_from_addon):
mock_from_addon.return_value = mock.Mock()
mock_from_addon.return_value.does_key_exist.return_value = False
rv = self.app.post_json(self.node_url + 's3/download/', {'path': 'faux.show'}, expect_errors=True)
assert_equals(rv.status_int, http.NOT_FOUND)
@mock.patch('website.addons.s3.views.crud.S3Wrapper.from_addon')
def test_get_info_for_deleting_file(self, mock_from_addon):
mock_from_addon.return_value = mock.Mock()
mock_from_addon.return_value.does_key_exist.return_value = False
res = self.app.get(
self.project.api_url_for(
'file_delete_info',
path='faux.sho',
),
auth=self.user.auth,
)
assert_equals(res.status_int, http.OK)
class TestS3ViewsHgrid(OsfTestCase):
def setUp(self):
super(TestS3ViewsHgrid, self).setUp()
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.auth = ('test', self.user.api_keys[0]._primary_key)
self.project = ProjectFactory(creator=self.user)
self.project.add_addon('s3', auth=self.consolidated_auth)
self.project.creator.add_addon('s3')
self.user_settings = self.user.get_addon('s3')
self.user_settings.access_key = 'We-Will-Rock-You'
self.user_settings.secret_key = 'Idontknowanyqueensongs'
self.user_settings.save()
self.node_settings = self.project.get_addon('s3')
self.node_settings.bucket = 'Sheer-Heart-Attack'
self.node_settings.user_settings = self.project.creator.get_addon('s3')
self.node_settings.save()
def test_data_contents_no_user_settings(self):
self.node_settings.user_settings = None
self.node_settings.save()
url = "/api/v1/project/{0}/s3/hgrid/".format(self.project._id)
rv = self.app.get(url, expect_errors=True, auth=self.user.auth)
assert_equals(rv.status_int, http.BAD_REQUEST)
def test_dummy_folder(self):
url = "/api/v1/project/{0}/s3/hgrid/dummy/".format(self.project._id)
rv = self.app.get(url, auth=self.user.auth)
assert_true(self.node_settings.bucket in rv.body)
def test_dummy_folder_no_user_settings(self):
self.node_settings.user_settings = None
self.node_settings.save()
url = "/api/v1/project/{0}/s3/hgrid/dummy/".format(self.project._id)
rv = self.app.get(url, auth=self.user.auth)
assert_equals(rv.body, 'null')
def test_dummy_folder_no_bucket(self):
self.node_settings.bucket = None
self.node_settings.save()
url = "/api/v1/project/{0}/s3/hgrid/dummy/".format(self.project._id)
rv = self.app.get(url, auth=self.user.auth)
assert_equals(rv.body, 'null')
class TestCreateBucket(OsfTestCase):
def setUp(self):
super(TestCreateBucket, self).setUp()
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.auth = ('test', self.user.api_keys[0]._primary_key)
self.project = ProjectFactory(creator=self.user)
self.project.add_addon('s3', auth=self.consolidated_auth)
self.project.creator.add_addon('s3')
self.user_settings = self.user.get_addon('s3')
self.user_settings.access_key = 'We-Will-Rock-You'
self.user_settings.secret_key = 'Idontknowanyqueensongs'
self.user_settings.save()
self.node_settings = self.project.get_addon('s3')
self.node_settings.bucket = 'Sheer-Heart-Attack'
self.node_settings.user_settings = self.project.creator.get_addon('s3')
self.node_settings.save()
def test_bad_names(self):
assert_false(validate_bucket_name('bogus naMe'))
assert_false(validate_bucket_name(''))
assert_false(validate_bucket_name('no'))
assert_false(validate_bucket_name('.cantstartwithp'))
assert_false(validate_bucket_name('or.endwith.'))
assert_false(validate_bucket_name('..nodoubles'))
assert_false(validate_bucket_name('no_unders_in'))
def test_names(self):
assert_true(validate_bucket_name('imagoodname'))
assert_true(validate_bucket_name('still.passing'))
assert_true(validate_bucket_name('can-have-dashes'))
assert_true(validate_bucket_name('kinda.name.spaced'))
@mock.patch('website.addons.s3.views.crud.create_bucket')
def test_create_bucket_pass(self, mock_make):
mock_make.return_value = True
url = "/api/v1/project/{0}/s3/newbucket/".format(self.project._id)
rv = self.app.post_json(url, {'bucket_name': 'doesntevenmatter'}, auth=self.user.auth, expect_errors=True)
assert_equals(rv.status_int, http.OK)
@mock.patch('website.addons.s3.views.crud.create_bucket')
def test_create_bucket_fail(self, mock_make):
error = S3ResponseError(418, 'because Im a test')
error.message = 'This should work'
mock_make.side_effect = error
url = "/api/v1/project/{0}/s3/newbucket/".format(self.project._id)
rv = self.app.post_json(url, {'bucket_name': 'doesntevenmatter'}, auth=self.user.auth, expect_errors=True)
assert_equals(rv.body, '{"message": "This should work"}')
#TODO
#removed access key
#
|
AndrewSallans/osf.io
|
website/addons/s3/tests/test_view.py
|
Python
|
apache-2.0
| 17,933
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq del allowed personality --cluster`."""
from aquilon.exceptions_ import ArgumentError
from aquilon.aqdb.model import Personality, Cluster, MetaCluster
from aquilon.worker.broker import BrokerCommand
from aquilon.worker.dbwrappers.change_management import ChangeManagement
class CommandDelAllowedPersonalityCluster(BrokerCommand):
requires_plenaries = True
required_parameters = ["archetype", "personality", "cluster"]
def render(self, session, plenaries, archetype, personality, cluster,
metacluster, user, justification, reason, logger, **arguments):
dbpers = Personality.get_unique(session, name=personality,
archetype=archetype, compel=True)
if cluster:
dbclus = Cluster.get_unique(session, cluster, compel=True)
if isinstance(dbclus, MetaCluster):
raise ArgumentError("Please use --metacluster for metaclusters.")
else:
dbclus = MetaCluster.get_unique(session, metacluster, compel=True)
# Validate ChangeManagement
cm = ChangeManagement(session, user, justification, reason, logger, self.command, **arguments)
cm.consider(dbclus)
cm.validate()
plenaries.add(dbclus)
if len(dbclus.allowed_personalities) > 1:
members = dbclus.hosts[:]
if hasattr(dbclus, 'members'):
members.extend(dbclus.members)
for obj in members:
if obj.personality == dbpers:
raise ArgumentError("Member {0:l} has {1:l}, which is "
"incompatible with this constraint."
.format(obj, obj.personality))
if dbpers in dbclus.allowed_personalities:
dbclus.allowed_personalities.remove(dbpers)
session.flush()
plenaries.write()
return
|
quattor/aquilon
|
lib/aquilon/worker/commands/del_allowed_personality_cluster.py
|
Python
|
apache-2.0
| 2,670
|
#!/usr/bin/env python
import sys, os, pwd, grp, signal, time
from resource_management import *
from subprocess import call
from common import *
def setup_hue():
import params
import status_params
Logger.info("Configure Hue Service")
# create the pid and log dir
Directory([params.hue_log_dir, params.hue_pid_dir],
mode=0755,
cd_access='a',
owner=params.hue_user,
group=params.hue_group,
create_parents=True
)
if not os.path.islink('/usr/lib/hue/logs/hue'):
Execute("ln -s /var/log/hue/ /usr/lib/hue/logs")
File([params.hue_log_file, params.hue_server_pid_file],
mode=0644,
owner=params.hue_user,
group=params.hue_group,
content=''
)
Logger.info("Creating symlinks .jar")
Link("{0}/desktop/libs/hadoop/java-lib/*".format(params.hue_dir),to = "/usr/lib")
Execute('find {0} -iname "*.sh" | xargs chmod +x'.format(params.service_packagedir))
# Create a home directory for solr user on HDFS
params.HdfsResource(params.hue_hdfs_home_dir,
type="directory",
action="create_on_execute",
owner=params.hue_user,
mode=0755,
recursive_chmod=True
)
Logger.info(format("Creating {hue_conf_dir}/log.conf file"))
File(format("{hue_conf_dir}/log.conf"),
content = InlineTemplate(params.hue_log_content),
owner = params.hue_user
)
Logger.info(format("Creating {hue_conf_dir}/pseudo-distributed.ini config file"))
File(format("{hue_conf_dir}/pseudo-distributed.ini"),
content = InlineTemplate(params.hue_pseudodistributed_content),
owner = params.hue_user
)
Logger.info(format("Run the script file to add configurations"))
if params.hue_hdfs_module_enabled == 'Yes':
add_hdfs_configuration(params.has_ranger_admin, params.security_enabled)
if params.hue_hbase_module_enabled == 'Yes':
add_hbase_configuration(params.has_ranger_admin, params.security_enabled)
if params.hue_hive_module_enabled == 'Yes':
add_hive_configuration(params.has_ranger_admin, params.security_enabled)
if params.hue_oozie_module_enabled == 'Yes':
add_oozie_configuration(params.has_ranger_admin, params.security_enabled)
if params.hue_spark_module_enabled == 'Yes':
add_spark_configuration(params.has_ranger_admin, params.security_enabled)
|
alexryndin/ambari
|
ambari-server/src/main/resources/stacks/ADH/1.4/services/HUE/package/scripts/setup_hue.py
|
Python
|
apache-2.0
| 2,346
|
BASE_CONFIG = {
'version': 1,
'loggers': {
'pit': {
'handlers': ['console'],
'level': 'DEBUG',
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'stream': 'ext://sys.stdout',
'formatter': 'verbose',
}
},
'formatters': {
'verbose': {
'format': '%(levelname)s [%(asctime)s] %(message)s in '
'%(filename)s:%(lineno)d'
}
}
}
|
mitlib-tdm/pit
|
pit/logging.py
|
Python
|
apache-2.0
| 507
|
#!/usr/bin/env python
#
# Copyright 2013 Darragh Bailey
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from testscenarios.testcase import TestWithScenarios
from testtools import ExpectedException
from testtools import TestCase
from yaml.composer import ComposerError
from jenkins_jobs.config import JJBConfig
from jenkins_jobs.parser import YamlParser
from tests.base import get_scenarios
from tests.base import JsonTestCase
from tests.base import LoggingFixture
from tests.base import YamlTestCase
def _exclude_scenarios(input_filename):
return os.path.basename(input_filename).startswith("custom_")
class TestCaseLocalYamlInclude(TestWithScenarios, JsonTestCase, TestCase):
"""
Verify application specific tags independently of any changes to
modules XML parsing behaviour
"""
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = get_scenarios(fixtures_path, 'yaml', 'json',
filter_func=_exclude_scenarios)
def test_yaml_snippet(self):
if os.path.basename(self.in_filename).startswith("exception_"):
with ExpectedException(ComposerError,
"^found duplicate anchor .*"):
super(TestCaseLocalYamlInclude, self).test_yaml_snippet()
else:
super(TestCaseLocalYamlInclude, self).test_yaml_snippet()
class TestCaseLocalYamlAnchorAlias(TestWithScenarios, YamlTestCase, TestCase):
"""
Verify yaml input is expanded to the expected yaml output when using yaml
anchors and aliases.
"""
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = get_scenarios(fixtures_path, 'iyaml', 'oyaml')
class TestCaseLocalYamlIncludeAnchors(LoggingFixture, TestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
def test_multiple_same_anchor_in_multiple_toplevel_yaml(self):
"""
Verify that anchors/aliases only span use of '!include' tag
To ensure that any yaml loaded by the include tag is in the same
space as the top level file, but individual top level yaml definitions
are treated by the yaml loader as independent.
"""
files = ["custom_same_anchor-001-part1.yaml",
"custom_same_anchor-001-part2.yaml"]
jjb_config = JJBConfig()
jjb_config.jenkins['url'] = 'http://example.com'
jjb_config.jenkins['user'] = 'jenkins'
jjb_config.jenkins['password'] = 'password'
jjb_config.builder['plugins_info'] = []
jjb_config.validate()
j = YamlParser(jjb_config)
j.load_files([os.path.join(self.fixtures_path, f) for f in files])
|
joostvdg/jenkins-job-builder
|
tests/localyaml/test_localyaml.py
|
Python
|
apache-2.0
| 3,217
|
##You need to edit the formula in the file named 'Formula.txt' to the desmos formula
##
##Created By Boston Abrams
##
##With Help from Martin ( :
##
##
def Maths (x):
reading_file=open('Formula.txt', 'r')
lines=reading_file.readlines()
Formula = lines[1]
m1 = 0
count = 0
m = ""
while True:
if Formula[count+2] != "x":
m1 = count+2
m = m + Formula[count+2]
else:
break
count = count + 1
m = float(m)
b = ""
print b
print count
while True:
if len(Formula)-1 >= count+3:
b = b + Formula[count+3]
else:
break
count=count+1
b = float(b)
print "m =", m
print "b =", b
print "x =", x
y = float(m)*int(x) + float(b)
#print " == ", y
# y = Angle
# x = Distance
return y
print Maths(10)
## ---------------------------------------------------------
## Extra Bonus Code -> Old Versions ( :
##def Mathing(x):
## m = 2
## b = 1
## return float(m)*int(x) + float(b)
##
##def ToString (List): # Coverts List to String
## return ''.join(List)
##def Math (x):
## reading_file=open('Formula.txt', 'r')
## lines=reading_file.readlines()
## Formula = lines[1]
## m = float(Formula[2])
## m1 = 2 # The Last # used for this
## if Formula[3] != "x":
## m1 = 3
## m = float(Formula[2] + Formula[3])
## if Formula[4] != "x":
## m1 = 4
## m = float(Formula[2] + Formula[3] + Formula[4])
## print m
## b = int(Formula[3+m1])
## print b
## if len(Formula)-1 >= 5+m1:
## b = float(Formula[3+m1]+Formula[4+m1]+Formula[5+m1])
## if len(Formula)-1 >= 6+m1:
## b = float(Formula[3+m1] +Formula[4+m1]+Formula[5+m1]+ Formula[6+m1])
## print "m =", m
## print "b =", b
## print "x =", x
## y = float(m)*int(x) + float(b)
## #print " == ", y
## # y = Angle
## # x = Distance
##
## return y
|
BostonA/SpudnikPi
|
Math.py
|
Python
|
apache-2.0
| 2,014
|
#!/usr/bin/env python
# Copyright (C) 2015 Wayne Warren
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sys
from jenkins_jobs import utils
from jenkins_jobs.builder import Builder
import jenkins_jobs.cli.subcommand.base as base
logger = logging.getLogger(__name__)
class DeleteAllSubCommand(base.BaseSubCommand):
def parse_args(self, subparser):
delete_all = subparser.add_parser(
'delete-all',
help='''delete *ALL* jobs from Jenkins server, including those not
managed by Jenkins Job Builder.''')
self.parse_option_recursive_exclude(delete_all)
def execute(self, options, jjb_config):
builder = Builder(jjb_config)
if not utils.confirm(
'Sure you want to delete *ALL* jobs from Jenkins '
'server?\n(including those not managed by Jenkins '
'Job Builder)'):
sys.exit('Aborted')
logger.info("Deleting all jobs")
builder.delete_all_jobs()
|
joostvdg/jenkins-job-builder
|
jenkins_jobs/cli/subcommand/delete_all.py
|
Python
|
apache-2.0
| 1,517
|
#!/usr/bin/env python
"""
fastaCount.py <filename>
"""
import sys
from mungo.fasta import fastaCount
for filename in sys.argv[1:]:
print '%s: %i' % (filename, fastaCount(filename))
|
PapenfussLab/Mungo
|
bin/fastaCount.py
|
Python
|
artistic-2.0
| 187
|
import arrow
import CONFIG
START_TIME = CONFIG.START_TIME
END_TIME = CONFIG.END_TIME
def get_free_times(busy_times, begin_date, end_date):
"""
Gets a list of free times calculated from a list of busy times.
:param busy_times: is the list of busy times in ascending order.
:param begin_date: is the start of the selected time interval.
:param end_date: is the end of the selected time interval.
:return: a list of free times.
"""
free_times = []
busy_times_original = busy_times
# print('free times')
if len(busy_times) == 0:
free_times.append((begin_date.isoformat(), end_date.isoformat()))
else:
begin_date = arrow.get(begin_date).replace(hour=9)
begin_date_end = begin_date.replace(hour=17)
begin_day = begin_date.format('YYYYMMDD')
begin_time = '09:00'
end_time = '17:00'
end_date = arrow.get(end_date).replace(hour=17)
end_date_start = arrow.get(end_date).replace(hour=9)
end_day = end_date.format('YYYYMMDD')
stored_event = busy_times[0]
busy_times = busy_times[1:]
if len(busy_times) == 0:
stored_event_start = arrow.get(stored_event['start']['dateTime'])
stored_event_end = arrow.get(stored_event['end']['dateTime'])
if (stored_event_start == begin_date and
stored_event_end < begin_date_end):
free_times.append((stored_event_end.isoformat(),
end_date.isoformat()))
elif (stored_event_end == end_date and
stored_event_start > end_date_start):
free_times.append((begin_date.isoformat(),
stored_event_start.isoformat()))
elif (stored_event_start > begin_date and
stored_event_end < end_date):
free_times.append((begin_date.isoformat(),
stored_event_start.isoformat()))
free_times.append((stored_event_end.isoformat(),
end_date.isoformat()))
for event in busy_times:
event_start = arrow.get(event['start']['dateTime'])
event_end = arrow.get(event['end']['dateTime'])
event_start_time = event_start.format('HH:mm')
event_end_time = event_end.format('HH:mm')
event_end_day = event_end.format('YYYYMMDD')
stored_event_start = arrow.get(stored_event['start']['dateTime'])
stored_event_start_time = stored_event_start.format('HH:mm')
stored_event_start_day = arrow.get(
stored_event['start']['dateTime']).format('YYYYMMDD')
stored_event_end = stored_event['end']['dateTime']
stored_event_end_time = arrow.get(stored_event_end).format('HH:mm')
event_start = event_start.isoformat()
# starting free time on begin day after start of day
if (stored_event_start_day == begin_day and
stored_event_start_time > begin_time):
free_times.append((begin_date.isoformat(),
stored_event_start.isoformat()))
# print('0 {} - {}'.format(begin_date.isoformat(),
# stored_event_start.isoformat()))
# middle free times
if (stored_event_end < event_start and
(stored_event_end, event_start) not in free_times):
if event_start_time == '09:00':
event_start = arrow.get(
event['start']['dateTime']).replace(
days=-1, hour=17).isoformat()
if stored_event_end_time == '17:00':
stored_event_end = arrow.get(
stored_event_end).replace(days=+1,
hour=START_TIME).isoformat()
free_times.append((stored_event_end, event_start))
# print('1 {} - {}'.format(stored_event_end,
# event_start))
# ending free time
if (event_end_day == end_day and
event_end_time != end_time):
free_times.append((event_end.isoformat(), end_date.isoformat()))
# print('2 {} - {}'.format(event_end.isoformat(),
# end_date.isoformat()))
# ending free time for final events that end before end_date
if (busy_times.index(event) == len(busy_times) - 1 and
event_end < end_date):
if event_end_time == '17:00':
event_end = event_end.replace(days=+1, hour=START_TIME)
free_times.append((event_end.isoformat(), end_date.isoformat()))
# print('3 {} - {}'.format(event_end.isoformat(),
# end_date.isoformat()))
# starting free time not on begin day
if (arrow.get(free_times[0][0]) != begin_date and
stored_event_start != begin_date and
begin_date != arrow.get(
busy_times_original[0]['start']['dateTime'])):
free_times.insert(0, (begin_date.isoformat(),
stored_event_start.isoformat()))
# print('4 {} - {}'.format(begin_date.isoformat(),
# stored_event_start.isoformat()))
stored_event = event
# print()
return free_times
|
hkhamm/proj7-freetimes
|
free_times.py
|
Python
|
artistic-2.0
| 5,647
|
import os
import unittest
from msquaredc.persistence import BackedUpDict
from msquaredc.persistence import obtain
from msquaredc.persistence import persist
class TestPersistence(unittest.TestCase):
def test_all(self):
samples = [
{"x": 1, "y": 2, "z": 3},
{"x": 4, "y": 5, "z": ""},
]
try:
persist("test.txt", samples[0], "w+")
persist("test.txt", samples[1], "a+")
self.assertIn("test.txt", os.listdir(os.getcwd()))
res = obtain("test.txt")
# assert count("test.txt") == 3
finally:
if "test.txt" in os.listdir(os.getcwd()):
os.remove("test.txt")
self.assertEqual(len(res), len(samples))
for i in range(len(samples)):
for j in samples[i].keys():
self.assertIn(j, res[i].keys())
self.assertEqual(str(samples[i][j]), res[i][j], j + str(samples) + str(res))
class TestBackedUpDict(unittest.TestCase):
def test_add(self):
p = BackedUpDict(":memory:")
p["bla"] = "blupp"
self.assertEqual(p["bla"], "blupp")
p[1] = 2
self.assertEqual(p[1], 2)
p[1] = "mahalo"
self.assertEqual(p[1], "mahalo")
self.assertIn(1, p.keys())
self.assertIn("bla", p.keys())
self.assertEqual(len(p), 2)
p["1"] = "2"
self.assertEqual(p[1], "mahalo")
|
j340m3/python-msquaredc
|
tests/test_persistence.py
|
Python
|
bsd-2-clause
| 1,432
|
from os import environ
from pyramid.view import view_config
from pyramid.view import notfound_view_config
from pyramid.httpexceptions import HTTPFound
from pyramid.response import Response
from springboard.utils import ga_context, Paginator
from springboard.views.base import SpringboardViews
from springboard.tasks import pull
ONE_YEAR = 31536000
class CoreViews(SpringboardViews):
@ga_context(lambda context: {'dt': 'Home', })
@view_config(route_name='home',
renderer='springboard:templates/home.jinja2')
def index_view(self):
return self.context()
@view_config(route_name='health', renderer='json')
def health(self):
app_id = environ.get('MARATHON_APP_ID', None)
ver = environ.get('MARATHON_APP_VERSION', None)
return {'id': app_id, 'version': ver}
@ga_context(lambda context: {'dt': context['category'].title, })
@view_config(route_name='category',
renderer='springboard:templates/category.jinja2')
def category(self):
uuid = self.request.matchdict['uuid']
[category] = self.all_categories.filter(uuid=uuid)
return self.context(category=category)
@view_config(route_name='search',
renderer='springboard:templates/search_results.jinja2')
def search(self):
query = self.request.GET.get('q')
p = int(self.request.GET.get('p', 0))
empty_defaults = self.context(
paginator=[],
query=query,
p=p,
)
# handle query exception
if not query:
return empty_defaults
all_results = self.all_pages.query(
content__query_string=query).filter(language=self.language)
# no results found
if all_results.count() == 0:
return empty_defaults
paginator = Paginator(all_results, p)
# requested page number is out of range
total_pages = paginator.total_pages()
# sets the floor to 0
p = p if p >= 0 else 0
# sets the roof to `total_pages -1`
p = p if p < total_pages else total_pages - 1
paginator = Paginator(all_results, p)
relevant_categories = self.all_categories.query().filter(
language=self.language)
return self.context(
relevant_categories=relevant_categories,
paginator=paginator,
query=query,
p=p,
)
@ga_context(lambda context: {'dt': context['page'].title, })
@view_config(route_name='page',
renderer='springboard:templates/page.jinja2')
def page(self):
uuid = self.request.matchdict['uuid']
[page] = self.all_pages.filter(uuid=uuid)
category = None
if page.primary_category:
[category] = self.all_categories.filter(
uuid=page.primary_category)
return self.context(category=category,
page=page)
@view_config(route_name='flat_page',
renderer='springboard:templates/flat_page.jinja2')
def flat_page(self):
slug = self.request.matchdict['slug']
[page] = self.all_pages.filter(language=self.language, slug=slug)
return self.context(page=page)
@view_config(route_name='api_notify', renderer='json')
def api_notify(self):
for repo_url, index_prefix in zip(self.all_repo_urls,
self.all_index_prefixes):
pull.delay(repo_url=repo_url,
index_prefix=index_prefix,
es=self.es_settings)
return {}
@notfound_view_config(renderer='springboard:templates/404.jinja2')
def notfound(self):
self.request.response.status = 404
return self.context()
@ga_context(lambda context: {'dt': 'Choose Language', })
@view_config(
route_name='locale_change',
renderer='springboard:templates/locale_change.jinja2')
def locale_change(self):
return self.context()
@ga_context(lambda context: {'dt': 'Set Language', })
@view_config(route_name='locale')
@view_config(route_name='locale_matched')
def set_locale_cookie(self):
response = Response()
language = self.request.matchdict.get('language') or \
self.request.GET.get('language')
next = self.request.GET.get('next', '/')
if language:
response.set_cookie('_LOCALE_', value=language, max_age=ONE_YEAR)
return HTTPFound(location=next, headers=response.headers)
|
universalcore/springboard
|
springboard/views/core.py
|
Python
|
bsd-2-clause
| 4,570
|
# $Id: ImageButton.py,v 1.33.2.2 2006/11/14 12:46:15 marcusva Exp $
#
# Copyright (c) 2004-2006, Marcus von Appen
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""A button widget, which can display an image."""
from pygame import Surface
from ocempgui.draw import Image
from ButtonBase import ButtonBase
from Label import Label
from Constants import *
from StyleInformation import StyleInformation
import base
class ImageButton (ButtonBase):
"""ImageButton (image) -> ImageButton
A button widget class which can display an image.
The ImageButton widget is able to display nearly any kind of
image, while providing all the features of the Button widget.
The image to display can be set with the 'picture' attribute or
set_picture() method. The image can be either a file name from which
the image should be loaded or a pygame.Surface object to display.
button.picture = './image.png'
button.set_picture (image_surface)
If the displayed image is loaded from a file, its file path will be
saved in the 'path' attribute. This also can be used to determine,
whether the image was loaded from a file ('path' contains a file
path) or not ('path' is None).
The ImageButton supports different border types by setting its
'border' attribute to a valid value of the BORDER_TYPES constants.
button.border = BORDER_SUNKEN
button.set_border (BORDER_SUNKEN)
Default action (invoked by activate()):
See the ButtonBase class.
Mnemonic action (invoked by activate_mnemonic()):
See the ButtonBase class.
Attributes:
text - The text to display on the ImageButton.
picture - A pygame.Surface of the set image.
path - The path of the set image (if it is loaded from a file).
border - The border style to set for the ImageButton.
"""
def __init__ (self, image=None):
ButtonBase.__init__ (self)
self._border = BORDER_RAISED
self._picture = None
self._path = None
self.set_picture (image)
def set_border (self, border):
"""I.set_border (...) -> None
Sets the border type to be used by the ImageButton.
Raises a ValueError, if the passed argument is not a value from
BORDER_TYPES
"""
if border not in BORDER_TYPES:
raise ValueError ("border must be a value from BORDER_TYPES")
self._border = border
self.dirty = True
def set_picture (self, image):
"""I.set_picture (...) -> None
Sets the image to be displayed on the ImageButton.
The image can be either a valid pygame.Surface object or the
path to an image file. If the argument is a file, the 'path'
attribute will be set to the file path, otherwise it will be
None.
Raises a TypeError, if the passed argument is not a string,
unicode or pygame.Surface.
"""
if image:
if type (image) in (str, unicode):
self._path = image
self._picture = Image.load_image (image)
elif isinstance (image, Surface):
self._path = None
self._picture = image
else:
raise TypeError ("image must be a string, unicode or a " \
"pygame.Surface")
else:
self._path = None
self._picture = None
self.dirty = True
def set_text (self, text=None):
"""I.set_text (...) -> None
Sets the text to display on the ImageButton by referring to the
'text' attribute of its child Label.
"""
if text != None:
if self.child:
self.child.set_text (text)
else:
self.child = Label (text)
else:
self.child = None
def get_text (self):
"""I.get_text () -> string
Returns the set text of the ImageButton.
Returns the text set on the Label of the ImageButton.
"""
if self.child:
return self.child.text
return ""
def set_child (self, child=None):
"""I.set_child (...) -> None
Sets the Label to display on the ImageButton.
Creates a parent-child relationship from the ImageButton to a
Label and causes the Label to set its mnemonic widget to the
ImageButton.
Raises a TypeError, if the passed argument does not inherit
from the Label class.
"""
self.lock ()
if child and not isinstance (child, Label):
raise TypeError ("child must inherit from Label")
ButtonBase.set_child (self, child)
if child:
child.set_widget (self)
if not child.style:
child.style = self.style or \
base.GlobalStyle.get_style (self.__class__)
self.unlock ()
def set_state (self, state):
"""I.set_state (...) -> None
Sets the state of the ImageButton.
Sets the state of the ImageButton and causes its child to set
its state to the same value.
"""
if self.state == state:
return
self.lock ()
if self.child:
self.child.state = state
ButtonBase.set_state (self, state)
self.unlock ()
def draw_bg (self):
"""I.draw_bg () -> Surface
Draws the background surface of the ImageButton and returns it.
Creates the visible surface of the image button and returns it
to the caller.
"""
return base.GlobalStyle.engine.draw_imagebutton (self)
def draw (self):
"""I.draw () -> None
Draws the ImageButton surface and places its picture and Label on it.
"""
ButtonBase.draw (self)
spacing = StyleInformation.get ("IMAGEBUTTON_SPACING")
rect_img = None
rect_child = None
rect = self.image.get_rect ()
if self.picture:
rect_img = self.picture.get_rect ()
rect_img.center = rect.center
if self.child:
rect_img.right -= (self.child.width / 2 + spacing)
rect_img.centery = rect.centery
self.image.blit (self.picture, rect_img)
if self.child:
self.child.center = rect.center
if self.picture:
self.child.left = rect_img.right + spacing
rect_child = self.child.rect
self.image.blit (self.child.image, rect_child)
text = property (lambda self: self.get_text (),
lambda self, var: self.set_text (var),
doc = "The text of the ImageButton.")
path = property (lambda self: self._path,
doc = "The file path of the image.")
picture = property (lambda self: self._picture,
lambda self, var: self.set_picture (var),
doc = "The image to display on the ImageButton.")
border = property (lambda self: self._border,
lambda self, var: self.set_border (var),
doc = "The border style to set for the ImageButton.")
|
prim/ocempgui
|
ocempgui/widgets/ImageButton.py
|
Python
|
bsd-2-clause
| 8,485
|
# Copyright (c) 2011, Daniel Crosta
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import re
from datetime import datetime, timedelta
from pytz import utc
import time
from mongoengine import Document
from mongoengine import EmbeddedDocument
from mongoengine import fields
from mongoengine.queryset import queryset_manager
punctuation = re.compile(r'[^a-zA-Z0-9 ]')
class Play(EmbeddedDocument):
username = fields.StringField()
entry = fields.StringField()
upvotes = fields.ListField(fields.StringField())
class Chat(EmbeddedDocument):
datetime = fields.DateTimeField()
username = fields.StringField()
message = fields.StringField()
class Game(Document):
@queryset_manager
def active(doc_cls, queryset):
# works like Game.objects, but only
# shows still-active games
queryset.filter(state__in=('playing', 'voting'))
return queryset
# state is 'playing', 'voting', or 'finished', or 'invalid'
#
# a game is marked 'invalid' if the playing round ends with
# fewer than 3 players (and thus there is no point in voting),
# of if the voting round ends with 0 votes
state = fields.StringField(default='playing')
acronym = fields.StringField()
# when does the current state end?
next_ending = fields.DateTimeField()
minutes_per_round = fields.IntField(default=120)
# game chat
chat = fields.ListField(fields.EmbeddedDocumentField('Chat'))
# list of plays in this game; see class Play
plays = fields.ListField(fields.EmbeddedDocumentField('Play'))
# list of players' usernames
players = fields.ListField(fields.StringField())
num_players = fields.IntField(default=0)
max_players = fields.IntField(default=10)
meta = {
'indexes': [
{'fields': ['players', 'state']},
{'fields': ['state', 'next_ending']},
],
'allow_inheritance': False,
}
def __unicode__(self):
return unicode(self.pk)
def save(self):
if self.next_ending is None:
self.next_ending = datetime.now(utc)
self.next_ending += timedelta(minutes=self.minutes_per_round)
super(Game, self).save()
# biz logic methods. note that these DO NOT update
# the internal state of the instance on which they
# are called; use .reload() for that if necessary
def add_chat(self, player, message):
c = Chat()
c.username = player.username
c.datetime = datetime.now(utc)
c.message = message
self.update(push__chat=c)
def add_player(self, player):
# attempt to add the given player to the Game, and
# return True on success, or False on failure (a
# race condition exists where more players than
# max_players can be added; if so, this attempt
# to add the player fails)
username = player.username
if username in self.players:
return 'duplicate'
if self.num_players >= self.max_players:
return 'toomany'
self.update(push__players=username, inc__num_players=1)
newself = Game.objects(pk=self.pk).only('num_players').first()
if newself.num_players > self.max_players:
# race condition happened, roll back
self.update(pull__players=username, inc__num_players=-1)
return 'toomany'
return 'ok'
def entry_is_valid(self, entry):
entry = punctuation.sub('', entry)
words = [w.strip() for w in entry.split(' ') if w.strip() != '']
if len(words) != len(self.acronym):
return False
for letter, word in zip(self.acronym, words):
if letter != word[0].upper():
return False
return True
def record_play(self, by_player, entry):
# attempt to record a play by a player. the
# player must not have already played, and
# must be in the list of players, and the
# entry must match self.acronym. return
# True on success, otherwise False
if by_player.username not in self.players:
return False
if not self.entry_is_valid(entry):
return False
existing = self.your_play(by_player)
if existing:
# update existing play
key = 'set__plays__%d__entry' % existing.index
kwargs = {key: entry}
self.update(**kwargs)
else:
play = Play()
play.username = by_player.username
play.entry = entry
self.update(push__plays=play)
return True
def record_upvote(self, by_player, for_username):
# attempt to record an upvote by one player
# for another player. the two players must
# be different, and the voting player must
# not have already voted. return True if
# the upvote succeeded, or False otherwise
if by_player.username == for_username:
return False
# remove any other upvotes from this player
# (there are at most 1)
pull = None
push = None
for i, play in enumerate(self.plays):
if play.username == for_username:
push = i
if by_player.username in play.upvotes:
pull = i
if push == pull:
# either voting for the same entry, or
# both are None; return True or False
# accordingly
return push is not None
if pull is not None:
key = 'pull__plays__%d__upvotes' % pull
kwargs = {key: by_player.username}
self.update(**kwargs)
key = 'push__plays__%d__upvotes' % push
kwargs = {key: by_player.username}
self.update(**kwargs)
return True
def your_play(self, by_player):
for i, play in enumerate(self.plays):
if play.username == by_player.username:
play.index = i
return play
return None
def your_vote(self, by_player):
for i, play in enumerate(self.plays):
if by_player.username in play.upvotes:
play.index = i
return play
return None
class Leaderboard(Document):
# documents in this collection are created by the
# map-reduce job in management/commands/leaderboard.py
#
# they should always be queried by one of the indices.
# there will be many, of the form "yYYYY", "mYYYY-MM",
# or "wYYYY-WW" where YYYY is current year or earlier,
# MM is a 00 - 12, and MM is 00-52 (or sometimes 53).
# furthermore, to make best use of the indices, always
# select only fields 'username', and the name of the
# indexed score field (this will ensure use of the
# covered index)
username = fields.StringField(db_field='_id')
scores = fields.DictField(db_field='value')
meta = {
'allow_inheritance': False,
'id_field': 'username',
}
@staticmethod
def weeknum(dt):
return (dt - datetime(1970, 1, 4, tzinfo=utc)).days / 7
__index_cache = set()
__index_cache_time = None
@classmethod
def __index_exists(cls, window, key):
if not cls.__index_cache_time or (time.time() - cls.__index_cache_time) > 60:
info = cls.objects._collection.index_information()
cls.__index_cache.clear()
cls.__index_cache.update(info.keys())
cls.__index_cache_time = time.time()
index_name = '%s.%s%s_-1__id_1' % (cls.scores.db_field, window, key)
return index_name in cls.__index_cache
@classmethod
def exists(cls, window, key):
return cls.__index_exists(window, key)
@classmethod
def leaders(cls, window, key, n=20):
# generate a list of tuples of (username, score),
# in rank order, of top-scoring users during the
# given time period
if not cls.__index_exists(window, key):
raise StopIteration()
indexbase = '%s%s' % (window, key)
index = 'scores.%s' % indexbase
leaders = cls.objects.order_by('-' + index).only(index, 'username')
for l in leaders[:n]:
yield (l.username, int(l.scores.get(indexbase, 0)))
@classmethod
def rank(cls, window, key, username):
# determine the rank, if any, of the user during
# the given time period, or None if the user did
# not play during that time
indexbase = '%s%s' % (window, key)
index = 'scores.%s' % indexbase
score = cls.objects(username=username)
score = score.only(index).first()
if score:
score = score.scores.get(indexbase)
if score:
query = {index.replace('.', '__') + '__gt': score}
return Leaderboard.objects(**query).order_by('-' + index).count() + 1
return None
|
dcrosta/nymwit
|
nymwit/game/models.py
|
Python
|
bsd-2-clause
| 10,157
|
"""
This acts very much like a renderer, except instead of rendering to screen,
it ouputs bam files, one per tile with all the LOD and such packed in
"""
import os
import json
import bakery.bakery
import bakery.gpuBakery
from renderer.renderTiler import RenderNodeTiler
from panda3d.core import NodePath
from direct.stdpy.file import join
def nameTile(x,y):
return str(x)+"_"+str(y)
dataFile="data.txt"
def exportTile(dstDir,name,tile):
tile.writeBamFile(join(dstDir,name+".bam"))
def makeImportWrapper(call):
def wrapper(srcDir,name,callback=None):
def process(model):
return call(RenderTile(model))
def done(model):
callback(process(model))
path=join(srcDir,name+".bam")
if callback:
# TODO: Update this when Panda3d bug is fixed: https://bugs.launchpad.net/panda3d/+bug/1186880
# To work around this bug, disable async model loading
#loader.loadModel(path,callback=done)
done(loader.loadModel(path))
else:
return process(loader.loadModel(path))
return wrapper
def importTile(srcDir,name,callback=None):
def done(model):
callback(RenderTile(model))
path=join(srcDir,name+".bam")
if callback:
loader.loadModel(path,callback=done)
else:
return RenderTile(loader.loadModel(path))
class RenderTile(NodePath):
def __init__(self,tile):
NodePath.__init__(self,tile.node())
def height(self,x,y):
return 0
def getRenderer(dir,focus,importer=importTile):
b=CachedNodeBakery(dir,importer)
scale=b.tileSize
return RenderNodeTiler(b,scale,focus)
def cache(dir,renderTileBakery,size,startX,startY,xCount,yCount,defaultX,defaultY,exporter=exportTile,originX=0,originY=0):
x=object()
class thingy:
def getTile(self,x, y):
tile=renderTileBakery.getTile(x,y)
#tile.meshes.flattenStrong()
return tile.meshes
fbak=thingy()
extraInfo={
'size':size,
'originX':originX,
'originY':originY,
}
b=Bammer(fbak,dir,exporter=exporter,extraInfo=extraInfo)
b.processGrid(startX,startY,xCount,yCount)
b.setDefaultTile(defaultX,defaultY)
b.finish()
class CachedNodeBakery:
def __init__(self,dir,importer):
path=join(dir,dataFile)
d=json.load(open(path,'r'))
self.default=d['default']
extraInfo=d['extraInfo']
self.tileSize=extraInfo['size']
self.originX=extraInfo['originX']
self.originY=extraInfo['originY']
self.tiles=set(tuple(t) for t in d['tiles'])
self.dir=dir
self.importer=importer
def _getName(self, x, y):
t=(x, y)
if t in self.tiles:
return nameTile(*t)
else:
return nameTile(*self.default)
def getTile(self, x, y):
return self.importer(self.dir,self._getName(x, y))
def asyncGetTile(self, x, y, callback, callbackParams=()):
def done(model):
callback(model,*callbackParams)
self.importer(self.dir,self._getName(x, y),callback=done)
class Bammer:
def __init__(self,nodeBakery,dstDir,exporter,extraInfo={}):
"""
nodeBakery is a bakery.FixedBakery that produces NodePaths
dstDir is where tiles will be saved, os specific style filepath
"""
self.nodeBakery=nodeBakery
self.processed=set()
self.default=None
self.dstDir=dstDir
self.extraInfo=extraInfo
self.exporter=exporter
def processGrid(self,startX,startY,xCount,yCount):
for x in xrange(xCount):
print "starting row {0} of {1}".format(x+1,xCount)
for y in xrange(yCount):
self.processTile(startX+x,startY+y)
def processTile(self,x,y):
t=(x,y)
if t not in self.processed:
node=self.nodeBakery.getTile(x,y)
self.exporter(self.dstDir,nameTile(*t),node)
self.processed.add(t)
else:
print "skipping redundant tile "+str(t)
def setDefaultTile(self,x,y):
self.processTile(x,y)
self.default=(x,y)
def finish(self):
f=open(join(self.dstDir,dataFile),"w")
d={"default":self.default,
"extraInfo":self.extraInfo,
"tiles":list(self.processed)
}
json.dump(d,f)
f.close()
|
Craig-Macomber/Panda3D-Terrain-System
|
tileBamCacher.py
|
Python
|
bsd-2-clause
| 4,549
|
###############################################################################
#
# Copyright (C) 2016 Bitquant Research Laboratories (Asia) Limited
#
# Licensed under the GPLv3+ License
#
###############################################################################
from cffi import FFI
import atexit
import os
import struct
import cffi_to_py
import sys
from enum import Enum
if 8 * struct.calcsize("P") != 64:
print("sptrader only supported for 64 bit")
print("sptrader_api string needs to be checked for 32-bit")
exit
location = os.path.dirname(os.path.realpath(__file__))
dll_location = os.path.join(location, "..", "dll")
ffi = FFI()
spapi_cdef = """
typedef signed long int __int64_t;
typedef unsigned long int __uint64_t;
typedef char tinyint;
typedef unsigned char u_tinyint;
typedef unsigned char u_char;
typedef unsigned short u_short;
typedef unsigned int u_int;
typedef unsigned long u_long;
typedef long long bigint;
typedef unsigned long long u_bigint;
typedef char STR4[4];
typedef char STR16[16];
typedef char STR40[40];
typedef struct
{
int32_t Qty;
int32_t DepQty;
int32_t LongQty;
int32_t ShortQty;
double TotalAmt;
double DepTotalAmt;
double LongTotalAmt;
double ShortTotalAmt;
double PLBaseCcy;
double PL;
double ExchangeRate;
STR16 AccNo;
STR16 ProdCode;
char LongShort;
tinyint DecInPrice;
} SPApiPos;
typedef struct
{
double Price;
double StopLevel;
double UpLevel;
double UpPrice;
double DownLevel;
double DownPrice;
bigint ExtOrderNo;
int32_t IntOrderNo;
int32_t Qty;
int32_t TradedQty;
int32_t TotalQty;
int32_t ValidTime;
int32_t SchedTime;
int32_t TimeStamp;
uint32_t OrderOptions;
STR16 AccNo;
STR16 ProdCode;
STR16 Initiator;
STR16 Ref;
STR16 Ref2;
STR16 GatewayCode;
STR40 ClOrderId;
char BuySell;
char StopType;
char OpenClose;
tinyint CondType;
tinyint OrderType;
tinyint ValidType;
tinyint Status;
tinyint DecInPrice;
tinyint OrderAction;
int32_t UpdateTime;
int32_t UpdateSeqNo;
} SPApiOrder;
typedef struct
{
bigint BidExtOrderNo;
bigint AskExtOrderNo;
long BidAccOrderNo;
long AskAccOrderNo;
double BidPrice;
double AskPrice;
long BidQty;
long AskQty;
long SpecTime;
u_long OrderOptions;
STR16 ProdCode;
STR16 AccNo;
STR40 ClOrderId;
STR40 OrigClOrdId;
tinyint OrderType;
tinyint ValidType;
tinyint DecInPrice;
} SPApiMMOrder;
typedef struct
{
int32_t RecNo;
double Price;
bigint TradeNo;
bigint ExtOrderNo;
int32_t IntOrderNo;
int32_t Qty;
int32_t TradeDate;
int32_t TradeTime;
STR16 AccNo;
STR16 ProdCode;
STR16 Initiator;
STR16 Ref;
STR16 Ref2;
STR16 GatewayCode;
STR40 ClOrderId;
char BuySell;
char OpenClose;
tinyint Status;
tinyint DecInPrice;
double OrderPrice;
STR40 TradeRef;
int32_t TotalQty;
int32_t RemainingQty;
int32_t TradedQty;
double AvgTradedPrice;
} SPApiTrade;
typedef struct
{
double Margin;
double ContractSize;
STR16 MarketCode;
STR16 InstCode;
STR40 InstName;
STR40 InstName1;
STR40 InstName2;
STR4 Ccy;
char DecInPrice;
char InstType;
} SPApiInstrument;
typedef struct
{
STR16 ProdCode;
char ProdType;
STR40 ProdName;
STR16 Underlying;
STR16 InstCode;
int32_t ExpiryDate;
char CallPut;
int32_t Strike;
int32_t LotSize;
STR40 ProdName1;
STR40 ProdName2;
char OptStyle;
int32_t TickSize;
}SPApiProduct;
typedef struct
{
double Bid[20];
int32_t BidQty[20];
int32_t BidTicket[20];
double Ask[20];
int32_t AskQty[20];
int32_t AskTicket[20];
double Last[20];
int32_t LastQty[20];
int32_t LastTime[20];
double Equil;
double Open;
double High;
double Low;
double Close;
int32_t CloseDate;
double TurnoverVol;
double TurnoverAmt;
int32_t OpenInt;
STR16 ProdCode;
STR40 ProdName;
char DecInPrice;
int32_t ExStateNo;
int32_t TradeStateNo;
bool Suspend;
int32_t ExpiryYMD;
int32_t ContractYMD;
int32_t Timestamp;
} SPApiPrice;
typedef struct
{
double Price;
int32_t Qty;
int32_t TickerTime;
int32_t DealSrc;
STR16 ProdCode;
char DecInPrice;
} SPApiTicker;
typedef struct
{
double NAV;
double BuyingPower;
double CashBal;
double MarginCall;
double CommodityPL;
double LockupAmt;
double CreditLimit;
double MaxMargin;
double MaxLoanLimit;
double TradingLimit;
double RawMargin;
double IMargin;
double MMargin;
double TodayTrans;
double LoanLimit;
double TotalFee;
double LoanToMR;
double LoanToMV;
STR16 AccName;
STR4 BaseCcy;
STR16 MarginClass;
STR16 TradeClass;
STR16 ClientId;
STR16 AEId;
char AccType;
char CtrlLevel;
char Active;
char MarginPeriod;
} SPApiAccInfo;
typedef struct
{
double CashBf;
double TodayCash;
double NotYetValue;
double Unpresented;
double TodayOut;
STR4 Ccy;
} SPApiAccBal;
typedef struct
{
STR4 Ccy;
double Rate;
} SPApiCcyRate;
typedef void (SPDLLCALL *LoginReplyAddr)(long ret_code, char *ret_msg);
typedef void (SPDLLCALL *ConnectedReplyAddr)(long host_type, long con_status);
typedef void (SPDLLCALL *ApiOrderRequestFailedAddr)(tinyint action,
SPApiOrder *order, long err_code, char *err_msg);
typedef void (SPDLLCALL *ApiOrderReportAddr)(long rec_no, SPApiOrder *order);
typedef void (SPDLLCALL *ApiOrderBeforeSendReportAddr)(SPApiOrder *order);
typedef void (SPDLLCALL *AccountLoginReplyAddr)(char *accNo,
long ret_code, char* ret_msg);
typedef void (SPDLLCALL *AccountLogoutReplyAddr)(long ret_code, char* ret_msg);
typedef void (SPDLLCALL *AccountInfoPushAddr)(SPApiAccInfo *acc_info);
typedef void (SPDLLCALL *AccountPositionPushAddr)(SPApiPos *pos);
typedef void (SPDLLCALL *UpdatedAccountPositionPushAddr)(SPApiPos *pos);
typedef void (SPDLLCALL *UpdatedAccountBalancePushAddr)(SPApiAccBal *acc_bal);
typedef void (SPDLLCALL *ApiTradeReportAddr)(long rec_no, SPApiTrade *trade);
typedef void (SPDLLCALL *ApiPriceUpdateAddr)(SPApiPrice *price);
typedef void (SPDLLCALL *ApiTickerUpdateAddr)(SPApiTicker *ticker);
typedef void (SPDLLCALL *PswChangeReplyAddr)(long ret_code, char *ret_msg);
typedef void (SPDLLCALL *ProductListByCodeReplyAddr)(char *inst_code,
bool is_ready, char *ret_msg);
typedef void (SPDLLCALL *InstrumentListReplyAddr)(bool is_ready,
char *ret_msg);
typedef void (SPDLLCALL *BusinessDateReplyAddr)(long business_date);
typedef void (SPDLLCALL *ApiMMOrderBeforeSendReportAddr)
(SPApiMMOrder *mm_order);
typedef void (SPDLLCALL *ApiMMOrderRequestFailedAddr)(SPApiMMOrder *mm_order,
long err_code, char *err_msg);
typedef void (SPDLLCALL *ApiQuoteRequestReceivedAddr)(char *product_code,
char buy_sell, long qty);
void SPAPI_RegisterLoginReply(LoginReplyAddr addr);
void SPAPI_RegisterConnectingReply(ConnectedReplyAddr addr);
void SPAPI_RegisterOrderReport(ApiOrderReportAddr addr);
void SPAPI_RegisterOrderRequestFailed(ApiOrderRequestFailedAddr addr);
void SPAPI_RegisterOrderBeforeSendReport(ApiOrderBeforeSendReportAddr addr);
void SPAPI_RegisterAccountLoginReply(AccountLoginReplyAddr addr);
void SPAPI_RegisterAccountLogoutReply(AccountLogoutReplyAddr addr);
void SPAPI_RegisterAccountInfoPush(AccountInfoPushAddr addr);
void SPAPI_RegisterAccountPositionPush(AccountPositionPushAddr addr);
void
SPAPI_RegisterUpdatedAccountPositionPush(UpdatedAccountPositionPushAddr addr);
void
SPAPI_RegisterUpdatedAccountBalancePush(UpdatedAccountBalancePushAddr addr);
void SPAPI_RegisterTradeReport(ApiTradeReportAddr addr);
void SPAPI_RegisterApiPriceUpdate(ApiPriceUpdateAddr addr);
void SPAPI_RegisterTickerUpdate(ApiTickerUpdateAddr addr);
void SPAPI_RegisterPswChangeReply(PswChangeReplyAddr addr);
void SPAPI_RegisterProductListByCodeReply(ProductListByCodeReplyAddr addr);
void SPAPI_RegisterInstrumentListReply(InstrumentListReplyAddr addr);
void SPAPI_RegisterBusinessDateReply(BusinessDateReplyAddr addr);
void SPAPI_RegisterMMOrderRequestFailed(ApiMMOrderRequestFailedAddr addr);
void SPAPI_RegisterMMOrderBeforeSendReport(
ApiMMOrderBeforeSendReportAddr addr);
void SPAPI_RegisterQuoteRequestReceivedReport(
ApiQuoteRequestReceivedAddr addr);
int SPAPI_Initialize();
void SPAPI_SetLoginInfo(char *host,
int port, char *license, char *app_id, char *user_id, char *password);
int SPAPI_Login();
int SPAPI_GetLoginStatus(char *user_id, short host_id);
int SPAPI_AddOrder(SPApiOrder *order);
int SPAPI_AddInactiveOrder(SPApiOrder* order);
int SPAPI_ChangeOrder(char *user_id,
SPApiOrder* order, double org_price, long org_qty);
int SPAPI_ChangeOrderBy(char *user_id,
char *acc_no, long accOrderNo, double org_price,
long org_qty, double newPrice, long newQty);
int SPAPI_DeleteOrderBy(char *user_id,
char *acc_no, long accOrderNo, char* productCode, char* clOrderId);
int SPAPI_DeleteAllOrders(char *user_id, char *acc_no);
int SPAPI_ActivateAllOrders(char *user_id, char *acc_no);
int SPAPI_InactivateAllOrders(char *user_id, char *acc_no);
int SPAPI_ActivateOrderBy(char *user_id, char *acc_no, long accOrderNo);
int SPAPI_InactivateOrderBy(char *user_id, char *acc_no, long accOrderNo);
int SPAPI_GetOrderCount(char *user_id, char* acc_no);
int SPAPI_GetOrderByOrderNo(char *user_id, char *acc_no,
long int_order_no, SPApiOrder *order);
int SPAPI_GetPosCount(char *user_id);
int SPAPI_GetPosByProduct(char *user_id, char *prod_code, SPApiPos *pos);
void SPAPI_Uninitialize();
int SPAPI_Logout(char *user_id);
int SPAPI_AccountLogin(char *user_id, char *acc_no);
int SPAPI_AccountLogout(char *user_id, char *acc_no);
int SPAPI_GetTradeCount(char *user_id, char *acc_no);
int SPAPI_SubscribePrice(char *user_id, char *prod_code, int mode);
int SPAPI_SubscribeTicker(char *user_id, char *prod_code, int mode);
int SPAPI_ChangePassword(char *user_id, char *old_password,
char *new_password);
int SPAPI_GetDllVersion(char *dll_ver_no, char *dll_rel_no, char *dll_suffix);
int SPAPI_GetAccBalCount(char* user_id);
int SPAPI_GetAccBalByCurrency(char *user_id, char *ccy, SPApiAccBal *acc_bal);
int SPAPI_GetCcyRateByCcy(char *user_id, char *ccy, double *rate);
int SPAPI_GetAccInfo(char *user_id, SPApiAccInfo *acc_info);
int SPAPI_GetPriceByCode(char *user_id, char *prod_code, SPApiPrice *price);
int SPAPI_SetApiLogPath(char *path);
int SPAPI_LoadProductInfoListByCode(char *inst_code);
int SPAPI_GetProductCount();
int SPAPI_GetProductByCode(char *prod_code, SPApiProduct *prod);
int SPAPI_LoadInstrumentList();
int SPAPI_GetInstrumentCount();
int SPAPI_GetInstrumentByCode(char *inst_code, SPApiInstrument *inst);
int SPAPI_SetLanguageId(int langid);
int SPAPI_SendMarketMakingOrder(char *user_id, SPApiMMOrder *mm_order);
int SPAPI_SubscribeQuoteRequest(char *user_id, char *prod_code, int mode);
int SPAPI_SubscribeAllQuoteRequest(char *user_id, int mode);
int SPAPI_GetAllTradesByArray(char *user_id, char *acc_no,
SPApiTrade* apiTradeList);
int SPAPI_GetOrdersByArray(char *user_id, char *acc_no,
SPApiOrder* apiOrderList);
int SPAPI_GetAllAccBalByArray(char *user_id, SPApiAccBal* apiAccBalList);
int SPAPI_GetInstrumentByArray(SPApiInstrument* apiInstList);
int SPAPI_GetProductByArray(SPApiProduct* apiProdList);
"""
spapi = None
if os.name == "nt":
ffi.cdef(spapi_cdef.replace("SPDLLCALL", "__stdcall"))
ffi.dlopen(os.path.join(dll_location, "libeay32.dll"))
ffi.dlopen(os.path.join(dll_location, "ssleay32.dll"))
spapi = ffi.dlopen(os.path.join(dll_location, "spapidllm64.dll"))
else:
ffi.cdef(spapi_cdef.replace("SPDLLCALL", ""))
ffi.dlopen(os.path.join(dll_location, "libapiwrapper.so"),
ffi.RTLD_GLOBAL | ffi.RTLD_NOW)
spapi = ffi.dlopen(os.path.join(dll_location, "linux-shim.so"))
# Remember to convert unicode strings to byte strings otherwise
# ctypes will assume that the characters are wchars and not
# ordinary characters
class SPTrader(object):
ffi = ffi
api = spapi
ffi_conv = cffi_to_py.FfiConverter(ffi)
def __init__(self):
self.api.SPAPI_SetLanguageId(0)
self.api.SPAPI_Initialize()
self.user = None
self.acc_no = None
def ready(self):
if self.user is None:
return -1
else:
return 0
def register_login_reply(self, login_reply_func):
self.api.SPAPI_RegisterLoginReply(login_reply_func)
def register_connecting_reply(self, connected_reply_func):
self.api.SPAPI_RegisterConnectingReply(connected_reply_func)
def register_order_report(self, func):
self.api.SPAPI_RegisterOrderReport(func)
def register_order_request_failed(self, func):
self.api.SPAPI_RegisterOrderRequestFailed(func)
def register_order_before_send_report(self, func):
self.api.SPAPI_RegisterOrderBeforeSendReport(func)
def register_account_login_reply(self, func):
self.api.SPAPI_RegisterAccountLoginReply(func)
def register_account_logout_reply(self, func):
self.api.SPAPI_RegisterAccountLogoutReply(func)
def register_account_info_push(self, account_info_func):
self.api.SPAPI_RegisterAccountInfoPush(account_info_func)
def register_account_position_push(self, func):
self.api.SPAPI_RegisterAccountPositionPush(func)
def register_updated_account_position_push(self, func):
self.api.SPAPI_RegisterUpdatedAccountPositionPush(func)
def register_updated_account_balance_push(self, func):
self.api.SPAPI_RegisterUpdatedAccountBalancePush(func)
def register_trade_report(self, func):
self.api.SPAPI_RegisterTradeReport(func)
def register_price_update(self, func):
self.api.SPAPI_RegisterApiPriceUpdate(func)
def register_ticker_update(self, func):
self.api.SPAPI_RegisterTickerUpdate(func)
def register_psw_change_reply(self, func):
self.api.SPAPI_RegisterPswChangeReply(func)
def register_product_list_by_code_reply(self, func):
self.api.SPAPI_RegisterProductListByCodeReply(func)
def register_instrument_list_reply(self, func):
self.api.SPAPI_RegisterInstrumentListReply(func)
def register_business_date_reply(self, func):
self.api.SPAPI_RegisterBusinessDateReply(func)
def register_mm_order_request_failed(self, func):
self.api.SPAPI_RegisterMMOrderRequestFailed(func)
def register_mm_order_before_send_report(self, func):
self.api.SPAPI_RegisterMMOrderBeforeSendReport(func)
def register_quote_request_received_report(self, func):
self.api.SPAPI_RegisterQuoteRequestReceivedReport(func)
def load_instrument_list(self):
return self.api.SPAPI_LoadInstrumentList()
def set_login_info(self,
host,
port,
license,
app_id,
user_id,
password):
self.user = user_id.encode("utf-8")
self.acc_no = self.user
self.api.SPAPI_SetLoginInfo(host.encode("utf-8"),
port,
license.encode("utf-8"),
app_id.encode("utf-8"),
self.user,
password.encode("utf-8"))
def login(self):
return self.api.SPAPI_Login()
def get_login_status(self, status_id):
if self.user is None:
return -1
return self.api.SPAPI_GetLoginStatus(self.user, status_id)
def get_instrument_count(self):
return self.api.SPAPI_GetInstrumentCount()
def get_instrument(self):
count = self.get_instrument_count()
if count <= 0:
return []
buffer = self.ffi.new("SPApiInstrument[%d]" % (count))
if self.api.SPAPI_GetInstrumentByArray(buffer) == 0:
return self.cdata_to_py(buffer)
else:
return []
def get_product_count(self):
return self.api.SPAPI_GetInstrumentCount()
def get_product(self):
count = self.get_product_count()
if count <= 0:
return []
buffer = self.ffi.new("SPApiProduct[%d]" % (count))
if self.api.SPAPI_GetProductByArray(buffer) == 0:
return []
return self.cdata_to_py(buffer)
def get_acc_info(self):
if self.user is None:
return None
buffer = self.ffi.new("SPApiAccInfo[1]")
self.api.SPAPI_GetAccInfo(self.user, buffer)
return self.cdata_to_py(buffer[0])
def get_acc_bal_count(self):
return self.api.SPAPI_GetAccBalCount(self.user)
def get_order_count(self):
return self.api.SPAPI_GetOrderCount(self.user, self.acc_no)
def get_all_orders(self):
if self.ready() != 0:
return []
count = self.get_order_count()
if count <= 0:
return []
buffer = self.ffi.new("SPApiOrder[%d]" % (count))
if self.api.SPAPI_GetOrdersByArray(self.user,
self.acc_no,
buffer) != 0:
return []
return self.cdata_to_py(buffer)
def get_trade_count(self):
return self.api.SPAPI_GetTradeCount(self.user, self.acc_no)
def get_all_trades(self):
if self.ready() != 0:
return []
count = self.get_trade_count()
if count <= 0:
return []
buffer = self.ffi.new("SPApiTrade[%d]" % (count))
if self.api.SPAPI_GetAllTradesByArray(self.user,
self.acc_no,
buffer) != 0:
return []
return self.cdata_to_py(buffer)
def get_position_count(self):
return SPAPI_GetPosCount(self.user)
def get_price_by_code(self, code):
price = self.ffi.new("SPApiPrice[1]")
self.api.SPAPI_GetPriceByCode(self.user, code.encode("utf-8"), price)
return self.cdata_to_py(price)
def subscribe_price(self, prod, value):
self.api.SPAPI_SubscribePrice(self.user,
prod.encode("utf-8"), value)
def subscribe_ticker(self, prod, value):
self.api.SPAPI_SubscribeTicker(self.user,
prod.encode("utf-8"), value)
def logout(self):
user = self.user
if user is not None:
self.user = None
self.acc_no = None
return self.api.SPAPI_Logout(user)
def cdata_to_py(self, s):
return self.ffi_conv.to_py(s)
def fields(self, s):
return self.ffi_conv.fields(s)
def order_add(self, data):
data['AccNo'] = self.acc_no
data['Initiator'] = self.user
buffer = self.ffi.new("SPApiOrder[1]")
self.ffi_conv.from_py(buffer, data)
if buffer is None:
return -2
return self.api.SPAPI_AddOrder(buffer)
def order_add_inactive(self, data):
data['AccNo'] = self.acc_no
data['Initiator'] = self.user
buffer = self.ffi.new("SPApiOrder[1]")
self.ffi_conv.from_py(buffer, data)
if buffer is None:
return -2
return self.api.SPAPI_AddInactiveOrder(buffer)
def order_delete(self, data):
accOrderNo = int(data['IntOrderNo'])
return self.api.SPAPI_DeleteOrderBy(self.user,
self.acc_no,
accOrderNo,
data['ProdCode'].encode("utf-8"),
data['ClOrderId'].encode("utf-8"))
def order_delete_all(self, data):
return self.api.SPAPI_DeleteAllOrders(self.user,
self.acc_no)
def order_activate(self, data):
accOrderNo = int(data['IntOrderNo'])
return self.api.SPAPI_ActivateOrderBy(self.user,
self.acc_no,
accOrderNo)
def order_inactivate(self, data):
accOrderNo = int(data['IntOrderNo'])
return self.api.SPAPI_InactivateOrderBy(self.user,
self.acc_no,
accOrderNo)
def __del__(self):
pass
# self.api.SPAPI_Uninitialize()
|
joequant/sptrader
|
sptrader/sptrader.py
|
Python
|
bsd-2-clause
| 20,783
|
import os
from uuid import uuid4
basedir = os.path.abspath(os.path.dirname(__file__))
CSFR_ENABLED = True
SECRET_KEY = uuid4().hex
#SHORTENER SERVICE SETTINGS
TINY_API_URL = 'http://tiny-url.info/api/v1/create'
TINY_API_KEY = '8A6CIC8081B6BC5G5A07'
TINY_API_PROVIDER = 'go_ly'
TINY_API_FORMAT = 'json'
#LOGS SETTINGS
LOG_FOLDER = 'logs'
LOG_FILENAME = 'smoothyurl.log'
LOG_INFO = 'Smoothy url startup'
|
sosmooth/smoothyurl
|
server/config.py
|
Python
|
bsd-2-clause
| 405
|
"""
Feature Detector/Descriptor Visualization
Licence: BSD
Author : Hoang Anh Nguyen
"""
import cv2
import numpy as np
import argparse, sys
#--------------------------------------------------------
#--------------------------------------------------------
# Class provide an interface to facilitate the use of Feature
# detector/descriptors with OpenCV.
class Feature:
def __init__(self, image_name, detectorId, descriptorId):
# read image file and convert it to grayscale
self.origin_image = cv2.imread(image_name)
self.gray_image = cv2.cvtColor(self.origin_image,
cv2.COLOR_BGR2GRAY)
# setup combination detector - descriptor
print(Detector.getDetectorNameBasedOnId(detectorId))
print(Descriptor.getDescriptorNameBasedOnId(descriptorId))
self.detector = eval(Detector.getDetectorNameBasedOnId(detectorId))()
self.descriptor = eval(Descriptor.getDescriptorNameBasedOnId(descriptorId))()
self.flann = None
#--------------------------------------------------------
# Main methods
#--------------------------------------------------------
# Extract features in the image using a specific Detector/Descriptor
# input: showResult
# return: number of keypoints
# description of each keypoint
def extract(self, showResult = False):
if self.detector:
# detect keypoints and extract descriptor of each keypoint
self.detector.detect(self.gray_image)
self.descriptor.describe(self.gray_image,
self.detector.keypoints)
print("keypoints: {}, descriptors: {}"\
.format(len(self.detector.keypoints),
self.descriptor.descriptions.size))
if showResult:
return self.showFeatures()
return None
# Match current feature with another
# input: target Feature
# return: matching mask between original vs. target features
def match(self, feature2, showResult = False):
if not self.flann:
self.initializeFlannMatcher()
# FLANN parameters
self.matches = self.flann.knnMatch(self.descriptor.descriptions,
feature2.descriptor.descriptions,
k=2)
# Need to draw only good matches, so create a mask
self.matchesMask = [[0,0] for i in xrange(len(self.matches))]
# ratio test as per Lowe's paper
for i, (m, n) in enumerate(self.matches):
if m.distance < 0.7 * n.distance:
self.matchesMask[i]=[1, 0]
if showResult:
return self.showMatches(feature2)
return None
#--------------------------------------------------------
# Inner methods
#--------------------------------------------------------
# Using Flann for descriptor matching
def initializeFlannMatcher(self):
FLANN_INDEX_LSH = 6
index_params = dict(algorithm = FLANN_INDEX_LSH,
table_number = 6,
key_size = 12,
multi_probe_level = 1)
# need to check type of key
search_params = dict(checks = 50)
self.flann = cv2.FlannBasedMatcher(index_params, search_params)
# draw keypoints & descriptors on origin image
def showFeatures(self):
if not self.detector.keypoints:
print('Keypoint not found!')
return None
cv2.drawKeypoints(self.origin_image,
self.detector.keypoints,
self.origin_image,
(0, 255, 0))
return self.origin_image
# show keypoint matching betwen 2 images
def showMatches(self, feature2):
if not self.detector.keypoints or not feature2.detector.keypoints:
print('Keypoint not found!')
return
if not self.matchesMask or not self.matches:
print('Invalid matches!')
return
draw_params = dict(matchColor = (0, 255, 0),
singlePointColor = (255, 0, 0),
matchesMask = self.matchesMask,
flags = 0)
output = cv2.drawMatchesKnn(self.origin_image,
self.detector.keypoints,
feature2.origin_image,
feature2.detector.keypoints,
self.matches,
None,
**draw_params)
return output
#-------------------------------------------------------
#----------------- Keypoint Detector -------------------
class Detector:
def __init__(self):
self.detector = None
self.keypoints = None
def detect(self, gray_image):
if self.detector:
self.keypoints = self.detector.detect(gray_image, None)
# Static methods
@staticmethod
def getDetectorNameBasedOnId(index):
if index < 1 or index > 9:
print('Invalid Detector!')
return None
return [
'AkazeDetector',
'KazeDetector',
'FASTDetector',
'BRISKDetector',
'ORBDetector',
'StarDetector',
'AGASTDetector',
'GFTTDetector'
][index - 1]
# Akaze (http://docs.opencv.org/trunk/d8/d30/classcv_1_1AKAZE.html)
class AkazeDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.AKAZE_create()
# Kaze (http://docs.opencv.org/trunk/d3/d61/classcv_1_1KAZE.html)
class KazeDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.KAZE_create()
# FAST (http://docs.opencv.org/trunk/df/d74/classcv_1_1FastFeatureDetector.html)
class FASTDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.FastFeatureDetector_create()
# BRISK (http://docs.opencv.org/trunk/de/dbf/classcv_1_1BRISK.html)
class BRISKDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.BRISK_create()
# ORB (http://docs.opencv.org/trunk/db/d95/classcv_1_1ORB.html)
class ORBDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.ORB_create()
# StarDetector
class StarDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.xfeatures2d.StarDetector_create()
# AGAST (http://docs.opencv.org/trunk/d7/d19/classcv_1_1AgastFeatureDetector.html)
class AGASTDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.AgastFeatureDetector_create()
# GFTT (http://docs.opencv.org/trunk/df/d21/classcv_1_1GFTTDetector.html)
class GFTTDetector(Detector):
def __init__(self):
Detector.__init__(self)
self.detector = cv2.GFTTDetector_create()
#--------------------------------------------------------
#------------------- Descriptor -------------------------
class Descriptor:
def __init__(self):
self.descriptor = None
self.descriptions = None
def describe(self, gray_image, keypoints):
if self.descriptor:
[__, self.descriptions] = self.descriptor.compute(gray_image, keypoints)
@staticmethod
def getDescriptorNameBasedOnId(index):
if index < 1 or index > 11:
print('Invalid Descriptor')
return None
return [
'AKAZEDescriptor',
'KAZEDescriptor',
'BRISKDescriptor',
'ORBDescriptor',
'BRIEFDescriptor',
'DAISYDescriptor',
'BoostDescriptor',
'FREAKDescriptor',
'LATCHDescriptor',
'LUCIDDescriptor',
'VGGDescriptor'
][index - 1]
# AKAZE (http://docs.opencv.org/trunk/d8/d30/classcv_1_1AKAZE.html)
class AKAZEDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.AKAZE_create()
# KAZE (http://docs.opencv.org/trunk/d3/d61/classcv_1_1KAZE.html)
class KAZEDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.KAZE_create()
# BRISK (http://docs.opencv.org/trunk/de/dbf/classcv_1_1BRISK.html)
class BRISKDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.BRISK_create()
# ORB (http://docs.opencv.org/trunk/db/d95/classcv_1_1ORB.html)
class ORBDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.ORB_create()
# BRIEF (http://docs.opencv.org/trunk/d1/d93/classcv_1_1xfeatures2d_1_1BriefDescriptorExtractor.html)
class BRIEFDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.xfeatures2d.BriefDescriptorExtractor_create()
# DAISY (http://docs.opencv.org/trunk/d9/d37/classcv_1_1xfeatures2d_1_1DAISY.html)
class DAISYDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.xfeatures2d.DAISY_create()
#BoostDesc (http://docs.opencv.org/trunk/d1/dfd/classcv_1_1xfeatures2d_1_1BoostDesc.html)
class BoostDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.xfeatures2d.BoostDesc_create()
# FREAK (http://docs.opencv.org/trunk/df/db4/classcv_1_1xfeatures2d_1_1FREAK.html)
class FREAKDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.xfeatures2d.FREAK_create()
# LATCH (http://docs.opencv.org/trunk/d6/d36/classcv_1_1xfeatures2d_1_1LATCH.html)
class LATCHDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.xfeatures2d.LATCH_create()
# LUCID (http://docs.opencv.org/trunk/d4/d86/classcv_1_1xfeatures2d_1_1LUCID.html)
class LUCIDDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
# 1=3x3, 2=5x5, 3=7x7 and so forth
self.descriptor = cv2.xfeatures2d.LUCID_create(lucid_kernel = 2,
blur_kernel = 2)
# VGG (http://docs.opencv.org/trunk/d6/d00/classcv_1_1xfeatures2d_1_1VGG.html)
class VGGDescriptor(Descriptor):
def __init__(self):
Descriptor.__init__(self)
self.descriptor = cv2.xfeatures2d.VGG_create()
#--------------------------------------------------------
#------------------- Blob Detector-----------------------
# SimpleBlobDetector
# MSER: Region detector, not key-points
class MSERFeature(Feature):
def __init__(self, image_name):
Feature.__init__(self, image_name)
def detect(self, showResult = False):
self.detector = cv2.MSER_create()
self.blobs = self.detector.detectRegions(
self.gray_image,
None)
if showResult:
return self.showFeatures()
def showFeatures(self):
img = self.origin_image.copy()
hulls = [cv2.convexHull(p.reshape(-1, 1, 2)) for p in self.blobs]
cv2.polylines(img, hulls, 1, (0, 255, 0))
return img
def match(self, feature2, showResult = False):
print("MSER can't do keypoints matching")
return None
#--------------------------------------------------------
#--------------------------------------------------------
def main(argv):
# Define argument list. Example:
# python feature.py -t 1
# -k 1
# -d 1
# -i1 resources/template1.jpg
# -i2 resources/test_template1.jpg
# -o .
parser = argparse.ArgumentParser(description='Feature 2D')
parser.add_argument('-t','--task',
help="""Specify task
1: Detecting
2: Matching
""",
required=True)
parser.add_argument('-k', '--keypoint',
help="""Specify keypoint detectors
1: AKAZE,
2: KAZE,
3: FAST,
4: BRISK,
5: ORB,
6: Star,
7: AGAST,
8: GFTT
""",
required=True)
parser.add_argument('-d', '--descriptor',
help="""Specify keypoint detectors
1: AKAZE,
2: KAZE,
3: BRISK,
4: ORB,
5: BRIEF,
6: DAISY,
7: Boost,
8: FREAK,
9: LATCH,
10: LUCID,
11: VGG
""",
required=True)
parser.add_argument('-i1','--input1',
help='Input image 1',
required=True)
parser.add_argument('-i2','--input2',
help='Input image 2 (for matching)')
parser.add_argument('-o','--output',
help='Ouput location',
required=True)
args = vars(parser.parse_args())
# extract arguments
task = int(args['task'])
detectorId = int(args['keypoint'])
descriptorId = int(args['descriptor'])
print('Argument parsed: ', task, detectorId, descriptorId)
if task != 1 and task != 2:
print("Invalid task: " + args['task'])
return
# find keypoints & build descriptor on input1 image
feature1 = Feature(args['input1'],
detectorId,
descriptorId)
output = feature1.extract(task == 1)
if task == 2:
if not args['input2']:
print("Missing second input image for matching!")
return
# find keypoints & build descriptor on input2 image
feature2 = Feature(args['input2'],
detectorId,
descriptorId)
feature2.extract()
# matching feature bwt 2 images, and save result
output = feature1.match(feature2, True)
# save output
cv2.imwrite(args['output'] + '/output.jpg', output)
print("Output saved!")
if __name__ == '__main__':
main(sys.argv)
|
hanguyen86/feature
|
src/feature.py
|
Python
|
bsd-2-clause
| 15,196
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2016 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import argparse
import json
import os
import platform
import subprocess
import sys
if sys.version_info.major < 3:
import codecs
def open(filename, mode='rb', encoding=None):
return codecs.open(filename, mode, encoding)
def get_conf():
try:
with open('conf.json', 'r', encoding='utf-8') as fp:
conf = json.load(fp)
except Exception:
if platform.system() == 'Windows':
conf_file = 'template.win.conf.json'
else:
conf_file = 'template.posix.conf.json'
with open(conf_file, 'r', encoding='utf-8') as fp:
conf = json.load(fp)
return conf
conf = get_conf()
proj_path = './'
def set_proj_path(path='./'):
global proj_path
proj_path = path
def get_proj_path():
global proj_path
return proj_path
def get_sources_list(root='./', fullpath=True):
cur_sources = os.listdir(root)
all_sources = []
for source in cur_sources:
source_fname = os.path.join(root, source).replace('\\', '/')
if os.path.isdir(source_fname):
next_sources = get_sources_list(source_fname, fullpath)
all_sources.extend(next_sources)
else:
if os.path.splitext(source)[1][1:] in conf['extensions']:
if fullpath:
all_sources.append(source_fname)
else:
all_sources.append(source)
return all_sources
def gen_options_string(key, options, functor=None, shave_last=False, posix=False):
OPTS_MAP = {
'cflags': ' -%s',
'preprocessor': ' -D%s',
'inc_dir': ' -I"%s"' if not posix else ' -I%s',
'ldflags': ' -%s',
'lib_dir': ' -LIBPATH:"%s"' if not posix else ' -L%s',
'dep_libs': ' %s',
'srcs': '%s ',
'objs': '%s.obj ' if not posix else '%s.o ',
}
if functor:
options = ''.join((OPTS_MAP[key] % functor(opt) for opt in options))
else:
options = ''.join((OPTS_MAP[key] % opt for opt in options))
if shave_last:
options = options[:-1]
return options
def gen_makefile_windows(pf, target, is_static=False, is_shared=False):
def get_obj(x):
x = x.split('/')[-1]
return os.path.splitext(x)[0]
all_sources = get_sources_list(root=get_proj_path(), fullpath=True)
mk_dict=dict(
out=target,
cflags=gen_options_string('cflags', conf.get('compile_options', [])),
preprocessor=gen_options_string('preprocessor', conf.get('precompile_options', [])),
inc_dir=gen_options_string('inc_dir', conf.get('inc_dir', [])),
srcs=gen_options_string('srcs', all_sources, shave_last=True),
objs=gen_options_string('objs', all_sources, functor=get_obj, shave_last=True)
)
if not is_static:
if is_shared:
mk_dict['proj_path'] = get_proj_path()
mk_dict['ldflags'] = gen_options_string('ldflags', conf.get('ldflags', []))
mk_dict['lib_dir'] = gen_options_string('lib_dir', conf.get('lib_dir', []))
mk_dict['dep_libs'] = gen_options_string('dep_libs', conf.get('dep_libraries', []))
return mk_dict
def gen_makefile_posix(pf, target, is_static=False, is_shared=False):
def get_posix_lib(lib):
if lib.endswith('.a') or lib.endswith('.so'):
return lib
else:
return lib.replace('lib', '-l')
all_sources = get_sources_list(root=get_proj_path(), fullpath=True)
mk_dict = dict(
out=target,
cflags=gen_options_string('cflags', conf.get('compile_options', []), posix=True),
inc_dir=gen_options_string('inc_dir', conf.get('inc_dir', []), posix=True),
srcs=gen_options_string('srcs', all_sources, shave_last=True, posix=True),
objs=gen_options_string('objs', all_sources, functor=lambda x: os.path.splitext(x)[0], shave_last=True, posix=True)
)
if not is_static:
ldflags = gen_options_string('ldflags', conf.get('ldflags', []), posix=True)
if not is_shared and pf == 'Linux' and ldflags.startswith(' '):
ldflags = ldflags[1:]
mk_dict['ldflags'] = ldflags
mk_dict['lib_dir'] = gen_options_string('lib_dir', conf.get('lib_dir', []), posix=True)
mk_dict['dep_libs'] = gen_options_string('dep_libs', conf.get('dep_libraries', []), functor=get_posix_lib, posix=True)
return mk_dict
def get_template_makefile(pf, is_static=False, is_shared=False):
fname_format = './templates/{pf}/{mk}.mk'
if is_static:
return fname_format.format(pf=pf, mk='static_lib')
elif is_shared:
return fname_format.format(pf=pf, mk='shared_lib')
else:
return fname_format.format(pf=pf, mk='bin')
def gen_makefile(pf):
GEN_FUNCTOR = {
'Windows': gen_makefile_windows,
'Linux': gen_makefile_posix,
'Darwin': gen_makefile_posix
}
fun = GEN_FUNCTOR.get(pf)
if not fun:
return
is_static = conf.get('static_lib', False)
is_shared = conf.get('shared_lib', False)
if is_static and is_shared:
raise Exception('Cannot build static library and sharded library at the same time')
mk_dict = fun(pf, conf['out'], is_static, is_shared)
mk = None
with open(get_template_makefile(pf, is_static, is_shared), 'r', encoding='utf-8') as rfp:
mk = rfp.read().format(**mk_dict)
with open('Makefile', 'w', encoding='utf-8') as wfp:
mk and wfp.write(mk)
def clean_windows():
if os.path.exists('Makefile'):
subprocess.check_call('nmake clean', shell=True)
subprocess.check_call('del Makefile', shell=True)
def clean_posix():
if os.path.exists('Makefile'):
subprocess.check_call('make clean', shell=True)
subprocess.check_call('rm Makefile', shell=True)
def build():
pf = platform.system()
if not os.path.exists('Makefile'):
gen_makefile(pf)
mk_cmd = 'nmake' if pf == 'Windows' else 'make'
subprocess.check_call(mk_cmd, shell=True)
def rebuild():
clean()
build()
def clean():
GEN_FUNCTOR = {
'Windows': clean_windows,
'Linux': clean_posix,
'Darwin': clean_posix
}
fun = GEN_FUNCTOR.get(platform.system())
fun and fun()
def get_build_arguments():
parser = argparse.ArgumentParser(description='C/C++ building tool')
parser.add_argument('option', help='[build|rebuild|clean] the project')
parser.add_argument('root', help='root path of the project', nargs='?', default='./')
args = parser.parse_args()
return args.option, args.root
def main():
option, path = get_build_arguments()
set_proj_path(path)
fun = getattr(sys.modules['__main__'], option, None)
fun and fun()
if __name__ == '__main__':
main()
|
ASMlover/study
|
python/btool/build.py
|
Python
|
bsd-2-clause
| 8,175
|
# -*- coding: utf-8 -*-
import logging
import functools
from django.contrib.gis.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.geos import fromstr, LineString
from mapentity.models import MapEntityMixin
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import (TimeStampedModelMixin, NoDeleteMixin,
AddPropertyMixin)
from geotrek.common.utils import classproperty
from geotrek.common.utils.postgresql import debug_pg_notices
from geotrek.altimetry.models import AltimetryMixin
from .helpers import PathHelper, TopologyHelper
from django.db import connections, DEFAULT_DB_ALIAS
logger = logging.getLogger(__name__)
class PathManager(models.GeoManager):
# Use this manager when walking through FK/M2M relationships
use_for_related_fields = True
def get_queryset(self):
"""Hide all ``Path`` records that are not marked as visible.
"""
return super(PathManager, self).get_queryset().filter(visible=True)
# GeoDjango note:
# Django automatically creates indexes on geometry fields but it uses a
# syntax which is not compatible with PostGIS 2.0. That's why index creation
# is explicitly disbaled here (see manual index creation in custom SQL files).
class Path(AddPropertyMixin, MapEntityMixin, AltimetryMixin,
TimeStampedModelMixin, StructureRelated):
geom = models.LineStringField(srid=settings.SRID, spatial_index=False)
geom_cadastre = models.LineStringField(null=True, srid=settings.SRID, spatial_index=False,
editable=False)
valid = models.BooleanField(db_column='valide', default=True, verbose_name=_(u"Validity"),
help_text=_(u"Approved by manager"))
visible = models.BooleanField(db_column='visible', default=True, verbose_name=_(u"Visible"),
help_text=_(u"Shown in lists and maps"))
name = models.CharField(null=True, blank=True, max_length=20, db_column='nom', verbose_name=_(u"Name"),
help_text=_(u"Official name"))
comments = models.TextField(null=True, blank=True, db_column='remarques', verbose_name=_(u"Comments"),
help_text=_(u"Remarks"))
departure = models.CharField(null=True, blank=True, default="", max_length=250, db_column='depart', verbose_name=_(u"Departure"),
help_text=_(u"Departure place"))
arrival = models.CharField(null=True, blank=True, default="", max_length=250, db_column='arrivee', verbose_name=_(u"Arrival"),
help_text=_(u"Arrival place"))
comfort = models.ForeignKey('Comfort',
null=True, blank=True, related_name='paths',
verbose_name=_("Comfort"), db_column='confort')
source = models.ForeignKey('PathSource',
null=True, blank=True, related_name='paths',
verbose_name=_("Source"), db_column='source')
stake = models.ForeignKey('Stake',
null=True, blank=True, related_name='paths',
verbose_name=_("Maintenance stake"), db_column='enjeu')
usages = models.ManyToManyField('Usage',
blank=True, null=True, related_name="paths",
verbose_name=_(u"Usages"), db_table="l_r_troncon_usage")
networks = models.ManyToManyField('Network',
blank=True, null=True, related_name="paths",
verbose_name=_(u"Networks"), db_table="l_r_troncon_reseau")
eid = models.CharField(verbose_name=_(u"External id"), max_length=128, blank=True, db_column='id_externe')
objects = PathManager()
is_reversed = False
@property
def length_2d(self):
if self.geom:
return round(self.geom.length, 1)
else:
return None
@classproperty
def length_2d_verbose_name(cls):
return _(u"2D Length")
@property
def length_2d_display(self):
return self.length_2d
def __unicode__(self):
return self.name or _('path %d') % self.pk
class Meta:
db_table = 'l_t_troncon'
verbose_name = _(u"Path")
verbose_name_plural = _(u"Paths")
@classmethod
def closest(cls, point):
"""
Returns the closest path of the point.
Will fail if no path in database.
"""
# TODO: move to custom manager
if point.srid != settings.SRID:
point = point.transform(settings.SRID, clone=True)
return cls.objects.all().distance(point).order_by('distance')[0]
def is_overlap(self):
return not PathHelper.disjoint(self.geom, self.pk)
def reverse(self):
"""
Reverse the geometry.
We keep track of this, since we will have to work on topologies at save()
"""
reversed_coord = self.geom.coords[-1::-1]
self.geom = LineString(reversed_coord)
self.is_reversed = True
return self
def interpolate(self, point):
"""
Returns position ([0.0-1.0]) and offset (distance) of the point
along this path.
"""
return PathHelper.interpolate(self, point)
def snap(self, point):
"""
Returns the point snapped (i.e closest) to the path line geometry.
"""
return PathHelper.snap(self, point)
def reload(self, fromdb=None):
# Update object's computed values (reload from database)
if self.pk and self.visible:
fromdb = self.__class__.objects.get(pk=self.pk)
self.geom = fromdb.geom
AltimetryMixin.reload(self, fromdb)
TimeStampedModelMixin.reload(self, fromdb)
return self
@debug_pg_notices
def save(self, *args, **kwargs):
# If the path was reversed, we have to invert related topologies
if self.is_reversed:
for aggr in self.aggregations.all():
aggr.start_position = 1 - aggr.start_position
aggr.end_position = 1 - aggr.end_position
aggr.save()
self._is_reversed = False
super(Path, self).save(*args, **kwargs)
self.reload()
@property
def name_display(self):
return u'<a data-pk="%s" href="%s" title="%s" >%s</a>' % (self.pk,
self.get_detail_url(),
self,
self)
@property
def name_csv_display(self):
return unicode(self)
@classproperty
def trails_verbose_name(cls):
return _("Trails")
@property
def trails_display(self):
trails = getattr(self, '_trails', self.trails)
if trails:
return ", ".join([t.name_display for t in trails])
return _("None")
@property
def trails_csv_display(self):
trails = getattr(self, '_trails', self.trails)
if trails:
return ", ".join([unicode(t) for t in trails])
return _("None")
@classmethod
def get_create_label(cls):
return _(u"Add a new path")
@property
def checkbox(self):
return u'<input type="checkbox" name="{}[]" value="{}" />'.format('path',
self.pk)
@classproperty
def checkbox_verbose_name(cls):
return _("Action")
@property
def checkbox_display(self):
return self.checkbox
def merge_path(self, path_to_merge):
"""
Path unification
:param path_to path_to_merge: Path instance to merge
:return: Boolean
"""
if (self.pk and path_to_merge) and (self.pk != path_to_merge.pk):
conn = connections[DEFAULT_DB_ALIAS]
cursor = conn.cursor()
sql = "SELECT ft_merge_path({}, {});".format(self.pk, path_to_merge.pk)
cursor.execute(sql)
result = cursor.fetchall()[0][0]
if result:
# reload object after unification
self.reload()
return result
class Topology(AddPropertyMixin, AltimetryMixin, TimeStampedModelMixin, NoDeleteMixin):
paths = models.ManyToManyField(Path, db_column='troncons', through='PathAggregation', verbose_name=_(u"Path"))
offset = models.FloatField(default=0.0, db_column='decallage', verbose_name=_(u"Offset")) # in SRID units
kind = models.CharField(editable=False, verbose_name=_(u"Kind"), max_length=32)
# Override default manager
objects = NoDeleteMixin.get_manager_cls(models.GeoManager)()
geom = models.GeometryField(editable=(not settings.TREKKING_TOPOLOGY_ENABLED),
srid=settings.SRID, null=True,
default=None, spatial_index=False)
""" Fake srid attribute, that prevents transform() calls when using Django map widgets. """
srid = settings.API_SRID
class Meta:
db_table = 'e_t_evenement'
verbose_name = _(u"Topology")
verbose_name_plural = _(u"Topologies")
def __init__(self, *args, **kwargs):
super(Topology, self).__init__(*args, **kwargs)
if not self.pk:
self.kind = self.__class__.KIND
@property
def length_2d(self):
if self.geom and not self.ispoint():
return round(self.geom.length, 1)
else:
return None
@classproperty
def length_2d_verbose_name(cls):
return _(u"2D Length")
@property
def length_2d_display(self):
return self.length_2d
@classproperty
def KIND(cls):
return cls._meta.object_name.upper()
def __unicode__(self):
return u"%s (%s)" % (_(u"Topology"), self.pk)
def ispoint(self):
if not settings.TREKKING_TOPOLOGY_ENABLED or not self.pk:
return self.geom and self.geom.geom_type == 'Point'
return all([a.start_position == a.end_position for a in self.aggregations.all()])
def add_path(self, path, start=0.0, end=1.0, order=0, reload=True):
"""
Shortcut function to add paths into this topology.
"""
from .factories import PathAggregationFactory
aggr = PathAggregationFactory.create(topo_object=self,
path=path,
start_position=start,
end_position=end,
order=order)
if self.deleted:
self.deleted = False
self.save(update_fields=['deleted'])
# Since a trigger modifies geom, we reload the object
if reload:
self.reload()
return aggr
@classmethod
def overlapping(cls, topologies):
""" Return a Topology queryset overlapping specified topologies.
"""
return TopologyHelper.overlapping(cls, topologies)
def mutate(self, other, delete=True):
"""
Take alls attributes of the other topology specified and
save them into this one. Optionnally deletes the other.
"""
self.offset = other.offset
self.save(update_fields=['offset'])
PathAggregation.objects.filter(topo_object=self).delete()
# The previous operation has put deleted = True (in triggers)
# and NULL in geom (see update_geometry_of_evenement:: IF t_count = 0)
self.deleted = False
self.geom = other.geom
self.save(update_fields=['deleted', 'geom'])
# Now copy all agregations from other to self
aggrs = other.aggregations.all()
# A point has only one aggregation, except if it is on an intersection.
# In this case, the trigger will create them, so ignore them here.
if other.ispoint():
aggrs = aggrs[:1]
for aggr in aggrs:
self.add_path(aggr.path, aggr.start_position, aggr.end_position, aggr.order, reload=False)
self.reload()
if delete:
other.delete(force=True) # Really delete it from database
return self
def reload(self, fromdb=None):
"""
Reload into instance all computed attributes in triggers.
"""
if self.pk:
# Update computed values
fromdb = self.__class__.objects.get(pk=self.pk)
self.geom = fromdb.geom
# /!\ offset may be set by a trigger OR in
# the django code, reload() will override
# any unsaved value
self.offset = fromdb.offset
AltimetryMixin.reload(self, fromdb)
TimeStampedModelMixin.reload(self, fromdb)
NoDeleteMixin.reload(self, fromdb)
return self
@debug_pg_notices
def save(self, *args, **kwargs):
# HACK: these fields are readonly from the Django point of view
# but they can be changed at DB level. Since Django write all fields
# to DB anyway, it is important to update it before writting
if self.pk and settings.TREKKING_TOPOLOGY_ENABLED:
existing = self.__class__.objects.get(pk=self.pk)
self.length = existing.length
# In the case of points, the geom can be set by Django. Don't override.
point_geom_not_set = self.ispoint() and self.geom is None
geom_already_in_db = not self.ispoint() and existing.geom is not None
if (point_geom_not_set or geom_already_in_db):
self.geom = existing.geom
else:
if not self.deleted and self.geom is None:
# We cannot have NULL geometry. So we use an empty one,
# it will be computed or overwritten by triggers.
self.geom = fromstr('POINT (0 0)')
if not self.kind:
if self.KIND == "TOPOLOGYMIXIN":
raise Exception("Cannot save abstract topologies")
self.kind = self.__class__.KIND
# Static value for Topology offset, if any
shortmodelname = self._meta.object_name.lower().replace('edge', '')
self.offset = settings.TOPOLOGY_STATIC_OFFSETS.get(shortmodelname, self.offset)
# Save into db
super(Topology, self).save(*args, **kwargs)
self.reload()
def serialize(self, **kwargs):
return TopologyHelper.serialize(self, **kwargs)
@classmethod
def deserialize(cls, serialized):
return TopologyHelper.deserialize(serialized)
def distance(self, to_cls):
"""Distance to associate this topology to another topology class"""
return None
class PathAggregationManager(models.GeoManager):
def get_queryset(self):
return super(PathAggregationManager, self).get_queryset().order_by('order')
class PathAggregation(models.Model):
path = models.ForeignKey(Path, null=False, db_column='troncon',
verbose_name=_(u"Path"),
related_name="aggregations",
on_delete=models.DO_NOTHING) # The CASCADE behavior is enforced at DB-level (see file ../sql/20_evenements_troncons.sql)
topo_object = models.ForeignKey(Topology, null=False, related_name="aggregations",
db_column='evenement', verbose_name=_(u"Topology"))
start_position = models.FloatField(db_column='pk_debut', verbose_name=_(u"Start position"), db_index=True)
end_position = models.FloatField(db_column='pk_fin', verbose_name=_(u"End position"), db_index=True)
order = models.IntegerField(db_column='ordre', default=0, blank=True, null=True, verbose_name=_(u"Order"))
# Override default manager
objects = PathAggregationManager()
def __unicode__(self):
return u"%s (%s-%s: %s - %s)" % (_("Path aggregation"), self.path.pk, self.path.name, self.start_position, self.end_position)
@property
def start_meter(self):
try:
return 0 if self.start_position == 0.0 else int(self.start_position * self.path.length)
except ValueError:
return -1
@property
def end_meter(self):
try:
return 0 if self.end_position == 0.0 else int(self.end_position * self.path.length)
except ValueError:
return -1
@property
def is_full(self):
return (self.start_position == 0.0 and self.end_position == 1.0 or
self.start_position == 1.0 and self.end_position == 0.0)
@debug_pg_notices
def save(self, *args, **kwargs):
return super(PathAggregation, self).save(*args, **kwargs)
class Meta:
db_table = 'e_r_evenement_troncon'
verbose_name = _(u"Path aggregation")
verbose_name_plural = _(u"Path aggregations")
# Important - represent the order of the path in the Topology path list
ordering = ['order', ]
class PathSource(StructureRelated):
source = models.CharField(verbose_name=_(u"Source"), max_length=50)
class Meta:
db_table = 'l_b_source_troncon'
verbose_name = _(u"Path source")
verbose_name_plural = _(u"Path sources")
ordering = ['source']
def __unicode__(self):
return self.source
@functools.total_ordering
class Stake(StructureRelated):
stake = models.CharField(verbose_name=_(u"Stake"), max_length=50, db_column='enjeu')
class Meta:
db_table = 'l_b_enjeu'
verbose_name = _(u"Maintenance stake")
verbose_name_plural = _(u"Maintenance stakes")
ordering = ['id']
def __lt__(self, other):
if other is None:
return False
return self.pk < other.pk
def __eq__(self, other):
return isinstance(other, Stake) \
and self.pk == other.pk
def __unicode__(self):
return self.stake
class Comfort(StructureRelated):
comfort = models.CharField(verbose_name=_(u"Comfort"), max_length=50, db_column='confort')
class Meta:
db_table = 'l_b_confort'
verbose_name = _(u"Comfort")
verbose_name_plural = _(u"Comforts")
ordering = ['comfort']
def __unicode__(self):
return self.comfort
class Usage(StructureRelated):
usage = models.CharField(verbose_name=_(u"Usage"), max_length=50, db_column='usage')
class Meta:
db_table = 'l_b_usage'
verbose_name = _(u"Usage")
verbose_name_plural = _(u"Usages")
ordering = ['usage']
def __unicode__(self):
return self.usage
class Network(StructureRelated):
network = models.CharField(verbose_name=_(u"Network"), max_length=50, db_column='reseau')
class Meta:
db_table = 'l_b_reseau'
verbose_name = _(u"Network")
verbose_name_plural = _(u"Networks")
ordering = ['network']
def __unicode__(self):
return self.network
class Trail(MapEntityMixin, Topology, StructureRelated):
topo_object = models.OneToOneField(Topology, parent_link=True,
db_column='evenement')
name = models.CharField(verbose_name=_(u"Name"), max_length=64, db_column='nom')
departure = models.CharField(verbose_name=_(u"Departure"), max_length=64, db_column='depart')
arrival = models.CharField(verbose_name=_(u"Arrival"), max_length=64, db_column='arrivee')
comments = models.TextField(default="", blank=True, verbose_name=_(u"Comments"), db_column='commentaire')
class Meta:
db_table = 'l_t_sentier'
verbose_name = _(u"Trail")
verbose_name_plural = _(u"Trails")
ordering = ['name']
objects = Topology.get_manager_cls(models.GeoManager)()
def __unicode__(self):
return self.name
@property
def name_display(self):
return u'<a data-pk="%s" href="%s" title="%s" >%s</a>' % (self.pk,
self.get_detail_url(),
self,
self)
@classmethod
def path_trails(cls, path):
return cls.objects.existing().filter(aggregations__path=path)
Path.add_property('trails', lambda self: Trail.path_trails(self), _(u"Trails"))
Topology.add_property('trails', lambda self: Trail.overlapping(self), _(u"Trails"))
|
Anaethelion/Geotrek
|
geotrek/core/models.py
|
Python
|
bsd-2-clause
| 20,663
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import os
import csv
import rdflib
import logging
import requests
try:
# Python 3
from functools import lru_cache
from urllib.error import HTTPError
except ImportError:
# Python 2
from functools32 import lru_cache
from urllib2 import HTTPError
from indra.util import read_unicode_csv
logger = logging.getLogger('uniprot')
uniprot_url = 'http://www.uniprot.org/uniprot/'
rdf_prefixes = """
PREFIX up: <http://purl.uniprot.org/core/>
PREFIX db: <http://purl.uniprot.org/database/>
PREFIX faldo: <http://biohackathon.org/resource/faldo#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> """
@lru_cache(maxsize=10000)
def query_protein(protein_id):
"""Return the UniProt entry as an RDF graph for the given UniProt ID.
Parameters
----------
protein_id : str
UniProt ID to be queried.
Returns
-------
g : rdflib.Graph
The RDF graph corresponding to the UniProt entry.
"""
# Try looking up a primary ID if the given one
# is a secondary ID
try:
prim_ids = uniprot_sec[protein_id]
protein_id = prim_ids[0]
except KeyError:
pass
url = uniprot_url + protein_id + '.rdf'
g = rdflib.Graph()
try:
g.parse(url)
except HTTPError:
logger.warning('Could not find protein with id %s' % protein_id)
return None
except rdflib.exceptions.ParserError as e:
logger.error('Could not parse RDF at %s' % url)
logger.error(e)
return None
# Check if the entry has been replaced by a new entry
query = rdf_prefixes + """
SELECT ?res2
WHERE {
?res1 up:replacedBy ?res2 .
}
"""
res = g.query(query)
if res:
term = [r for r in res][0][0]
replaced_by_id = term.split('/')[-1]
return query_protein(replaced_by_id)
return g
def is_secondary(protein_id):
"""Return True if the UniProt ID corresponds to a secondary accession.
Parameters
----------
protein_id : str
The UniProt ID to check.
Returns
-------
True if it is a secondary accessing entry, False otherwise.
"""
entry = uniprot_sec.get(protein_id)
if not entry:
return False
return True
def get_primary_id(protein_id):
"""Return a primary entry corresponding to the UniProt ID.
Parameters
----------
protein_id : str
The UniProt ID to map to primary.
Returns
-------
primary_id : str
If the given ID is primary, it is returned as is. Othwewise the primary
IDs are looked up. If there are multiple primary IDs then the first
human one is returned. If there are no human primary IDs then the
first primary found is returned.
"""
primaries = uniprot_sec.get(protein_id)
if primaries:
if len(primaries) > 1:
logger.debug('More than 1 primary ID for %s.' % protein_id)
for primary in primaries:
# Often secondary IDs were broken into multiple primary IDs
# for different organisms. In this case we return the human
# one if it exists.
if is_human(primary):
return primary
# If we haven't returned anything then we just return the
# first primary id
return primaries[0]
# If there is not secondary entry the we assume this is a primary entry
return protein_id
def get_family_members(family_name, human_only=True):
"""Return the HGNC gene symbols which are the members of a given family.
Parameters
----------
family_name : str
Family name to be queried.
human_only : bool
If True, only human proteins in the family will be returned.
Default: True
Returns
-------
gene_names : list
The HGNC gene symbols corresponding to the given family.
"""
data = {'query': 'family:%s' % family_name,
'format': 'list'}
if human_only:
data['fil'] = 'organism:human'
res = requests.get(uniprot_url, params=data)
if not res.status_code == 200 or not res.text:
return None
# res.text gets us the Unicode
html = res.text
protein_list = html.strip().split('\n')
gene_names = []
for p in protein_list:
gene_name = get_gene_name(p)
gene_names.append(gene_name)
return gene_names
def get_mnemonic(protein_id, web_fallback=True):
"""Return the UniProt mnemonic for the given UniProt ID.
Parameters
----------
protein_id : str
UniProt ID to be mapped.
web_fallback : Optional[bool]
If True and the offline lookup fails, the UniProt web service
is used to do the query.
Returns
-------
mnemonic : str
The UniProt mnemonic corresponding to the given Uniprot ID.
"""
try:
mnemonic = uniprot_mnemonic[protein_id]
return mnemonic
except KeyError:
pass
if not web_fallback:
return None
g = query_protein(protein_id)
if g is None:
return None
query = rdf_prefixes + """
SELECT ?mnemonic
WHERE {
?r up:mnemonic ?mnemonic .
}
"""
res = g.query(query)
if res:
mnemonic = [r for r in res][0][0].toPython()
return mnemonic
else:
return None
def get_id_from_mnemonic(uniprot_mnemonic):
"""Return the UniProt ID for the given UniProt mnemonic.
Parameters
----------
uniprot_mnemonic : str
UniProt mnemonic to be mapped.
Returns
-------
uniprot_id : str
The UniProt ID corresponding to the given Uniprot mnemonic.
"""
try:
uniprot_id = uniprot_mnemonic_reverse[uniprot_mnemonic]
return uniprot_id
except KeyError:
return None
def get_gene_name(protein_id, web_fallback=True):
"""Return the gene name for the given UniProt ID.
This is an alternative to get_hgnc_name and is useful when
HGNC name is not availabe (for instance, when the organism
is not homo sapiens).
Parameters
----------
protein_id : str
UniProt ID to be mapped.
web_fallback : Optional[bool]
If True and the offline lookup fails, the UniProt web service
is used to do the query.
Returns
-------
gene_name : str
The gene name corresponding to the given Uniprot ID.
"""
try:
gene_name = uniprot_gene_name[protein_id]
# There are cases when the entry is in the resource
# table but the gene name is empty. Often this gene
# name is actually available in the web service RDF
# so here we return only if the gene name is not None
# and not empty string.
if gene_name:
return gene_name
except KeyError:
pass
if not web_fallback:
return None
g = query_protein(protein_id)
if g is None:
return None
query = rdf_prefixes + """
SELECT ?name
WHERE {
?gene a up:Gene .
?gene skos:prefLabel ?name .
}
"""
res = g.query(query)
if res:
gene_name = [r for r in res][0][0].toPython()
if not gene_name:
return None
return gene_name
return None
@lru_cache(maxsize=1000)
def get_sequence(protein_id):
try:
prim_ids = uniprot_sec[protein_id]
protein_id = prim_ids[0]
except KeyError:
pass
url = uniprot_url + '%s.fasta' % protein_id
res = requests.get(url)
if not res.status_code == 200:
logger.warning('Could not find sequence for protein %s' % protein_id)
return None
# res.text is Unicode
lines = res.text.splitlines()
seq = (''.join(lines[1:])).replace('\n','')
return seq
def get_modifications(protein_id):
g = query_protein(protein_id)
if g is None:
return None
query = rdf_prefixes + """
SELECT ?beg_pos ?comment
WHERE {
?mod_res a up:Modified_Residue_Annotation .
?mod_res rdfs:comment ?comment .
?mod_res up:range ?range .
?range faldo:begin ?beg .
?range faldo:end ?end .
?beg a faldo:ExactPosition .
?beg faldo:position ?beg_pos .
FILTER (?beg = ?end)
}
"""
res = g.query(query)
mods = []
for r in res:
mod_pos = r[0].value
# "Phosphothreonine; by autocatalysis"
# "Phosphothreonine; by MAP2K1 and MAP2K2"
# TODO: take into account the comment after the ;?
mod_res = r[1].value.split(';')[0]
mods.append((mod_res, mod_pos))
return mods
def verify_location(protein_id, residue, location):
"""Return True if the residue is at the given location in the UP sequence.
Parameters
----------
protein_id : str
UniProt ID of the protein whose sequence is used as reference.
residue : str
A single character amino acid symbol (Y, S, T, V, etc.)
location : str
The location on the protein sequence (starting at 1) at which the
residue should be checked against the reference sequence.
Returns
-------
True if the given residue is at the given position in the sequence
corresponding to the given UniProt ID, otherwise False.
"""
seq = get_sequence(protein_id)
# If we couldn't get the sequence (can happen due to web service hiccups)
# don't throw the statement away by default
if seq is None:
return True
try:
loc_int = int(location)
except ValueError:
logger.warning('Invalid location %s' % location)
loc_int = -1
if (loc_int < 1) or (loc_int > len(seq)):
return False
elif seq[loc_int - 1] == residue:
return True
return False
def verify_modification(protein_id, residue, location=None):
"""Return True if the residue at the given location has a known modifiation.
Parameters
----------
protein_id : str
UniProt ID of the protein whose sequence is used as reference.
residue : str
A single character amino acid symbol (Y, S, T, V, etc.)
location : Optional[str]
The location on the protein sequence (starting at 1) at which the
modification is checked.
Returns
-------
True if the given residue is reported to be modified at the given position
in the sequence corresponding to the given UniProt ID, otherwise False.
If location is not given, we only check if there is any residue of the
given type that is modified.
"""
mods = get_modifications(protein_id)
mod_locs = [m[1] for m in mods]
seq = get_sequence(protein_id)
if location:
if not verify_location(protein_id, residue, location):
return False
try:
mod_idx = mod_locs.index(location)
except ValueError:
return False
return True
else:
for ml in mod_locs:
if seq[ml - 1] == residue:
return True
return False
def is_human(protein_id):
"""Return True if the given protein id corresponds to a human protein.
Parameters
----------
protein_id : str
UniProt ID of the protein
Returns
-------
True if the protein_id corresponds to a human protein, otherwise False.
"""
mnemonic = get_mnemonic(protein_id)
if mnemonic is None:
return False
if mnemonic.endswith('HUMAN'):
return True
return False
def _build_uniprot_entries():
up_entries_file = os.path.dirname(os.path.abspath(__file__)) + \
'/../resources/uniprot_entries.tsv'
uniprot_gene_name = {}
uniprot_mnemonic = {}
uniprot_mnemonic_reverse = {}
try:
csv_rows = read_unicode_csv(up_entries_file, delimiter='\t')
# Skip the header row
next(csv_rows)
for row in csv_rows:
up_id = row[0]
gene_name = row[1]
up_mnemonic = row[3]
uniprot_gene_name[up_id] = gene_name
uniprot_mnemonic[up_id] = up_mnemonic
uniprot_mnemonic_reverse[up_mnemonic] = up_id
except IOError:
pass
return uniprot_gene_name, uniprot_mnemonic, uniprot_mnemonic_reverse
def _build_uniprot_hgnc():
hgnc_file = os.path.dirname(os.path.abspath(__file__)) +\
'/../resources/hgnc_entries.txt'
try:
csv_rows = read_unicode_csv(hgnc_file, delimiter='\t')
# Skip the header row
next(csv_rows)
uniprot_hgnc = {}
for row in csv_rows:
hgnc_name = row[1]
uniprot_id = row[6]
if uniprot_id:
uniprot_hgnc[uniprot_id] = hgnc_name
except IOError:
uniprot_hgnc = {}
return uniprot_hgnc
def _build_uniprot_sec():
# File containing secondary accession numbers mapped
# to primary accession numbers
sec_file = os.path.dirname(os.path.abspath(__file__)) +\
'/../resources/uniprot_sec_ac.txt'
try:
uniprot_sec = {}
lines = open(sec_file, 'rt').readlines()
for i, l in enumerate(lines):
if l.startswith('Secondary AC'):
entry_lines = lines[i+2:]
for l in entry_lines:
sec_id, prim_id = l.split()
try:
uniprot_sec[sec_id].append(prim_id)
except KeyError:
uniprot_sec[sec_id] = [prim_id]
except IOError:
uniprot_sec = {}
return uniprot_sec
def _build_uniprot_subcell_loc():
fname = os.path.dirname(os.path.abspath(__file__)) +\
'/../resources/uniprot_subcell_loc.tsv'
try:
csv_rows = read_unicode_csv(fname, delimiter='\t')
# Skip the header row
next(csv_rows)
subcell_loc = {}
for row in csv_rows:
loc_id = row[0]
loc_alias = row[3]
subcell_loc[loc_id] = loc_alias
except IOError:
subcell_loc = {}
return subcell_loc
uniprot_gene_name, uniprot_mnemonic, uniprot_mnemonic_reverse = \
_build_uniprot_entries()
uniprot_sec = _build_uniprot_sec()
uniprot_subcell_loc = _build_uniprot_subcell_loc()
|
jmuhlich/indra
|
indra/databases/uniprot_client.py
|
Python
|
bsd-2-clause
| 14,426
|
#!/usr/bin/env python3
"""
Nisetango
Builds a string based on a pattern to insert the letter U in specific
places. The result emulates how the author thinks japanese people
sound when they speak western (swedish) languages.
"""
import re
import sys
def nisetango(str_, to_upper=True):
"""
Nonsense-japanese from string.
"""
try:
str_.upper()
except (AttributeError, TypeError):
raise AssertionError('Input must be a string.')
CONSONANTS = 'bfjkmqvwxz'
VOWELS = 'aeiouyåäö'
append_u = r"""
(
[{cons}]+(?!\b) # any 'CONSONANTS'
|
c+(?![hk]|\b) # ch, ck
|
d+(?![j]) # dj
|
g+(?![j]) # gj
|
l+(?![j]|\b) # lj
|
n+(?![gd]|\b) # ng, nd
|
p+(?![h]) # ph, word boundary
|
r+ # word boundary
|
s+(?![chjk]|\b) # sc, sh, sj, sk
|
t+(?![ij]|\b) # ti, tj
)
(?! # not followed by
\1 # the same as captured above
|
[{vow}] # a VOWEL
)
""".format(cons = CONSONANTS, vow = VOWELS)
append_u = (append_u, r'\1u')
single_ell = (r'(?<!l)l(?!l|\b)', 'r')
e_end_of_word = (r'(?<!u)e\b', 'u')
ou_to_oo = (r'ou\b', 'oo')
output = str_
FLAGS = re.IGNORECASE|re.VERBOSE
replace = lambda s, r: re.sub(s, r, output, 0, FLAGS)
replacements = [append_u, single_ell, e_end_of_word, ou_to_oo]
for s, r in replacements:
output = replace(s, r)
if to_upper:
return output.upper()
else:
return output
def main():
if len(sys.argv) < 2:
usage = '{}\nusage: {} [input string]'
sys.exit(usage.format(nisetango.__doc__, sys.argv[0]))
else:
print(nisetango(' '.join(sys.argv[1:])))
if __name__ == '__main__':
main()
|
antoneri/nisetango
|
nisetango.py
|
Python
|
bsd-2-clause
| 2,199
|
from django.views.generic import TemplateView
class ScrapsIndexView(TemplateView):
template_name = '404.html'
index = ScrapsIndexView.as_view()
|
hipikat/hipikat.org
|
src/hipikat/views/scraps.py
|
Python
|
bsd-2-clause
| 151
|
###############################################################################
#
# Format - A class for writing the Excel XLSX Worksheet file.
#
# Copyright 2013-2016, John McNamara, jmcnamara@cpan.org
#
# Package imports.
from . import xmlwriter
class Format(xmlwriter.XMLwriter):
"""
A class for writing the Excel XLSX Format file.
"""
###########################################################################
#
# Public API.
#
###########################################################################
def __init__(self, properties={}, xf_indices=None, dxf_indices=None):
"""
Constructor.
"""
super(Format, self).__init__()
self.xf_format_indices = xf_indices
self.dxf_format_indices = dxf_indices
self.xf_index = None
self.dxf_index = None
self.num_format = 0
self.num_format_index = 0
self.font_index = 0
self.has_font = 0
self.has_dxf_font = 0
self.bold = 0
self.underline = 0
self.italic = 0
self.font_name = 'Calibri'
self.font_size = 11
self.font_color = 0x0
self.font_strikeout = 0
self.font_outline = 0
self.font_shadow = 0
self.font_script = 0
self.font_family = 2
self.font_charset = 0
self.font_scheme = 'minor'
self.font_condense = 0
self.font_extend = 0
self.theme = 0
self.hyperlink = 0
self.hidden = 0
self.locked = 1
self.text_h_align = 0
self.text_wrap = 0
self.text_v_align = 0
self.text_justlast = 0
self.rotation = 0
self.fg_color = 0
self.bg_color = 0
self.pattern = 0
self.has_fill = 0
self.has_dxf_fill = 0
self.fill_index = 0
self.fill_count = 0
self.border_index = 0
self.has_border = 0
self.has_dxf_border = 0
self.border_count = 0
self.bottom = 0
self.bottom_color = 0
self.diag_border = 0
self.diag_color = 0
self.diag_type = 0
self.left = 0
self.left_color = 0
self.right = 0
self.right_color = 0
self.top = 0
self.top_color = 0
self.indent = 0
self.shrink = 0
self.merge_range = 0
self.reading_order = 0
self.just_distrib = 0
self.color_indexed = 0
self.font_only = 0
# Convert properties in the constructor to method calls.
for key, value in properties.items():
getattr(self, 'set_' + key)(value)
###########################################################################
#
# Format properties.
#
###########################################################################
def set_font_name(self, font_name):
"""
Set the Format font_name property such as 'Time New Roman'. The
default Excel font is 'Calibri'.
Args:
font_name: String with the font name. No default.
Returns:
Nothing.
"""
self.font_name = font_name
def set_font_size(self, font_size=11):
"""
Set the Format font_size property. The default Excel font size is 11.
Args:
font_size: Int with font size. No default.
Returns:
Nothing.
"""
self.font_size = font_size
def set_font_color(self, font_color):
"""
Set the Format font_color property. The Excel default is black.
Args:
font_color: String with the font color. No default.
Returns:
Nothing.
"""
self.font_color = self._get_color(font_color)
def set_bold(self, bold=1):
"""
Set the Format bold property.
Args:
bold: Default is 1, turns property on.
Returns:
Nothing.
"""
self.bold = bold
def set_italic(self, italic=1):
"""
Set the Format italic property.
Args:
italic: Default is 1, turns property on.
Returns:
Nothing.
"""
self.italic = italic
def set_underline(self, underline=1):
"""
Set the Format underline property.
Args:
underline: Default is 1, single underline.
Returns:
Nothing.
"""
self.underline = underline
def set_font_strikeout(self, font_strikeout=1):
"""
Set the Format font_strikeout property.
Args:
font_strikeout: Default is 1, turns property on.
Returns:
Nothing.
"""
self.font_strikeout = font_strikeout
def set_font_script(self, font_script=1):
"""
Set the Format font_script property.
Args:
font_script: Default is 1, superscript.
Returns:
Nothing.
"""
self.font_script = font_script
def set_font_outline(self, font_outline=1):
"""
Set the Format font_outline property.
Args:
font_outline: Default is 1, turns property on.
Returns:
Nothing.
"""
self.font_outline = font_outline
def set_font_shadow(self, font_shadow=1):
"""
Set the Format font_shadow property.
Args:
font_shadow: Default is 1, turns property on.
Returns:
Nothing.
"""
self.font_shadow = font_shadow
def set_num_format(self, num_format):
"""
Set the Format num_format property such as '#,##0'.
Args:
num_format: String representing the number format. No default.
Returns:
Nothing.
"""
self.num_format = num_format
def set_locked(self, locked=1):
"""
Set the Format locked property.
Args:
locked: Default is 1, turns property on.
Returns:
Nothing.
"""
self.locked = locked
def set_hidden(self, hidden=1):
"""
Set the Format hidden property.
Args:
hidden: Default is 1, turns property on.
Returns:
Nothing.
"""
self.hidden = hidden
def set_align(self, alignment):
"""
Set the Format cell alignment.
Args:
alignment: String representing alignment. No default.
Returns:
Nothing.
"""
alignment = alignment.lower()
# Set horizontal alignment properties.
if alignment == 'left':
self.set_text_h_align(1)
if alignment == 'centre':
self.set_text_h_align(2)
if alignment == 'center':
self.set_text_h_align(2)
if alignment == 'right':
self.set_text_h_align(3)
if alignment == 'fill':
self.set_text_h_align(4)
if alignment == 'justify':
self.set_text_h_align(5)
if alignment == 'center_across':
self.set_text_h_align(6)
if alignment == 'centre_across':
self.set_text_h_align(6)
if alignment == 'distributed':
self.set_text_h_align(7)
if alignment == 'justify_distributed':
self.set_text_h_align(7)
if alignment == 'justify_distributed':
self.just_distrib = 1
# Set vertical alignment properties.
if alignment == 'top':
self.set_text_v_align(1)
if alignment == 'vcentre':
self.set_text_v_align(2)
if alignment == 'vcenter':
self.set_text_v_align(2)
if alignment == 'bottom':
self.set_text_v_align(3)
if alignment == 'vjustify':
self.set_text_v_align(4)
if alignment == 'vdistributed':
self.set_text_v_align(5)
def set_center_across(self):
"""
Set the Format center_across property.
Returns:
Nothing.
"""
self.set_text_h_align(6)
def set_text_wrap(self, text_wrap=1):
"""
Set the Format text_wrap property.
Args:
text_wrap: Default is 1, turns property on.
Returns:
Nothing.
"""
self.text_wrap = text_wrap
def set_rotation(self, rotation):
"""
Set the Format rotation property.
Args:
rotation: Rotation angle. No default.
Returns:
Nothing.
"""
rotation = int(rotation)
# Map user angle to Excel angle.
if rotation == 270:
rotation = 255
elif -90 <= rotation <= 90:
if rotation < 0:
rotation = -rotation + 90
else:
raise Exception(
"Rotation rotation outside range: -90 <= angle <= 90")
self.rotation = rotation
def set_indent(self, indent=1):
"""
Set the Format indent property.
Args:
indent: Default is 1, turns property on.
Returns:
Nothing.
"""
self.indent = indent
def set_shrink(self, shrink=1):
"""
Set the Format shrink property.
Args:
shrink: Default is 1, turns property on.
Returns:
Nothing.
"""
self.shrink = shrink
def set_text_justlast(self, text_justlast=1):
"""
Set the Format text_justlast property.
Args:
text_justlast: Default is 1, turns property on.
Returns:
Nothing.
"""
self.text_justlast = text_justlast
def set_pattern(self, pattern=1):
"""
Set the Format pattern property.
Args:
pattern: Default is 1, solid fill.
Returns:
Nothing.
"""
self.pattern = pattern
def set_bg_color(self, bg_color):
"""
Set the Format bg_color property.
Args:
bg_color: Background color. No default.
Returns:
Nothing.
"""
self.bg_color = self._get_color(bg_color)
def set_fg_color(self, fg_color):
"""
Set the Format fg_color property.
Args:
fg_color: Foreground color. No default.
Returns:
Nothing.
"""
self.fg_color = self._get_color(fg_color)
# set_border(style) Set cells borders to the same style
def set_border(self, style=1):
"""
Set the Format bottom property.
Args:
bottom: Default is 1, border type 1.
Returns:
Nothing.
"""
self.set_bottom(style)
self.set_top(style)
self.set_left(style)
self.set_right(style)
# set_border_color(color) Set cells border to the same color
def set_border_color(self, color):
"""
Set the Format bottom property.
Args:
color: Color string. No default.
Returns:
Nothing.
"""
self.set_bottom_color(color)
self.set_top_color(color)
self.set_left_color(color)
self.set_right_color(color)
def set_bottom(self, bottom=1):
"""
Set the Format bottom property.
Args:
bottom: Default is 1, border type 1.
Returns:
Nothing.
"""
self.bottom = bottom
def set_bottom_color(self, bottom_color):
"""
Set the Format bottom_color property.
Args:
bottom_color: Color string. No default.
Returns:
Nothing.
"""
self.bottom_color = self._get_color(bottom_color)
def set_diag_type(self, diag_type=1):
"""
Set the Format diag_type property.
Args:
diag_type: Default is 1, border type 1.
Returns:
Nothing.
"""
self.diag_type = diag_type
def set_left(self, left=1):
"""
Set the Format left property.
Args:
left: Default is 1, border type 1.
Returns:
Nothing.
"""
self.left = left
def set_left_color(self, left_color):
"""
Set the Format left_color property.
Args:
left_color: Color string. No default.
Returns:
Nothing.
"""
self.left_color = self._get_color(left_color)
def set_right(self, right=1):
"""
Set the Format right property.
Args:
right: Default is 1, border type 1.
Returns:
Nothing.
"""
self.right = right
def set_right_color(self, right_color):
"""
Set the Format right_color property.
Args:
right_color: Color string. No default.
Returns:
Nothing.
"""
self.right_color = self._get_color(right_color)
def set_top(self, top=1):
"""
Set the Format top property.
Args:
top: Default is 1, border type 1.
Returns:
Nothing.
"""
self.top = top
def set_top_color(self, top_color):
"""
Set the Format top_color property.
Args:
top_color: Color string. No default.
Returns:
Nothing.
"""
self.top_color = self._get_color(top_color)
def set_diag_color(self, diag_color):
"""
Set the Format diag_color property.
Args:
diag_color: Color string. No default.
Returns:
Nothing.
"""
self.diag_color = self._get_color(diag_color)
def set_diag_border(self, diag_border=1):
"""
Set the Format diag_border property.
Args:
diag_border: Default is 1, border type 1.
Returns:
Nothing.
"""
self.diag_border = diag_border
###########################################################################
#
# Internal Format properties. These aren't documented since they are
# either only used internally or else are unlikely to be set by the user.
#
###########################################################################
def set_has_font(self, has_font=1):
# Set the has_font property.
self.has_font = has_font
def set_has_fill(self, has_fill=1):
# Set the has_fill property.
self.has_fill = has_fill
def set_font_index(self, font_index):
# Set the font_index property.
self.font_index = font_index
def set_xf_index(self, xf_index):
# Set the xf_index property.
self.xf_index = xf_index
def set_dxf_index(self, dxf_index):
# Set the xf_index property.
self.dxf_index = dxf_index
def set_num_format_index(self, num_format_index):
# Set the num_format_index property.
self.num_format_index = num_format_index
def set_text_h_align(self, text_h_align):
# Set the text_h_align property.
self.text_h_align = text_h_align
def set_text_v_align(self, text_v_align):
# Set the text_v_align property.
self.text_v_align = text_v_align
def set_reading_order(self, reading_order=1):
# Set the reading_order property.
self.reading_order = reading_order
def set_valign(self, align):
# Set vertical cell alignment. This is required by the constructor
# properties dict to differentiate between the vertical and horizontal
# properties.
self.set_align(align)
def set_font_family(self, font_family):
# Set the Format font_family property.
self.font_family = font_family
def set_font_charset(self, font_charset):
# Set the Format font_charset property.
self.font_charset = font_charset
def set_font_scheme(self, font_scheme):
# Set the Format font_scheme property.
self.font_scheme = font_scheme
def set_font_condense(self, font_condense):
# Set the Format font_condense property.
self.font_condense = font_condense
def set_font_extend(self, font_extend):
# Set the Format font_extend property.
self.font_extend = font_extend
def set_theme(self, theme):
# Set the Format theme property.
self.theme = theme
def set_hyperlink(self, hyperlink=1):
# Set the properties for the hyperlink style. This doesn't
# currently work. To be fixed when styles are supported.
self.set_underline(1)
self.set_theme(10)
self.set_align('top')
self.hyperlink = hyperlink
def set_color_indexed(self, color_index):
# Used in the cell comment format.
self.color_indexed = color_index
def set_font_only(self, font_only=True):
# Used in the cell comment format.
self.font_only = font_only
# Compatibility methods.
def set_font(self, font_name):
# For compatibility with Excel::Writer::XLSX.
self.font_name = font_name
def set_size(self, font_size):
# For compatibility with Excel::Writer::XLSX.
self.font_size = font_size
def set_color(self, font_color):
# For compatibility with Excel::Writer::XLSX.
self.font_color = self._get_color(font_color)
###########################################################################
#
# Private API.
#
###########################################################################
def _get_align_properties(self):
# Return properties for an Style xf <alignment> sub-element.
changed = 0
align = []
# Check if any alignment options in the format have been changed.
if (self.text_h_align or self.text_v_align or self.indent
or self.rotation or self.text_wrap or self.shrink
or self.reading_order):
changed = 1
else:
return changed, align
# Indent is only allowed for horizontal left, right and distributed.
# If it is defined for any other alignment or no alignment has
# been set then default to left alignment.
if (self.indent
and self.text_h_align != 1
and self.text_h_align != 3
and self.text_h_align != 7):
self.text_h_align = 1
# Check for properties that are mutually exclusive.
if self.text_wrap:
self.shrink = 0
if self.text_h_align == 4:
self.shrink = 0
if self.text_h_align == 5:
self.shrink = 0
if self.text_h_align == 7:
self.shrink = 0
if self.text_h_align != 7:
self.just_distrib = 0
if self.indent:
self.just_distrib = 0
continuous = 'centerContinuous'
if self.text_h_align == 1:
align.append(('horizontal', 'left'))
if self.text_h_align == 2:
align.append(('horizontal', 'center'))
if self.text_h_align == 3:
align.append(('horizontal', 'right'))
if self.text_h_align == 4:
align.append(('horizontal', 'fill'))
if self.text_h_align == 5:
align.append(('horizontal', 'justify'))
if self.text_h_align == 6:
align.append(('horizontal', continuous))
if self.text_h_align == 7:
align.append(('horizontal', 'distributed'))
if self.just_distrib:
align.append(('justifyLastLine', 1))
# Property 'vertical' => 'bottom' is a default. It sets applyAlignment
# without an alignment sub-element.
if self.text_v_align == 1:
align.append(('vertical', 'top'))
if self.text_v_align == 2:
align.append(('vertical', 'center'))
if self.text_v_align == 4:
align.append(('vertical', 'justify'))
if self.text_v_align == 5:
align.append(('vertical', 'distributed'))
if self.indent:
align.append(('indent', self.indent))
if self.rotation:
align.append(('textRotation', self.rotation))
if self.text_wrap:
align.append(('wrapText', 1))
if self.shrink:
align.append(('shrinkToFit', 1))
if self.reading_order == 1:
align.append(('readingOrder', 1))
if self.reading_order == 2:
align.append(('readingOrder', 2))
return changed, align
def _get_protection_properties(self):
# Return properties for an Excel XML <Protection> element.
attribs = []
if not self.locked:
attribs.append(('locked', 0))
if self.hidden:
attribs.append(('hidden', 1))
return attribs
def _get_format_key(self):
# Returns a unique hash key for a font. Used by Workbook.
key = ':'.join(self._to_string(x) for x in (
self._get_font_key(),
self._get_border_key(),
self._get_fill_key(),
self._get_alignment_key(),
self.num_format,
self.locked,
self.hidden))
return key
def _get_font_key(self):
# Returns a unique hash key for a font. Used by Workbook.
key = ':'.join(self._to_string(x) for x in (
self.bold,
self.font_color,
self.font_charset,
self.font_family,
self.font_outline,
self.font_script,
self.font_shadow,
self.font_strikeout,
self.font_name,
self.italic,
self.font_size,
self.underline))
return key
def _get_border_key(self):
# Returns a unique hash key for a border style. Used by Workbook.
key = ':'.join(self._to_string(x) for x in (
self.bottom,
self.bottom_color,
self.diag_border,
self.diag_color,
self.diag_type,
self.left,
self.left_color,
self.right,
self.right_color,
self.top,
self.top_color))
return key
def _get_fill_key(self):
# Returns a unique hash key for a fill style. Used by Workbook.
key = ':'.join(self._to_string(x) for x in (
self.pattern,
self.bg_color,
self.fg_color))
return key
def _get_alignment_key(self):
# Returns a unique hash key for alignment formats.
key = ':'.join(self._to_string(x) for x in (
self.text_h_align,
self.text_v_align,
self.indent,
self.rotation,
self.text_wrap,
self.shrink,
self.reading_order))
return key
def _get_xf_index(self):
# Returns the XF index number used by Excel to identify a format.
if self.xf_index is not None:
# Format already has an index number so return it.
return self.xf_index
else:
# Format doesn't have an index number so assign one.
key = self._get_format_key()
if key in self.xf_format_indices:
# Format matches existing format with an index.
return self.xf_format_indices[key]
else:
# New format requiring an index. Note. +1 since Excel
# has an implicit "General" format at index 0.
index = 1 + len(self.xf_format_indices)
self.xf_format_indices[key] = index
self.xf_index = index
return index
def _get_dxf_index(self):
# Returns the DXF index number used by Excel to identify a format.
if self.dxf_index is not None:
# Format already has an index number so return it.
return self.dxf_index
else:
# Format doesn't have an index number so assign one.
key = self._get_format_key()
if key in self.dxf_format_indices:
# Format matches existing format with an index.
return self.dxf_format_indices[key]
else:
# New format requiring an index.
index = len(self.dxf_format_indices)
self.dxf_format_indices[key] = index
self.dxf_index = index
return index
def _get_color(self, color):
# Used in conjunction with the set_xxx_color methods to convert a
# color name into an RGB formatted string. These colors are for
# backward compatibility with older versions of Excel.
named_colors = {
'black': '#000000',
'blue': '#0000FF',
'brown': '#800000',
'cyan': '#00FFFF',
'gray': '#808080',
'green': '#008000',
'lime': '#00FF00',
'magenta': '#FF00FF',
'navy': '#000080',
'orange': '#FF6600',
'pink': '#FF00FF',
'purple': '#800080',
'red': '#FF0000',
'silver': '#C0C0C0',
'white': '#FFFFFF',
'yellow': '#FFFF00',
}
if color in named_colors:
color = named_colors[color]
return color
def _to_string(self, value):
# Convert number to a string but allow for utf-8 strings in Python 2.
try:
return str(value)
except UnicodeEncodeError:
return value.encode('utf-8')
###########################################################################
#
# XML methods.
#
###########################################################################
|
jkyeung/XlsxWriter
|
xlsxwriter/format.py
|
Python
|
bsd-2-clause
| 25,888
|
import click
from iscc_bench.readers import ALL_READERS
from iscc_bench.elastic_search.fill_elasticsearch import populate_elastic
from iscc_bench.elastic_search.new_index import new_data_index, new_id_index
from iscc_bench.elastic_search.generate_meta_ids import generate_ids
from iscc_bench.elastic_search.evaluate import evaluate
@click.group()
def main():
"""ISCC Benchmarking."""
pass
@click.command()
@click.option(
"--reader",
"-r",
required=False,
help="Reader (If no reader is given, all reader are parsed)",
default=None,
)
@click.option(
"--kill", "-k", required=False, type=bool, help="Reset old index", default=False
)
def load(reader, kill):
"""Populate ElasticSearch with given reader."""
if kill:
new_data_index()
if not reader:
for reader in ALL_READERS:
populate_elastic(reader)
else:
reader_names = {r.__name__: r for r in ALL_READERS}
if not reader in reader_names:
pass
else:
populate_elastic(reader_names[reader])
main.add_command(load)
@click.command()
@click.option("--id_bits", type=int, help="Length of generated Meta-IDs", default=64)
@click.option("--shingle_size", type=int, help="Shingle Size", default=4)
def build(id_bits, shingle_size):
"""Generate Meta-IDs for the Meta-Data."""
new_id_index()
generate_ids(id_bits, shinglesize=shingle_size)
main.add_command(build)
@click.command()
def run():
"""Run Evaluation"""
evaluate()
main.add_command(run)
|
coblo/isccbench
|
iscc_bench/cli.py
|
Python
|
bsd-2-clause
| 1,542
|
from django.core.urlresolvers import reverse
from django.db.models import F
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.utils.translation import get_language_from_request
from django.views import generic
from django.views.generic import TemplateView
from django.views.generic.edit import FormView
from molo.core.utils import get_locale_code
from molo.core.models import get_translation_for
from molo.polls.forms import TextVoteForm, VoteForm, NumericalTextVoteForm
from molo.polls.models import (
Choice, Question, FreeTextVote, ChoiceVote, FreeTextQuestion)
class IndexView(generic.ListView):
template_name = 'polls/index.html'
context_object_name = 'latest_question_list'
def get_queryset(self):
"""Return the last five published questions."""
return Question.objects.order_by('-pub_date')[:5]
class PollsDetailsView(TemplateView):
template_name = 'polls/polls_details.html'
def get_context_data(self, *args, **kwargs):
context = super(
PollsDetailsView, self).get_context_data(*args, **kwargs)
context.update({
'question': Question.objects.get(pk=kwargs.get('question_id'))
})
return context
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/detail.html'
def poll_results(request, poll_id):
question = get_object_or_404(Question, pk=poll_id)
page = question.get_main_language_page()
qs = Choice.objects.live().child_of(page).filter(
language__is_main_language=True)
locale = get_locale_code(get_language_from_request(request))
choices = []
for c in qs:
translations = get_translation_for([c], locale, request.site)
if translations:
choices.append((translations[0].specific, c.specific))
total_votes = sum(c.votes for c in qs)
choice_color = ['orange', 'purple', 'turq']
index = 0
for choice, main_choice in choices:
vote_percentage = 0
if index >= len(choice_color):
index = 0
if main_choice.votes > 0:
vote_percentage = int(main_choice.votes * 100.0 / total_votes)
choice.percentage = vote_percentage
choice.color = choice_color[index]
index += 1
context = {
'question': question,
'total': total_votes,
'choices': sorted(
[c for c, m in choices], key=lambda x: x.percentage, reverse=True)
}
return render(request, 'polls/results.html', context,)
class VoteView(FormView):
form_class = VoteForm
template_name = 'polls/detail.html'
def get_context_data(self, *args, **kwargs):
context = super(
VoteView, self).get_context_data(*args, **kwargs)
question_id = self.kwargs.get('question_id')
question = get_object_or_404(Question, pk=question_id)
context.update({'question': question})
return context
def form_valid(self, form, *args, **kwargs):
question_id = self.kwargs.get('question_id')
question = get_object_or_404(Question, pk=question_id)
question = question.get_main_language_page().specific
obj, created = ChoiceVote.objects.get_or_create(
user=self.request.user,
question=question,)
if created:
selected_choice = form.cleaned_data['choice']
for choice_pk in selected_choice:
Choice.objects.filter(
pk=choice_pk).update(votes=F('votes') + 1)
choice = Choice.objects.get(pk=choice_pk)
obj.choice.add(choice)
choice.choice_votes.add(obj)
choice.save()
return HttpResponseRedirect(
reverse('molo.polls:results', args=(question_id,)))
class FreeTextVoteView(FormView):
template_name = 'polls/free_text_detail.html'
def dispatch(self, *args, **kwargs):
question_id = kwargs.get('question_id')
question = get_object_or_404(FreeTextQuestion, pk=question_id)
if question.numerical:
self.form_class = NumericalTextVoteForm
else:
self.form_class = TextVoteForm
return super(FreeTextVoteView, self).dispatch(*args, **kwargs)
def get_context_data(self, *args, **kwargs):
context = super(
FreeTextVoteView, self).get_context_data(*args, **kwargs)
question_id = self.kwargs.get('question_id')
question = get_object_or_404(FreeTextQuestion, pk=question_id)
context.update({'question': question})
return context
def form_valid(self, form, *args, **kwargs):
question_id = self.kwargs.get('question_id')
question = get_object_or_404(FreeTextQuestion, pk=question_id)
question = question.get_main_language_page().specific
FreeTextVote.objects.get_or_create(
user=self.request.user,
question=question,
defaults={
'answer': form.cleaned_data['answer']
})
return HttpResponseRedirect(reverse('molo.polls:results',
args=(question.id,)))
|
praekelt/molo.polls
|
molo/polls/views.py
|
Python
|
bsd-2-clause
| 5,198
|
import os
import sys
class ConfigHelper():
"""
Manage all configuration information for the application
"""
def __init__(self):
TRUE = "True"
FALSE = "False"
ERROR = 1
self.halo_key = os.getenv("HALO_API_KEY")
self.halo_secret = os.getenv("HALO_API_SECRET_KEY")
# get the results directory and create it if it does not exist
scan_results_directory = os.environ["SCAN_RESULTS_DIRECTORY"] = \
"/tmp/scan_results/"
path_exists = os.path.exists(scan_results_directory)
if not path_exists:
try:
os.mkdir(scan_results_directory)
path_exists = os.path.exists(scan_results_directory)
except OSError:
pass
days_for_scan_age = os.environ["DAYS_FOR_SCAN_AGE"] = "0"
days_for_scan_age = int(days_for_scan_age)
days_string_is_int_value = isinstance(days_for_scan_age, int)
os.environ["HALO_SERVER_GROUP"] = "Git"
scan_examples = os.environ["SCAN_EXAMPLES"] = "False"
heartbeat_interval = os.environ["HEARTBEAT_INTERVAL"] = "60"
heartbeat_interval = int(heartbeat_interval)
hi_string_is_int_value = isinstance(heartbeat_interval, int)
# for unit tests Travis populates the IP
server_ip = "<server_ip>"
os.environ["SERVER_IP"] = server_ip
unit_tests = os.environ["UNIT_TESTS"] = "no_unit_tests" # NOQA
if self.halo_key is None or self.halo_secret is None \
or not os.path.exists(scan_results_directory) or not path_exists \
or days_string_is_int_value == "False" \
or hi_string_is_int_value == "False" \
or scan_examples != TRUE and scan_examples != FALSE:
print "Configuration validation failed... exiting...\n"
sys.exit(ERROR)
|
jgibbons-cp/sva_scan_examples
|
app/sva_scan_examples/config_helper.py
|
Python
|
bsd-2-clause
| 1,913
|
from clusto.drivers.devices.servers.basicserver import BasicServer
from IPy import IP
import boto.ec2
import urllib2
import urllib
import socket
import json
import sys
from sgext.util import SGException
class Request(urllib2.Request):
def __init__(self, method, url, data=None):
if isinstance(data, dict):
data = urllib.urlencode(data)
urllib2.Request.__init__(self, url, data=data)
self.method = method
def get_method(self):
return self.method
class SGServer(BasicServer):
_driver_name = 'sgserver'
_portmeta = {
'nic-eth': {'numports': 1},
}
def get_boto_connection(self):
region = self.attr_value(key='ec2', subkey='region', merge_container_values=True)
return boto.ec2.connect_to_region(region)
def get_best_ip(self):
for dnsname in self.attr_values(key='ec2', subkey='public-dns'):
try:
ip = socket.gethostbyname(dnsname)
return ip
except Exception, e:
pass
ips = self.attr_values(key='ip', subkey='ipstring')
for ip in ips:
if IP(ip).iptype() != 'PRIVATE':
return ip
if not ips:
raise SGException('Unable to determine IP for %s' % self.name)
def reboot(self):
conn = self.get_boto_connection()
conn.reboot_instances([self.attr_value(key='ec2', subkey='instance-id')])
def opsd_request(self, method, endpoint, data={}):
url = 'http://%s:9666%s' % (self.get_best_ip(), endpoint)
resp = urllib2.urlopen(Request(method, url, data))
return json.loads(resp.read())
def start_service(self, name, provider='monit'):
return self.opsd_request('POST', '/v0/service/%s/%s.json' % (provider, name), {'action': 'start'})
def stop_service(self, name, provider='monit'):
return self.opsd_request('POST', '/v0/service/%s/%s.json' % (provider, name), {'action': 'stop'})
def restart_service(self, name, provider='monit'):
return self.opsd_request('POST', '/v0/service/%s/%s.json' % (provider, name), {'action': 'restart'})
def get_service(self, name=None, provider='monit'):
if name is None:
return self.opsd_request('GET', '/v0/service/%s/' % provider)
else:
return self.opsd_request('GET', '/v0/service/%s/%s.json' % (provider, name))
def install_package(self, name, provider='apt'):
result = self.opsd_request('POST', '/v0/package/%s/%s.json' % (provider, name), {'action': 'install'})
if result.get('status', None) != 'ok':
raise SGException('Error installing package: %s' % result)
def remove_package(self, name, provider='apt'):
result = self.opsd_request('POST', '/v0/package/%s/%s.json' % (provider, name), {'action': 'remove'})
if result.get('status', None) != 'ok':
raise SGException('Error removing package: %s' % result)
def apt_update(self):
result = self.opsd_request('POST', '/v0/package/apt/update.json')
if result.get('status', None) != 'ok':
raise SGException('Error performing apt update: %s' % result)
def get_package(self, name=None, provider='apt'):
if name is None:
return self.opsd_request('GET', '/v0/package/%s/' % provider)
else:
return self.opsd_request('GET', '/v0/package/%s/%s.json' % (provider, name))
def run_test(self, name, provider='consumption'):
return self.opsd_request('GET', '/v0/test/%s/%s.json' % (provider, name))
def get_tests(self, provider='consumption'):
return self.opsd_request('GET', '/v0/test/%s/' % provider)
def run_puppet(self):
result = self.opsd_request('POST', '/v0/config/puppet/run.json')
if result.get('status', None) != 'ok':
raise SGException('Error running puppet: %s' % result)
def enable_puppet(self):
result = self.opsd_request('POST', '/v0/config/puppet/state.json', {
'action': 'enable',
})
if result.get('status', None) != 'ok':
raise SGException('Error enabling puppet: %s' % result)
def disable_puppet(self):
result = self.opsd_request('POST', '/v0/config/puppet/state.json', {
'action': 'disable',
})
if result.get('status', None) != 'ok':
raise SGException('Error disabling puppet: %s' % result)
|
simplegeo/clusto-sgext
|
sgext/drivers/devices/servers/sgserver.py
|
Python
|
bsd-2-clause
| 4,449
|
#
#
# Copyright (C) 2006, 2007, 2010, 2011 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Ganeti utility module.
This module holds functions that can be used in both daemons (all) and
the command line scripts.
"""
# Allow wildcard import in pylint: disable=W0401
import os
import re
import errno
import pwd
import time
import itertools
import select
import logging
import signal
from ganeti import errors
from ganeti import constants
from ganeti import compat
from ganeti import pathutils
from ganeti.utils.algo import *
from ganeti.utils.filelock import *
from ganeti.utils.hash import *
from ganeti.utils.io import *
from ganeti.utils.livelock import *
from ganeti.utils.log import *
from ganeti.utils.lvm import *
from ganeti.utils.mlock import *
from ganeti.utils.nodesetup import *
from ganeti.utils.process import *
from ganeti.utils.retry import *
from ganeti.utils.security import *
from ganeti.utils.storage import *
from ganeti.utils.tags import *
from ganeti.utils.text import *
from ganeti.utils.wrapper import *
from ganeti.utils.version import *
from ganeti.utils.x509 import *
from ganeti.utils.bitarrays import *
_VALID_SERVICE_NAME_RE = re.compile("^[-_.a-zA-Z0-9]{1,128}$")
UUID_RE = re.compile(constants.UUID_REGEX)
def ForceDictType(target, key_types, allowed_values=None):
"""Force the values of a dict to have certain types.
@type target: dict
@param target: the dict to update
@type key_types: dict
@param key_types: dict mapping target dict keys to types
in constants.ENFORCEABLE_TYPES
@type allowed_values: list
@keyword allowed_values: list of specially allowed values
"""
if allowed_values is None:
allowed_values = []
if not isinstance(target, dict):
msg = "Expected dictionary, got '%s'" % target
raise errors.TypeEnforcementError(msg)
for key in target:
if key not in key_types:
msg = "Unknown parameter '%s'" % key
raise errors.TypeEnforcementError(msg)
if target[key] in allowed_values:
continue
ktype = key_types[key]
if ktype not in constants.ENFORCEABLE_TYPES:
msg = "'%s' has non-enforceable type %s" % (key, ktype)
raise errors.ProgrammerError(msg)
if ktype in (constants.VTYPE_STRING, constants.VTYPE_MAYBE_STRING):
if target[key] is None and ktype == constants.VTYPE_MAYBE_STRING:
pass
elif not isinstance(target[key], basestring):
if isinstance(target[key], bool) and not target[key]:
target[key] = ""
else:
msg = "'%s' (value %s) is not a valid string" % (key, target[key])
raise errors.TypeEnforcementError(msg)
elif ktype == constants.VTYPE_BOOL:
if isinstance(target[key], basestring) and target[key]:
if target[key].lower() == constants.VALUE_FALSE:
target[key] = False
elif target[key].lower() == constants.VALUE_TRUE:
target[key] = True
else:
msg = "'%s' (value %s) is not a valid boolean" % (key, target[key])
raise errors.TypeEnforcementError(msg)
elif target[key]:
target[key] = True
else:
target[key] = False
elif ktype == constants.VTYPE_SIZE:
try:
target[key] = ParseUnit(target[key])
except errors.UnitParseError, err:
msg = "'%s' (value %s) is not a valid size. error: %s" % \
(key, target[key], err)
raise errors.TypeEnforcementError(msg)
elif ktype == constants.VTYPE_INT:
try:
target[key] = int(target[key])
except (ValueError, TypeError):
msg = "'%s' (value %s) is not a valid integer" % (key, target[key])
raise errors.TypeEnforcementError(msg)
elif ktype == constants.VTYPE_FLOAT:
try:
target[key] = float(target[key])
except (ValueError, TypeError):
msg = "'%s' (value %s) is not a valid float" % (key, target[key])
raise errors.TypeEnforcementError(msg)
def ValidateServiceName(name):
"""Validate the given service name.
@type name: number or string
@param name: Service name or port specification
"""
try:
numport = int(name)
except (ValueError, TypeError):
# Non-numeric service name
valid = _VALID_SERVICE_NAME_RE.match(name)
else:
# Numeric port (protocols other than TCP or UDP might need adjustments
# here)
valid = (numport >= 0 and numport < (1 << 16))
if not valid:
raise errors.OpPrereqError("Invalid service name '%s'" % name,
errors.ECODE_INVAL)
return name
def _ComputeMissingKeys(key_path, options, defaults):
"""Helper functions to compute which keys a invalid.
@param key_path: The current key path (if any)
@param options: The user provided options
@param defaults: The default dictionary
@return: A list of invalid keys
"""
defaults_keys = frozenset(defaults.keys())
invalid = []
for key, value in options.items():
if key_path:
new_path = "%s/%s" % (key_path, key)
else:
new_path = key
if key not in defaults_keys:
invalid.append(new_path)
elif isinstance(value, dict):
invalid.extend(_ComputeMissingKeys(new_path, value, defaults[key]))
return invalid
def VerifyDictOptions(options, defaults):
"""Verify a dict has only keys set which also are in the defaults dict.
@param options: The user provided options
@param defaults: The default dictionary
@raise error.OpPrereqError: If one of the keys is not supported
"""
invalid = _ComputeMissingKeys("", options, defaults)
if invalid:
raise errors.OpPrereqError("Provided option keys not supported: %s" %
CommaJoin(invalid), errors.ECODE_INVAL)
def ListVolumeGroups():
"""List volume groups and their size
@rtype: dict
@return:
Dictionary with keys volume name and values
the size of the volume
"""
command = "vgs --noheadings --units m --nosuffix -o name,size"
result = RunCmd(command)
retval = {}
if result.failed:
return retval
for line in result.stdout.splitlines():
try:
name, size = line.split()
size = int(float(size))
except (IndexError, ValueError), err:
logging.error("Invalid output from vgs (%s): %s", err, line)
continue
retval[name] = size
return retval
def BridgeExists(bridge):
"""Check whether the given bridge exists in the system
@type bridge: str
@param bridge: the bridge name to check
@rtype: boolean
@return: True if it does
"""
return os.path.isdir("/sys/class/net/%s/bridge" % bridge)
def TryConvert(fn, val):
"""Try to convert a value ignoring errors.
This function tries to apply function I{fn} to I{val}. If no
C{ValueError} or C{TypeError} exceptions are raised, it will return
the result, else it will return the original value. Any other
exceptions are propagated to the caller.
@type fn: callable
@param fn: function to apply to the value
@param val: the value to be converted
@return: The converted value if the conversion was successful,
otherwise the original value.
"""
try:
nv = fn(val)
except (ValueError, TypeError):
nv = val
return nv
def ParseCpuMask(cpu_mask):
"""Parse a CPU mask definition and return the list of CPU IDs.
CPU mask format: comma-separated list of CPU IDs
or dash-separated ID ranges
Example: "0-2,5" -> "0,1,2,5"
@type cpu_mask: str
@param cpu_mask: CPU mask definition
@rtype: list of int
@return: list of CPU IDs
"""
if not cpu_mask:
return []
cpu_list = []
for range_def in cpu_mask.split(","):
if range_def == constants.CPU_PINNING_ALL:
return [constants.CPU_PINNING_ALL_VAL]
boundaries = range_def.split("-")
n_elements = len(boundaries)
if n_elements > 2:
raise errors.ParseError("Invalid CPU ID range definition"
" (only one hyphen allowed): %s" % range_def)
try:
lower = int(boundaries[0])
except (ValueError, TypeError), err:
raise errors.ParseError("Invalid CPU ID value for lower boundary of"
" CPU ID range: %s" % str(err))
try:
higher = int(boundaries[-1])
except (ValueError, TypeError), err:
raise errors.ParseError("Invalid CPU ID value for higher boundary of"
" CPU ID range: %s" % str(err))
if lower > higher:
raise errors.ParseError("Invalid CPU ID range definition"
" (%d > %d): %s" % (lower, higher, range_def))
cpu_list.extend(range(lower, higher + 1))
return cpu_list
def ParseMultiCpuMask(cpu_mask):
"""Parse a multiple CPU mask definition and return the list of CPU IDs.
CPU mask format: colon-separated list of comma-separated list of CPU IDs
or dash-separated ID ranges, with optional "all" as CPU value
Example: "0-2,5:all:1,5,6:2" -> [ [ 0,1,2,5 ], [ -1 ], [ 1, 5, 6 ], [ 2 ] ]
@type cpu_mask: str
@param cpu_mask: multiple CPU mask definition
@rtype: list of lists of int
@return: list of lists of CPU IDs
"""
if not cpu_mask:
return []
cpu_list = []
for range_def in cpu_mask.split(constants.CPU_PINNING_SEP):
# Uniquify and sort the list before adding
cpu_list.append(sorted(set(ParseCpuMask(range_def))))
return cpu_list
def GetHomeDir(user, default=None):
"""Try to get the homedir of the given user.
The user can be passed either as a string (denoting the name) or as
an integer (denoting the user id). If the user is not found, the
C{default} argument is returned, which defaults to C{None}.
"""
try:
if isinstance(user, basestring):
result = pwd.getpwnam(user)
elif isinstance(user, (int, long)):
result = pwd.getpwuid(user)
else:
raise errors.ProgrammerError("Invalid type passed to GetHomeDir (%s)" %
type(user))
except KeyError:
return default
return result.pw_dir
def FirstFree(seq, base=0):
"""Returns the first non-existing integer from seq.
The seq argument should be a sorted list of positive integers. The
first time the index of an element is smaller than the element
value, the index will be returned.
The base argument is used to start at a different offset,
i.e. C{[3, 4, 6]} with I{offset=3} will return 5.
Example: C{[0, 1, 3]} will return I{2}.
@type seq: sequence
@param seq: the sequence to be analyzed.
@type base: int
@param base: use this value as the base index of the sequence
@rtype: int
@return: the first non-used index in the sequence
"""
for idx, elem in enumerate(seq):
assert elem >= base, "Passed element is higher than base offset"
if elem > idx + base:
# idx is not used
return idx + base
return None
def SingleWaitForFdCondition(fdobj, event, timeout):
"""Waits for a condition to occur on the socket.
Immediately returns at the first interruption.
@type fdobj: integer or object supporting a fileno() method
@param fdobj: entity to wait for events on
@type event: integer
@param event: ORed condition (see select module)
@type timeout: float or None
@param timeout: Timeout in seconds
@rtype: int or None
@return: None for timeout, otherwise occured conditions
"""
check = (event | select.POLLPRI |
select.POLLNVAL | select.POLLHUP | select.POLLERR)
if timeout is not None:
# Poller object expects milliseconds
timeout *= 1000
poller = select.poll()
poller.register(fdobj, event)
try:
# TODO: If the main thread receives a signal and we have no timeout, we
# could wait forever. This should check a global "quit" flag or something
# every so often.
io_events = poller.poll(timeout)
except select.error, err:
if err[0] != errno.EINTR:
raise
io_events = []
if io_events and io_events[0][1] & check:
return io_events[0][1]
else:
return None
class FdConditionWaiterHelper(object):
"""Retry helper for WaitForFdCondition.
This class contains the retried and wait functions that make sure
WaitForFdCondition can continue waiting until the timeout is actually
expired.
"""
def __init__(self, timeout):
self.timeout = timeout
def Poll(self, fdobj, event):
result = SingleWaitForFdCondition(fdobj, event, self.timeout)
if result is None:
raise RetryAgain()
else:
return result
def UpdateTimeout(self, timeout):
self.timeout = timeout
def WaitForFdCondition(fdobj, event, timeout):
"""Waits for a condition to occur on the socket.
Retries until the timeout is expired, even if interrupted.
@type fdobj: integer or object supporting a fileno() method
@param fdobj: entity to wait for events on
@type event: integer
@param event: ORed condition (see select module)
@type timeout: float or None
@param timeout: Timeout in seconds
@rtype: int or None
@return: None for timeout, otherwise occured conditions
"""
if timeout is not None:
retrywaiter = FdConditionWaiterHelper(timeout)
try:
result = Retry(retrywaiter.Poll, RETRY_REMAINING_TIME, timeout,
args=(fdobj, event), wait_fn=retrywaiter.UpdateTimeout)
except RetryTimeout:
result = None
else:
result = None
while result is None:
result = SingleWaitForFdCondition(fdobj, event, timeout)
return result
def EnsureDaemon(name):
"""Check for and start daemon if not alive.
@type name: string
@param name: daemon name
@rtype: bool
@return: 'True' if daemon successfully started,
'False' otherwise
"""
result = RunCmd([pathutils.DAEMON_UTIL, "check-and-start", name])
if result.failed:
logging.error("Can't start daemon '%s', failure %s, output: %s",
name, result.fail_reason, result.output)
return False
return True
def StopDaemon(name):
"""Stop daemon
@type name: string
@param name: daemon name
@rtype: bool
@return: 'True' if daemon successfully stopped,
'False' otherwise
"""
result = RunCmd([pathutils.DAEMON_UTIL, "stop", name])
if result.failed:
logging.error("Can't stop daemon '%s', failure %s, output: %s",
name, result.fail_reason, result.output)
return False
return True
def SplitTime(value):
"""Splits time as floating point number into a tuple.
@param value: Time in seconds
@type value: int or float
@return: Tuple containing (seconds, microseconds)
"""
(seconds, microseconds) = divmod(int(value * 1000000), 1000000)
assert 0 <= seconds, \
"Seconds must be larger than or equal to 0, but are %s" % seconds
assert 0 <= microseconds <= 999999, \
"Microseconds must be 0-999999, but are %s" % microseconds
return (int(seconds), int(microseconds))
def MergeTime(timetuple):
"""Merges a tuple into time as a floating point number.
@param timetuple: Time as tuple, (seconds, microseconds)
@type timetuple: tuple
@return: Time as a floating point number expressed in seconds
"""
(seconds, microseconds) = timetuple
assert 0 <= seconds, \
"Seconds must be larger than or equal to 0, but are %s" % seconds
assert 0 <= microseconds <= 999999, \
"Microseconds must be 0-999999, but are %s" % microseconds
return float(seconds) + (float(microseconds) * 0.000001)
def EpochNano():
"""Return the current timestamp expressed as number of nanoseconds since the
unix epoch
@return: nanoseconds since the Unix epoch
"""
return int(time.time() * 1000000000)
def FindMatch(data, name):
"""Tries to find an item in a dictionary matching a name.
Callers have to ensure the data names aren't contradictory (e.g. a regexp
that matches a string). If the name isn't a direct key, all regular
expression objects in the dictionary are matched against it.
@type data: dict
@param data: Dictionary containing data
@type name: string
@param name: Name to look for
@rtype: tuple; (value in dictionary, matched groups as list)
"""
if name in data:
return (data[name], [])
for key, value in data.items():
# Regex objects
if hasattr(key, "match"):
m = key.match(name)
if m:
return (value, list(m.groups()))
return None
def GetMounts(filename=constants.PROC_MOUNTS):
"""Returns the list of mounted filesystems.
This function is Linux-specific.
@param filename: path of mounts file (/proc/mounts by default)
@rtype: list of tuples
@return: list of mount entries (device, mountpoint, fstype, options)
"""
# TODO(iustin): investigate non-Linux options (e.g. via mount output)
data = []
mountlines = ReadFile(filename).splitlines()
for line in mountlines:
device, mountpoint, fstype, options, _ = line.split(None, 4)
data.append((device, mountpoint, fstype, options))
return data
def SignalHandled(signums):
"""Signal Handled decoration.
This special decorator installs a signal handler and then calls the target
function. The function must accept a 'signal_handlers' keyword argument,
which will contain a dict indexed by signal number, with SignalHandler
objects as values.
The decorator can be safely stacked with iself, to handle multiple signals
with different handlers.
@type signums: list
@param signums: signals to intercept
"""
def wrap(fn):
def sig_function(*args, **kwargs):
assert "signal_handlers" not in kwargs or \
kwargs["signal_handlers"] is None or \
isinstance(kwargs["signal_handlers"], dict), \
"Wrong signal_handlers parameter in original function call"
if "signal_handlers" in kwargs and kwargs["signal_handlers"] is not None:
signal_handlers = kwargs["signal_handlers"]
else:
signal_handlers = {}
kwargs["signal_handlers"] = signal_handlers
sighandler = SignalHandler(signums)
try:
for sig in signums:
signal_handlers[sig] = sighandler
return fn(*args, **kwargs)
finally:
sighandler.Reset()
return sig_function
return wrap
def TimeoutExpired(epoch, timeout, _time_fn=time.time):
"""Checks whether a timeout has expired.
"""
return _time_fn() > (epoch + timeout)
class SignalWakeupFd(object):
try:
# This is only supported in Python 2.5 and above (some distributions
# backported it to Python 2.4)
_set_wakeup_fd_fn = signal.set_wakeup_fd
except AttributeError:
# Not supported
def _SetWakeupFd(self, _): # pylint: disable=R0201
return -1
else:
def _SetWakeupFd(self, fd):
return self._set_wakeup_fd_fn(fd)
def __init__(self):
"""Initializes this class.
"""
(read_fd, write_fd) = os.pipe()
# Once these succeeded, the file descriptors will be closed automatically.
# Buffer size 0 is important, otherwise .read() with a specified length
# might buffer data and the file descriptors won't be marked readable.
self._read_fh = os.fdopen(read_fd, "r", 0)
self._write_fh = os.fdopen(write_fd, "w", 0)
self._previous = self._SetWakeupFd(self._write_fh.fileno())
# Utility functions
self.fileno = self._read_fh.fileno
self.read = self._read_fh.read
def Reset(self):
"""Restores the previous wakeup file descriptor.
"""
if hasattr(self, "_previous") and self._previous is not None:
self._SetWakeupFd(self._previous)
self._previous = None
def Notify(self):
"""Notifies the wakeup file descriptor.
"""
self._write_fh.write(chr(0))
def __del__(self):
"""Called before object deletion.
"""
self.Reset()
class SignalHandler(object):
"""Generic signal handler class.
It automatically restores the original handler when deconstructed or
when L{Reset} is called. You can either pass your own handler
function in or query the L{called} attribute to detect whether the
signal was sent.
@type signum: list
@ivar signum: the signals we handle
@type called: boolean
@ivar called: tracks whether any of the signals have been raised
"""
def __init__(self, signum, handler_fn=None, wakeup=None):
"""Constructs a new SignalHandler instance.
@type signum: int or list of ints
@param signum: Single signal number or set of signal numbers
@type handler_fn: callable
@param handler_fn: Signal handling function
"""
assert handler_fn is None or callable(handler_fn)
self.signum = set(signum)
self.called = False
self._handler_fn = handler_fn
self._wakeup = wakeup
self._previous = {}
try:
for signum in self.signum:
# Setup handler
prev_handler = signal.signal(signum, self._HandleSignal)
try:
self._previous[signum] = prev_handler
except:
# Restore previous handler
signal.signal(signum, prev_handler)
raise
except:
# Reset all handlers
self.Reset()
# Here we have a race condition: a handler may have already been called,
# but there's not much we can do about it at this point.
raise
def __del__(self):
self.Reset()
def Reset(self):
"""Restore previous handler.
This will reset all the signals to their previous handlers.
"""
for signum, prev_handler in self._previous.items():
signal.signal(signum, prev_handler)
# If successful, remove from dict
del self._previous[signum]
def Clear(self):
"""Unsets the L{called} flag.
This function can be used in case a signal may arrive several times.
"""
self.called = False
def _HandleSignal(self, signum, frame):
"""Actual signal handling function.
"""
# This is not nice and not absolutely atomic, but it appears to be the only
# solution in Python -- there are no atomic types.
self.called = True
if self._wakeup:
# Notify whoever is interested in signals
self._wakeup.Notify()
if self._handler_fn:
self._handler_fn(signum, frame)
class FieldSet(object):
"""A simple field set.
Among the features are:
- checking if a string is among a list of static string or regex objects
- checking if a whole list of string matches
- returning the matching groups from a regex match
Internally, all fields are held as regular expression objects.
"""
def __init__(self, *items):
self.items = [re.compile("^%s$" % value) for value in items]
def Extend(self, other_set):
"""Extend the field set with the items from another one"""
self.items.extend(other_set.items)
def Matches(self, field):
"""Checks if a field matches the current set
@type field: str
@param field: the string to match
@return: either None or a regular expression match object
"""
for m in itertools.ifilter(None, (val.match(field) for val in self.items)):
return m
return None
def NonMatching(self, items):
"""Returns the list of fields not matching the current set
@type items: list
@param items: the list of fields to check
@rtype: list
@return: list of non-matching fields
"""
return [val for val in items if not self.Matches(val)]
def ValidateDeviceNames(kind, container):
"""Validate instance device names.
Check that a device container contains only unique and valid names.
@type kind: string
@param kind: One-word item description
@type container: list
@param container: Container containing the devices
"""
valid = []
for device in container:
if isinstance(device, dict):
if kind == "NIC":
name = device.get(constants.INIC_NAME, None)
elif kind == "disk":
name = device.get(constants.IDISK_NAME, None)
else:
raise errors.OpPrereqError("Invalid container kind '%s'" % kind,
errors.ECODE_INVAL)
else:
name = device.name
# Check that a device name is not the UUID of another device
valid.append(device.uuid)
try:
int(name)
except (ValueError, TypeError):
pass
else:
raise errors.OpPrereqError("Invalid name '%s'. Purely numeric %s names"
" are not allowed" % (name, kind),
errors.ECODE_INVAL)
if name is not None and name.lower() != constants.VALUE_NONE:
if name in valid:
raise errors.OpPrereqError("%s name '%s' already used" % (kind, name),
errors.ECODE_NOTUNIQUE)
else:
valid.append(name)
def AllDiskOfType(disks_info, dev_types):
"""Checks if the instance has only disks of any of the dev_types.
@type disks_info: list of L{Disk}
@param disks_info: all the disks of the instance.
@type dev_types: list of disk templates
@param dev_types: the disk type required.
@rtype: bool
@return: True iff the instance only has disks of type dev_type.
"""
assert not isinstance(dev_types, str)
if not disks_info and constants.DT_DISKLESS not in dev_types:
return False
for disk in disks_info:
if disk.dev_type not in dev_types:
return False
return True
def AnyDiskOfType(disks_info, dev_types):
"""Checks if the instance has some disks of any types in dev_types.
@type disks_info: list of L{Disk}
@param disks_info: all the disks of the instance.
@type dev_types: list of disk template
@param dev_types: the disk type required.
@rtype: bool
@return: True if the instance has disks of type dev_types or the instance has
no disks and the dev_types allow DT_DISKLESS.
"""
assert not isinstance(dev_types, str)
if not disks_info and constants.DT_DISKLESS in dev_types:
return True
for disk in disks_info:
if disk.dev_type in dev_types:
return True
return False
def GetDiskTemplateString(disk_types):
"""Gives a summary disk template from disk devtypes.
@type disk_types: list of string
@param disk_types: all the dev_types of the instance.
@rtype disk template
@returns the summarized disk template of the disk types.
"""
disk_types = set(dev_type for dev_type in disk_types)
if not disk_types:
return constants.DT_DISKLESS
elif len(disk_types) > 1:
return constants.DT_MIXED
else:
return disk_types.pop()
def GetDiskTemplate(disks_info):
"""Gives a summary disk template from disks.
@type disks_info: list of L{Disk}
@param disks_info: all the disks of the instance.
@rtype disk template
@returns the summarized disk template of the disk types.
"""
return GetDiskTemplateString(d.dev_type for d in disks_info)
|
leshchevds/ganeti
|
lib/utils/__init__.py
|
Python
|
bsd-2-clause
| 27,804
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.verification
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for Intermediate Verification Languages (IVLs).
:copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, words
from pygments.token import Comment, Operator, Keyword, Name, Number, \
Punctuation, Whitespace
__all__ = ['BoogieLexer', 'SilverLexer']
class BoogieLexer(RegexLexer):
"""
For `Boogie <https://boogie.codeplex.com/>`_ source code.
.. versionadded:: 2.1
"""
name = 'Boogie'
aliases = ['boogie']
filenames = ['*.bpl']
tokens = {
'root': [
# Whitespace and Comments
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'//[/!](.*?)\n', Comment.Doc),
(r'//(.*?)\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(words((
'axiom', 'break', 'call', 'ensures', 'else', 'exists', 'function',
'forall', 'if', 'invariant', 'modifies', 'procedure', 'requires',
'then', 'var', 'while'),
suffix=r'\b'), Keyword),
(words(('const',), suffix=r'\b'), Keyword.Reserved),
(words(('bool', 'int', 'ref'), suffix=r'\b'), Keyword.Type),
include('numbers'),
(r"(>=|<=|:=|!=|==>|&&|\|\||[+/\-=>*<\[\]])", Operator),
(r"([{}():;,.])", Punctuation),
# Identifier
(r'[a-zA-Z_]\w*', Name),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'numbers': [
(r'[0-9]+', Number.Integer),
],
}
class SilverLexer(RegexLexer):
"""
For `Silver <https://bitbucket.org/viperproject/silver>`_ source code.
.. versionadded:: 2.2
"""
name = 'Silver'
aliases = ['silver']
filenames = ['*.sil']
tokens = {
'root': [
# Whitespace and Comments
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'//[/!](.*?)\n', Comment.Doc),
(r'//(.*?)\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(words((
'result', 'true', 'false', 'null', 'method', 'function',
'predicate', 'program', 'domain', 'axiom', 'var', 'returns',
'field', 'define', 'requires', 'ensures', 'invariant',
'fold', 'unfold', 'inhale', 'exhale', 'new', 'assert',
'assume', 'goto', 'while', 'if', 'elseif', 'else', 'fresh',
'constraining', 'Seq', 'Set', 'Multiset', 'union', 'intersection',
'setminus', 'subset', 'unfolding', 'in', 'old', 'forall', 'exists',
'acc', 'wildcard', 'write', 'none', 'epsilon', 'perm', 'unique'),
suffix=r'\b'), Keyword),
(words(('Int', 'Perm', 'Bool', 'Ref'), suffix=r'\b'), Keyword.Type),
include('numbers'),
(r'[!%&*+=|?:<>/-]', Operator),
(r"([{}():;,.])", Punctuation),
# Identifier
(r'[\w$]\w*', Name),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'numbers': [
(r'[0-9]+', Number.Integer),
],
}
|
aswinpj/Pygments
|
pygments/lexers/verification.py
|
Python
|
bsd-2-clause
| 3,624
|
# Copyright (c) 2011, Enthought, Ltd.
# Author: Pietro Berkes <pberkes@enthought.com>
# License: Modified BSD license (2-clause)
from traits.api import HasTraits, Property, Array
from traits.trait_types import List, Int, Bool, Str
from traitsui.api import View, Item, TabularEditor
from traitsui.group import VGroup
from traitsui.tabular_adapter import TabularAdapter
from traitsui.menu import NoButtons
class Array2DAdapter(TabularAdapter):
columns = List
show_index = Bool(True)
count_from_one = Bool(True)
ncolumns = Int
data_format = Str('%s')
font = 'Courier 10'
alignment = 'right'
format = data_format
index_text = Property
index_alignment = Property
width = 60
def _get_index_text(self):
return '- {} -'.format(self.row)
def _get_index_alignment(self):
return 'left'
def _columns_default(self):
if self.count_from_one:
columns = [('%d' % (i+1), i) for i in range(self.ncolumns)]
else:
columns = [('%d' % i, i) for i in range(self.ncolumns)]
if self.show_index:
columns.insert(0, ('items', 'index'))
return columns
#### Testing and debugging ####################################################
def main():
"""Entry point for standalone testing/debugging."""
class TestShowArray(HasTraits):
data = Array
view = View(
Item(
'data',
editor=TabularEditor
(
adapter=Array2DAdapter(ncolumns=2,
format='%s',
show_index=False)),
show_label=False
),
title = 'Array2D editor',
width = 0.3,
height = 0.8,
resizable = True,
buttons = NoButtons
)
VGroup(Item('data',
editor=TabularEditor
(
adapter=Array2DAdapter(ncolumns=2,
format='%d',
show_index=False)),
show_label=False)),
data = [['a', 'b'], [1, 2]]
blah = TestShowArray(data=data)
blah.data = data
print blah.data
blah.configure_traits()
if __name__ == '__main__':
main()
|
enthought/uchicago-pyanno
|
pyanno/ui/arrayview.py
|
Python
|
bsd-2-clause
| 2,429
|
"""
Boilerplate functions used in defining binary operations.
"""
from functools import wraps
from typing import Callable
from pandas._libs.lib import item_from_zerodim
from pandas._typing import F
from pandas.core.dtypes.generic import ABCDataFrame, ABCIndex, ABCSeries
def unpack_zerodim_and_defer(name: str) -> Callable[[F], F]:
"""
Boilerplate for pandas conventions in arithmetic and comparison methods.
Parameters
----------
name : str
Returns
-------
decorator
"""
def wrapper(method: F) -> F:
return _unpack_zerodim_and_defer(method, name)
return wrapper
def _unpack_zerodim_and_defer(method, name: str):
"""
Boilerplate for pandas conventions in arithmetic and comparison methods.
Ensure method returns NotImplemented when operating against "senior"
classes. Ensure zero-dimensional ndarrays are always unpacked.
Parameters
----------
method : binary method
name : str
Returns
-------
method
"""
is_cmp = name.strip("__") in {"eq", "ne", "lt", "le", "gt", "ge"}
@wraps(method)
def new_method(self, other):
if is_cmp and isinstance(self, ABCIndex) and isinstance(other, ABCSeries):
# For comparison ops, Index does *not* defer to Series
pass
else:
for cls in [ABCDataFrame, ABCSeries, ABCIndex]:
if isinstance(self, cls):
break
if isinstance(other, cls):
return NotImplemented
other = item_from_zerodim(other)
return method(self, other)
return new_method
def get_op_result_name(left, right):
"""
Find the appropriate name to pin to an operation result. This result
should always be either an Index or a Series.
Parameters
----------
left : {Series, Index}
right : object
Returns
-------
name : object
Usually a string
"""
if isinstance(right, (ABCSeries, ABCIndex)):
name = _maybe_match_name(left, right)
else:
name = left.name
return name
def _maybe_match_name(a, b):
"""
Try to find a name to attach to the result of an operation between
a and b. If only one of these has a `name` attribute, return that
name. Otherwise return a consensus name if they match or None if
they have different names.
Parameters
----------
a : object
b : object
Returns
-------
name : str or None
See Also
--------
pandas.core.common.consensus_name_attr
"""
a_has = hasattr(a, "name")
b_has = hasattr(b, "name")
if a_has and b_has:
if a.name == b.name:
return a.name
else:
# TODO: what if they both have np.nan for their names?
return None
elif a_has:
return a.name
elif b_has:
return b.name
return None
|
jreback/pandas
|
pandas/core/ops/common.py
|
Python
|
bsd-3-clause
| 2,915
|
"""
kombu.transport.SQS
===================
Amazon SQS transport module for Kombu. This package implements an AMQP-like
interface on top of Amazons SQS service, with the goal of being optimized for
high performance and reliability.
The default settings for this module are focused now on high performance in
task queue situations where tasks are small, idempotent and run very fast.
SQS Features supported by this transport:
Long Polling:
http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/
sqs-long-polling.html
Long polling is enabled by setting the `wait_time_seconds` transport
option to a number > 1. Amazon supports up to 20 seconds. This is
disabled for now, but will be enabled by default in the near future.
Batch API Actions:
http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/
sqs-batch-api.html
The default behavior of the SQS Channel.drain_events() method is to
request up to the 'prefetch_count' messages on every request to SQS.
These messages are stored locally in a deque object and passed back
to the Transport until the deque is empty, before triggering a new
API call to Amazon.
This behavior dramatically speeds up the rate that you can pull tasks
from SQS when you have short-running tasks (or a large number of workers).
When a Celery worker has multiple queues to monitor, it will pull down
up to 'prefetch_count' messages from queueA and work on them all before
moving on to queueB. If queueB is empty, it will wait up until
'polling_interval' expires before moving back and checking on queueA.
"""
from __future__ import absolute_import
import collections
import socket
import string
from amqp.promise import transform, ensure_promise, promise
from kombu.async import get_event_loop
from kombu.async.aws import sqs as _asynsqs
from kombu.async.aws.ext import boto, exception
from kombu.async.aws.sqs.connection import AsyncSQSConnection, SQSConnection
from kombu.async.aws.sqs.ext import regions
from kombu.async.aws.sqs.message import Message
from kombu.five import Empty, range, string_t, text_t
from kombu.log import get_logger
from kombu.utils import cached_property
from kombu.utils.encoding import bytes_to_str, safe_str
from kombu.utils.json import loads, dumps
from kombu.utils import scheduling
from . import virtual
logger = get_logger(__name__)
# dots are replaced by dash, all other punctuation
# replaced by underscore.
CHARS_REPLACE_TABLE = {
ord(c): 0x5f for c in string.punctuation if c not in '-_.'
}
CHARS_REPLACE_TABLE[0x2e] = 0x2d # '.' -> '-'
#: SQS bulk get supports a maximum of 10 messages at a time.
SQS_MAX_MESSAGES = 10
def maybe_int(x):
try:
return int(x)
except ValueError:
return x
class Channel(virtual.Channel):
default_region = 'us-east-1'
default_visibility_timeout = 1800 # 30 minutes.
default_wait_time_seconds = 10 # disabled see #198
domain_format = 'kombu%(vhost)s'
_asynsqs = None
_sqs = None
_queue_cache = {}
_noack_queues = set()
def __init__(self, *args, **kwargs):
if boto is None:
raise ImportError('boto is not installed')
super(Channel, self).__init__(*args, **kwargs)
# SQS blows up if you try to create a new queue when one already
# exists but with a different visibility_timeout. This prepopulates
# the queue_cache to protect us from recreating
# queues that are known to already exist.
try:
queues = self.sqs.get_all_queues(prefix=self.queue_name_prefix)
except exception.SQSError as exc:
if exc.status == 403:
raise RuntimeError(
'SQS authorization error, access_key={0}'.format(
self.sqs.access_key))
raise
else:
self._queue_cache.update({
queue.name: queue for queue in queues
})
# The drain_events() method stores extra messages in a local
# Deque object. This allows multiple messages to be requested from
# SQS at once for performance, but maintains the same external API
# to the caller of the drain_events() method.
self._queue_message_cache = collections.deque()
self.hub = kwargs.get('hub') or get_event_loop()
def basic_consume(self, queue, no_ack, *args, **kwargs):
if no_ack:
self._noack_queues.add(queue)
if self.hub:
self._loop1(queue)
return super(Channel, self).basic_consume(
queue, no_ack, *args, **kwargs
)
def basic_cancel(self, consumer_tag):
if consumer_tag in self._consumers:
queue = self._tag_to_queue[consumer_tag]
self._noack_queues.discard(queue)
return super(Channel, self).basic_cancel(consumer_tag)
def drain_events(self, timeout=None):
"""Return a single payload message from one of our queues.
:raises Empty: if no messages available.
"""
# If we're not allowed to consume or have no consumers, raise Empty
if not self._consumers or not self.qos.can_consume():
raise Empty()
message_cache = self._queue_message_cache
# Check if there are any items in our buffer. If there are any, pop
# off that queue first.
try:
return message_cache.popleft()
except IndexError:
pass
# At this point, go and get more messages from SQS
res, queue = self._poll(self.cycle, timeout=timeout)
message_cache.extend((r, queue) for r in res)
# Now try to pop off the queue again.
try:
return message_cache.popleft()
except IndexError:
raise Empty()
def _reset_cycle(self):
"""Reset the consume cycle.
:returns: a FairCycle object that points to our _get_bulk() method
rather than the standard _get() method. This allows for multiple
messages to be returned at once from SQS (based on the prefetch
limit).
"""
self._cycle = scheduling.FairCycle(
self._get_bulk, self._active_queues, Empty,
)
def entity_name(self, name, table=CHARS_REPLACE_TABLE):
"""Format AMQP queue name into a legal SQS queue name."""
return text_t(safe_str(name)).translate(table)
def _new_queue(self, queue, **kwargs):
"""Ensure a queue with given name exists in SQS."""
if not isinstance(queue, string_t):
return queue
# Translate to SQS name for consistency with initial
# _queue_cache population.
queue = self.entity_name(self.queue_name_prefix + queue)
try:
return self._queue_cache[queue]
except KeyError:
q = self._queue_cache[queue] = self.sqs.create_queue(
queue, self.visibility_timeout,
)
return q
def _delete(self, queue, *args):
"""delete queue by name."""
super(Channel, self)._delete(queue)
self._queue_cache.pop(queue, None)
def _put(self, queue, message, **kwargs):
"""Put message onto queue."""
q = self._new_queue(queue)
m = Message()
m.set_body(dumps(message))
q.write(m)
def _message_to_python(self, message, queue_name, queue):
payload = loads(bytes_to_str(message.get_body()))
if queue_name in self._noack_queues:
queue.delete_message(message)
else:
payload['properties']['delivery_info'].update({
'sqs_message': message, 'sqs_queue': queue,
})
# set delivery tag to SQS receipt handle
payload['properties']['delivery_tag'] = message.receipt_handle
return payload
def _messages_to_python(self, messages, queue):
"""Convert a list of SQS Message objects into Payloads.
This method handles converting SQS Message objects into
Payloads, and appropriately updating the queue depending on
the 'ack' settings for that queue.
:param messages: A list of SQS Message objects.
:param queue: String name representing the queue they came from
:returns: A list of Payload objects
"""
q = self._new_queue(queue)
return [self._message_to_python(m, queue, q) for m in messages]
def _get_bulk(self, queue,
max_if_unlimited=SQS_MAX_MESSAGES, callback=None):
"""Try to retrieve multiple messages off ``queue``.
Where :meth:`_get` returns a single Payload object, this method
returns a list of Payload objects. The number of objects returned
is determined by the total number of messages available in the queue
and the number of messages the QoS object allows (based on the
prefetch_count).
.. note::
Ignores QoS limits so caller is responsible for checking
that we are allowed to consume at least one message from the
queue. get_bulk will then ask QoS for an estimate of
the number of extra messages that we can consume.
:param queue: The queue name to pull from.
:returns list: of message objects.
"""
# drain_events calls `can_consume` first, consuming
# a token, so we know that we are allowed to consume at least
# one message.
maxcount = self._get_message_estimate()
if maxcount:
q = self._new_queue(queue)
messages = q.get_messages(num_messages=maxcount)
if messages:
return self._messages_to_python(messages, queue)
raise Empty()
def _get(self, queue):
"""Try to retrieve a single message off ``queue``."""
q = self._new_queue(queue)
messages = q.get_messages(num_messages=1)
if messages:
return self._messages_to_python(messages, queue)[0]
raise Empty()
def _loop1(self, queue, _=None):
self.hub.call_soon(self._schedule_queue, queue)
def _schedule_queue(self, queue):
if queue in self._active_queues:
if self.qos.can_consume():
self._get_bulk_async(
queue, callback=promise(self._loop1, (queue,)),
)
else:
self._loop1(queue)
def _get_message_estimate(self, max_if_unlimited=SQS_MAX_MESSAGES):
maxcount = self.qos.can_consume_max_estimate()
return min(
max_if_unlimited if maxcount is None else max(maxcount, 1),
max_if_unlimited,
)
def _get_bulk_async(self, queue,
max_if_unlimited=SQS_MAX_MESSAGES, callback=None):
maxcount = self._get_message_estimate()
if maxcount:
return self._get_async(queue, maxcount, callback=callback)
# Not allowed to consume, make sure to notify callback..
callback = ensure_promise(callback)
callback([])
return callback
def _get_async(self, queue, count=1, callback=None):
q = self._new_queue(queue)
return self._get_from_sqs(
q, count=count, connection=self.asynsqs,
callback=transform(self._on_messages_ready, callback, q, queue),
)
def _on_messages_ready(self, queue, qname, messages):
if messages:
callbacks = self.connection._callbacks
for raw_message in messages:
message = self._message_to_python(raw_message, qname, queue)
callbacks[qname](message)
def _get_from_sqs(self, queue,
count=1, connection=None, callback=None):
"""Retrieve and handle messages from SQS.
Uses long polling and returns :class:`~amqp.promise`.
"""
connection = connection if connection is not None else queue.connection
return connection.receive_message(
queue, number_messages=count,
wait_time_seconds=self.wait_time_seconds,
callback=callback,
)
def _restore(self, message,
unwanted_delivery_info=('sqs_message', 'sqs_queue')):
for unwanted_key in unwanted_delivery_info:
# Remove objects that aren't JSON serializable (Issue #1108).
message.delivery_info.pop(unwanted_key, None)
return super(Channel, self)._restore(message)
def basic_ack(self, delivery_tag):
delivery_info = self.qos.get(delivery_tag).delivery_info
try:
queue = delivery_info['sqs_queue']
except KeyError:
pass
else:
queue.delete_message(delivery_info['sqs_message'])
super(Channel, self).basic_ack(delivery_tag)
def _size(self, queue):
"""Return the number of messages in a queue."""
return self._new_queue(queue).count()
def _purge(self, queue):
"""Delete all current messages in a queue."""
q = self._new_queue(queue)
# SQS is slow at registering messages, so run for a few
# iterations to ensure messages are deleted.
size = 0
for i in range(10):
size += q.count()
if not size:
break
q.clear()
return size
def close(self):
super(Channel, self).close()
for conn in (self._sqs, self._asynsqs):
if conn:
try:
conn.close()
except AttributeError as exc: # FIXME ???
if "can't set attribute" not in str(exc):
raise
def _get_regioninfo(self, regions):
if self.regioninfo:
return self.regioninfo
if self.region:
for _r in regions:
if _r.name == self.region:
return _r
def _aws_connect_to(self, fun, regions):
conninfo = self.conninfo
region = self._get_regioninfo(regions)
is_secure = self.is_secure if self.is_secure is not None else True
port = self.port if self.port is not None else conninfo.port
return fun(region=region,
aws_access_key_id=conninfo.userid,
aws_secret_access_key=conninfo.password,
is_secure=is_secure,
port=port)
@property
def sqs(self):
if self._sqs is None:
self._sqs = self._aws_connect_to(SQSConnection, regions())
return self._sqs
@property
def asynsqs(self):
if self._asynsqs is None:
self._asynsqs = self._aws_connect_to(
AsyncSQSConnection, _asynsqs.regions(),
)
return self._asynsqs
@property
def conninfo(self):
return self.connection.client
@property
def transport_options(self):
return self.connection.client.transport_options
@cached_property
def visibility_timeout(self):
return (self.transport_options.get('visibility_timeout') or
self.default_visibility_timeout)
@cached_property
def queue_name_prefix(self):
return self.transport_options.get('queue_name_prefix', '')
@cached_property
def supports_fanout(self):
return False
@cached_property
def region(self):
return self.transport_options.get('region') or self.default_region
@cached_property
def regioninfo(self):
return self.transport_options.get('regioninfo')
@cached_property
def is_secure(self):
return self.transport_options.get('is_secure')
@cached_property
def port(self):
return self.transport_options.get('port')
@cached_property
def wait_time_seconds(self):
return self.transport_options.get('wait_time_seconds',
self.default_wait_time_seconds)
class Transport(virtual.Transport):
Channel = Channel
polling_interval = 1
wait_time_seconds = 0
default_port = None
connection_errors = (
virtual.Transport.connection_errors +
(exception.SQSError, socket.error)
)
channel_errors = (
virtual.Transport.channel_errors + (exception.SQSDecodeError,)
)
driver_type = 'sqs'
driver_name = 'sqs'
implements = virtual.Transport.implements.extend(
async=True,
exchange_type=frozenset(['direct']),
)
|
daevaorn/kombu
|
kombu/transport/SQS.py
|
Python
|
bsd-3-clause
| 16,498
|