code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
try: if len(result) == 0: folder = _resolve_folder(project, folderpath, entity_name) return {"project": project, "folder": folder, "name": None} else: validated_results = _validate_resolution_output_length(path, entity_name, result) return {"project": None if is_job_id(project) else project, "folder": None, "name": validated_results} except ResolutionError: return {"project": None, "folder": None, "name": None}
def _format_resolution_output(path, project, folderpath, entity_name, result)
:param path: Path to the object that required resolution; propagated from command-line :type path: string :param project: The potential project the entity belongs to :type project: string :param folderpath: Path to the entity :type folderpath: string :param entity_name: The name of the entity :type entity_name: string :param result: The result of resolving entity_name :type result: list of dictionaries :returns: The validated resolution output :rtype: dictionary Formats the output from the resolution of entity_name based on the number of resolved entities. If no results are found and entity_name can be resolved to a folder, then the return value will look like: {"project": <project>, "folder": <folder>, "name": None} If exactly one result is found, then the return value will look like: {"project": <project>, "folder": <folder>, "name": {"id": <id>, "project": <project>}} OR {"project": None, "folder": <folder>, "name": {"id": <id>, "project": <project>}} Else, the return value will look like: {"project": None, "folder": None, "name": None}
3.563351
2.887188
1.234194
done_objects = {} # Return value to_resolve_in_batch_paths = [] # Paths to resolve to_resolve_in_batch_inputs = [] # Project, folderpath, and entity name for path in paths: project, folderpath, entity_name = resolve_path(path, expected='entity') try: must_resolve, project, folderpath, entity_name = _check_resolution_needed( path, project, folderpath, entity_name) except: must_resolve = False if must_resolve: if is_glob_pattern(entity_name): # TODO: Must call findDataObjects because resolveDataObjects does not support glob patterns try: find_results = _resolve_global_entity(project, folderpath, entity_name) done_objects[path] = _format_resolution_output(path, project, folderpath, entity_name, find_results) except ResolutionError: # Catches any ResolutionError thrown by _resolve_global_entity done_objects[path] = {"project": None, "folder": None, "name": None} else: # Prepare batch call for resolveDataObjects to_resolve_in_batch_paths.append(path) to_resolve_in_batch_inputs.append({"project": project, "folder": folderpath, "name": entity_name}) else: # No need to resolve done_objects[path] = {"project": project, "folder": folderpath, "name": entity_name} # Call resolveDataObjects resolution_results = dxpy.resolve_data_objects(to_resolve_in_batch_inputs) for path, inputs, result in zip(to_resolve_in_batch_paths, to_resolve_in_batch_inputs, resolution_results): done_objects[path] = _format_resolution_output(path, inputs["project"], inputs["folder"], inputs["name"], result) return done_objects
def resolve_multiple_existing_paths(paths)
:param paths: A list of paths to items that need to be resolved :type paths: list :returns: A dictionary mapping a specified path to either its resolved object or Nones, if the object could not be resolved :rtype: dict For each input given in paths, attempts to resolve the path, and returns the resolved object in a dictionary. The return value will look like: {<path1>: <resolved_object1>, <path2>: <resolved_object2>,...} If entity_id is a DX ID that can be described, <resolved_object*> ::= {"project": None, "folder": None, "name": {"id": <id>, "describe": <describe_output>}} Else if a general resolution (or search) method will be used to resolve the entity, <resolved_object*> ::= {"project": <project>, "folder": None, "name": {"project": <project>, "id": <resolved_id>}} Else if <project> is a job ID, <resolved_object*> ::= {"project": None, "folder": None, "name": {"project": <project>, "id": <resolved_id>}} Else if the path refers to a folder instead of a data object, <resolved_object*> ::= {"project": <project>, "folder": <folder>, "name": None} Else if description or resolution fails, <resolved_object*> ::= {"project": None, "folder": None, "name": None}
3.170758
2.883559
1.099599
''' :param project: project id :type project: string :param path: path to where we should look for the folder in question :type path: string :param folder_name: name of the folder in question :type folder_name: string :returns: A boolean True or False whether the folder exists at the specified path :type: boolean :raises: :exc:'ResolutionError' if dxpy.api.container_list_folder raises an exception This function returns a boolean value that indicates whether a folder of the specified name exists at the specified path Note: this function will NOT work on the root folder case, i.e. '/' ''' if folder_name is None or path is None: return False try: folder_list = dxpy.api.container_list_folder(project, {"folder": path, "only": "folders"}) except dxpy.exceptions.DXAPIError as e: if e.name == 'ResourceNotFound': raise ResolutionError(str(e.msg)) else: raise e target_folder = path + '/' + folder_name # sanitize input if necessary target_folder, _skip = clean_folder_path(target_folder, 'folder') # Check that folder name exists in return from list folder API call return target_folder in folder_list['folders']
def check_folder_exists(project, path, folder_name)
:param project: project id :type project: string :param path: path to where we should look for the folder in question :type path: string :param folder_name: name of the folder in question :type folder_name: string :returns: A boolean True or False whether the folder exists at the specified path :type: boolean :raises: :exc:'ResolutionError' if dxpy.api.container_list_folder raises an exception This function returns a boolean value that indicates whether a folder of the specified name exists at the specified path Note: this function will NOT work on the root folder case, i.e. '/'
4.438644
2.202775
2.015024
''' :param path: A string to attempt to resolve to an app object :type path: string :returns: The describe hash of the app object if found, or None otherwise :rtype: dict or None This method parses a string that is expected to perhaps refer to an app object. If found, its describe hash will be returned. For more information on the contents of this hash, see the API documentation. [TODO: external link here] ''' alias = None if not path.startswith('app-'): path = 'app-' + path if '/' in path: alias = path[path.find('/') + 1:] path = path[:path.find('/')] try: return dxpy.api.app_describe(path, alias=alias) except dxpy.DXAPIError: return None
def get_app_from_path(path)
:param path: A string to attempt to resolve to an app object :type path: string :returns: The describe hash of the app object if found, or None otherwise :rtype: dict or None This method parses a string that is expected to perhaps refer to an app object. If found, its describe hash will be returned. For more information on the contents of this hash, see the API documentation. [TODO: external link here]
5.043443
1.757882
2.869045
''' :param path: A string to attempt to resolve to a global workflow object :type path: string :returns: The describe hash of the global workflow object if found, or None otherwise :rtype: dict or None This method parses a string that is expected to perhaps refer to a global workflow object. If found, its describe hash will be returned. For more information on the contents of this hash, see the API documentation. [TODO: external link here] ''' alias = None if not path.startswith('globalworkflow-'): path = 'globalworkflow-' + path if '/' in path: alias = path[path.find('/') + 1:] path = path[:path.find('/')] try: return dxpy.api.global_workflow_describe(path, alias=alias) except dxpy.DXAPIError: return None
def get_global_workflow_from_path(path)
:param path: A string to attempt to resolve to a global workflow object :type path: string :returns: The describe hash of the global workflow object if found, or None otherwise :rtype: dict or None This method parses a string that is expected to perhaps refer to a global workflow object. If found, its describe hash will be returned. For more information on the contents of this hash, see the API documentation. [TODO: external link here]
4.960872
1.776814
2.792005
if not is_hashid(path) and is_version_required and "/" not in path: raise ResolutionError('Version is required, e.g. "myexec/1.0.0"'.format()) # First, check if the prefix is provided, then we don't have to resolve the name if path.startswith('app-'): return resolve_app(path) elif path.startswith('globalworkflow-'): return resolve_global_workflow(path) # If the path doesn't include a prefix, we must try describing # as an app and, if that fails, as a global workflow desc = get_app_from_path(path) if not desc: desc = get_global_workflow_from_path(path) if desc is None: raise ResolutionError( 'The given path "' + path + '" could not be resolved to an accessible global executable (app or workflow)') return desc
def resolve_global_executable(path, is_version_required=False)
:param path: A string which is supposed to identify a global executable (app or workflow) :type path: string :param is_version_required: If set to True, the path has to specify a specific version/alias, e.g. "myapp/1.0.0" :type is_version_required: boolean :returns: The describe hash of the global executable object (app or workflow) :raises: :exc:`ResolutionError` if it cannot be found *path* is expected to have one of the following forms: - hash ID, e.g. "globalworkflow-F85Z6bQ0xku1PKY6FjGQ011J", "app-FBZ3f200yfzkKYyp9JkFVQ97" - named ID, e.g. "app-myapp", "globalworkflow-myworkflow" - named ID with alias (version or tag), e.g. "myapp/1.2.0", "myworkflow/1.2.0" - named ID with prefix and with alias (version or tag), e.g. "app-myapp/1.2.0", "globalworkflow-myworkflow/1.2.0"
5.589559
4.342333
1.287225
''' :param path: Path to resolve :type path: string :param all_matching_results: Whether to return a list of all matching results :type all_matching_results: boolean A thin wrapper over :meth:`resolve_existing_path` which throws an error if the path does not look like a project and doesn't match a data object path. Returns either a list of results or a single result (depending on how many is expected; if only one, then an interactive picking of a choice will be initiated if input is a tty, or else throw an error). ''' # Attempt to resolve name project, folderpath, entity_results = resolve_existing_path(path, expected='entity', allow_mult=True, all_mult=all_matching_results) if entity_results is None and not is_container_id(path): if folderpath != None and folderpath != '/': raise ResolutionError('Could not resolve "' + path + \ '''" to an existing data object or to only a project; if you were attempting to refer to a project by name, please append a colon ":" to indicate that it is a project.''') return project, folderpath, entity_results
def resolve_to_objects_or_project(path, all_matching_results=False)
:param path: Path to resolve :type path: string :param all_matching_results: Whether to return a list of all matching results :type all_matching_results: boolean A thin wrapper over :meth:`resolve_existing_path` which throws an error if the path does not look like a project and doesn't match a data object path. Returns either a list of results or a single result (depending on how many is expected; if only one, then an interactive picking of a choice will be initiated if input is a tty, or else throw an error).
7.472747
3.057619
2.443976
''' :param dxid: New job ID to be associated with the handler (localjob IDs also accepted for local runs) :type dxid: string Discards the currently stored ID and associates the handler with *dxid* ''' if dxid is not None: if not (isinstance(dxid, basestring) and dxid.startswith('localjob-')): # localjob IDs (which do not follow the usual ID # syntax) should be allowed; otherwise, follow the # usual syntax checking verify_string_dxid(dxid, self._class) self._dxid = dxid
def set_id(self, dxid)
:param dxid: New job ID to be associated with the handler (localjob IDs also accepted for local runs) :type dxid: string Discards the currently stored ID and associates the handler with *dxid*
9.253603
4.232109
2.186523
if fields is not None and io is not None: raise DXError('DXJob.describe: cannot provide non-None values for both fields and io') describe_input = {} if fields is not None: describe_input['fields'] = fields if io is not None: describe_input['io'] = io self._desc = dxpy.api.job_describe(self._dxid, describe_input, **kwargs) return self._desc
def describe(self, fields=None, io=None, **kwargs)
:param fields: dict where the keys are field names that should be returned, and values should be set to True (by default, all fields are returned) :type fields: dict :param io: Include input and output fields in description; cannot be provided with *fields*; default is True if *fields* is not provided (deprecated) :type io: bool :returns: Description of the job :rtype: dict Returns a hash with key-value pairs containing information about the job, including its state and (optionally) its inputs and outputs, as described in the API documentation for the `/job-xxxx/describe <https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method:-/job-xxxx/describe>`_ method.
3.069823
3.357096
0.914428
dxpy.api.job_add_tags(self._dxid, {"tags": tags}, **kwargs)
def add_tags(self, tags, **kwargs)
:param tags: Tags to add to the job :type tags: list of strings Adds each of the specified tags to the job. Takes no action for tags that are already listed for the job.
6.341293
6.008894
1.055318
dxpy.api.job_remove_tags(self._dxid, {"tags": tags}, **kwargs)
def remove_tags(self, tags, **kwargs)
:param tags: Tags to remove from the job :type tags: list of strings Removes each of the specified tags from the job. Takes no action for tags that the job does not currently have.
6.92475
6.360837
1.088654
dxpy.api.job_set_properties(self._dxid, {"properties": properties}, **kwargs)
def set_properties(self, properties, **kwargs)
:param properties: Property names and values given as key-value pairs of strings :type properties: dict Given key-value pairs in *properties* for property names and values, the properties are set on the job for the given property names. Any property with a value of :const:`None` indicates the property will be deleted. .. note:: Any existing properties not mentioned in *properties* are not modified by this method.
7.429538
9.030824
0.822687
''' :param interval: Number of seconds between queries to the job's state :type interval: integer :param timeout: Maximum amount of time to wait, in seconds, until the job is done running :type timeout: integer :raises: :exc:`~dxpy.exceptions.DXError` if the timeout is reached before the job has finished running, or :exc:`dxpy.exceptions.DXJobFailureError` if the job fails Waits until the job has finished running. ''' elapsed = 0 while True: state = self._get_state(**kwargs) if state == "done": break if state == "failed": desc = self.describe(**kwargs) err_msg = "Job has failed because of {failureReason}: {failureMessage}".format(**desc) if desc.get("failureFrom") != None and desc["failureFrom"]["id"] != desc["id"]: err_msg += " (failure from {id})".format(id=desc['failureFrom']['id']) raise DXJobFailureError(err_msg) if state == "terminated": raise DXJobFailureError("Job was terminated.") if elapsed >= timeout or elapsed < 0: raise DXJobFailureError("Reached timeout while waiting for the job to finish") time.sleep(interval) elapsed += interval
def wait_on_done(self, interval=2, timeout=3600*24*7, **kwargs)
:param interval: Number of seconds between queries to the job's state :type interval: integer :param timeout: Maximum amount of time to wait, in seconds, until the job is done running :type timeout: integer :raises: :exc:`~dxpy.exceptions.DXError` if the timeout is reached before the job has finished running, or :exc:`dxpy.exceptions.DXJobFailureError` if the job fails Waits until the job has finished running.
3.333269
2.225578
1.49771
''' :returns: State of the remote object :rtype: string Queries the API server for the job's state. Note that this function is shorthand for: dxjob.describe(io=False, **kwargs)["state"] ''' return self.describe(fields=dict(state=True), **kwargs)["state"]
def _get_state(self, **kwargs)
:returns: State of the remote object :rtype: string Queries the API server for the job's state. Note that this function is shorthand for: dxjob.describe(io=False, **kwargs)["state"]
11.679734
2.445012
4.776964
user_id_or_username = user_id_or_username.lower() if not user_id_or_username.startswith("user-"): user_id = "user-" + user_id_or_username.lower() else: user_id = user_id_or_username return user_id
def get_user_id(user_id_or_username)
Gets the user ID based on the value `user_id_or_username` specified on the command-line, being extra lenient and lowercasing the value in all cases.
1.979639
2.035876
0.972377
org_invite_args = {"invitee": user_id} org_invite_args["level"] = args.level if "set_bill_to" in args and args.set_bill_to is True: # /org-x/invite is called in conjunction with /user/new. org_invite_args["allowBillableActivities"] = True else: org_invite_args["allowBillableActivities"] = args.allow_billable_activities org_invite_args["appAccess"] = args.app_access org_invite_args["projectAccess"] = args.project_access org_invite_args["suppressEmailNotification"] = args.no_email return org_invite_args
def get_org_invite_args(user_id, args)
Used by: - `dx new user` - `dx add member` PRECONDITION: - If /org-x/invite is being called in conjunction with /user/new, then `_validate_new_user_input()` has been called on `args`; otherwise, the parser must perform all the basic input validation.
3.856662
3.212523
1.200509
user_new_args = {"username": args.username, "email": args.email} if args.first is not None: user_new_args["first"] = args.first if args.last is not None: user_new_args["last"] = args.last if args.middle is not None: user_new_args["middle"] = args.middle if args.token_duration is not None: token_duration_ms = normalize_timedelta(args.token_duration) if token_duration_ms > 30 * 24 * 60 * 60 * 1000: raise ValueError("--token-duration must be 30 days or less") else: user_new_args["tokenDuration"] = token_duration_ms if args.occupation is not None: user_new_args["occupation"] = args.occupation if args.set_bill_to is True: user_new_args["billTo"] = args.org return user_new_args
def _get_user_new_args(args)
PRECONDITION: `_validate_new_user_input()` has been called on `args`.
2.06223
2.056258
1.002904
# The following may throw if the executable is a workflow with no # input spec available (because a stage is inaccessible) exec_inputs = try_call(ExecutableInputs, executable, input_name_prefix=input_name_prefix, active_region=args.region) # Use input and system requirements from a cloned execution if args.input_json is None and args.filename is None: # --input-json and --input-json-file completely override input # from the cloned job exec_inputs.update(args.input_from_clone, strip_prefix=False) # Update with inputs passed to the this function if preset_inputs is not None: exec_inputs.update(preset_inputs, strip_prefix=False) # Update with inputs passed with -i, --input_json, --input_json_file, etc. # If batch_tsv is set, do not prompt for missing arguments require_all_inputs = (args.batch_tsv is None) try_call(exec_inputs.update_from_args, args, require_all_inputs) return exec_inputs.inputs
def _get_input_for_run(args, executable, preset_inputs=None, input_name_prefix=None)
Returns an input dictionary that can be passed to executable.run()
6.89207
6.836308
1.008157
name = re.sub('^dx ', '', parser.prog) if subparsers_action is None: subparsers_action = subparsers if isinstance(categories, basestring): categories = (categories, ) parser_map[name] = parser if add_help: _help = subparsers_action._choices_actions[-1].help parser_categories['all']['cmds'].append((name, _help)) for category in categories: parser_categories[category]['cmds'].append((name, _help))
def register_parser(parser, subparsers_action=None, categories=('other', ), add_help=True)
Attaches `parser` to the global ``parser_map``. If `add_help` is truthy, then adds the helpstring of `parser` into the output of ``dx help...``, for each category in `categories`. :param subparsers_action: A special action object that is returned by ``ArgumentParser.add_subparsers(...)``, or None. :type subparsers_action: argparse._SubParsersAction, or None.
3.955855
3.825419
1.034097
if isinstance(e, urllib3.exceptions.ProtocolError): e = e.args[1] if isinstance(e, (socket.gaierror, socket.herror)): return True if isinstance(e, socket.error) and e.errno in _RETRYABLE_SOCKET_ERRORS: return True if isinstance(e, urllib3.exceptions.NewConnectionError): return True return False
def _is_retryable_exception(e)
Returns True if the exception is always safe to retry. This is True if the client was never able to establish a connection to the server (for example, name resolution failed or the connection could otherwise not be initialized). Conservatively, if we can't tell whether a network connection could have been established, we return False.
2.048567
2.221991
0.921951
''' Extract a useful error message from the last thrown exception ''' last_exc_type, last_error, last_traceback = sys.exc_info() if isinstance(last_error, exceptions.DXAPIError): # Using the same code path as below would not # produce a useful message when the error contains a # 'details' hash (which would have a last line of # '}') return last_error.error_message() else: return traceback.format_exc().splitlines()[-1].strip()
def _extract_msg_from_last_exception()
Extract a useful error message from the last thrown exception
7.483786
6.92369
1.080896
''' Returns the time in seconds that we should wait. :param num_attempts: number of attempts that have been made to the resource, including the most recent failed one :type num_attempts: int ''' if response is not None and response.status == 503 and 'retry-after' in response.headers: try: return int(response.headers['retry-after']) except ValueError: # In RFC 2616, retry-after can be formatted as absolute time # instead of seconds to wait. We don't bother to parse that, # but the apiserver doesn't generate such responses anyway. pass if num_attempts <= 1: return 1 num_attempts = min(num_attempts, 7) return randint(2 ** (num_attempts - 2), 2 ** (num_attempts - 1))
def _calculate_retry_delay(response, num_attempts)
Returns the time in seconds that we should wait. :param num_attempts: number of attempts that have been made to the resource, including the most recent failed one :type num_attempts: int
4.275826
3.184502
1.342698
''' :param host: API server hostname :type host: string :param port: API server port. If not specified, *port* is guessed based on *protocol*. :type port: string :param protocol: Either "http" or "https" :type protocol: string Overrides the current settings for which API server to communicate with. Any parameters that are not explicitly specified are not overridden. ''' global APISERVER_PROTOCOL, APISERVER_HOST, APISERVER_PORT, APISERVER if host is not None: APISERVER_HOST = host if port is not None: APISERVER_PORT = port if protocol is not None: APISERVER_PROTOCOL = protocol if port is None or port == '': APISERVER = APISERVER_PROTOCOL + "://" + APISERVER_HOST else: APISERVER = APISERVER_PROTOCOL + "://" + APISERVER_HOST + ":" + str(APISERVER_PORT)
def set_api_server_info(host=None, port=None, protocol=None)
:param host: API server hostname :type host: string :param port: API server port. If not specified, *port* is guessed based on *protocol*. :type port: string :param protocol: Either "http" or "https" :type protocol: string Overrides the current settings for which API server to communicate with. Any parameters that are not explicitly specified are not overridden.
2.504537
1.496632
1.673449
if host_override is not None or port_override is not None: if host_override is None or port_override is None: raise exceptions.DXError("Both host and port must be specified if either is specified") return protocol + '://' + host_override + ':' + str(port_override) elif APISERVER_HOST == 'stagingapi.dnanexus.com': return 'https://stagingauth.dnanexus.com' elif APISERVER_HOST == 'api.dnanexus.com': return 'https://auth.dnanexus.com' elif APISERVER_HOST == 'stagingapi.cn.dnanexus.com': return 'https://stagingauth.cn.dnanexus.com:7001' elif APISERVER_HOST == 'api.cn.dnanexus.com': return 'https://auth.cn.dnanexus.com:8001' elif APISERVER_HOST == "localhost" or APISERVER_HOST == "127.0.0.1": if "DX_AUTHSERVER_HOST" not in os.environ or "DX_AUTHSERVER_PORT" not in os.environ: err_msg = "Must set authserver env vars (DX_AUTHSERVER_HOST, DX_AUTHSERVER_PORT) if apiserver is {apiserver}." raise exceptions.DXError(err_msg.format(apiserver=APISERVER_HOST)) else: return os.environ["DX_AUTHSERVER_HOST"] + ":" + os.environ["DX_AUTHSERVER_PORT"] else: err_msg = "Could not determine which auth server is associated with {apiserver}." raise exceptions.DXError(err_msg.format(apiserver=APISERVER_HOST))
def get_auth_server_name(host_override=None, port_override=None, protocol='https')
Chooses the auth server name from the currently configured API server name. Raises DXError if the auth server name cannot be guessed and the overrides are not provided (or improperly provided).
2.097988
1.999477
1.049268
if not globalworkflow_desc or \ globalworkflow_desc['class'] != 'globalworkflow' or \ not 'regionalOptions' in globalworkflow_desc: return globalworkflow_desc for region, config in globalworkflow_desc['regionalOptions'].items(): workflow_id = config['workflow'] workflow_desc = dxpy.api.workflow_describe(workflow_id) globalworkflow_desc['regionalOptions'][region]['workflowDescribe'] = workflow_desc return globalworkflow_desc
def append_underlying_workflow_describe(globalworkflow_desc)
Adds the "workflowDescribe" field to the config for each region of the global workflow. The value is the description of an underlying workflow in that region.
3.071335
2.719344
1.129439
''' :param job_id: Job ID :type job_id: string :param field_name_and_maybe_index: Field name, plus possibly ".N" where N is an array index :type field_name_and_maybe_index: string :returns: dict of JBOR ''' link = {"$dnanexus_link": {"job": job_id}} if '.' in field_name_and_maybe_index: split_by_dot = field_name_and_maybe_index.rsplit('.', 1) link["$dnanexus_link"]["field"] = split_by_dot[0] link["$dnanexus_link"]["index"] = int(split_by_dot[1]) else: link["$dnanexus_link"]["field"] = field_name_and_maybe_index return link
def _construct_jbor(job_id, field_name_and_maybe_index)
:param job_id: Job ID :type job_id: string :param field_name_and_maybe_index: Field name, plus possibly ".N" where N is an array index :type field_name_and_maybe_index: string :returns: dict of JBOR
2.320867
1.699526
1.365596
if strip_prefix and self.input_name_prefix is not None: for i in new_inputs: if i.startswith(self.input_name_prefix): self.inputs[i[len(self.input_name_prefix):]] = new_inputs[i] else: self.inputs.update(new_inputs)
def update(self, new_inputs, strip_prefix=True)
Updates the inputs dictionary with the key/value pairs from new_inputs, overwriting existing keys.
1.920299
1.773908
1.082524
input_paths = [quad[1] for quad in self.requires_resolution] results = resolve_multiple_existing_paths(input_paths) for input_name, input_value, input_class, input_index in self.requires_resolution: project = results[input_value]['project'] folderpath = results[input_value]['folder'] entity_result = results[input_value]['name'] if input_class is None: if entity_result is not None: if isinstance(entity_result, basestring): # Case: -ifoo=job-012301230123012301230123 # Case: -ifoo=analysis-012301230123012301230123 assert(is_job_id(entity_result) or (is_analysis_id(entity_result))) input_value = entity_result elif is_hashid(input_value): input_value = {'$dnanexus_link': entity_result['id']} elif 'describe' in entity_result: # Then findDataObjects was called (returned describe hash) input_value = {"$dnanexus_link": {"project": entity_result['describe']['project'], "id": entity_result['id']}} else: # Then resolveDataObjects was called in a batch (no describe hash) input_value = {"$dnanexus_link": {"project": entity_result['project'], "id": entity_result['id']}} if input_index >= 0: if self.inputs[input_name][input_index] is not None: raise AssertionError("Expected 'self.inputs' to have saved a spot for 'input_value'.") self.inputs[input_name][input_index] = input_value else: if self.inputs[input_name] is not None: raise AssertionError("Expected 'self.inputs' to have saved a spot for 'input_value'.") self.inputs[input_name] = input_value else: msg = 'Value provided for input field "' + input_name + '" could not be parsed as ' + \ input_class + ': ' if input_value == '': raise DXCLIError(msg + 'empty string cannot be resolved') if entity_result is None: raise DXCLIError(msg + 'could not resolve \"' + input_value + '\" to a name or ID') try: dxpy.bindings.verify_string_dxid(entity_result['id'], input_class) except DXError as details: raise DXCLIError(msg + str(details)) if is_hashid(input_value): input_value = {'$dnanexus_link': entity_result['id']} elif 'describe' in entity_result: # Then findDataObjects was called (returned describe hash) input_value = {'$dnanexus_link': {"project": entity_result['describe']['project'], "id": entity_result['id']}} else: # Then resolveDataObjects was called in a batch (no describe hash) input_value = {"$dnanexus_link": {"project": entity_result['project'], "id": entity_result['id']}} if input_index != -1: # The class is an array, so append the resolved value self.inputs[input_name].append(input_value) else: self.inputs[input_name] = input_value
def _update_requires_resolution_inputs(self)
Updates self.inputs with resolved input values (the input values that were provided as paths to items that require resolutions, eg. folder or job/analyses ids)
2.992466
2.858035
1.047036
def replacer(matchobj): if ord(matchobj.group(1)) == 127: return "\\x7f" if ord(matchobj.group(1)) == 92: # backslash return "\\\\" return REPLACEMENT_TABLE[ord(matchobj.group(1))] return re.sub("([\\000-\\037\\134\\177])", replacer, u)
def escape_unicode_string(u)
Escapes the nonprintable chars 0-31 and 127, and backslash; preferably with a friendly equivalent such as '\n' if available, but otherwise with a Python-style backslashed hex escape.
3.25364
3.095028
1.051247
''' Tree pretty printer. Expects trees to be given as mappings (dictionaries). Keys will be printed; values will be traversed if they are mappings. To preserve order, use collections.OrderedDict. Example: print format_tree(collections.OrderedDict({'foo': 0, 'bar': {'xyz': 0}})) ''' formatted_tree = [root] if root is not None else [] def _format(tree, prefix=' '): nodes = list(tree.keys()) for i in range(len(nodes)): node = nodes[i] if i == len(nodes)-1 and len(prefix) > 1: my_prefix = prefix[:-4] + '└── ' my_multiline_prefix = prefix[:-4] + ' ' else: my_prefix = prefix[:-4] + '├── ' my_multiline_prefix = prefix[:-4] + '│ ' n = 0 for line in node.splitlines(): if n == 0: formatted_tree.append(my_prefix + line) else: formatted_tree.append(my_multiline_prefix + line) n += 1 if isinstance(tree[node], collections.Mapping): subprefix = prefix if i < len(nodes)-1 and len(prefix) > 1 and prefix[-4:] == ' ': subprefix = prefix[:-4] + '│ ' _format(tree[node], subprefix + ' ') _format(tree) return '\n'.join(formatted_tree)
def format_tree(tree, root=None)
Tree pretty printer. Expects trees to be given as mappings (dictionaries). Keys will be printed; values will be traversed if they are mappings. To preserve order, use collections.OrderedDict. Example: print format_tree(collections.OrderedDict({'foo': 0, 'bar': {'xyz': 0}}))
2.975997
1.978693
1.504021
''' Table pretty printer. Expects tables to be given as arrays of arrays. Example: print format_table([[1, "2"], [3, "456"]], column_names=['A', 'B']) ''' if len(table) > 0: col_widths = [0] * len(list(table)[0]) elif column_specs is not None: col_widths = [0] * (len(column_specs) + 1) elif column_names is not None: col_widths = [0] * len(column_names) my_column_names = [] if column_specs is not None: column_names = ['Row'] column_names.extend([col['name'] for col in column_specs]) column_specs = [{'name': 'Row', 'type': 'float'}] + column_specs if column_names is not None: for i in range(len(column_names)): my_col = str(column_names[i]) if len(my_col) > max_col_width: my_col = my_col[:max_col_width-1] + '…' my_column_names.append(my_col) col_widths[i] = max(col_widths[i], len(my_col)) my_table = [] for row in table: my_row = [] for i in range(len(row)): my_item = escape_unicode_string(str(row[i])) if len(my_item) > max_col_width: my_item = my_item[:max_col_width-1] + '…' my_row.append(my_item) col_widths[i] = max(col_widths[i], len(my_item)) my_table.append(my_row) def border(i): return WHITE() + i + ENDC() type_colormap = {'boolean': BLUE(), 'integer': YELLOW(), 'float': WHITE(), 'string': GREEN()} for i in 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64': type_colormap[i] = type_colormap['integer'] type_colormap['double'] = type_colormap['float'] def col_head(i): if column_specs is not None: return BOLD() + type_colormap[column_specs[i]['type']] + column_names[i] + ENDC() else: return BOLD() + WHITE() + column_names[i] + ENDC() formatted_table = [border('┌') + border('┬').join(border('─')*i for i in col_widths) + border('┐')] if len(my_column_names) > 0: padded_column_names = [col_head(i) + ' '*(col_widths[i]-len(my_column_names[i])) for i in range(len(my_column_names))] formatted_table.append(border('│') + border('│').join(padded_column_names) + border('│')) formatted_table.append(border('├') + border('┼').join(border('─')*i for i in col_widths) + border('┤')) for row in my_table: padded_row = [row[i] + ' '*(col_widths[i]-len(row[i])) for i in range(len(row))] formatted_table.append(border('│') + border('│').join(padded_row) + border('│')) formatted_table.append(border('└') + border('┴').join(border('─')*i for i in col_widths) + border('┘')) if report_dimensions: return '\n'.join(formatted_table), len(formatted_table), sum(col_widths) + len(col_widths) + 1 else: return '\n'.join(formatted_table)
def format_table(table, column_names=None, column_specs=None, max_col_width=32, report_dimensions=False)
Table pretty printer. Expects tables to be given as arrays of arrays. Example: print format_table([[1, "2"], [3, "456"]], column_names=['A', 'B'])
1.932227
1.804284
1.070911
result = re.sub('"{}": \\[\r?\n\\s*'.format(array_name), '"{}": ['.format(array_name), json_string, flags=re.MULTILINE) flatten_regexp = re.compile('"{}": \\[(.*)(?<=,)\r?\n\\s*'.format(array_name), flags=re.MULTILINE) while flatten_regexp.search(result): result = flatten_regexp.sub('"{}": [\\1 '.format(array_name), result) result = re.sub('"{}": \\[(.*)\r?\n\\s*\\]'.format(array_name), '"{}": [\\1]'.format(array_name), result, flags=re.MULTILINE) return result
def flatten_json_array(json_string, array_name)
Flattens all arrays with the same name in the JSON string :param json_string: JSON string :type json_string: str :param array_name: Array name to flatten :type array_name: str
2.453167
2.437629
1.006374
''' :param title: Workflow title (optional) :type title: string :param summary: Workflow summary (optional) :type summary: string :param description: Workflow description (optional) :type description: string :param output_folder: Default output folder of the workflow (optional) :type output_folder: string :param init_from: Another analysis workflow object handler or and analysis (string or handler) from which to initialize the metadata (optional) :type init_from: :class:`~dxpy.bindings.dxworkflow.DXWorkflow`, :class:`~dxpy.bindings.dxanalysis.DXAnalysis`, or string (for analysis IDs only) :rtype: :class:`DXWorkflow` Additional optional parameters not listed: all those under :func:`dxpy.bindings.DXDataObject.new`, except `details`. Creates a new remote workflow object with project set to *project* and returns the appropriate handler. Example: r = dxpy.new_dxworkflow(title="My Workflow", description="This workflow contains...") Note that this function is shorthand for:: dxworkflow = DXWorkflow() dxworkflow.new(**kwargs) ''' dxworkflow = DXWorkflow() dxworkflow.new(title=title, summary=summary, description=description, output_folder=output_folder, init_from=init_from, **kwargs) return dxworkflow
def new_dxworkflow(title=None, summary=None, description=None, output_folder=None, init_from=None, **kwargs)
:param title: Workflow title (optional) :type title: string :param summary: Workflow summary (optional) :type summary: string :param description: Workflow description (optional) :type description: string :param output_folder: Default output folder of the workflow (optional) :type output_folder: string :param init_from: Another analysis workflow object handler or and analysis (string or handler) from which to initialize the metadata (optional) :type init_from: :class:`~dxpy.bindings.dxworkflow.DXWorkflow`, :class:`~dxpy.bindings.dxanalysis.DXAnalysis`, or string (for analysis IDs only) :rtype: :class:`DXWorkflow` Additional optional parameters not listed: all those under :func:`dxpy.bindings.DXDataObject.new`, except `details`. Creates a new remote workflow object with project set to *project* and returns the appropriate handler. Example: r = dxpy.new_dxworkflow(title="My Workflow", description="This workflow contains...") Note that this function is shorthand for:: dxworkflow = DXWorkflow() dxworkflow.new(**kwargs)
3.820858
1.198084
3.18914
def _set_dx_hash(kwargs, dxhash, key, new_key=None): new_key = key if new_key is None else new_key if key in kwargs: if kwargs[key] is not None: dxhash[new_key] = kwargs[key] del kwargs[key] if "init_from" in kwargs: if kwargs["init_from"] is not None: if not (isinstance(kwargs["init_from"], (DXWorkflow, DXAnalysis)) or \ (isinstance(kwargs["init_from"], basestring) and \ re.compile('^analysis-[0-9A-Za-z]{24}$').match(kwargs["init_from"]))): raise DXError("Expected init_from to be an instance of DXWorkflow or DXAnalysis, or to be a string analysis ID.") if isinstance(kwargs["init_from"], basestring): dx_hash["initializeFrom"] = {"id": kwargs["init_from"]} else: dx_hash["initializeFrom"] = {"id": kwargs["init_from"].get_id()} if isinstance(kwargs["init_from"], DXWorkflow): dx_hash["initializeFrom"]["project"] = kwargs["init_from"].get_proj_id() del kwargs["init_from"] _set_dx_hash(kwargs, dx_hash, "title") _set_dx_hash(kwargs, dx_hash, "summary") _set_dx_hash(kwargs, dx_hash, "description") _set_dx_hash(kwargs, dx_hash, "output_folder", "outputFolder") _set_dx_hash(kwargs, dx_hash, "stages") _set_dx_hash(kwargs, dx_hash, "workflow_inputs", "inputs") _set_dx_hash(kwargs, dx_hash, "workflow_outputs", "outputs") resp = dxpy.api.workflow_new(dx_hash, **kwargs) self.set_ids(resp["id"], dx_hash["project"])
def _new(self, dx_hash, **kwargs)
:param dx_hash: Standard hash populated in :func:`dxpy.bindings.DXDataObject.new()` containing attributes common to all data object classes. :type dx_hash: dict :param title: Workflow title (optional) :type title: string :param summary: Workflow summary (optional) :type summary: string :param description: Workflow description (optional) :type description: string :param output_folder: Default output folder of the workflow (optional) :type output_folder: string :param stages: Stages of the workflow (optional) :type stages: array of dictionaries :param workflow_inputs: Workflow-level input specification (optional) :type workflow_inputs: array of dictionaries :param workflow_outputs: Workflow-level output specification (optional) :type workflow_outputs: array of dictionaries :param init_from: Another analysis workflow object handler or and analysis (string or handler) from which to initialize the metadata (optional) :type init_from: :class:`~dxpy.bindings.dxworkflow.DXWorkflow`, :class:`~dxpy.bindings.dxanalysis.DXAnalysis`, or string (for analysis IDs only) Create a new remote workflow object.
2.256489
1.871367
1.205797
''' :param stage: A stage ID, name, or index (stage index is the number n for the nth stage, starting from 0; can be provided as an int or a string) :type stage: int or string :returns: The stage ID (this is a no-op if it was already a stage ID) :raises: :class:`~dxpy.exceptions.DXError` if *stage* could not be parsed, resolved to a stage ID, or it could not be found in the workflow ''' # first, if it is a string, see if it is an integer if isinstance(stage, basestring): try: stage = int(stage) except: # we'll try parsing it as a string later pass if not isinstance(stage, basestring): # Try to parse as stage index; ensure that if it's not a # string that it is an integer at this point. try: stage_index = int(stage) except: raise DXError('DXWorkflow: the given stage identifier was neither a string stage ID nor an integer index') if stage_index < 0 or stage_index >= len(self.stages): raise DXError('DXWorkflow: the workflow contains ' + str(len(self.stages)) + \ ' stage(s), and the numerical value of the given stage identifier is out of range') return self.stages[stage_index].get("id") if re.compile('^([a-zA-Z_]|stage-)[0-9a-zA-Z_]*$').match(stage) is not None: # Check if there exists a stage with this stage id stage_id_exists = any([stg['id'] for stg in self.stages if stg.get('id') == stage]) if stage_id_exists: return stage # A stage with the provided ID can't be found in the workflow, so look for it as a name stage_ids_matching_name = [stg['id'] for stg in self.stages if stg.get('name') == stage] if len(stage_ids_matching_name) == 0: raise DXError('DXWorkflow: the given stage identifier ' + stage + ' could not be found as a stage ID nor as a stage name') elif len(stage_ids_matching_name) > 1: raise DXError('DXWorkflow: more than one workflow stage was found to have the name "' + stage + '"') else: return stage_ids_matching_name[0]
def _get_stage_id(self, stage)
:param stage: A stage ID, name, or index (stage index is the number n for the nth stage, starting from 0; can be provided as an int or a string) :type stage: int or string :returns: The stage ID (this is a no-op if it was already a stage ID) :raises: :class:`~dxpy.exceptions.DXError` if *stage* could not be parsed, resolved to a stage ID, or it could not be found in the workflow
3.288272
2.406272
1.366542
''' :param executable: string or a handler for an app or applet :type executable: string, DXApplet, or DXApp :param stage_id: id for the stage (optional) :type stage_id: string :param name: name for the stage (optional) :type name: string :param folder: default output folder for the stage; either a relative or absolute path (optional) :type folder: string :param stage_input: input fields to bind as default inputs for the executable (optional) :type stage_input: dict :param instance_type: Default instance type on which all jobs will be run for this stage, or a dict mapping function names to instance type requests :type instance_type: string or dict :param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional) :type edit_version: int :returns: ID of the added stage :rtype: string :raises: :class:`~dxpy.exceptions.DXError` if *executable* is not an expected type :class:`~dxpy.exceptions.DXAPIError` for errors thrown from the API call Adds the specified executable as a new stage in the workflow. ''' if isinstance(executable, basestring): exec_id = executable elif isinstance(executable, DXExecutable): exec_id = executable.get_id() else: raise DXError("dxpy.DXWorkflow.add_stage: executable must be a string or an instance of DXApplet or DXApp") add_stage_input = {"executable": exec_id} if stage_id is not None: add_stage_input["id"] = stage_id if name is not None: add_stage_input["name"] = name if folder is not None: add_stage_input["folder"] = folder if stage_input is not None: add_stage_input["input"] = stage_input if instance_type is not None: add_stage_input["systemRequirements"] = SystemRequirementsDict.from_instance_type(instance_type).as_dict() self._add_edit_version_to_request(add_stage_input, edit_version) try: result = dxpy.api.workflow_add_stage(self._dxid, add_stage_input, **kwargs) finally: self.describe() # update cached describe return result['stage']
def add_stage(self, executable, stage_id=None, name=None, folder=None, stage_input=None, instance_type=None, edit_version=None, **kwargs)
:param executable: string or a handler for an app or applet :type executable: string, DXApplet, or DXApp :param stage_id: id for the stage (optional) :type stage_id: string :param name: name for the stage (optional) :type name: string :param folder: default output folder for the stage; either a relative or absolute path (optional) :type folder: string :param stage_input: input fields to bind as default inputs for the executable (optional) :type stage_input: dict :param instance_type: Default instance type on which all jobs will be run for this stage, or a dict mapping function names to instance type requests :type instance_type: string or dict :param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional) :type edit_version: int :returns: ID of the added stage :rtype: string :raises: :class:`~dxpy.exceptions.DXError` if *executable* is not an expected type :class:`~dxpy.exceptions.DXAPIError` for errors thrown from the API call Adds the specified executable as a new stage in the workflow.
3.008541
1.557761
1.931324
''' :param stage: A number for the stage index (for the nth stage, starting from 0), or a string of the stage index, name, or ID :type stage: int or string :returns: Hash of stage descriptor in workflow ''' stage_id = self._get_stage_id(stage) result = next((stage for stage in self.stages if stage['id'] == stage_id), None) if result is None: raise DXError('The stage ID ' + stage_id + ' could not be found') return result
def get_stage(self, stage, **kwargs)
:param stage: A number for the stage index (for the nth stage, starting from 0), or a string of the stage index, name, or ID :type stage: int or string :returns: Hash of stage descriptor in workflow
5.324955
2.203561
2.416523
''' :param stage: A number for the stage index (for the nth stage, starting from 0), or a string of the stage index, name, or ID :type stage: int or string :param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional) :type edit_version: int :returns: Stage ID that was removed :rtype: string Removes the specified stage from the workflow ''' stage_id = self._get_stage_id(stage) remove_stage_input = {"stage": stage_id} self._add_edit_version_to_request(remove_stage_input, edit_version) try: dxpy.api.workflow_remove_stage(self._dxid, remove_stage_input, **kwargs) finally: self.describe() # update cached describe return stage_id
def remove_stage(self, stage, edit_version=None, **kwargs)
:param stage: A number for the stage index (for the nth stage, starting from 0), or a string of the stage index, name, or ID :type stage: int or string :param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional) :type edit_version: int :returns: Stage ID that was removed :rtype: string Removes the specified stage from the workflow
4.115467
2.078716
1.979812
''' :param stage: A number for the stage index (for the nth stage, starting from 0), or a string of the stage index, name, or ID :type stage: int or string :param new_index: The new position in the order of stages that the specified stage should have (where 0 indicates the first stage) :type new_index: int :param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional) :type edit_version: int Removes the specified stage from the workflow ''' stage_id = self._get_stage_id(stage) move_stage_input = {"stage": stage_id, "newIndex": new_index} self._add_edit_version_to_request(move_stage_input, edit_version) try: dxpy.api.workflow_move_stage(self._dxid, move_stage_input, **kwargs) finally: self.describe()
def move_stage(self, stage, new_index, edit_version=None, **kwargs)
:param stage: A number for the stage index (for the nth stage, starting from 0), or a string of the stage index, name, or ID :type stage: int or string :param new_index: The new position in the order of stages that the specified stage should have (where 0 indicates the first stage) :type new_index: int :param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional) :type edit_version: int Removes the specified stage from the workflow
3.865064
1.831969
2.109787
''' :param input_str: A string of one of the forms: "<exported input field name>", "<explicit workflow input field name>", "<stage ID>.<input field name>", "<stage index>.<input field name>", "<stage name>.<input field name>" :type input_str: string :returns: If the given form was one of those which uses the stage index or stage name, it is translated to the stage ID for use in the API call (stage name takes precedence) ''' if '.' in input_str: stage_identifier, input_name = input_str.split('.', 1) # Try to parse as a stage ID or name return self._get_stage_id(stage_identifier) + '.' + input_name return input_str
def _get_input_name(self, input_str, region=None, describe_output=None)
:param input_str: A string of one of the forms: "<exported input field name>", "<explicit workflow input field name>", "<stage ID>.<input field name>", "<stage index>.<input field name>", "<stage name>.<input field name>" :type input_str: string :returns: If the given form was one of those which uses the stage index or stage name, it is translated to the stage ID for use in the API call (stage name takes precedence)
6.082314
1.842202
3.301654
''' :param text: String to be tab-completed; still in escaped form :type text: string :param delim_pos: index of last unescaped "/" in text :type delim_pos: int :param dxproj: DXProject handler to use :type dxproj: DXProject :param folderpath: Unescaped path in which to search for folder matches :type folderpath: string :returns: List of matches :rtype: list of strings Members of the returned list are guaranteed to start with *text* and be in escaped form for consumption by the command-line. ''' try: folders = dxproj.list_folder(folder=folderpath, only='folders')['folders'] folder_names = [name[name.rfind('/') + 1:] for name in folders] if text != '' and delim_pos != len(text) - 1: folder_names += ['.', '..'] prefix = text[:delim_pos + 1] return [prefix + f + '/' for f in folder_names if (prefix + f + '/').startswith(text)] except: return []
def get_folder_matches(text, delim_pos, dxproj, folderpath)
:param text: String to be tab-completed; still in escaped form :type text: string :param delim_pos: index of last unescaped "/" in text :type delim_pos: int :param dxproj: DXProject handler to use :type dxproj: DXProject :param folderpath: Unescaped path in which to search for folder matches :type folderpath: string :returns: List of matches :rtype: list of strings Members of the returned list are guaranteed to start with *text* and be in escaped form for consumption by the command-line.
4.071644
1.893395
2.150446
''' :param text: String to be tab-completed; still in escaped form :type text: string :param delim_pos: index of last unescaped "/" or ":" in text :type delim_pos: int :param dxproj: DXProject handler to use :type dxproj: DXProject :param folderpath: Unescaped path in which to search for data object matches :type folderpath: string :param classname: Data object class by which to restrict the search (None for no restriction on class) :type classname: string :param visibility: Visibility to constrain the results to; default is "visible" for empty strings, "either" for nonempty :type visibility: string :returns: List of matches :rtype: list of strings Members of the returned list are guaranteed to start with *text* and be in escaped form for consumption by the command-line. ''' #unescaped_text = unescape_completion_name_str(text[delim_pos + 1:]) unescaped_text = text[delim_pos + 1:] if visibility is None: if text != '' and delim_pos != len(text) - 1: visibility = "either" else: visibility = "visible" try: results = list(dxpy.find_data_objects(project=dxproj.get_id(), folder=folderpath, name=unescaped_text + "*", name_mode="glob", recurse=False, visibility=visibility, classname=classname, limit=100, describe=dict(fields=dict(name=True)), typename=typespec)) prefix = '' if text == '' else text[:delim_pos + 1] return [prefix + escape_name(result['describe']['name']) for result in results] except: return []
def get_data_matches(text, delim_pos, dxproj, folderpath, classname=None, typespec=None, visibility=None)
:param text: String to be tab-completed; still in escaped form :type text: string :param delim_pos: index of last unescaped "/" or ":" in text :type delim_pos: int :param dxproj: DXProject handler to use :type dxproj: DXProject :param folderpath: Unescaped path in which to search for data object matches :type folderpath: string :param classname: Data object class by which to restrict the search (None for no restriction on class) :type classname: string :param visibility: Visibility to constrain the results to; default is "visible" for empty strings, "either" for nonempty :type visibility: string :returns: List of matches :rtype: list of strings Members of the returned list are guaranteed to start with *text* and be in escaped form for consumption by the command-line.
4.109764
1.9411
2.117235
''' :param dxid: Value to verify as a DNAnexus ID of class *expected_class* :param expected_classes: Single string or list of strings of allowed classes of the ID, e.g. "file" or ["project", "container"] :type expected_classes: string or list of strings :raises: :exc:`~dxpy.exceptions.DXError` if *dxid* is not a string or is not a valid DNAnexus ID of the expected class ''' if isinstance(expected_classes, basestring): expected_classes = [expected_classes] if not isinstance(expected_classes, list) or len(expected_classes) == 0: raise DXError('verify_string_dxid: expected_classes should be a string or list of strings') if not (isinstance(dxid, basestring) and re.match('^(' + '|'.join(expected_classes) + ')-[0-9a-zA-Z]{24}$', dxid)): if len(expected_classes) == 1: str_expected_classes = expected_classes[0] elif len(expected_classes) == 2: str_expected_classes = ' or '.join(expected_classes) else: str_expected_classes = ', '.join(expected_classes[:-1]) + ', or ' + expected_classes[-1] raise DXError('Invalid ID of class %s: %r' % (str_expected_classes, dxid))
def verify_string_dxid(dxid, expected_classes)
:param dxid: Value to verify as a DNAnexus ID of class *expected_class* :param expected_classes: Single string or list of strings of allowed classes of the ID, e.g. "file" or ["project", "container"] :type expected_classes: string or list of strings :raises: :exc:`~dxpy.exceptions.DXError` if *dxid* is not a string or is not a valid DNAnexus ID of the expected class
2.23632
1.574541
1.4203
''' :param dxid: New ID to be associated with the handler :type dxid: string Discards the currently stored ID and associates the handler with *dxid* ''' if dxid is not None: verify_string_dxid(dxid, self._class) self._dxid = dxid
def set_id(self, dxid)
:param dxid: New ID to be associated with the handler :type dxid: string Discards the currently stored ID and associates the handler with *dxid*
6.197567
3.178561
1.949803
''' :param project: Project ID in which to create the new remote object :type project: string :param name: Name for the object :type name: string :param tags: Tags to add for the object :type tags: list of strings :param types: Types to add to the object :type types: list of strings :param hidden: Whether the object is to be hidden :type hidden: boolean :param properties: Properties given as key-value pairs of strings :type properties: dict :param details: Details to set for the object :type details: dict or list :param folder: Full path to the destination folder :type folder: string :param parents: If True, recursively create all parent folders if they are missing :type parents: boolean :rtype: :class:`DXDataObject` Creates a data object with the given fields. Only *project* is required, and only if no default project or workspace is set; the remaining arguments are optional and have default behavior as specified in the API documentation for the ``/new`` method of each data object class. ''' if not hasattr(self, '_class'): raise NotImplementedError( "DXDataObject is an abstract class; a subclass should" + \ "be initialized instead.") dx_hash, remaining_kwargs = self._get_creation_params(kwargs) self._new(dx_hash, **remaining_kwargs)
def new(self, **kwargs)
:param project: Project ID in which to create the new remote object :type project: string :param name: Name for the object :type name: string :param tags: Tags to add for the object :type tags: list of strings :param types: Types to add to the object :type types: list of strings :param hidden: Whether the object is to be hidden :type hidden: boolean :param properties: Properties given as key-value pairs of strings :type properties: dict :param details: Details to set for the object :type details: dict or list :param folder: Full path to the destination folder :type folder: string :param parents: If True, recursively create all parent folders if they are missing :type parents: boolean :rtype: :class:`DXDataObject` Creates a data object with the given fields. Only *project* is required, and only if no default project or workspace is set; the remaining arguments are optional and have default behavior as specified in the API documentation for the ``/new`` method of each data object class.
5.22427
1.707572
3.059473
''' :param dxid: Object ID or a DNAnexus link (a dict with key "$dnanexus_link"); if a project ID is provided in the DNAnexus link, it will be used as *project* unless *project* has been explictly provided :type dxid: string or dict :param project: Project ID :type project: string Discards the currently stored ID and associates the handler with *dxid*. Associates the handler with the copy of the object in *project* (if no project is explicitly specified, the default data container is used). ''' if is_dxlink(dxid): dxid, project_from_link = get_dxlink_ids(dxid) if project is None: project = project_from_link if dxid is not None: verify_string_dxid(dxid, self._class) self._dxid = dxid if project is None: self._proj = dxpy.WORKSPACE_ID elif project is not None: verify_string_dxid(project, ['project', 'container']) self._proj = project
def set_ids(self, dxid, project=None)
:param dxid: Object ID or a DNAnexus link (a dict with key "$dnanexus_link"); if a project ID is provided in the DNAnexus link, it will be used as *project* unless *project* has been explictly provided :type dxid: string or dict :param project: Project ID :type project: string Discards the currently stored ID and associates the handler with *dxid*. Associates the handler with the copy of the object in *project* (if no project is explicitly specified, the default data container is used).
4.967628
1.940477
2.560003
if self._dxid is None: raise DXError('This {handler} handler has not been initialized with a {_class} ID and cannot be described'.format( handler=self.__class__.__name__, _class=self._class) ) if (incl_properties or incl_details) and (fields is not None or default_fields is not None): raise ValueError('Cannot specify properties or details in conjunction with fields or default_fields') if incl_properties or incl_details: describe_input = dict(properties=incl_properties, details=incl_details) else: describe_input = {} if default_fields is not None: describe_input['defaultFields'] = default_fields if fields is not None: describe_input['fields'] = {field_name: True for field_name in fields} if self._proj is not None: describe_input["project"] = self._proj self._desc = self._describe(self._dxid, describe_input, **kwargs) return self._desc
def describe(self, incl_properties=False, incl_details=False, fields=None, default_fields=None, **kwargs)
:param fields: set of fields to include in the output, for example ``{'name', 'modified'}``. The field ``id`` is always implicitly included. If ``fields`` is specified, the default fields are not included (that is, only the fields specified here, and ``id``, are included) unless ``default_fields`` is additionally set to True. :type fields: set or sequence of str :param default_fields: if True, include the default fields in addition to fields requested in ``fields``, if any; if False, only the fields specified in ``fields``, if any, are returned (defaults to False if ``fields`` is specified, True otherwise) :type default_fields: bool :param incl_properties: if true, includes the properties of the object in the output (deprecated; use ``fields={'properties'}, default_fields=True`` instead) :type incl_properties: bool :param incl_details: if true, includes the details of the object in the output (deprecated; use ``fields={'details'}, default_fields=True`` instead) :type incl_details: bool :returns: Description of the remote object :rtype: dict Return a dict with a description of the remote data object. The result includes the key-value pairs as specified in the API documentation for the ``/describe`` method of each data object class. The API defines some default set of fields that will be included (at a minimum, "id", "class", etc. should be available, and there may be additional fields that vary based on the class); the set of fields may be customized using ``fields`` and ``default_fields``. Any project-specific metadata fields (name, properties, and tags) are obtained from the copy of the object in the project associated with the handler, if possible.
2.843469
2.93943
0.967354
self._add_types(self._dxid, {"types": types}, **kwargs)
def add_types(self, types, **kwargs)
:param types: Types to add to the object :type types: list of strings :raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state Adds each of the specified types to the remote object. Takes no action for types that are already listed for the object.
18.489666
9.4013
1.966714
self._remove_types(self._dxid, {"types": types}, **kwargs)
def remove_types(self, types, **kwargs)
:param types: Types to remove from the object :type types: list of strings :raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state Removes each the specified types from the remote object. Takes no action for types that the object does not currently have.
16.094786
10.466966
1.537674
return self._set_details(self._dxid, details, **kwargs)
def set_details(self, details, **kwargs)
:param details: Details to set for the object :type details: dict or list :raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state Sets the details for the remote object with the specified value. If the input contains the string ``"$dnanexus_link"`` as a key in a hash, it must be the only key in the hash, and its value must be a valid ID of an existing object.
11.69878
10.412347
1.123549
return self._rename(self._dxid, {"project": self._proj, "name": name}, **kwargs)
def rename(self, name, **kwargs)
:param name: New name for the object :type name: string Renames the remote object. The name is changed on the copy of the object in the project associated with the handler.
14.647871
13.684221
1.070421
self._set_properties(self._dxid, {"project": self._proj, "properties": properties}, **kwargs)
def set_properties(self, properties, **kwargs)
:param properties: Property names and values given as key-value pairs of strings :type properties: dict Given key-value pairs in *properties* for property names and values, the properties are set on the object for the given property names. Any property with a value of :const:`None` indicates the property will be deleted. .. note:: Any existing properties not mentioned in *properties* are not modified by this method. The properties are written to the copy of the object in the project associated with the handler. The following example sets the properties for "name" and "project" for a remote file:: dxfile.set_properties({"name": "George", "project": "cancer"}) Subsequently, the following would delete the property "project":: dxfile.set_properties({"project": None})
12.91025
10.635627
1.213868
self._add_tags(self._dxid, {"project": self._proj, "tags": tags}, **kwargs)
def add_tags(self, tags, **kwargs)
:param tags: Tags to add to the object :type tags: list of strings Adds each of the specified tags to the remote object. Takes no action for tags that are already listed for the object. The tags are added to the copy of the object in the project associated with the handler.
11.013156
10.940007
1.006686
self._remove_tags(self._dxid, {"project": self._proj, "tags": tags}, **kwargs)
def remove_tags(self, tags, **kwargs)
:param tags: Tags to remove from the object :type tags: list of strings Removes each of the specified tags from the remote object. Takes no action for tags that the object does not currently have. The tags are removed from the copy of the object in the project associated with the handler.
11.367187
10.744906
1.057914
''' :raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object Permanently removes the associated remote object from the associated project. ''' if self._proj is None: raise DXError("Remove called when a project ID was not associated with this object handler") dxpy.api.project_remove_objects(self._proj, {"objects": [self._dxid]}, **kwargs) # Reset internal state self._dxid = None self._proj = None self._desc = {}
def remove(self, **kwargs)
:raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object Permanently removes the associated remote object from the associated project.
5.70686
3.514241
1.623924
''' :param folder: Folder route to which to move the object :type folder: string :raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object Moves the associated remote object to *folder*. ''' if self._proj is None: raise DXError("Move called when a project ID was not associated with this object handler") dxpy.api.project_move(self._proj, {"objects": [self._dxid], "destination": folder}, **kwargs)
def move(self, folder, **kwargs)
:param folder: Folder route to which to move the object :type folder: string :raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object Moves the associated remote object to *folder*.
6.494699
3.070376
2.115278
''' :param project: Destination project ID :type project: string :param folder: Folder route to which to move the object :type folder: string :raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object :returns: An object handler for the new cloned object :rtype: :class:`DXDataObject` Clones the associated remote object to *folder* in *project* and returns an object handler for the new object in the destination project. ''' if self._proj is None: raise DXError("Clone called when a project ID was not associated with this object handler") dxpy.api.project_clone(self._proj, {"objects": [self._dxid], "project": project, "destination": folder}, **kwargs) cloned_copy = copy.copy(self) cloned_copy.set_ids(cloned_copy.get_id(), project) return cloned_copy
def clone(self, project, folder="/", **kwargs)
:param project: Destination project ID :type project: string :param folder: Folder route to which to move the object :type folder: string :raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object :returns: An object handler for the new cloned object :rtype: :class:`DXDataObject` Clones the associated remote object to *folder* in *project* and returns an object handler for the new object in the destination project.
4.4773
2.207668
2.028068
sessions_dir = os.path.join(self._user_conf_dir, "sessions") try: from psutil import Process, pid_exists if cleanup: try: session_dirs = os.listdir(sessions_dir) except OSError as e: # Silently skip cleanup and continue if we are unable to # enumerate the session directories for any reason # (including, most commonly, because the sessions dir # doesn't exist) session_dirs = [] for session_dir in session_dirs: try: session_pid = int(session_dir) except ValueError: # If dir name doesn't look like an int, leave it # alone continue if not pid_exists(session_pid): rmtree(os.path.join(sessions_dir, session_dir), ignore_errors=True) parent_process = Process(os.getpid()).parent() default_session_dir = os.path.join(sessions_dir, str(parent_process.pid)) while parent_process is not None and parent_process.pid != 0: session_dir = os.path.join(sessions_dir, str(parent_process.pid)) if os.path.exists(session_dir): return session_dir parent_process = parent_process.parent() return default_session_dir except (ImportError, IOError, AttributeError) as e: # We don't bundle psutil with Windows, so failure to import # psutil would be expected. if platform.system() != 'Windows': warn(fill("Error while retrieving session configuration: " + format_exception(e))) except Exception as e: warn(fill("Unexpected error while retrieving session configuration: " + format_exception(e))) return self._get_ppid_session_conf_dir(sessions_dir)
def get_session_conf_dir(self, cleanup=False)
Tries to find the session configuration directory by looking in ~/.dnanexus_config/sessions/<PID>, where <PID> is pid of the parent of this process, then its parent, and so on. If none of those exist, the path for the immediate parent is given, even if it doesn't exist. If *cleanup* is True, looks up and deletes all session configuration directories that belong to nonexistent processes.
3.025898
3.002061
1.00794
''' :param object_id: Object ID or the object handler itself :type object_id: string or :class:`~dxpy.bindings.DXDataObject` :param project_id: A project ID, if creating a cross-project DXLink :type project_id: string :param field: A field name, if creating a job-based object reference :type field: string :returns: A dict formatted as a symbolic DNAnexus object reference :rtype: dict Creates a DXLink to the specified object. If `object_id` is already a link, it is returned without modification. If `object_id is a `~dxpy.bindings.DXDataObject`, the object ID is retrieved via its `get_id()` method. If `field` is not `None`, `object_id` is expected to be of class 'job' and the link created is a Job Based Object Reference (JBOR), which is of the form:: {'$dnanexus_link': {'job': object_id, 'field': field}} If `field` is `None` and `project_id` is not `None`, the link created is a project-specific link of the form:: {'$dnanexus_link': {'project': project_id, 'id': object_id}} ''' if is_dxlink(object_id): return object_id if isinstance(object_id, DXDataObject): object_id = object_id.get_id() if not any((project_id, field)): return {'$dnanexus_link': object_id} elif field: dxpy.verify_string_dxid(object_id, "job") return {'$dnanexus_link': {'job': object_id, 'field': field}} else: return {'$dnanexus_link': {'project': project_id, 'id': object_id}}
def dxlink(object_id, project_id=None, field=None)
:param object_id: Object ID or the object handler itself :type object_id: string or :class:`~dxpy.bindings.DXDataObject` :param project_id: A project ID, if creating a cross-project DXLink :type project_id: string :param field: A field name, if creating a job-based object reference :type field: string :returns: A dict formatted as a symbolic DNAnexus object reference :rtype: dict Creates a DXLink to the specified object. If `object_id` is already a link, it is returned without modification. If `object_id is a `~dxpy.bindings.DXDataObject`, the object ID is retrieved via its `get_id()` method. If `field` is not `None`, `object_id` is expected to be of class 'job' and the link created is a Job Based Object Reference (JBOR), which is of the form:: {'$dnanexus_link': {'job': object_id, 'field': field}} If `field` is `None` and `project_id` is not `None`, the link created is a project-specific link of the form:: {'$dnanexus_link': {'project': project_id, 'id': object_id}}
3.04287
1.337682
2.274733
''' :param x: A potential DNAnexus link Returns whether *x* appears to be a DNAnexus link (is a dict with key ``"$dnanexus_link"``) with a referenced data object. ''' if not isinstance(x, dict): return False if '$dnanexus_link' not in x: return False link = x['$dnanexus_link'] if isinstance(link, basestring): return True elif isinstance(link, dict): return any(key in link for key in ('id', 'job')) return False
def is_dxlink(x)
:param x: A potential DNAnexus link Returns whether *x* appears to be a DNAnexus link (is a dict with key ``"$dnanexus_link"``) with a referenced data object.
3.63445
1.840859
1.974323
''' :param link: A DNAnexus link :type link: dict :returns: (Object ID, Project ID) if the link is to a data object (or :const:`None` if no project specified in the link), or (Job ID, Field) if the link is a job-based object reference (JBOR). :rtype: tuple Get the object ID and detail from a link. There are three types of links: * Simple link of the form ``{"$dnanexus_link": "file-XXXX"}`` returns ``("file-XXXX", None)``. * Data object link of the form ``{"$dnanexus_link': {"id": "file-XXXX", "project": "project-XXXX"}}`` returns ``("file-XXXX", "project-XXXX")``. * Job-based object reference (JBOR) of the form ``{"$dnanexus_link": {"job": "job-XXXX", "field": "foo"}}`` returns ``("job-XXXX", "foo")``. ''' if not is_dxlink(link): raise DXError('Invalid link: %r' % link) if isinstance(link['$dnanexus_link'], basestring): return link['$dnanexus_link'], None elif 'id' in link['$dnanexus_link']: return link['$dnanexus_link']['id'], link['$dnanexus_link'].get('project') else: return link['$dnanexus_link']['job'], link['$dnanexus_link']['field']
def get_dxlink_ids(link)
:param link: A DNAnexus link :type link: dict :returns: (Object ID, Project ID) if the link is to a data object (or :const:`None` if no project specified in the link), or (Job ID, Field) if the link is a job-based object reference (JBOR). :rtype: tuple Get the object ID and detail from a link. There are three types of links: * Simple link of the form ``{"$dnanexus_link": "file-XXXX"}`` returns ``("file-XXXX", None)``. * Data object link of the form ``{"$dnanexus_link': {"id": "file-XXXX", "project": "project-XXXX"}}`` returns ``("file-XXXX", "project-XXXX")``. * Job-based object reference (JBOR) of the form ``{"$dnanexus_link": {"job": "job-XXXX", "field": "foo"}}`` returns ``("job-XXXX", "foo")``.
3.017797
1.296198
2.328193
''' :param id_or_link: String containing an object ID or dict containing a DXLink :type id_or_link: string or dict :param project: String project ID to use as the context if the the object is a data object :type project: string :rtype: :class:`~dxpy.bindings.DXObject`, :class:`~dxpy.bindings.DXApp`, or :class:`~dxpy.bindings.DXGlobalWorkflow` Parses a string or DXLink dict. Creates and returns an object handler for it. Example:: get_handler("file-1234") ''' try: cls = _guess_link_target_type(id_or_link) except Exception as e: raise DXError("Could not parse link {}: {}".format(id_or_link, e)) if cls in [dxpy.DXApp, dxpy.DXGlobalWorkflow]: # This special case should translate identifiers of the form # "app-name" or "app-name/version_or_tag" to the appropriate # arguments if dxpy.utils.resolver.is_hashid(id_or_link): return cls(id_or_link) else: slash_pos = id_or_link.find('/') dash_pos = id_or_link.find('-') if slash_pos == -1: return cls(name=id_or_link[dash_pos+1:]) else: return cls(name=id_or_link[dash_pos+1:slash_pos], alias=id_or_link[slash_pos + 1:]) elif project is None or cls in [dxpy.DXJob, dxpy.DXAnalysis, dxpy.DXProject, dxpy.DXContainer]: # This case is important for the handlers which do not # take a project field return cls(id_or_link) else: return cls(id_or_link, project=project)
def get_handler(id_or_link, project=None)
:param id_or_link: String containing an object ID or dict containing a DXLink :type id_or_link: string or dict :param project: String project ID to use as the context if the the object is a data object :type project: string :rtype: :class:`~dxpy.bindings.DXObject`, :class:`~dxpy.bindings.DXApp`, or :class:`~dxpy.bindings.DXGlobalWorkflow` Parses a string or DXLink dict. Creates and returns an object handler for it. Example:: get_handler("file-1234")
3.640557
2.289949
1.589798
''' :param id_or_link: String containing an object ID or dict containing a DXLink, or a list of object IDs or dicts containing a DXLink. Given an object ID, calls :meth:`~dxpy.bindings.DXDataObject.describe` on the object. Example:: describe("file-1234") Given a list of object IDs, calls :meth:`~dxpy.api.system_describe_data_objects`. Example:: describe(["file-1234", "workflow-5678"]) Note: If id_or_link is a list and **kwargs contains a "fields" parameter, these fields will be returned in the response for each data object in addition to the fields included by default. Additionally, describe options can be provided for each data object class in the "classDescribeOptions" kwargs argument. See https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeDataObjects for input parameters used with the multiple object describe method. ''' # If this is a list, extract the ids. # TODO: modify the procedure to use project ID when possible if isinstance(id_or_link, basestring) or is_dxlink(id_or_link): handler = get_handler(id_or_link) return handler.describe(**kwargs) else: links = [] for link in id_or_link: # If this entry is a dxlink, then get the id. if is_dxlink(link): # Guaranteed by is_dxlink that one of the following will work if isinstance(link['$dnanexus_link'], basestring): link = link['$dnanexus_link'] else: link = link['$dnanexus_link']['id'] links.append(link) # Prepare input to system_describe_data_objects, the same fields will be passed # for all data object classes; if a class doesn't include a field in its describe # output, it will be ignored describe_input = \ dict([(field, True) for field in kwargs['fields']]) if kwargs.get('fields', []) else True describe_links_input = [{'id': link, 'describe': describe_input} for link in links] bulk_describe_input = {'objects': describe_links_input} if 'classDescribeOptions' in kwargs: bulk_describe_input['classDescribeOptions'] = kwargs['classDescribeOptions'] data_object_descriptions = dxpy.api.system_describe_data_objects(bulk_describe_input) return [desc['describe'] for desc in data_object_descriptions['results']]
def describe(id_or_link, **kwargs)
:param id_or_link: String containing an object ID or dict containing a DXLink, or a list of object IDs or dicts containing a DXLink. Given an object ID, calls :meth:`~dxpy.bindings.DXDataObject.describe` on the object. Example:: describe("file-1234") Given a list of object IDs, calls :meth:`~dxpy.api.system_describe_data_objects`. Example:: describe(["file-1234", "workflow-5678"]) Note: If id_or_link is a list and **kwargs contains a "fields" parameter, these fields will be returned in the response for each data object in addition to the fields included by default. Additionally, describe options can be provided for each data object class in the "classDescribeOptions" kwargs argument. See https://wiki.dnanexus.com/API-Specification-v1.0.0/System-Methods#API-method:-/system/describeDataObjects for input parameters used with the multiple object describe method.
4.253234
2.230789
1.906606
# TODO: link to /container-xxxx/describe api_method = dxpy.api.container_describe if isinstance(self, DXProject): api_method = dxpy.api.project_describe self._desc = api_method(self._dxid, **kwargs) return self._desc
def describe(self, **kwargs)
:returns: A hash containing attributes of the project or container. :rtype: dict Returns a hash with key-value pairs as specified by the API specification for the `/project-xxxx/describe <https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fdescribe>`_ method. This will usually include keys such as "id", "name", "class", "billTo", "created", "modified", and "dataUsage".
5.938693
4.197165
1.41493
api_method = dxpy.api.container_new_folder if isinstance(self, DXProject): api_method = dxpy.api.project_new_folder api_method(self._dxid, {"folder": folder, "parents": parents}, **kwargs)
def new_folder(self, folder, parents=False, **kwargs)
:param folder: Full path to the new folder to create :type folder: string :param parents: If True, recursively create any parent folders that are missing :type parents: boolean Creates a new folder in the project or container.
4.250572
4.491837
0.946288
# TODO: it would be nice if we could supply describe # fields/defaultFields in a similar way to what we pass to the # high-level describe method, rather than having to construct # the literal API input api_method = dxpy.api.container_list_folder if isinstance(self, DXProject): api_method = dxpy.api.project_list_folder return api_method(self._dxid, {"folder": folder, "describe": describe, "only": only, "includeHidden": includeHidden}, **kwargs)
def list_folder(self, folder="/", describe=False, only="all", includeHidden=False, **kwargs)
:param folder: Full path to the folder to list :type folder: string :param describe: If True, returns the output of ``/describe`` on each object (see below for notes) :type describe: bool or dict :param only: Indicate "objects" for only objects, "folders" for only folders, or "all" for both :type only: string :param includeHidden: Indicate whether hidden objects should be returned :type includeHidden: bool :returns: A hash with key "objects" for the list of object IDs and key "folders" for the list of folder routes :rtype: dict Returns a hash containing a list of objects that reside directly inside the specified folder, and a list of strings representing the full paths to folders that reside directly inside the specified folder. By default, the list of objects is provided as a list containing one hash ``{"id": "class-XXXX"}`` with the ID of each matching object. If *describe* is not False, the output of ``/describe`` is also included in an additional field "describe" for each object. If *describe* is True, ``/describe`` is called with the default arguments. *describe* may also be a hash, indicating the input hash to be supplied to each ``/describe`` call.
6.723723
7.118259
0.944574
api_method = dxpy.api.container_move if isinstance(self, DXProject): api_method = dxpy.api.project_move api_method(self._dxid, {"objects": objects, "folders": folders, "destination": destination}, **kwargs)
def move(self, destination, objects=[], folders=[], **kwargs)
:param destination: Path of destination folder :type destination: string :param objects: List of object IDs to move :type objects: list of strings :param folders: List of full paths to folders to move :type folders: list of strings Moves the specified objects and folders into the folder represented by *destination*. Moving a folder also moves all contained folders and objects. If an object or folder is explicitly specified but also appears inside another specified folder, it will be removed from its parent folder and placed directly in *destination*.
3.839719
5.135964
0.747614
api_method = dxpy.api.container_move if isinstance(self, DXProject): api_method = dxpy.api.project_move api_method(self._dxid, {"folders": [folder], "destination": destination}, **kwargs)
def move_folder(self, folder, destination, **kwargs)
:param folder: Full path to the folder to move :type folder: string :param destination: Full path to the destination folder that will contain *folder* :type destination: string Moves *folder* to reside in *destination* in the same project or container. All objects and subfolders inside *folder* are also moved.
4.666982
5.765654
0.809445
api_method = dxpy.api.container_remove_folder if isinstance(self, DXProject): api_method = dxpy.api.project_remove_folder completed = False while not completed: resp = api_method(self._dxid, {"folder": folder, "recurse": recurse, "force": force, "partial": True}, always_retry=force, # api call is idempotent under 'force' semantics **kwargs) if 'completed' not in resp: raise DXError('Error removing folder') completed = resp['completed']
def remove_folder(self, folder, recurse=False, force=False, **kwargs)
:param folder: Full path to the folder to remove :type folder: string :param recurse: If True, recursively remove all objects and subfolders in the folder :type recurse: bool :param force: If True, will suppress errors for folders that do not exist :type force: bool Removes the specified folder from the project or container. It must be empty to be removed, unless *recurse* is True. Removal propagates to any hidden objects that become unreachable from any visible object in the same project or container as a result of this operation. (This can only happen if *recurse* is True.)
4.468411
4.948023
0.90307
api_method = dxpy.api.container_remove_objects if isinstance(self, DXProject): api_method = dxpy.api.project_remove_objects api_method(self._dxid, {"objects": objects, "force": force}, always_retry=force, # api call is idempotent under 'force' semantics **kwargs)
def remove_objects(self, objects, force=False, **kwargs)
:param objects: List of object IDs to remove from the project or container :type objects: list of strings :param force: If True, will suppress errors for objects that do not exist :type force: bool Removes the specified objects from the project or container. Removal propagates to any hidden objects that become unreachable from any visible object in the same project or container as a result of this operation.
5.958453
6.625721
0.899291
api_method = dxpy.api.container_clone if isinstance(self, DXProject): api_method = dxpy.api.project_clone return api_method(self._dxid, {"objects": objects, "folders": folders, "project": container, "destination": destination, "parents": parents}, **kwargs)
def clone(self, container, destination="/", objects=[], folders=[], parents=False, **kwargs)
:param container: Destination container ID :type container: string :param destination: Path of destination folder in the destination container :type destination: string :param objects: List of object IDs to move :type objects: list of strings :param folders: List of full paths to folders to move :type folders: list of strings :param parents: Whether the destination folder and/or parent folders should be created if they do not exist :type parents: boolean Clones (copies) the specified objects and folders in the container into the folder *destination* in the container *container*. Cloning a folder also clones all all folders and objects it contains. If an object or folder is explicitly specified but also appears inside another specified folder, it will be removed from its parent folder and placed directly in *destination*. No objects or folders are modified in the source container. Objects must be in the "closed" state to be cloned.
3.343455
4.494014
0.74398
input_hash = {} input_hash["name"] = name if summary is not None: input_hash["summary"] = summary if description is not None: input_hash["description"] = description if protected is not None: input_hash["protected"] = protected if restricted is not None: input_hash["restricted"] = restricted if download_restricted is not None: input_hash["downloadRestricted"] = download_restricted if contains_phi is not None: input_hash["containsPHI"] = contains_phi if bill_to is not None: input_hash["billTo"] = bill_to if tags is not None: input_hash["tags"] = tags if properties is not None: input_hash["properties"] = properties self.set_id(dxpy.api.project_new(input_hash, **kwargs)["id"]) self._desc = {} return self._dxid
def new(self, name, summary=None, description=None, protected=None, restricted=None, download_restricted=None, contains_phi=None, tags=None, properties=None, bill_to=None, **kwargs)
:param name: The name of the project :type name: string :param summary: If provided, a short summary of what the project contains :type summary: string :param description: If provided, the new project description :type name: string :param protected: If provided, whether the project should be protected :type protected: boolean :param restricted: If provided, whether the project should be restricted :type restricted: boolean :param download_restricted: If provided, whether external downloads should be restricted :type download_restricted: boolean :param contains_phi: If provided, whether the project should be marked as containing protected health information (PHI) :type contains_phi: boolean :param tags: If provided, tags to associate with the project :type tags: list of strings :param properties: If provided, properties to associate with the project :type properties: dict :param bill_to: If provided, ID of the entity to which any costs associated with this project will be billed; must be the ID of the requesting user or an org of which the requesting user is a member with allowBillableActivities permission :type bill_to: string Creates a new project. Initially only the user performing this action will be in the permissions/member list, with ADMINISTER access. See the API documentation for the `/project/new <https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject%2Fnew>`_ method for more info.
1.685832
1.741662
0.967945
update_hash = {} if name is not None: update_hash["name"] = name if summary is not None: update_hash["summary"] = summary if description is not None: update_hash["description"] = description if protected is not None: update_hash["protected"] = protected if restricted is not None: update_hash["restricted"] = restricted if download_restricted is not None: update_hash["downloadRestricted"] = download_restricted if version is not None: update_hash["version"] = version dxpy.api.project_update(self._dxid, update_hash, **kwargs)
def update(self, name=None, summary=None, description=None, protected=None, restricted=None, download_restricted=None, version=None, **kwargs)
:param name: If provided, the new project name :type name: string :param summary: If provided, the new project summary :type summary: string :param description: If provided, the new project description :type name: string :param protected: If provided, whether the project should be protected :type protected: boolean :param restricted: If provided, whether the project should be restricted :type restricted: boolean :param download_restricted: If provided, whether external downloads should be restricted :type download_restricted: boolean :param version: If provided, the update will only occur if the value matches the current project's version number :type version: int Updates the project with the new fields. All fields are optional. Fields that are not provided are not changed. See the API documentation for the `/project-xxxx/update <https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fupdate>`_ method for more info.
1.533368
1.653329
0.927443
return dxpy.api.project_invite(self._dxid, {"invitee": invitee, "level": level, "suppressEmailNotification": not send_email}, **kwargs)
def invite(self, invitee, level, send_email=True, **kwargs)
:param invitee: Username (of the form "user-USERNAME") or email address of person to be invited to the project; use "PUBLIC" to make the project publicly available (in which case level must be set to "VIEW"). :type invitee: string :param level: Permissions level that the invitee would get ("VIEW", "UPLOAD", "CONTRIBUTE", or "ADMINISTER") :type level: string :param send_email: Determines whether user receives email notifications regarding the project invitation :type send_email: boolean Invites the specified user to have access to the project.
5.531439
6.525678
0.847642
input_hash = {} input_hash[member] = level return dxpy.api.project_decrease_permissions(self._dxid, input_hash, **kwargs)
def decrease_perms(self, member, level, **kwargs)
:param member: Username (of the form "user-USERNAME") of the project member whose permissions will be decreased. :type member: string :param level: Permissions level that the member will have after this operation (None, "VIEW", "UPLOAD", or "CONTRIBUTE") :type level: string or None Decreases the permissions that the specified user has in the project.
8.311579
8.456163
0.982902
return dxpy.api.project_set_properties(self._dxid, {"properties": properties}, **kwargs)
def set_properties(self, properties, **kwargs)
:param properties: Property names and values given as key-value pairs of strings :type properties: dict Given key-value pairs in *properties* for property names and values, the properties are set on the project for the given property names. Any property with a value of :const:`None` indicates the property will be deleted. .. note:: Any existing properties not mentioned in *properties* are not modified by this method.
6.063358
7.087186
0.855538
from .utils.printing import fill return '\n'.join(fill(line) for line in traceback.format_exception_only(type(e), e))
def format_exception(e)
Returns a string containing the type and text of the exception.
5.332405
5.022976
1.061603
'''Exits the program, printing information about the last exception (if any) and an optional error message. Uses *exception* instead if provided. :param code: Exit code. :type code: integer (valid exit code, 0-255) :param message: Message to be printed after the exception information. :type message: string :param print_tb: If set to True, prints the exception traceback; otherwise, suppresses it. :type print_tb: boolean :type exception: an exception to use in place of the last exception raised ''' exc_type, exc_value = (exception.__class__, exception) \ if exception is not None else sys.exc_info()[:2] if exc_type is not None: if print_tb: traceback.print_exc() elif isinstance(exc_value, KeyboardInterrupt): sys.stderr.write('^C\n') else: for line in traceback.format_exception_only(exc_type, exc_value): sys.stderr.write(line) sys.stderr.write(message) if message != '' and not message.endswith('\n'): sys.stderr.write('\n') sys.exit(code)
def exit_with_exc_info(code=1, message='', print_tb=False, exception=None)
Exits the program, printing information about the last exception (if any) and an optional error message. Uses *exception* instead if provided. :param code: Exit code. :type code: integer (valid exit code, 0-255) :param message: Message to be printed after the exception information. :type message: string :param print_tb: If set to True, prints the exception traceback; otherwise, suppresses it. :type print_tb: boolean :type exception: an exception to use in place of the last exception raised
3.120036
1.704486
1.830485
'''Exits the program, printing information about the last exception (if any) and an optional error message. Uses *exception* instead if provided. Uses **expected_exceptions** to set the error code decide whether to suppress the error traceback. :param message: Message to be printed after the exception information. :type message: string :param code: Exit code. :type code: integer (valid exit code, 0-255) :param expected_exceptions: Exceptions for which to exit with error code 3 (expected error condition) and suppress the stack trace (unless the _DX_DEBUG environment variable is set). :type expected_exceptions: iterable :param arg_parser: argparse.ArgumentParser object used in the program (optional) :param ignore_sigpipe: Whether to exit silently with code 3 when IOError with code EPIPE is raised. Default true. :type ignore_sigpipe: boolean :param exception: an exception to use in place of the last exception raised ''' if arg_parser is not None: message = arg_parser.prog + ": " + message exc = exception if exception is not None else sys.exc_info()[1] if isinstance(exc, SystemExit): raise exc elif isinstance(exc, expected_exceptions): exit_with_exc_info(EXPECTED_ERR_EXIT_STATUS, message, print_tb=dxpy._DEBUG > 0, exception=exception) elif ignore_sigpipe and isinstance(exc, IOError) and getattr(exc, 'errno', None) == errno.EPIPE: if dxpy._DEBUG > 0: print("Broken pipe", file=sys.stderr) sys.exit(3) else: if code is None: code = 1 exit_with_exc_info(code, message, print_tb=True, exception=exception)
def err_exit(message='', code=None, expected_exceptions=default_expected_exceptions, arg_parser=None, ignore_sigpipe=True, exception=None)
Exits the program, printing information about the last exception (if any) and an optional error message. Uses *exception* instead if provided. Uses **expected_exceptions** to set the error code decide whether to suppress the error traceback. :param message: Message to be printed after the exception information. :type message: string :param code: Exit code. :type code: integer (valid exit code, 0-255) :param expected_exceptions: Exceptions for which to exit with error code 3 (expected error condition) and suppress the stack trace (unless the _DX_DEBUG environment variable is set). :type expected_exceptions: iterable :param arg_parser: argparse.ArgumentParser object used in the program (optional) :param ignore_sigpipe: Whether to exit silently with code 3 when IOError with code EPIPE is raised. Default true. :type ignore_sigpipe: boolean :param exception: an exception to use in place of the last exception raised
4.429681
1.714085
2.584283
"Returns a one-line description of the error." output = self.msg + ", code " + str(self.code) output += ". Request Time={}, Request ID={}".format(self.timestamp, self.req_id) if self.name != self.__class__.__name__: output = self.name + ": " + output return output
def error_message(self)
Returns a one-line description of the error.
5.584613
4.928307
1.133171
if self._dxid is not None: return dxpy.api.global_workflow_publish(self._dxid, **kwargs) else: return dxpy.api.global_workflow_publish('globalworkflow-' + self._name, alias=self._alias, **kwargs)
def publish(self, **kwargs)
Publishes the global workflow, so all users can find it and use it on the platform. The current user must be a developer of the workflow.
4.759044
4.006808
1.187739
assert(describe_output is None or describe_output.get('class', '') == 'globalworkflow') if region is None: raise DXError( 'DXGlobalWorkflow: region must be provided to get an underlying workflow') # Perhaps we have cached it already if region in self._workflow_desc_by_region: return self._workflow_desc_by_region[region] if not describe_output: describe_output = self.describe() if region not in describe_output['regionalOptions'].keys(): raise DXError('DXGlobalWorkflow: the global workflow {} is not enabled in region {}'.format( self.get_id(), region)) underlying_workflow_id = describe_output['regionalOptions'][region]['workflow'] dxworkflow = dxpy.DXWorkflow(underlying_workflow_id) dxworkflow_desc = dxworkflow.describe() self._workflow_desc_by_region = dxworkflow_desc return dxworkflow_desc
def describe_underlying_workflow(self, region, describe_output=None)
:param region: region name :type region: string :param describe_output: description of a global workflow :type describe_output: dict :returns: object description of a workflow :rtype: : dict Returns an object description of an underlying workflow from a given region.
3.675917
3.617596
1.016122
assert(describe_output is None or describe_output.get('class') == 'globalworkflow') if region is None: raise DXError( 'DXGlobalWorkflow: region must be provided to get an underlying workflow') # Perhaps we have cached it already if region in self._workflows_by_region: return self._workflows_by_region[region] if not describe_output: describe_output = self.describe() if region not in describe_output['regionalOptions'].keys(): raise DXError('DXGlobalWorkflow: the global workflow {} is not enabled in region {}'.format( self.get_id(), region)) underlying_workflow_id = describe_output['regionalOptions'][region]['workflow'] self._workflow_desc_by_region = dxpy.DXWorkflow(underlying_workflow_id) return dxpy.DXWorkflow(underlying_workflow_id)
def get_underlying_workflow(self, region, describe_output=None)
:param region: region name :type region: string :param describe_output: description of a global workflow :type describe_output: dict :returns: object handler of a workflow :rtype: :class:`~dxpy.bindings.dxworkflow.DXWorkflow` Returns an object handler of an underlying workflow from a given region.
3.938778
3.544623
1.111198
assert(describe_output is None or describe_output.get('class') == 'globalworkflow') underlying_workflow_desc = self.describe_underlying_workflow(region, describe_output=describe_output) for field in ['inputs', 'outputs', 'inputSpec', 'outputSpec', 'stages']: describe_output[field] = underlying_workflow_desc[field] return describe_output
def append_underlying_workflow_desc(self, describe_output, region)
:param region: region name :type region: string :param describe_output: description of a global workflow :type describe_output: dict :returns: object description of the global workflow :rtype: : dict Appends stages, inputs, outputs and other workflow-specific metadata to a global workflow describe output. Note: global workflow description does not contain functional metadata (stages, IO), since this data is region-specific (due to applets and bound inputs) and so reside only in region-specific underlying workflows. We add them to global_workflow_desc so that it can be used for a workflow or a global workflow
4.274083
3.783637
1.129623
region = dxpy.api.project_describe(project, input_params={"fields": {"region": True}})["region"] dxworkflow = self.get_underlying_workflow(region) return dxworkflow._get_run_input(workflow_input, **kwargs)
def _get_run_input(self, workflow_input, project=None, **kwargs)
Checks the region in which the global workflow is run and returns the input associated with the underlying workflow from that region.
5.489191
4.592323
1.195297
wrapped_stream = None try: wrapped_stream = getattr(sys, stream_name) if hasattr(wrapped_stream, '_original_stream'): setattr(sys, stream_name, wrapped_stream._original_stream) yield finally: if wrapped_stream: setattr(sys, stream_name, wrapped_stream)
def unwrap_stream(stream_name)
Temporarily unwraps a given stream (stdin, stdout, or stderr) to undo the effects of wrap_stdio_in_codecs().
2.369576
2.108892
1.123611
''' :param dxid: file ID :type dxid: string :rtype: :class:`~dxpy.bindings.dxfile.DXFile` Given the object ID of an uploaded file, returns a remote file handler that is a Python file-like object. Example:: with open_dxfile("file-xxxx") as fd: for line in fd: ... Note that this is shorthand for:: DXFile(dxid) ''' return DXFile(dxid, project=project, mode=mode, read_buffer_size=read_buffer_size)
def open_dxfile(dxid, project=None, mode=None, read_buffer_size=dxfile.DEFAULT_BUFFER_SIZE)
:param dxid: file ID :type dxid: string :rtype: :class:`~dxpy.bindings.dxfile.DXFile` Given the object ID of an uploaded file, returns a remote file handler that is a Python file-like object. Example:: with open_dxfile("file-xxxx") as fd: for line in fd: ... Note that this is shorthand for:: DXFile(dxid)
3.896573
1.47326
2.644865
''' :param mode: One of "w" or "a" for write and append modes, respectively :type mode: string :rtype: :class:`~dxpy.bindings.dxfile.DXFile` Additional optional parameters not listed: all those under :func:`dxpy.bindings.DXDataObject.new`. Creates a new remote file object that is ready to be written to; returns a :class:`~dxpy.bindings.dxfile.DXFile` object that is a writable file-like object. Example:: with new_dxfile(media_type="application/json") as fd: fd.write("foo\\n") Note that this is shorthand for:: dxFile = DXFile() dxFile.new(**kwargs) ''' dx_file = DXFile(mode=mode, write_buffer_size=write_buffer_size, expected_file_size=expected_file_size, file_is_mmapd=file_is_mmapd) dx_file.new(**kwargs) return dx_file
def new_dxfile(mode=None, write_buffer_size=dxfile.DEFAULT_BUFFER_SIZE, expected_file_size=None, file_is_mmapd=False, **kwargs)
:param mode: One of "w" or "a" for write and append modes, respectively :type mode: string :rtype: :class:`~dxpy.bindings.dxfile.DXFile` Additional optional parameters not listed: all those under :func:`dxpy.bindings.DXDataObject.new`. Creates a new remote file object that is ready to be written to; returns a :class:`~dxpy.bindings.dxfile.DXFile` object that is a writable file-like object. Example:: with new_dxfile(media_type="application/json") as fd: fd.write("foo\\n") Note that this is shorthand for:: dxFile = DXFile() dxFile.new(**kwargs)
3.736938
1.354581
2.75874
''' :param dxid: DNAnexus file ID or DXFile (file handler) object :type dxid: string or DXFile :param filename: Local filename :type filename: string :param append: If True, appends to the local file (default is to truncate local file if it exists) :type append: boolean :param project: project to use as context for this download (may affect which billing account is billed for this download). If None or DXFile.NO_PROJECT_HINT, no project hint is supplied to the API server. :type project: str or None :param describe_output: (experimental) output of the file-xxxx/describe API call, if available. It will make it possible to skip another describe API call. It should contain the default fields of the describe API call output and the "parts" field, not included in the output by default. :type describe_output: dict or None Downloads the remote file referenced by *dxid* and saves it to *filename*. Example:: download_dxfile("file-xxxx", "localfilename.fastq") ''' # retry the inner loop while there are retriable errors part_retry_counter = defaultdict(lambda: 3) success = False while not success: success = _download_dxfile(dxid, filename, part_retry_counter, chunksize=chunksize, append=append, show_progress=show_progress, project=project, describe_output=describe_output, **kwargs)
def download_dxfile(dxid, filename, chunksize=dxfile.DEFAULT_BUFFER_SIZE, append=False, show_progress=False, project=None, describe_output=None, **kwargs)
:param dxid: DNAnexus file ID or DXFile (file handler) object :type dxid: string or DXFile :param filename: Local filename :type filename: string :param append: If True, appends to the local file (default is to truncate local file if it exists) :type append: boolean :param project: project to use as context for this download (may affect which billing account is billed for this download). If None or DXFile.NO_PROJECT_HINT, no project hint is supplied to the API server. :type project: str or None :param describe_output: (experimental) output of the file-xxxx/describe API call, if available. It will make it possible to skip another describe API call. It should contain the default fields of the describe API call output and the "parts" field, not included in the output by default. :type describe_output: dict or None Downloads the remote file referenced by *dxid* and saves it to *filename*. Example:: download_dxfile("file-xxxx", "localfilename.fastq")
5.75758
1.52726
3.769874
# Use 'a' mode because we will be responsible for closing the file # ourselves later (if requested). handler = new_dxfile(media_type=media_type, mode='a', **kwargs) # For subsequent API calls, don't supply the dataobject metadata # parameters that are only needed at creation time. _, remaining_kwargs = dxpy.DXDataObject._get_creation_params(kwargs) handler.write(to_upload, **remaining_kwargs) if not keep_open: handler.close(block=wait_on_close, **remaining_kwargs) return handler
def upload_string(to_upload, media_type=None, keep_open=False, wait_on_close=False, **kwargs)
:param to_upload: String to upload into a file :type to_upload: string :param media_type: Internet Media Type :type media_type: string :param keep_open: If False, closes the file after uploading :type keep_open: boolean :param wait_on_close: If True, waits for the file to close :type wait_on_close: boolean :returns: Remote file handler :rtype: :class:`~dxpy.bindings.dxfile.DXFile` Additional optional parameters not listed: all those under :func:`dxpy.bindings.DXDataObject.new`. Uploads the data in the string *to_upload* into a new file object (with media type *media_type* if given) and returns the associated remote file handler.
7.82231
7.588645
1.030791
''' :param project: Project ID to use as context for the listing :type project: string :param path: Subtree root path :type path: string :param recurse: Return a complete subfolders tree :type recurse: boolean Returns a list of subfolders for the remote *path* (included to the result) of the *project*. Example:: list_subfolders("project-xxxx", folder="/input") ''' project_folders = dxpy.get_handler(project).describe(input_params={'folders': True})['folders'] # TODO: support shell-style path globbing (i.e. /a*/c matches /ab/c but not /a/b/c) # return pathmatch.filter(project_folders, os.path.join(path, '*')) if recurse: return (f for f in project_folders if f.startswith(path)) else: return (f for f in project_folders if f.startswith(path) and '/' not in f[len(path)+1:])
def list_subfolders(project, path, recurse=True)
:param project: Project ID to use as context for the listing :type project: string :param path: Subtree root path :type path: string :param recurse: Return a complete subfolders tree :type recurse: boolean Returns a list of subfolders for the remote *path* (included to the result) of the *project*. Example:: list_subfolders("project-xxxx", folder="/input")
5.02452
2.441609
2.057873
# TODO: use Gentoo or deb buildsystem config_script = os.path.join(src_dir, "configure") if os.path.isfile(config_script) and os.access(config_script, os.X_OK): logger.debug("Running ./configure in {cwd}".format(cwd=os.path.abspath(src_dir))) try: subprocess.check_call([config_script]) except subprocess.CalledProcessError as e: raise AppBuilderException("./configure in target directory failed with exit code %d" % (e.returncode,)) if os.path.isfile(os.path.join(src_dir, "Makefile")) \ or os.path.isfile(os.path.join(src_dir, "makefile")) \ or os.path.isfile(os.path.join(src_dir, "GNUmakefile")): if parallel_build: make_shortcmd = "make -j%d" % (NUM_CORES,) else: make_shortcmd = "make" logger.debug("Building with {make} in {cwd}".format(make=make_shortcmd, cwd=os.path.abspath(src_dir))) try: make_cmd = ["make", "-C", src_dir] if parallel_build: make_cmd.append("-j" + str(NUM_CORES)) subprocess.check_call(make_cmd) except subprocess.CalledProcessError as e: raise AppBuilderException("%s in target directory failed with exit code %d" % (make_shortcmd, e.returncode))
def build(src_dir, parallel_build=True)
Runs any build scripts that are found in the specified directory. In particular, runs ``./configure`` if it exists, followed by ``make -jN`` if it exists (building with as many parallel tasks as there are CPUs on the system).
2.201389
2.126814
1.035064
is_local=(not os.path.isabs(link_target)) if is_local: # make sure that the path NEVER extends outside the resources directory! d,l = os.path.split(link_target) link_parts = [] while l: link_parts.append(l) d,l = os.path.split(d) curr_path = os.sep for p in reversed(link_parts): is_local = (is_local and not (curr_path == os.sep and p == os.pardir) ) curr_path = os.path.abspath(os.path.join(curr_path, p)) return is_local
def is_link_local(link_target)
:param link_target: The target of a symbolic link, as given by os.readlink() :type link_target: string :returns: A boolean indicating the link is local to the current directory. This is defined to mean that os.path.isabs(link_target) == False and the link NEVER references the parent directory, so "./foo/../../curdir/foo" would return False. :rtype: boolean
3.641354
3.952891
0.921188
ret_perm = perm_obj | stat.S_IROTH | stat.S_IRGRP | stat.S_IRUSR if ret_perm & stat.S_IXUSR: ret_perm = ret_perm | stat.S_IXGRP | stat.S_IXOTH return ret_perm
def _fix_perms(perm_obj)
:param perm_obj: A permissions object, as given by os.stat() :type perm_obj: integer :returns: A permissions object that is the result of "chmod a+rX" on the given permission object. This is defined to be the permission object bitwise or-ed with all stat.S_IR*, and if the stat.S_IXUSR bit is set, then the permission object should also be returned bitwise or-ed with stat.S_IX* (stat.S_IXUSR not included because it would be redundant). :rtype: integer
2.521621
2.508925
1.00506
# This has a race condition since the app could have been created or # published since we last looked. try: app_id = dxpy.api.app_new(app_spec)["id"] return app_id except dxpy.exceptions.DXAPIError as e: # TODO: detect this error more reliably if e.name == 'InvalidInput' and e.msg == 'Specified name and version conflict with an existing alias': print('App %s/%s already exists' % (app_spec["name"], version), file=sys.stderr) # The version number was already taken, so app/new doesn't work. # However, maybe it hasn't been published yet, so we might be able # to app-xxxx/update it. app_describe = dxpy.api.app_describe("app-" + app_name, alias=version) if app_describe.get("published", 0) > 0: return None return _update_version(app_name, version, app_spec, try_update=try_update) raise e
def _create_or_update_version(app_name, version, app_spec, try_update=True)
Creates a new version of the app. Returns an app_id, or None if the app has already been created and published.
4.985049
4.728847
1.054179
if not try_update: return None try: app_id = dxpy.api.app_update("app-" + app_name, version, app_spec)["id"] return app_id except dxpy.exceptions.DXAPIError as e: if e.name == 'InvalidState': print('App %s/%s has already been published' % (app_spec["name"], version), file=sys.stderr) return None raise e
def _update_version(app_name, version, app_spec, try_update=True)
Updates a version of the app in place. Returns an app_id, or None if the app has already been published.
3.234485
2.81448
1.14923
return _create_app(dict(regionalOptions=regional_options), app_name, src_dir, publish=publish, set_default=set_default, billTo=billTo, try_versions=try_versions, try_update=try_update, confirm=confirm)
def create_app_multi_region(regional_options, app_name, src_dir, publish=False, set_default=False, billTo=None, try_versions=None, try_update=True, confirm=True)
Creates a new app object from the specified applet(s). :param regional_options: Region-specific options for the app. See https://wiki.dnanexus.com/API-Specification-v1.0.0/Apps#API-method:-/app/new for details; this should contain keys for each region the app is to be enabled in, and for the values, a dict containing (at minimum) a key "applet" whose value is an applet ID for that region. :type regional_options: dict
1.86947
2.499772
0.747856
# In this case we don't know the region of the applet, so we use the # legacy API {"applet": applet_id} without specifying a region # specifically. return _create_app(dict(applet=applet_id), applet_name, src_dir, publish=publish, set_default=set_default, billTo=billTo, try_versions=try_versions, try_update=try_update, confirm=confirm)
def create_app(applet_id, applet_name, src_dir, publish=False, set_default=False, billTo=None, try_versions=None, try_update=True, confirm=True, regional_options=None)
Creates a new app object from the specified applet. .. deprecated:: 0.204.0 Use :func:`create_app_multi_region()` instead.
3.68574
3.768802
0.977961
enabled_regions = dxpy.executable_builder.get_enabled_regions('app', app_spec, from_command_line, AppBuilderException) if enabled_regions is not None and len(enabled_regions) == 0: raise AssertionError("This app should be enabled in at least one region") return enabled_regions
def get_enabled_regions(app_spec, from_command_line)
Returns a list of the regions in which the app should be enabled. Also validates that app_spec['regionalOptions'], if supplied, is well-formed. :param app_spec: app specification :type app_spec: dict :param from_command_line: The regions specified on the command-line via --region :type from_command_line: list or None
5.098403
6.353869
0.802409
''' :param msg: string message to print before exiting Print the error message, as well as a blurb on where to find the job workspaces ''' msg += '\n' msg += 'Local job workspaces can be found in: ' + str(environ.get('DX_TEST_JOB_HOMEDIRS')) sys.exit(msg)
def exit_with_error(msg)
:param msg: string message to print before exiting Print the error message, as well as a blurb on where to find the job workspaces
12.096168
4.320843
2.799492