code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
# Make a list of all task configurations needed to supervise sup_configs = sorted(dependencies.keys()) try: logging.info("Validating supervisor data...") # Make an api call to list task configurations in the workspace r = fapi.list_workspace_configs(args['project'], args['workspace']) fapi._check_response_code(r, 200) space_configs = r.json() # Make a dict for easy lookup later space_configs = { c["name"]: c for c in space_configs} # Also make an api call to list methods you have view permissions for r = fapi.list_repository_methods() fapi._check_response_code(r, 200) repo_methods = r.json() ## Put in a form that is more easily searchable: namespace/name:snapshot repo_methods = {m['namespace'] + '/' + m['name'] + ':' + str(m['snapshotId']) for m in repo_methods if m['entityType'] == 'Workflow'} valid = True for config in sup_configs: # ensure config exists in the workspace if config not in space_configs: logging.error("No task configuration for " + config + " found in " + args['project'] + "/" + args['workspace']) valid = False else: # Check access permissions for the referenced method m = space_configs[config]['methodRepoMethod'] ref_method = m['methodNamespace'] + "/" + m['methodName'] + ":" + str(m['methodVersion']) if ref_method not in repo_methods: logging.error(config+ " -- You don't have permisson to run the referenced method: " + ref_method) valid = False except Exception as e: logging.error("Exception occurred while validating supervisor: " + str(e)) raise return False return valid
def validate_monitor_tasks(dependencies, args)
Validate that all entries in the supervisor are valid task configurations and that all permissions requirements are satisfied.
4.601579
4.464048
1.030809
try: logging.info("Attempting to recover Supervisor data from " + recovery_file) with open(recovery_file) as rf: recovery_data = json.load(rf) monitor_data = recovery_data['monitor_data'] dependencies = recovery_data['dependencies'] args = recovery_data['args'] except: logging.error("Could not recover monitor data, exiting...") return 1 logging.info("Data successfully loaded, resuming Supervisor") supervise_until_complete(monitor_data, dependencies, args, recovery_file)
def recover_and_supervise(recovery_file)
Retrieve monitor data from recovery_file and resume monitoring
3.674647
3.410041
1.077596
''' List accessible workspaces, in TSV form: <namespace><TAB>workspace''' r = fapi.list_workspaces() fapi._check_response_code(r, 200) spaces = [] project = args.project if project: project = re.compile('^' + project) for space in r.json(): ns = space['workspace']['namespace'] if project and not project.match(ns): continue ws = space['workspace']['name'] spaces.append(ns + '\t' + ws) # Sort for easier downstream viewing, ignoring case return sorted(spaces, key=lambda s: s.lower())
def space_list(args)
List accessible workspaces, in TSV form: <namespace><TAB>workspace
5.06971
3.588435
1.412791
# The return value is the INVERSE of UNIX exit status semantics, (where # 0 = good/true, 1 = bad/false), so to check existence in UNIX one would do # if ! fissfc space_exists blah ; then # ... # fi try: r = fapi.get_workspace(args.project, args.workspace) fapi._check_response_code(r, 200) exists = True except FireCloudServerError as e: if e.code == 404: exists = False else: raise if fcconfig.verbosity: result = "DOES NOT" if not exists else "DOES" eprint('Space <%s> %s exist in project <%s>' % (args.workspace, result, args.project)) return exists
def space_exists(args)
Determine if the named space exists in the given project (namespace)
8.71143
7.98122
1.091491
r = fapi.lock_workspace(args.project, args.workspace) fapi._check_response_code(r, 204) if fcconfig.verbosity: eprint('Locked workspace {0}/{1}'.format(args.project, args.workspace)) return 0
def space_lock(args)
Lock a workspace
5.998089
5.56573
1.077682
r = fapi.unlock_workspace(args.project, args.workspace) fapi._check_response_code(r, 204) if fcconfig.verbosity: eprint('Unlocked workspace {0}/{1}'.format(args.project,args.workspace)) return 0
def space_unlock(args)
Unlock a workspace
5.932284
5.493971
1.079781
r = fapi.create_workspace(args.project, args.workspace, args.authdomain, dict()) fapi._check_response_code(r, 201) if fcconfig.verbosity: eprint(r.content) return 0
def space_new(args)
Create a new workspace.
11.892485
9.643991
1.23315
r = fapi.get_workspace(args.project, args.workspace) fapi._check_response_code(r, 200) return r.text
def space_info(args)
Get metadata for a workspace.
5.704651
4.522629
1.261357
message = "WARNING: this will delete workspace: \n\t{0}/{1}".format( args.project, args.workspace) if not args.yes and not _confirm_prompt(message): return 0 r = fapi.delete_workspace(args.project, args.workspace) fapi._check_response_code(r, [200, 202, 204, 404]) if fcconfig.verbosity: print('Deleted workspace {0}/{1}'.format(args.project, args.workspace)) return 0
def space_delete(args)
Delete a workspace.
4.016022
3.786785
1.060536
# FIXME: add --deep copy option (shallow by default) # add aliasing capability, then make space_copy alias if not args.to_workspace: args.to_workspace = args.workspace if not args.to_project: args.to_project = args.project if (args.project == args.to_project and args.workspace == args.to_workspace): eprint("Error: destination project and namespace must differ from" " cloned workspace") return 1 r = fapi.clone_workspace(args.project, args.workspace, args.to_project, args.to_workspace) fapi._check_response_code(r, 201) if fcconfig.verbosity: msg = "{}/{} successfully cloned to {}/{}".format( args.project, args.workspace, args.to_project, args.to_workspace) print(msg) return 0
def space_clone(args)
Replicate a workspace
4.970095
4.71473
1.054163
''' Retrieve access control list for a workspace''' r = fapi.get_workspace_acl(args.project, args.workspace) fapi._check_response_code(r, 200) result = dict() for user, info in sorted(r.json()['acl'].items()): result[user] = info['accessLevel'] return result
def space_acl(args)
Retrieve access control list for a workspace
4.780094
4.38507
1.090084
acl_updates = [{"email": user, "accessLevel": args.role} for user in args.users] r = fapi.update_workspace_acl(args.project, args.workspace, acl_updates) fapi._check_response_code(r, 200) errors = r.json()['usersNotFound'] if len(errors): eprint("Unable to assign role for unrecognized users:") for user in errors: eprint("\t{0}".format(user['email'])) return 1 if fcconfig.verbosity: print("Successfully updated {0} role(s)".format(len(acl_updates))) return 0
def space_set_acl(args)
Assign an ACL role to list of users for a workspace
5.157502
4.837502
1.06615
r = fapi.list_workspaces() fapi._check_response_code(r, 200) # Parse the JSON for workspace + namespace; then filter by # search terms: each term is treated as a regular expression workspaces = r.json() extra_terms = [] if args.bucket: workspaces = [w for w in workspaces if re.search(args.bucket, w['workspace']['bucketName'])] extra_terms.append('bucket') # FIXME: add more filter terms pretty_spaces = [] for space in workspaces: ns = space['workspace']['namespace'] ws = space['workspace']['name'] pspace = ns + '/' + ws # Always show workspace storage id pspace += '\t' + space['workspace']['bucketName'] pretty_spaces.append(pspace) # Sort for easier viewing, ignore case return sorted(pretty_spaces, key=lambda s: s.lower())
def space_search(args)
Search for workspaces matching certain criteria
5.692708
5.304138
1.073258
project = args.project workspace = args.workspace chunk_size = args.chunk_size with open(args.tsvfile) as tsvf: headerline = tsvf.readline().strip() entity_data = [l.rstrip('\n') for l in tsvf] return _batch_load(project, workspace, headerline, entity_data, chunk_size)
def entity_import(args)
Upload an entity loadfile.
3.972025
3.729401
1.065057
'''Return a list of lines in TSV form that would suffice to reconstitute a container (set) entity, if passed to entity_import. The first line in the list is the header, and subsequent lines are the container members. ''' r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity) fapi._check_response_code(r, 200) set_type = args.entity_type set_name = args.entity member_type = set_type.split('_')[0] members = r.json()['attributes'][member_type+'s']['items'] result = ["membership:{}_id\t{}_id".format(set_type, member_type)] result += ["%s\t%s" % (set_name, m['entityName']) for m in members ] return result
def set_export(args)
Return a list of lines in TSV form that would suffice to reconstitute a container (set) entity, if passed to entity_import. The first line in the list is the header, and subsequent lines are the container members.
6.190633
2.975616
2.080455
r = fapi.list_entity_types(args.project, args.workspace) fapi._check_response_code(r, 200) return r.json().keys()
def entity_types(args)
List entity types in a workspace
4.793005
4.544603
1.054658
r = fapi.get_entities_with_type(args.project, args.workspace) fapi._check_response_code(r, 200) return [ '{0}\t{1}'.format(e['entityType'], e['name']) for e in r.json() ]
def entity_list(args)
List entities in a workspace.
4.550299
4.284588
1.062016
''' List participants within a container''' # Case 1: retrieve participants within a named data entity if args.entity_type and args.entity: # Edge case: caller asked for participant within participant (itself) if args.entity_type == 'participant': return [ args.entity.strip() ] # Otherwise retrieve the container entity r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity) fapi._check_response_code(r, 200) participants = r.json()['attributes']["participants"]['items'] return [ participant['entityName'] for participant in participants ] # Case 2: retrieve all participants within a workspace return __get_entities(args, "participant", page_size=2000)
def participant_list(args)
List participants within a container
5.861207
5.805262
1.009637
''' List pairs within a container. ''' # Case 1: retrieve pairs within a named data entity if args.entity_type and args.entity: # Edge case: caller asked for pair within a pair (itself) if args.entity_type == 'pair': return [ args.entity.strip() ] # Edge case: pairs for a participant, which has to be done hard way # by iteratiing over all samples (see firecloud/discussion/9648) elif args.entity_type == 'participant': entities = _entity_paginator(args.project, args.workspace, 'pair', page_size=2000) return [ e['name'] for e in entities if e['attributes']['participant']['entityName'] == args.entity] # Otherwise retrieve the container entity r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity) fapi._check_response_code(r, 200) pairs = r.json()['attributes']["pairs"]['items'] return [ pair['entityName'] for pair in pairs] # Case 2: retrieve all pairs within a workspace return __get_entities(args, "pair", page_size=2000)
def pair_list(args)
List pairs within a container.
6.290188
6.238305
1.008317
''' List samples within a container. ''' # Case 1: retrieve samples within a named data entity if args.entity_type and args.entity: # Edge case: caller asked for samples within a sample (itself) if args.entity_type == 'sample': return [ args.entity.strip() ] # Edge case: samples for a participant, which has to be done hard way # by iteratiing over all samples (see firecloud/discussion/9648) elif args.entity_type == 'participant': samples = _entity_paginator(args.project, args.workspace, 'sample', page_size=2000) return [ e['name'] for e in samples if e['attributes']['participant']['entityName'] == args.entity] # Otherwise retrieve the container entity r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity) fapi._check_response_code(r, 200) if args.entity_type == 'pair': pair = r.json()['attributes'] samples = [ pair['case_sample'], pair['control_sample'] ] else: samples = r.json()['attributes']["samples"]['items'] return [ sample['entityName'] for sample in samples ] # Case 2: retrieve all samples within a workspace return __get_entities(args, "sample", page_size=2000)
def sample_list(args)
List samples within a container.
5.639068
5.644554
0.999028
msg = "WARNING: this will delete {0} {1} in {2}/{3}".format( args.entity_type, args.entity, args.project, args.workspace) if not (args.yes or _confirm_prompt(msg)): return json_body=[{"entityType": args.entity_type, "entityName": args.entity}] r = fapi.delete_entities(args.project, args.workspace, json_body) fapi._check_response_code(r, 204) if fcconfig.verbosity: print("Succesfully deleted " + args.type + " " + args.entity)
def entity_delete(args)
Delete entity in a workspace.
4.271573
4.10869
1.039643
r = fapi.update_repository_method(args.namespace, args.method, args.synopsis, args.wdl, args.doc, args.comment) fapi._check_response_code(r, 201) if fcconfig.verbosity: print("Method %s installed to project %s" % (args.method, args.namespace)) return 0
def meth_new(args)
Submit a new workflow (or update) to the methods repository.
8.351179
6.74987
1.237236
message = "WARNING: this will delete workflow \n\t{0}/{1}:{2}".format( args.namespace, args.method, args.snapshot_id) if not args.yes and not _confirm_prompt(message): return r = fapi.delete_repository_method(args.namespace, args.method, args.snapshot_id) fapi._check_response_code(r, 200) if fcconfig.verbosity: print("Method %s removed from project %s" % (args.method,args.namespace)) return 0
def meth_delete(args)
Remove (redact) a method from the method repository
6.118511
5.804845
1.054035
''' Retrieve WDL for given version of a repository method''' r = fapi.get_repository_method(args.namespace, args.method, args.snapshot_id, True) fapi._check_response_code(r, 200) return r.text
def meth_wdl(args)
Retrieve WDL for given version of a repository method
9.111466
5.907403
1.542381
''' Retrieve access control list for given version of a repository method''' r = fapi.get_repository_method_acl(args.namespace, args.method, args.snapshot_id) fapi._check_response_code(r, 200) acls = sorted(r.json(), key=lambda k: k['user']) return map(lambda acl: '{0}\t{1}'.format(acl['user'], acl['role']), acls)
def meth_acl(args)
Retrieve access control list for given version of a repository method
5.69971
4.514194
1.26262
acl_updates = [{"user": user, "role": args.role} \ for user in set(expand_fc_groups(args.users)) \ if user != fapi.whoami()] id = args.snapshot_id if not id: # get the latest snapshot_id for this method from the methods repo r = fapi.list_repository_methods(namespace=args.namespace, name=args.method) fapi._check_response_code(r, 200) versions = r.json() if len(versions) == 0: if fcconfig.verbosity: eprint("method {0}/{1} not found".format(args.namespace, args.method)) return 1 latest = sorted(versions, key=lambda m: m['snapshotId'])[-1] id = latest['snapshotId'] r = fapi.update_repository_method_acl(args.namespace, args.method, id, acl_updates) fapi._check_response_code(r, 200) if fcconfig.verbosity: print("Updated ACL for {0}/{1}:{2}".format(args.namespace, args.method, id)) return 0
def meth_set_acl(args)
Assign an ACL role to a list of users for a workflow.
3.516549
3.478945
1.010809
groups = None for user in users: fcgroup = None if '@' not in user: fcgroup = user elif user.lower().endswith('@firecloud.org'): if groups is None: r = fapi.get_groups() fapi._check_response_code(r, 200) groups = {group['groupEmail'].lower():group['groupName'] \ for group in r.json() if group['role'] == 'Admin'} if user.lower() not in groups: if fcconfig.verbosity: eprint("You do not have access to the members of {}".format(user)) yield user continue else: fcgroup = groups[user.lower()] else: yield user continue r = fapi.get_group(fcgroup) fapi._check_response_code(r, 200) fcgroup_data = r.json() for admin in fcgroup_data['adminsEmails']: yield admin for member in fcgroup_data['membersEmails']: yield member
def expand_fc_groups(users)
If user is a firecloud group, return all members of the group. Caveat is that only group admins may do this.
3.170025
2.968549
1.06787
r = fapi.list_repository_methods(namespace=args.namespace, name=args.method, snapshotId=args.snapshot_id) fapi._check_response_code(r, 200) # Parse the JSON for the workspace + namespace methods = r.json() results = [] for m in methods: ns = m['namespace'] n = m['name'] sn_id = m['snapshotId'] results.append('{0}\t{1}\t{2}'.format(ns,n,sn_id)) # Sort for easier viewing, ignore case return sorted(results, key=lambda s: s.lower())
def meth_list(args)
List workflows in the methods repository
4.267213
4.083517
1.044985
'''Invoke a task (method configuration), on given entity in given space''' # Try to use call caching (job avoidance)? Flexibly accept range of answers cache = getattr(args, "cache", True) cache = cache is True or (cache.lower() in ["y", "true", "yes", "t", "1"]) if not args.namespace: args.namespace = fcconfig.method_ns if not args.namespace: raise RuntimeError("namespace not provided, or configured by default") r = fapi.create_submission(args.project, args.workspace,args.namespace, args.config, args.entity, args.entity_type, args.expression, use_callcache=cache) fapi._check_response_code(r, 201) id = r.json()['submissionId'] return ("Started {0}/{1} in {2}/{3}: id={4}".format( args.namespace, args.config, args.project, args.workspace, id)), id
def config_start(args)
Invoke a task (method configuration), on given entity in given space
8.934056
7.082476
1.261431
'''Abort a task (method configuration) by submission ID in given space''' r = fapi.abort_submission(args.project, args.workspace, args.submission_id) fapi._check_response_code(r, 204) return ("Aborted {0} in {1}/{2}".format(args.submission_id, args.project, args.workspace))
def config_stop(args)
Abort a task (method configuration) by submission ID in given space
7.990494
4.091342
1.953025
verbose = fcconfig.verbosity if args.workspace: if verbose: print("Retrieving method configs from space {0}".format(args.workspace)) if not args.project: eprint("No project given, and no default project configured") return 1 r = fapi.list_workspace_configs(args.project, args.workspace) fapi._check_response_code(r, 200) else: if verbose: print("Retrieving method configs from method repository") r = fapi.list_repository_configs(namespace=args.namespace, name=args.config, snapshotId=args.snapshot_id) fapi._check_response_code(r, 200) # Parse the JSON for the workspace + namespace methods = r.json() results = [] for m in methods: ns = m['namespace'] if not ns: ns = '__EMPTYSTRING__' name = m['name'] # Ugh: configs in workspaces look different from configs in methodRepo mver = m.get('methodRepoMethod', None) if mver: mver = mver.get('methodVersion', 'unknown') # space config else: mver = m.get('snapshotId', 'unknown') # repo config results.append('{0}\t{1}\tsnapshotId:{2}'.format(ns, name, mver)) # Sort for easier viewing, ignore case return sorted(results, key=lambda s: s.lower())
def config_list(args)
List configuration(s) in the methods repository or a workspace.
5.150846
4.708622
1.093918
''' Retrieve access control list for a method configuration''' r = fapi.get_repository_config_acl(args.namespace, args.config, args.snapshot_id) fapi._check_response_code(r, 200) acls = sorted(r.json(), key=lambda k: k['user']) return map(lambda acl: '{0}\t{1}'.format(acl['user'], acl['role']), acls)
def config_acl(args)
Retrieve access control list for a method configuration
6.075271
5.020621
1.210064
acl_updates = [{"user": user, "role": args.role} \ for user in set(expand_fc_groups(args.users)) \ if user != fapi.whoami()] id = args.snapshot_id if not id: # get the latest snapshot_id for this method from the methods repo r = fapi.list_repository_configs(namespace=args.namespace, name=args.config) fapi._check_response_code(r, 200) versions = r.json() if len(versions) == 0: if fcconfig.verbosity: eprint("Configuration {0}/{1} not found".format(args.namespace, args.config)) return 1 latest = sorted(versions, key=lambda c: c['snapshotId'])[-1] id = latest['snapshotId'] r = fapi.update_repository_config_acl(args.namespace, args.config, id, acl_updates) fapi._check_response_code(r, 200) if fcconfig.verbosity: print("Updated ACL for {0}/{1}:{2}".format(args.namespace, args.config, id)) return 0
def config_set_acl(args)
Assign an ACL role to a list of users for a config.
3.751929
3.646334
1.028959
r = fapi.get_workspace_config(args.project, args.workspace, args.namespace, args.config) fapi._check_response_code(r, 200) # Setting ensure_ascii to False ensures unicode string returns return json.dumps(r.json(), indent=4, separators=(',', ': '), sort_keys=True, ensure_ascii=False)
def config_get(args)
Retrieve a method config from a workspace, send stdout
4.857651
4.641401
1.046592
r = fapi.get_workspace_config(args.project, args.workspace, args.namespace, args.config) fapi._check_response_code(r, 200) method = r.json()["methodRepoMethod"] args.namespace = method["methodNamespace"] args.method = method["methodName"] args.snapshot_id = method["methodVersion"] return meth_wdl(args)
def config_wdl(args)
Retrieve the WDL for a method config in a workspace, send stdout
6.309422
5.362054
1.17668
config_1 = config_get(args).splitlines() args.project = args.Project args.workspace = args.Workspace cfg_1_name = args.config if args.Config is not None: args.config = args.Config if args.Namespace is not None: args.namespace = args.Namespace config_2 = config_get(args).splitlines() if not args.verbose: config_1 = skip_cfg_ver(config_1) config_2 = skip_cfg_ver(config_2) return list(unified_diff(config_1, config_2, cfg_1_name, args.config, lineterm=''))
def config_diff(args)
Compare method configuration definitions across workspaces. Ignores methodConfigVersion if the verbose argument is not set
3.360673
3.382367
0.993586
'''Install a valid method configuration into a workspace, in one of several ways: from a JSON file containing a config definition (both file names and objects are supported); as a string representing the content of such a JSON file; or as a dict generated from such JSON content, e.g via json.loads(). Note that the CLI supports only string & filename input.''' config = args.config if os.path.isfile(config): with open(config, 'r') as fp: config = json.loads(fp.read()) elif isinstance(config, str): config = json.loads(config) elif isinstance(config, dict): pass elif hasattr(config, "read"): config = json.loads(config.read()) else: raise ValueError('Input method config must be filename, string or dict') r = fapi.create_workspace_config(args.project, args.workspace, config) fapi._check_response_code(r, [201]) return True
def config_put(args)
Install a valid method configuration into a workspace, in one of several ways: from a JSON file containing a config definition (both file names and objects are supported); as a string representing the content of such a JSON file; or as a dict generated from such JSON content, e.g via json.loads(). Note that the CLI supports only string & filename input.
5.491509
2.03959
2.692457
'''Attempt to install a new method config into a workspace, by: generating a template from a versioned method in the methods repo, then launching a local editor (respecting the $EDITOR environment variable) to fill in the incomplete input/output fields. Returns True if the config was successfully installed, otherwise False''' cfg = config_template(args) # Iteratively try to edit/install the config: exit iteration by EITHER # Successful config_put() after editing # Leaving config unchanged in editor, e.g. quitting out of VI with :q # FIXME: put an small integer upper bound on the # of loops here while True: try: edited = fccore.edit_text(cfg) if edited == cfg: eprint("No edits made, method config not installed ...") break if __EDITME__ in edited: eprint("Edit is incomplete, method config not installed ...") time.sleep(1) continue args.config = cfg = edited config_put(args) return True except FireCloudServerError as fce: __pretty_print_fc_exception(fce) return False
def config_new(args)
Attempt to install a new method config into a workspace, by: generating a template from a versioned method in the methods repo, then launching a local editor (respecting the $EDITOR environment variable) to fill in the incomplete input/output fields. Returns True if the config was successfully installed, otherwise False
14.370977
7.059225
2.035773
r = fapi.delete_workspace_config(args.project, args.workspace, args.namespace, args.config) fapi._check_response_code(r, [200,204]) return r.text if r.text else None
def config_delete(args)
Remove a method config from a workspace
4.849499
4.269876
1.135747
if not (args.tospace or args.toname or args.toproject or args.tonamespace): raise RuntimeError('A new config name OR workspace OR project OR ' + 'namespace must be given (or all)') copy = fapi.get_workspace_config(args.fromproject, args.fromspace, args.namespace, args.config) fapi._check_response_code(copy, 200) copy = copy.json() if not args.toname: args.toname = args.config if not args.tonamespace: args.tonamespace = args.namespace if not args.toproject: args.toproject = args.fromproject if not args.tospace: args.tospace = args.fromspace copy['name'] = args.toname copy['namespace'] = args.tonamespace # Instantiate the copy r = fapi.overwrite_workspace_config(args.toproject, args.tospace, args.tonamespace, args.toname, copy) fapi._check_response_code(r, 200) if fcconfig.verbosity: print("Method %s/%s:%s copied to %s/%s:%s" % ( args.fromproject, args.fromspace, args.config, args.toproject, args.tospace, args.toname)) return 0
def config_copy(args)
Copy a method config to new name/space/namespace/project (or all 4)
3.123722
2.774382
1.125916
'''Return a dict of attribute name/value pairs: if entity name & type are specified then attributes will be retrieved from that entity, otherwise workspace-level attributes will be returned. By default all attributes attached to the given object will be returned, but a subset can be selected by specifying a list of attribute names; names which refer to a non-existent attribute will be silently ignored. By default a special __header__ entry is optionally added to the result. ''' if args.entity_type and args.entity: r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity) fapi._check_response_code(r, 200) attrs = r.json()['attributes'] # It is wrong for the members of container objects to appear as metadata # (attributes) attached to the container, as it conflates fundamentally # different things: annotation vs membership. This can be seen by using # a suitcase model of reasoning: ATTRIBUTES are METADATA like the weight # & height of the suitcase, the shipping label and all of its passport # stamps; while MEMBERS are the actual CONTENTS INSIDE the suitcase. # This conflation also contradicts the design & docs (which make a clear # distinction between MEMBERSHIP and UPDATE loadfiles). For this reason # a change has been requested of the FireCloud dev team (via forum), and # until it is implemented we will elide "list of members" attributes here # (but later may provide a way for users to request such, if wanted) for k in ["samples", "participants", "pairs"]: attrs.pop(k, None) else: r = fapi.get_workspace(args.project, args.workspace) fapi._check_response_code(r, 200) attrs = r.json()['workspace']['attributes'] if args.attributes: # return a subset of attributes, if requested attrs = {k:attrs[k] for k in set(attrs).intersection(args.attributes)} # If some attributes have been collected, return in appropriate format if attrs: if args.entity: # Entity attributes def textify(thing): if isinstance(thing, dict): thing = thing.get("items", thing.get("entityName", "__UNKNOWN__")) return "{0}".format(thing) result = {args.entity : u'\t'.join(map(textify, attrs.values()))} # Add "hidden" header of attribute names, for downstream convenience object_id = u'entity:%s_id' % args.entity_type result['__header__'] = [object_id] + list(attrs.keys()) else: result = attrs # Workspace attributes else: result = {} return result
def attr_get(args)
Return a dict of attribute name/value pairs: if entity name & type are specified then attributes will be retrieved from that entity, otherwise workspace-level attributes will be returned. By default all attributes attached to the given object will be returned, but a subset can be selected by specifying a list of attribute names; names which refer to a non-existent attribute will be silently ignored. By default a special __header__ entry is optionally added to the result.
8.889065
6.382315
1.392765
'''Retrieve names of all attributes attached to a given object, either an entity (if entity type+name is provided) or workspace (if not)''' args.attributes = None result = attr_get(args) names = result.get("__header__",[]) if names: names = names[1:] else: names = result.keys() return sorted(names)
def attr_list(args)
Retrieve names of all attributes attached to a given object, either an entity (if entity type+name is provided) or workspace (if not)
10.190878
4.064328
2.507396
''' Set key=value attributes: if entity name & type are specified then attributes will be set upon that entity, otherwise the attribute will be set at the workspace level''' if args.entity_type and args.entity: prompt = "Set {0}={1} for {2}:{3} in {4}/{5}?\n[Y\\n]: ".format( args.attribute, args.value, args.entity_type, args.entity, args.project, args.workspace) if not (args.yes or _confirm_prompt("", prompt)): return 0 update = fapi._attr_set(args.attribute, args.value) r = fapi.update_entity(args.project, args.workspace, args.entity_type, args.entity, [update]) fapi._check_response_code(r, 200) else: prompt = "Set {0}={1} in {2}/{3}?\n[Y\\n]: ".format( args.attribute, args.value, args.project, args.workspace ) if not (args.yes or _confirm_prompt("", prompt)): return 0 update = fapi._attr_set(args.attribute, args.value) r = fapi.update_workspace_attributes(args.project, args.workspace, [update]) fapi._check_response_code(r, 200) return 0
def attr_set(args)
Set key=value attributes: if entity name & type are specified then attributes will be set upon that entity, otherwise the attribute will be set at the workspace level
3.116896
2.477005
1.258333
''' Delete key=value attributes: if entity name & type are specified then attributes will be deleted from that entity, otherwise the attribute will be removed from the workspace''' if args.entity_type and args.entities: # Since there is no attribute deletion endpoint, we must perform 2 steps # here: first we retrieve the entity_ids, and any foreign keys (e.g. # participant_id for sample_id); and then construct a loadfile which # specifies which entities are to have what attributes removed. Note # that FireCloud uses the magic keyword __DELETE__ to indicate that # an attribute should be deleted from an entity. # Step 1: see what entities are present, and filter to those requested entities = _entity_paginator(args.project, args.workspace, args.entity_type, page_size=1000, filter_terms=None, sort_direction="asc") if args.entities: entities = [e for e in entities if e['name'] in args.entities] # Step 2: construct a loadfile to delete these attributes attrs = sorted(args.attributes) etype = args.entity_type entity_data = [] for entity_dict in entities: name = entity_dict['name'] line = name # TODO: Fix other types? if etype == "sample": line += "\t" + entity_dict['attributes']['participant']['entityName'] for attr in attrs: line += "\t__DELETE__" # Improve performance by only updating records that have changed entity_data.append(line) entity_header = ["entity:" + etype + "_id"] if etype == "sample": entity_header.append("participant_id") entity_header = '\t'.join(entity_header + list(attrs)) # Remove attributes from an entity message = "WARNING: this will delete these attributes:\n\n" + \ ','.join(args.attributes) + "\n\n" if args.entities: message += 'on these {0}s:\n\n'.format(args.entity_type) + \ ', '.join(args.entities) else: message += 'on all {0}s'.format(args.entity_type) message += "\n\nin workspace {0}/{1}\n".format(args.project, args.workspace) if not args.yes and not _confirm_prompt(message): return 0 # TODO: reconcile with other batch updates # Chunk the entities into batches of 500, and upload to FC if args.verbose: print("Batching " + str(len(entity_data)) + " updates to Firecloud...") chunk_len = 500 total = int(len(entity_data) / chunk_len) + 1 batch = 0 for i in range(0, len(entity_data), chunk_len): batch += 1 if args.verbose: print("Updating samples {0}-{1}, batch {2}/{3}".format( i+1, min(i+chunk_len, len(entity_data)), batch, total )) this_data = entity_header + '\n' + '\n'.join(entity_data[i:i+chunk_len]) # Now push the entity data back to firecloud r = fapi.upload_entities(args.project, args.workspace, this_data) fapi._check_response_code(r, 200) else: message = "WARNING: this will delete the following attributes in " + \ "{0}/{1}\n\t".format(args.project, args.workspace) + \ "\n\t".join(args.attributes) if not (args.yes or _confirm_prompt(message)): return 0 updates = [fapi._attr_rem(a) for a in args.attributes] r = fapi.update_workspace_attributes(args.project, args.workspace, updates) fapi._check_response_code(r, 200) return 0
def attr_delete(args)
Delete key=value attributes: if entity name & type are specified then attributes will be deleted from that entity, otherwise the attribute will be removed from the workspace
3.836501
3.549597
1.080827
if not args.to_workspace: args.to_workspace = args.workspace if not args.to_project: args.to_project = args.project if (args.project == args.to_project and args.workspace == args.to_workspace): eprint("destination project and namespace must differ from" " source workspace") return 1 # First get the workspace attributes of the source workspace r = fapi.get_workspace(args.project, args.workspace) fapi._check_response_code(r, 200) # Parse the attributes workspace_attrs = r.json()['workspace']['attributes'] # If we passed attributes, only use those if args.attributes: workspace_attrs = {k:v for k, v in iteritems(workspace_attrs) if k in args.attributes} if len(workspace_attrs) == 0: print("No workspace attributes defined in {0}/{1}".format( args.project, args.workspace)) return 1 message = "This will copy the following workspace attributes to {0}/{1}\n" message = message.format(args.to_project, args.to_workspace) for k, v in sorted(iteritems(workspace_attrs)): message += '\t{0}\t{1}\n'.format(k, v) if not args.yes and not _confirm_prompt(message): return 0 # make the attributes into updates updates = [fapi._attr_set(k,v) for k,v in iteritems(workspace_attrs)] r = fapi.update_workspace_attributes(args.to_project, args.to_workspace, updates) fapi._check_response_code(r, 200) return 0
def attr_copy(args)
Copy workspace attributes between workspaces.
2.722123
2.636168
1.032606
r = fapi.health() fapi._check_response_code(r, 200) return r.content
def health(args)
Health FireCloud Server
5.867196
5.701084
1.029137
''' Loop over all sample sets in a workspace, performing a func ''' # Ensure that the requested action is a valid fiss_cmd fiss_func = __cmd_to_func(args.action) if not fiss_func: eprint("invalid FISS cmd '" + args.action + "'") return 1 # First get the sample set names r = fapi.get_entities(args.project, args.workspace, "sample_set") fapi._check_response_code(r, 200) sample_sets = [entity['name'] for entity in r.json()] args.entity_type = "sample_set" for sset in sample_sets: print('\n# {0}::{1}/{2} {3}'.format(args.project, args.workspace, sset, args.action)) args.entity = sset # Note how this code is similar to how args.func is called in # main so it may make sense to try to a common method for both try: result = fiss_func(args) except Exception as e: status = __pretty_print_fc_exception(e) if not args.keep_going: return status printToCLI(result) return 0
def sset_loop(args)
Loop over all sample sets in a workspace, performing a func
6.234228
5.628
1.107716
''' Retrieve status of jobs submitted from a given workspace, as a list of TSV lines sorted by descending order of job submission date''' r = fapi.list_submissions(args.project, args.workspace) fapi._check_response_code(r, 200) statuses = sorted(r.json(), key=lambda k: k['submissionDate'], reverse=True) header = '\t'.join(list(statuses[0].keys())) expander = lambda v: '{0}'.format(v) def expander(thing): if isinstance(thing, dict): entityType = thing.get("entityType", None) if entityType: return "{0}:{1}".format(entityType, thing['entityName']) return "{0}".format(thing) # FIXME: this will generally return different column order between Python 2/3 return [header] + ['\t'.join( map(expander, v.values())) for v in statuses]
def monitor(args)
Retrieve status of jobs submitted from a given workspace, as a list of TSV lines sorted by descending order of job submission date
5.76896
4.041941
1.427275
''' Run legacy, Firehose-style workflow of workflows''' project = args.project workspace = args.workspace namespace = args.namespace workflow = args.workflow sample_sets = args.sample_sets recovery_file = args.json_checkpoint # If no sample sets are provided, run on all sample sets if not sample_sets: r = fapi.get_entities(args.project, args.workspace, "sample_set") fapi._check_response_code(r, 200) sample_sets = [s['name'] for s in r.json()] message = "Sample Sets ({}):\n\t".format(len(sample_sets)) + \ "\n\t".join(sample_sets) prompt = "\nLaunch workflow in " + project + "/" + workspace + \ " on these sample sets? [Y\\n]: " if not args.yes and not _confirm_prompt(message, prompt): return return supervisor.supervise(project, workspace, namespace, workflow, sample_sets, recovery_file)
def supervise(args)
Run legacy, Firehose-style workflow of workflows
4.690404
3.930053
1.193471
if not args.to_workspace: args.to_workspace = args.workspace if not args.to_project: args.to_project = args.project if (args.project == args.to_project and args.workspace == args.to_workspace): eprint("destination project and namespace must differ from" " source workspace") return 1 if not args.entities: # get a list of entities from source workspace matching entity_type ents = _entity_paginator(args.project, args.workspace, args.entity_type, page_size=500, filter_terms=None, sort_direction='asc') args.entities = [e['name'] for e in ents] prompt = "Copy {0} {1}(s) from {2}/{3} to {4}/{5}?\n[Y\\n]: " prompt = prompt.format(len(args.entities), args.entity_type, args.project, args.workspace, args.to_project, args.to_workspace) if not args.yes and not _confirm_prompt("", prompt): return r = fapi.copy_entities(args.project, args.workspace, args.to_project, args.to_workspace, args.entity_type, args.entities, link_existing_entities=args.link) fapi._check_response_code(r, 201) return 0
def entity_copy(args)
Copy entities from one workspace to another.
3.057877
2.977168
1.027109
'''Retrieve the list of billing projects accessible to the caller/user, and show the level of access granted for each (e.g. Owner, User, ...)''' projects = fapi.list_billing_projects() fapi._check_response_code(projects, 200) projects = sorted(projects.json(), key=lambda d: d['projectName']) l = map(lambda p: '{0}\t{1}'.format(p['projectName'], p['role']), projects) # FIXME: add username col to output, for when iterating over multiple users return ["Project\tRole"] + l
def proj_list(args)
Retrieve the list of billing projects accessible to the caller/user, and show the level of access granted for each (e.g. Owner, User, ...)
8.146361
4.401787
1.850694
''' Validate a workspace configuration: if an entity was specified (i.e. upon which the configuration should operate), then also validate that the entity has the necessary attributes''' r = fapi.validate_config(args.project, args.workspace, args.namespace, args.config) fapi._check_response_code(r, 200) entity_d = None config_d = r.json() if args.entity: entity_type = config_d['methodConfiguration']['rootEntityType'] entity_r = fapi.get_entity(args.project, args.workspace, entity_type, args.entity) fapi._check_response_code(entity_r, [200,404]) if entity_r.status_code == 404: eprint("Error: No {0} named '{1}'".format(entity_type, args.entity)) return 2 else: entity_d = entity_r.json() # also get the workspace info w = fapi.get_workspace(args.project, args.workspace) fapi._check_response_code(w, 200) workspace_d = w.json() ii, io, ma, mwa = _validate_helper(args, config_d, workspace_d, entity_d) ii_msg = "\nInvalid inputs:" io_msg = "\nInvalid outputs:" ma_msg = "\n{0} {1} doesn't satisfy the following inputs:".format(entity_type, args.entity) if args.entity else "" mwa_msg = "\nWorkspace {0}/{1} doesn't satisfy following inputs:".format(args.project, args.workspace) for errs, msg in zip([ii, io, ma, mwa], [ii_msg, io_msg, ma_msg, mwa_msg]): if errs: print(msg) for inp, val in errs: print("{0} -> {1}".format(inp, val)) if ii + io + ma + mwa: return 1
def config_validate(args)
Validate a workspace configuration: if an entity was specified (i.e. upon which the configuration should operate), then also validate that the entity has the necessary attributes
3.406346
2.830283
1.203535
# 4 ways to have invalid config: invalid_inputs = sorted(config_d["invalidInputs"]) invalid_outputs = sorted(config_d["invalidOutputs"]) # Also insert values for invalid i/o invalid_inputs = [(i, config_d['methodConfiguration']['inputs'][i]) for i in invalid_inputs] invalid_outputs = [(i, config_d['methodConfiguration']['outputs'][i]) for i in invalid_outputs] missing_attrs = [] missing_wksp_attrs = [] # If an entity was provided, also check to see if that entity has the necessary inputs if entity_d: entity_type = config_d['methodConfiguration']['rootEntityType'] # If the attribute is listed here, it has an entry entity_attrs = set(entity_d['attributes']) # Optimization, only get the workspace attrs if the method config has any workspace_attrs = workspace_d['workspace']['attributes'] # So now iterate over the inputs for inp, val in iteritems(config_d['methodConfiguration']['inputs']): # Must be an attribute on the entity if val.startswith("this."): # Normally, the value is of the form 'this.attribute', # but for operations on sets, e.g. one can also do # 'this.samples.attr'. But even in this case, there must be a # 'samples' attribute on the sample set, so checking for the middle # value works as expected. Other pathological cases would've been # caught above by the validation endpoint expected_attr = val.split('.')[1] # 'name' is special, it really means '_id', which everything has if expected_attr == "name": continue if expected_attr not in entity_attrs: missing_attrs.append((inp, val)) if val.startswith("workspace."): # Anything not matching this format will be caught above expected_attr = val.split('.')[1] if expected_attr not in workspace_attrs: missing_wksp_attrs.append((inp, val)) # Anything else is a literal return invalid_inputs, invalid_outputs, missing_attrs, missing_wksp_attrs
def _validate_helper(args, config_d, workspace_d, entity_d=None)
Return FISSFC validation information on config for a certain entity
5.755045
5.65665
1.017395
answer = input(message + prompt) return answer in affirmations
def _confirm_prompt(message, prompt="\nAre you sure? [y/yes (default: no)]: ", affirmations=("Y", "Yes", "yes", "y"))
Display a message, then confirmation prompt, and return true if the user responds with one of the affirmations.
7.070701
6.089376
1.161154
value = str(string) if len(value) == 0: msg = "No project provided and no default project configured" raise argparse.ArgumentTypeError(msg) return value
def _nonempty_project(string)
Argparse validator for ensuring a workspace is provided
4.539819
3.702154
1.226264
page = 1 all_entities = [] # Make initial request r = fapi.get_entities_query(namespace, workspace, etype, page=page, page_size=page_size, sort_direction=sort_direction, filter_terms=filter_terms) fapi._check_response_code(r, 200) response_body = r.json() # Get the total number of pages total_pages = response_body['resultMetadata']['filteredPageCount'] # append the first set of results entities = response_body['results'] all_entities.extend(entities) # Now iterate over remaining pages to retrieve all the results page = 2 while page <= total_pages: r = fapi.get_entities_query(namespace, workspace, etype, page=page, page_size=page_size, sort_direction=sort_direction, filter_terms=filter_terms) fapi._check_response_code(r, 200) entities = r.json()['results'] all_entities.extend(entities) page += 1 return all_entities
def _entity_paginator(namespace, workspace, etype, page_size=500, filter_terms=None, sort_direction="asc")
Pages through the get_entities_query endpoint to get all entities in the workspace without crashing.
2.356831
2.351245
1.002376
fiss_module = sys.modules[__name__] # Returns None if string is not a recognized FISS command func = getattr(fiss_module, cmd, None) if func and not hasattr(func, 'fiss_cmd'): func = None return func
def __cmd_to_func(cmd)
Returns the function object in this module matching cmd.
5.232221
4.845909
1.079719
if not l: return False headers = l.split('\t') first_col = headers[0] tsplit = first_col.split(':') if len(tsplit) != 2: return False if tsplit[0] in ('entity', 'update'): return tsplit[1] in ('participant_id', 'participant_set_id', 'sample_id', 'sample_set_id', 'pair_id', 'pair_set_id') elif tsplit[0] == 'membership': if len(headers) < 2: return False # membership:sample_set_id sample_id, e.g. return tsplit[1].replace('set_', '') == headers[1] else: return False
def _valid_headerline(l)
return true if the given string is a valid loadfile header
3.789483
3.788381
1.000291
if fcconfig.verbosity: print("Batching " + str(len(entity_data)) + " updates to Firecloud...") # Parse the entity type from the first cell, e.g. "entity:sample_id" # First check that the header is valid if not _valid_headerline(headerline): eprint("Invalid loadfile header:\n" + headerline) return 1 update_type = "membership" if headerline.startswith("membership") else "entitie" etype = headerline.split('\t')[0].split(':')[1].replace("_id", "") # Split entity_data into chunks total = int(len(entity_data) / chunk_size) + 1 batch = 0 for i in range(0, len(entity_data), chunk_size): batch += 1 if fcconfig.verbosity: print("Updating {0} {1}s {2}-{3}, batch {4}/{5}".format( etype, update_type, i+1, min(i+chunk_size, len(entity_data)), batch, total)) this_data = headerline + '\n' + '\n'.join(entity_data[i:i+chunk_size]) # Now push the entity data to firecloud r = fapi.upload_entities(project, workspace, this_data) fapi._check_response_code(r, 200) return 0
def _batch_load(project, workspace, headerline, entity_data, chunk_size=500)
Submit a large number of entity updates in batches of chunk_size
3.959737
3.84177
1.030706
'''Use this entry point to call HL fiss funcs as though from the UNIX CLI. (see firecloud/tests/highlevel_tests.py:call_cli for usage examples)''' try: result = main(argv) except Exception as e: result = __pretty_print_fc_exception(e) # FIXME: we should invert True/False return values to 0/1 here, to comply # with UNIX exit code semantics (and avoid problems with make, scripts, etc) return printToCLI(result)
def main_as_cli(argv=None)
Use this entry point to call HL fiss funcs as though from the UNIX CLI. (see firecloud/tests/highlevel_tests.py:call_cli for usage examples)
23.271034
6.763853
3.4405
#First check that all entities are of the same type types = {e.etype for e in entities} if len(types) != 1: raise ValueError("Can't create payload with " + str(len(types)) + " types") all_attrs = set() for e in entities: all_attrs.update(set(e.attrs.keys())) #Write a header line all_attrs = list(all_attrs) header = "entity:" + entities[0].etype + "_id" payload = '\t'.join([header] + all_attrs) + '\n' for e in entities: line = e.entity_id for a in all_attrs: line += '\t' + e.attrs.get(a, "") payload += line + '\n' return payload
def create_payload(entities)
Create a tsv payload describing entities. A TSV payload consists of 1 header row describing entity type and attribute names. Each subsequent line is an entity_id followed by attribute values separated by the tab "\\t" character. This payload can be uploaded to the workspace via firecloud.api.upload_entities()
3.059539
2.730352
1.120565
with open(f, 'w') as out: out.write(Entity.create_payload(entities))
def create_loadfile(entities, f)
Create payload and save to file.
6.247815
4.086842
1.528764
gcloud_default_path = ['google-cloud-sdk', 'bin'] if platform.system() != "Windows": gcloud_default_path = os.path.join(os.path.expanduser('~'), *gcloud_default_path) else: gcloud_default_path = os.path.join(os.environ['LOCALAPPDATA'], 'Google', 'Cloud SDK', *gcloud_default_path) return not os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/') \ and gcloud_default_path not in os.environ["PATH"].split(os.pathsep) \ and which('gcloud') is None
def needs_gcloud(self)
Returns true if gcloud is unavailable and needed for authentication.
2.762044
2.66828
1.03514
for input_file in arguments.input_files: logging.info(input_file) # Generate a temporary file with common.atomic_write( input_file.name, file_factory=common.FileType('wt')) as tf: convert.transform_file(input_file, tf, arguments) if hasattr(input_file, 'close'): input_file.close()
def action(arguments)
Run mogrify. Most of the action is in convert, this just creates a temp file for the output.
6.087412
5.606001
1.085874
result = [[]] for c in sequence_str: result = [i + [a] for i in result for a in _AMBIGUOUS_MAP.get(c, c)] return [''.join(i) for i in result]
def all_unambiguous(sequence_str)
All unambiguous versions of sequence_str
3.551231
3.507179
1.012561
parser.add_argument( 'sequence_file', type=FileType('r'), help=) parser.add_argument( '--input-qual', type=FileType('r'), help=) parser.add_argument( 'output_file', type=FileType('w'), help=) output_group = parser.add_argument_group("Output") output_group.add_argument( '--report-out', type=FileType('w'), default=sys.stdout, help=) output_group.add_argument( '--details-out', type=FileType('w'), help=) output_group.add_argument( '--no-details-comment', action='store_false', default=True, dest='details_comment', help=) parser.add_argument( '--min-mean-quality', metavar='QUALITY', type=float, default=DEFAULT_MEAN_SCORE, help=) parser.add_argument( '--min-length', metavar='LENGTH', type=int, default=200, help=) parser.add_argument( '--max-length', metavar='LENGTH', type=int, default=1000, help=) window_group = parser.add_argument_group('Quality window options') window_group.add_argument( '--quality-window-mean-qual', type=float, help=) window_group.add_argument( '--quality-window-prop', help=, default=1.0, type=typed_range(float, 0.0, 1.0)) window_group.add_argument( '--quality-window', type=int, metavar='WINDOW_SIZE', default=0, help=) parser.add_argument( '--ambiguous-action', choices=('truncate', 'drop'), help=) parser.add_argument( '--max-ambiguous', default=None, help=, type=int) parser.add_argument( '--pct-ambiguous', help=, type=float) barcode_group = parser.add_argument_group('Barcode/Primer') primer_group = barcode_group.add_mutually_exclusive_group() primer_group.add_argument( '--primer', help=) primer_group.add_argument( '--no-primer', help=, action='store_const', const='', dest='primer') barcode_group.add_argument( '--barcode-file', help=, type=FileType('r')) barcode_group.add_argument( '--barcode-header', action='store_true', default=False, help=) barcode_group.add_argument( '--map-out', help=, type=FileType('w'), metavar='SAMPLE_MAP') barcode_group.add_argument( '--quoting', help=, default='QUOTE_MINIMAL', choices=[s for s in dir(csv) if s.startswith('QUOTE_')])
def build_parser(parser)
Generate a subparser
2.162974
2.159521
1.001599
it = iter(iterable) d = collections.deque(itertools.islice(it, n - 1)) d.appendleft(0) s = sum(d) for elem in it: s += elem - d.popleft() d.append(elem) yield s / float(n)
def moving_average(iterable, n)
From Python collections module documentation moving_average([40, 30, 50, 46, 39, 44]) --> 40.0 42.0 45.0 43.0
2.05707
2.413932
0.852166
tr = trie.trie() reader = csv.reader(fp) if header: # Skip header next(reader) # Skip blank rows records = (record for record in reader if record) for record in records: specimen, barcode = record[:2] if primer is not None: pr = primer else: pr = record[2] for sequence in all_unambiguous(barcode + pr): if sequence in tr: raise ValueError("Duplicate sample: {0}, {1} both have {2}", specimen, tr[sequence], sequence) logging.info('%s->%s', sequence, specimen) tr[sequence] = specimen return tr
def parse_barcode_file(fp, primer=None, header=False)
Load label, barcode, primer records from a CSV file. Returns a map from barcode -> label Any additional columns are ignored
5.032435
5.240089
0.960372
if arguments.quality_window_mean_qual and not arguments.quality_window: raise ValueError("--quality-window-mean-qual specified without " "--quality-window") if trie is None or triefind is None: raise ValueError( 'Missing Bio.trie and/or Bio.triefind modules. Cannot continue') filters = [] input_type = fileformat.from_handle(arguments.sequence_file) output_type = fileformat.from_handle(arguments.output_file) with arguments.sequence_file as fp: if arguments.input_qual: sequences = QualityIO.PairedFastaQualIterator( fp, arguments.input_qual) else: sequences = SeqIO.parse(fp, input_type) listener = RecordEventListener() if arguments.details_out: rh = RecordReportHandler(arguments.details_out, arguments.argv, arguments.details_comment) rh.register_with(listener) # Track read sequences sequences = listener.iterable_hook('read', sequences) # Add filters if arguments.min_mean_quality and input_type == 'fastq': qfilter = QualityScoreFilter(arguments.min_mean_quality) filters.append(qfilter) if arguments.max_length: max_length_filter = MaxLengthFilter(arguments.max_length) filters.append(max_length_filter) if arguments.min_length: min_length_filter = MinLengthFilter(arguments.min_length) filters.append(min_length_filter) if arguments.max_ambiguous is not None: max_ambig_filter = MaxAmbiguousFilter(arguments.max_ambiguous) filters.append(max_ambig_filter) if arguments.pct_ambiguous is not None: pct_ambig_filter = PctAmbiguousFilter(arguments.pct_ambiguous) filters.append(pct_ambig_filter) if arguments.ambiguous_action: ambiguous_filter = AmbiguousBaseFilter(arguments.ambiguous_action) filters.append(ambiguous_filter) if arguments.quality_window: min_qual = (arguments.quality_window_mean_qual or arguments.min_mean_quality) window_filter = WindowQualityScoreFilter(arguments.quality_window, min_qual) filters.insert(0, window_filter) if arguments.barcode_file: with arguments.barcode_file: tr = parse_barcode_file(arguments.barcode_file, arguments.primer, arguments.barcode_header) f = PrimerBarcodeFilter(tr) filters.append(f) if arguments.map_out: barcode_writer = csv.writer( arguments.map_out, quoting=getattr(csv, arguments.quoting), lineterminator='\n') def barcode_handler(record, sample, barcode=None): barcode_writer.writerow((record.id, sample)) listener.register_handler('found_barcode', barcode_handler) for f in filters: f.listener = listener sequences = f.filter_records(sequences) # Track sequences which passed all filters sequences = listener.iterable_hook('write', sequences) with arguments.output_file: SeqIO.write(sequences, arguments.output_file, output_type) rpt_rows = (f.report_dict() for f in filters) # Write report with arguments.report_out as fp: writer = csv.DictWriter( fp, BaseFilter.report_fields, lineterminator='\n', delimiter='\t') writer.writeheader() writer.writerows(rpt_rows)
def action(arguments)
Given parsed arguments, filter input files.
3.124166
3.094833
1.009478
for record in iterable: self(name, record) yield record
def iterable_hook(self, name, iterable)
Fire an event named ``name`` with each item in iterable
8.540811
9.862789
0.865963
assert record.id == self.current_record['sequence_name'] self.current_record['sample'] = sample
def _found_barcode(self, record, sample, barcode=None)
Hook called when barcode is found
8.896487
7.906541
1.125206
for record in records: try: filtered = self.filter_record(record) assert (filtered) # Quick tracking whether the sequence was modified if filtered.seq == record.seq: self.passed_unchanged += 1 else: self.passed_changed += 1 yield filtered except FailedFilter as e: self.failed += 1 v = e.value if self.listener: self.listener( 'failed_filter', record, filter_name=self.name, value=v)
def filter_records(self, records)
Apply the filter to records
4.633291
4.599479
1.007351
quality_scores = record.letter_annotations['phred_quality'] mean_score = mean(quality_scores) if mean_score >= self.min_mean_score: return record else: raise FailedFilter(mean_score)
def filter_record(self, record)
Filter a single record
5.515133
5.354036
1.030089
quality_scores = record.letter_annotations['phred_quality'] # Simple case - window covers whole sequence if len(record) <= self.window_size: mean_score = mean(quality_scores) if mean_score >= self.min_mean_score: return record else: raise FailedFilter(mean_score) # Find the right clipping point. Start clipping at the beginning of the # sequence, then extend the window to include regions with acceptable # mean quality scores. clip_right = 0 for i, a in enumerate( moving_average(quality_scores, self.window_size)): if a >= self.min_mean_score: clip_right = i + self.window_size else: break if clip_right: return record[:clip_right] else: # First window failed - record fails raise FailedFilter()
def filter_record(self, record)
Filter a single record
5.032946
4.958039
1.015108
nloc = record.seq.find('N') if nloc == -1: return record elif self.action == 'truncate': return record[:nloc] elif self.action == 'drop': raise FailedFilter() else: assert False
def filter_record(self, record)
Filter a record, truncating or dropping at an 'N'
5.452857
3.95887
1.377377
if len(record) >= self.min_length: return record else: raise FailedFilter(len(record))
def filter_record(self, record)
Filter record, dropping any that don't meet minimum length
7.349077
5.268901
1.394803
if len(record) >= self.max_length: return record[:self.max_length] else: return record
def filter_record(self, record)
Filter record, truncating any over some maximum length
3.827832
2.601223
1.471551
is_alignment = True avg_length = None min_length = sys.maxsize max_length = 0 sequence_count = 0 # Get an iterator and analyze the data. with common.FileType('rt')(source_file) as fp: if not file_type: file_type = fileformat.from_handle(fp) for record in SeqIO.parse(fp, file_type): sequence_count += 1 sequence_length = len(record) if max_length != 0: # If even one sequence is not the same length as the others, # we don't consider this an alignment. if sequence_length != max_length: is_alignment = False # Lengths if sequence_length > max_length: max_length = sequence_length if sequence_length < min_length: min_length = sequence_length # Average length if sequence_count == 1: avg_length = float(sequence_length) else: avg_length = avg_length + ((sequence_length - avg_length) / sequence_count) # Handle an empty file: if avg_length is None: min_length = max_length = avg_length = 0 if sequence_count <= 1: is_alignment = False return (source_file, str(is_alignment).upper(), min_length, max_length, avg_length, sequence_count)
def summarize_sequence_file(source_file, file_type=None)
Summarizes a sequence file, returning a tuple containing the name, whether the file is an alignment, minimum sequence length, maximum sequence length, average length, number of sequences.
3.010628
2.780783
1.082655
# Ignore SIGPIPE, for head support common.exit_on_sigpipe() common.exit_on_sigint() handle = arguments.destination_file output_format = arguments.output_format if not output_format: try: output_format = 'align' if handle.isatty() else 'tab' except AttributeError: output_format = 'tab' writer_cls = _WRITERS[output_format] ssf = partial(summarize_sequence_file, file_type = arguments.input_format) # if only one thread, do not use the multithreading so parent process # can be terminated using ctrl+c if arguments.threads > 1: pool = multiprocessing.Pool(processes=arguments.threads) summary = pool.imap(ssf, arguments.source_files) else: summary = (ssf(f) for f in arguments.source_files) with handle: writer = writer_cls(arguments.source_files, summary, handle) writer.write()
def action(arguments)
Given one more more sequence files, determine if the file is an alignment, the maximum sequence length and the total number of sequences. Provides different output formats including tab (tab-delimited), csv and align (aligned as if part of a borderless table).
4.641497
4.535552
1.023359
with tempfile.SpooledTemporaryFile(buffer_size, mode='wb+') as tf: pickler = pickle.Pickler(tf) for record in records: pickler.dump(record) def record_iter(): tf.seek(0) unpickler = pickle.Unpickler(tf) while True: try: yield unpickler.load() except EOFError: break yield record_iter
def _record_buffer(records, buffer_size=DEFAULT_BUFFER_SIZE)
Buffer for transform functions which require multiple passes through data. Value returned by context manager is a function which returns an iterator through records.
2.450219
2.653841
0.923273
logging.info( "Applying _dashes_cleanup: converting any of '{}' to '-'.".format(prune_chars)) translation_table = {ord(c): '-' for c in prune_chars} for record in records: record.seq = Seq(str(record.seq).translate(translation_table), record.seq.alphabet) yield record
def dashes_cleanup(records, prune_chars='.:?~')
Take an alignment and convert any undesirable characters such as ? or ~ to -.
4.36041
3.779189
1.153795
logging.info('Applying _deduplicate_sequences generator: ' 'removing any duplicate records with identical sequences.') checksum_sequences = collections.defaultdict(list) for record in records: checksum = seguid(record.seq) sequences = checksum_sequences[checksum] if not sequences: yield record sequences.append(record.id) if out_file is not None: with out_file: for sequences in checksum_sequences.values(): out_file.write('%s\n' % (' '.join(sequences),))
def deduplicate_sequences(records, out_file)
Remove any duplicate records with identical sequences, keep the first instance seen and discard additional occurences.
4.720343
4.183456
1.128336
logging.info('Applying _deduplicate_taxa generator: ' + \ 'removing any duplicate records with identical IDs.') taxa = set() for record in records: # Default to full ID, split if | is found. taxid = record.id if '|' in record.id: try: taxid = int(record.id.split("|")[0]) except: # If we couldn't parse an integer from the ID, just fall back # on the ID logging.warn("Unable to parse integer taxid from %s", taxid) if taxid in taxa: continue taxa.add(taxid) yield record
def deduplicate_taxa(records)
Remove any duplicate records with identical IDs, keep the first instance seen and discard additional occurences.
5.125939
4.70483
1.089506
logging.info('Applying _first_name_capture generator: ' 'making sure ID only contains the first whitespace-delimited ' 'word.') whitespace = re.compile(r'\s+') for record in records: if whitespace.search(record.description): yield SeqRecord(record.seq, id=record.id, description="") else: yield record
def first_name_capture(records)
Take only the first whitespace-delimited word as the name of the sequence. Essentially removes any extra text from the sequence's description.
7.451004
5.564649
1.338989
ids = set(i.strip() for i in handle) for record in records: if record.id.strip() in ids: yield record
def include_from_file(records, handle)
Filter the records, keeping only sequences whose ID is contained in the handle.
4.092854
3.799107
1.07732
# Check arguments if end <= start: raise ValueError("start of slice must precede end ({0} !> {1})".format( end, start)) for sequence in sequences: seq = sequence.seq start_gap = gap_char * start end_gap = gap_char * (len(seq) - end) seq = Seq(start_gap + str(seq[start:end]) + end_gap, alphabet=seq.alphabet) sequence.seq = seq yield sequence
def isolate_region(sequences, start, end, gap_char='-')
Replace regions before and after start:end with gap chars
3.597214
3.567507
1.008327
for record in records: # Generate a set of indices to remove drop = set(i for slice in slices for i in range(*slice.indices(len(record)))) keep = [i not in drop for i in range(len(record))] record.seq = Seq(''.join(itertools.compress(record.seq, keep)), record.seq.alphabet) yield record
def drop_columns(records, slices)
Drop all columns present in ``slices`` from records
4.177508
4.250261
0.982883
with _record_buffer(records) as r: try: record = next(i for i in r() if i.id == record_id) except StopIteration: raise ValueError("Record with id {0} not found.".format(record_id)) new_slices = _update_slices(record, slices) for record in multi_cut_sequences(r(), new_slices): yield record
def cut_sequences_relative(records, slices, record_id)
Cuts records to slices, indexed by non-gap positions in record_id
3.944307
4.109336
0.95984
for record in records: record_indices = list(range(len(record))) keep_indices = reduce(lambda i, s: i - frozenset(record_indices[s]), slices, frozenset(record_indices)) seq = ''.join(b if i in keep_indices else '-' for i, b in enumerate(str(record.seq))) record.seq = Seq(seq) yield record
def multi_mask_sequences(records, slices)
Replace characters sliced by slices with gap characters.
4.049739
3.75843
1.077508
for record in records: if not all(c == '-' for c in str(record.seq)): yield record
def prune_empty(records)
Remove any sequences which are entirely gaps ('-')
4.948414
3.071051
1.61131
# Copy the annotations over for k, v in list(old_record.annotations.items()): # Trim if appropriate if isinstance(v, (tuple, list)) and len(v) == len(old_record): assert len(v) == len(old_record) v = v[::-1] new_record.annotations[k] = v # Letter annotations must be lists / tuples / strings of the same # length as the sequence for k, v in list(old_record.letter_annotations.items()): assert len(v) == len(old_record) new_record.letter_annotations[k] = v[::-1]
def _reverse_annotations(old_record, new_record)
Copy annotations form old_record to new_record, reversing any lists / tuples / strings.
3.212094
2.846264
1.12853
logging.info('Applying _reverse_sequences generator: ' 'reversing the order of sites in sequences.') for record in records: rev_record = SeqRecord(record.seq[::-1], id=record.id, name=record.name, description=record.description) # Copy the annotations over _reverse_annotations(record, rev_record) yield rev_record
def reverse_sequences(records)
Reverse the order of sites in sequences.
4.559299
3.663043
1.244675
logging.info('Applying _reverse_complement_sequences generator: ' 'transforming sequences into reverse complements.') for record in records: rev_record = SeqRecord(record.seq.reverse_complement(), id=record.id, name=record.name, description=record.description) # Copy the annotations over _reverse_annotations(record, rev_record) yield rev_record
def reverse_complement_sequences(records)
Transform sequences into reverse complements.
4.253955
3.854671
1.103585
logging.info('Applying _ungap_sequences generator: removing all gap characters') for record in records: yield ungap_all(record, gap_chars)
def ungap_sequences(records, gap_chars=GAP_TABLE)
Remove gaps from sequences, given an alignment.
9.277992
9.744833
0.952094
old_id = record.id record.id = new_id # At least for FASTA, record ID starts the description record.description = re.sub('^' + re.escape(old_id), new_id, record.description) return record
def _update_id(record, new_id)
Update a record id to new_id, also modifying the ID in record.description
5.970239
5.121632
1.165691
logging.info('Applying _name_append_suffix generator: ' 'Appending suffix ' + suffix + ' to all ' 'sequence IDs.') for record in records: new_id = record.id + suffix _update_id(record, new_id) yield record
def name_append_suffix(records, suffix)
Given a set of sequences, append a suffix for each sequence's name.
5.679056
5.800223
0.97911
logging.info('Applying _name_insert_prefix generator: ' 'Inserting prefix ' + prefix + ' for all ' 'sequence IDs.') for record in records: new_id = prefix + record.id _update_id(record, new_id) yield record
def name_insert_prefix(records, prefix)
Given a set of sequences, insert a prefix for each sequence's name.
6.3369
6.079336
1.042367
logging.info('Applying _name_include generator: ' 'including only IDs matching ' + filter_regex + ' in results.') regex = re.compile(filter_regex) for record in records: if regex.search(record.id) or regex.search(record.description): yield record
def name_include(records, filter_regex)
Given a set of sequences, filter out any sequences with names that do not match the specified regular expression. Ignore case.
5.466459
5.653746
0.966874
regex = re.compile(search_regex) for record in records: maybe_id = record.description.split(None, 1)[0] if maybe_id == record.id: record.description = regex.sub(replace_pattern, record.description) record.id = record.description.split(None, 1)[0] else: record.id = regex.sub(replace_pattern, record.id) record.description = regex.sub(replace_pattern, record.description) yield record
def name_replace(records, search_regex, replace_pattern)
Given a set of sequences, replace all occurrences of search_regex with replace_pattern. Ignore case. If the ID and the first word of the description match, assume the description is FASTA-like and apply the transform to the entire description, then set the ID from the first word. If the ID and the first word of the description do not match, apply the transform to each individually.
2.097433
2.003291
1.046994
regex = re.compile(filter_regex) for record in records: if regex.search(str(record.seq)): yield record
def seq_include(records, filter_regex)
Filter any sequences who's seq does not match the filter. Ignore case.
2.480295
2.409734
1.029282