code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
''' >>> find_one_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('SELECT * FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10]) >>> find_one_sql('tbl', {'id': 10}, fields=['foo', 'bar']) ('SELECT foo, bar FROM tbl WHERE id=$1', [10]) ''' keys, values = _split_dict(filter) fields = ', '.join(fields) if fields else '*' where = _pairs(keys) sql = 'SELECT {} FROM {} WHERE {}'.format(fields, table, where) return sql, values
def find_one_sql(table, filter, fields=None)
>>> find_one_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('SELECT * FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10]) >>> find_one_sql('tbl', {'id': 10}, fields=['foo', 'bar']) ('SELECT foo, bar FROM tbl WHERE id=$1', [10])
2.620842
1.807405
1.450057
''' >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}) ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING id', ['bar', 1]) >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning=None) ('INSERT INTO tbl (foo, id) VALUES ($1, $2)', ['bar', 1]) >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning='pk') ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING pk', ['bar', 1]) ''' keys, values = _split_dict(data) sql = 'INSERT INTO {} ({}) VALUES ({}){}'.format( table, ', '.join(keys), ', '.join(_placeholders(data)), ' RETURNING {}'.format(returning) if returning else '') return sql, values
def insert_sql(table, data, returning='id')
>>> insert_sql('tbl', {'foo': 'bar', 'id': 1}) ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING id', ['bar', 1]) >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning=None) ('INSERT INTO tbl (foo, id) VALUES ($1, $2)', ['bar', 1]) >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning='pk') ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING pk', ['bar', 1])
1.866073
1.537099
1.214023
''' >>> update_sql('tbl', {'foo': 'a', 'bar': 1}, {'bar': 2, 'baz': 'b'}) ('UPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4', [2, 'b', 1, 'a']) ''' where_keys, where_vals = _split_dict(filter) up_keys, up_vals = _split_dict(updates) changes = _pairs(up_keys, sep=', ') where = _pairs(where_keys, start=len(up_keys) + 1) sql = 'UPDATE {} SET {} WHERE {}'.format( table, changes, where) return sql, up_vals + where_vals
def update_sql(table, filter, updates)
>>> update_sql('tbl', {'foo': 'a', 'bar': 1}, {'bar': 2, 'baz': 'b'}) ('UPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4', [2, 'b', 1, 'a'])
3.089096
2.361984
1.307839
''' >>> delete_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('DELETE FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10]) ''' keys, values = _split_dict(filter) where = _pairs(keys) sql = 'DELETE FROM {} WHERE {}'.format(table, where) return sql, values
def delete_sql(table, filter)
>>> delete_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('DELETE FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10])
3.827041
2.364431
1.618589
''' >>> _pairs(['foo', 'bar', 'baz'], sep=', ') 'foo=$1, bar=$2, baz=$3' >>> _pairs(['foo', 'bar', 'baz'], start=2) 'foo=$2 AND bar=$3 AND baz=$4' ''' return sep.join('{}=${}'.format(k, i) for i, k in enumerate(keys, start))
def _pairs(keys, *, start=1, sep=' AND ')
>>> _pairs(['foo', 'bar', 'baz'], sep=', ') 'foo=$1, bar=$2, baz=$3' >>> _pairs(['foo', 'bar', 'baz'], start=2) 'foo=$2 AND bar=$3 AND baz=$4'
2.52553
1.795868
1.406301
'''Split dict into sorted keys and values >>> _split_dict({'b': 2, 'a': 1}) (['a', 'b'], [1, 2]) ''' keys = sorted(dic.keys()) return keys, [dic[k] for k in keys]
def _split_dict(dic)
Split dict into sorted keys and values >>> _split_dict({'b': 2, 'a': 1}) (['a', 'b'], [1, 2])
2.533216
2.012389
1.25881
global _game from .game import Game from .prompt import install_words _game = Game(seed) load_advent_dat(_game) install_words(_game) _game.start() print(_game.output[:-1])
def play(seed=None)
Turn the Python prompt into an Adventure game. With optional the `seed` argument the caller can supply an integer to start the Python random number generator at a known state.
10.255265
11.120769
0.922172
if more: self.output += str(more).upper() self.output += '\n'
def write(self, more)
Append the Unicode representation of `s` to our output.
7.20772
6.180236
1.166253
self.write(s) self.yesno_callback = yesno_callback self.yesno_casual = casual
def yesno(self, s, yesno_callback, casual=False)
Ask a question and prepare to receive a yes-or-no answer.
2.754824
2.664272
1.033988
# For old-fashioned players, accept five-letter truncations like # "inven" instead of insisting on full words like "inventory". for key, value in list(self.vocabulary.items()): if isinstance(key, str) and len(key) > 5: self.vocabulary[key[:5]] = value # Set things going. self.chest_room = self.rooms[114] self.bottle.contents = self.water self.yesno(self.messages[65], self.start2)
def start(self)
Start the game.
11.352956
10.523637
1.078805
if yes: self.write_message(1) self.hints[3].used = True self.lamp_turns = 1000 self.oldloc2 = self.oldloc = self.loc = self.rooms[1] self.dwarves = [ Dwarf(self.rooms[n]) for n in (19, 27, 33, 44, 64) ] self.pirate = Pirate(self.chest_room) treasures = self.treasures self.treasures_not_found = len(treasures) for treasure in treasures: treasure.prop = -1 self.describe_location()
def start2(self, yes)
Display instructions if the user wants them.
6.849324
6.905378
0.991883
self.output = '' self._do_command(words) return self.output
def do_command(self, words)
Parse and act upon the command in the list of strings `words`.
6.366655
5.693194
1.118292
if isinstance(obj, str): savefile = open(obj, 'rb') else: savefile = obj game = pickle.loads(zlib.decompress(savefile.read())) if savefile is not obj: savefile.close() # Reinstate the random number generator. game.random_generator = random.Random() game.random_generator.setstate(game.random_state) del game.random_state return game
def resume(self, obj)
Returns an Adventure game saved to the given file.
3.249295
2.71244
1.197923
data._last_travel = [0, [0]] # x and verbs used by section 3 while True: section_number = int(datafile.readline()) if not section_number: # no further sections break store = globals().get('section%d' % section_number) while True: fields = [ (int(field) if field.lstrip('-').isdigit() else field) for field in datafile.readline().strip().split('\t') ] if fields[0] == -1: # end-of-section marker break store(data, *fields) del data._last_travel # state used by section 3 del data._object # state used by section 5 data.object_list = sorted(set(data.objects.values()), key=attrgetter('n')) #data.room_list = sorted(set(data.rooms.values()), key=attrgetter('n')) for obj in data.object_list: name = obj.names[0] if hasattr(data, name): name = name + '2' # create identifiers like ROD2, PLANT2 setattr(data, name, obj) return data
def parse(data, datafile)
Read the Adventure data file and return a ``Data`` object.
4.94602
4.722623
1.047303
api_data = [] for smtp_key, api_transformer in six.iteritems(self._headers_map): data_to_transform = email_message.extra_headers.pop(smtp_key, None) if data_to_transform is not None: if isinstance(data_to_transform, (list, tuple)): # map each value in the tuple/list for data in data_to_transform: api_data.append((api_transformer[0], api_transformer[1](data))) elif isinstance(data_to_transform, dict): for data in six.iteritems(data_to_transform): api_data.append(api_transformer(data)) else: # we only have one value api_data.append((api_transformer[0], api_transformer[1](data_to_transform))) return api_data
def _map_smtp_headers_to_api_parameters(self, email_message)
Map the values passed in SMTP headers to API-ready 2-item tuples present in HEADERS_MAP header values must be a single string or list or tuple of strings :return: 2-item tuples of the form (api_name, api_values)
2.438564
2.419204
1.008003
if not email_message.recipients(): return False from_email = sanitize_address(email_message.from_email, email_message.encoding) to_recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.to] try: post_data = [] post_data.append(('to', (",".join(to_recipients)),)) if email_message.bcc: bcc_recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.bcc] post_data.append(('bcc', (",".join(bcc_recipients)),)) if email_message.cc: cc_recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.cc] post_data.append(('cc', (",".join(cc_recipients)),)) post_data.append(('text', email_message.body,)) post_data.append(('subject', email_message.subject,)) post_data.append(('from', from_email,)) # get our recipient variables if they were passed in recipient_variables = email_message.extra_headers.pop('recipient_variables', None) if recipient_variables is not None: post_data.append(('recipient-variables', recipient_variables, )) for name, value in self._map_smtp_headers_to_api_parameters(email_message): post_data.append((name, value, )) if hasattr(email_message, 'alternatives') and email_message.alternatives: for alt in email_message.alternatives: if alt[1] == 'text/html': post_data.append(('html', alt[0],)) break # Map Reply-To header if present try: if email_message.reply_to: post_data.append(( "h:Reply-To", ", ".join(map(force_text, email_message.reply_to)), )) except AttributeError: pass if email_message.attachments: for attachment in email_message.attachments: post_data.append(('attachment', (attachment[0], attachment[1],))) content, header = encode_multipart_formdata(post_data) headers = {'Content-Type': header} else: content = post_data headers = None response = requests.post(self._api_url + "messages", auth=("api", self._access_key), data=content, headers=headers) except: if not self.fail_silently: raise return False if response.status_code != 200: if not self.fail_silently: raise MailgunAPIError(response) return False return True
def _send(self, email_message)
A helper method that does the actual sending.
2.156441
2.149504
1.003227
if not email_messages: return num_sent = 0 for message in email_messages: if self._send(message): num_sent += 1 return num_sent
def send_messages(self, email_messages)
Sends one or more EmailMessage objects and returns the number of email messages sent.
2.739498
2.59717
1.054801
context = context or {} context.update({ 'display_name': self.display_name, 'image_url': self.image_url, 'thumbnail_url': self.thumbnail_url or self.image_url, 'description': self.description, 'xblock_id': text_type(self.scope_ids.usage_id), 'alt_text': self.alt_text or self.display_name, }) fragment = self.build_fragment( template='view.html', context=context, css=[ 'view.less.css', URL_FONT_AWESOME_CSS, ], js=[ 'draggabilly.pkgd.js', 'view.js', ], js_init='ImageModalView', ) return fragment
def student_view(self, context=None)
Build the fragment for the default student view
3.765924
3.572155
1.054244
template = 'templates/' + template context = context or {} css = css or [] js = js or [] rendered_template = '' if template: rendered_template = self.loader.render_django_template( template, context=Context(context), i18n_service=self.runtime.service(self, 'i18n'), ) fragment = Fragment(rendered_template) for item in css: if item.startswith('/'): url = item else: item = 'public/' + item url = self.runtime.local_resource_url(self, item) fragment.add_css_url(url) for item in js: item = 'public/' + item url = self.runtime.local_resource_url(self, item) fragment.add_javascript_url(url) if js_init: fragment.initialize_js(js_init) return fragment
def build_fragment( self, template='', context=None, css=None, js=None, js_init=None, )
Creates a fragment for display.
2.613945
2.543315
1.027771
title = file_path title = title.split('/')[-1] title = '.'.join(title.split('.')[:-1]) title = ' '.join(title.split('-')) title = ' '.join([ word.capitalize() for word in title.split(' ') ]) return title
def _parse_title(file_path)
Parse a title from a file name
2.543312
2.386666
1.065634
file_contents = [ ( _parse_title(file_path), _read_file(file_path), ) for file_path in files ] return file_contents
def _read_files(files)
Read the contents of a list of files
4.085413
3.721807
1.097696
pattern = "{directory}/*.xml".format( directory=directory, ) files = glob(pattern) return files
def _find_files(directory)
Find XML files in the directory
5.499239
4.578109
1.201203
module = cls.__module__ module = module.split('.')[0] directory = pkg_resources.resource_filename(module, 'scenarios') files = _find_files(directory) scenarios = _read_files(files) return scenarios
def workbench_scenarios(cls)
Gather scenarios to be displayed in the workbench
3.671159
3.720005
0.986869
global __SESSION global __USER_ID if __SESSION is None: try: __SESSION = AuthorizedSession(google.auth.default(['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email'])[0]) health() __USER_ID = id_token.verify_oauth2_token(__SESSION.credentials.id_token, Request(session=__SESSION))['email'] except (DefaultCredentialsError, RefreshError) as gae: if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'): raise logging.warning("Unable to determine/refresh application credentials") try: subprocess.check_call(['gcloud', 'auth', 'application-default', 'login', '--no-launch-browser']) __SESSION = AuthorizedSession(google.auth.default(['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email'])[0]) except subprocess.CalledProcessError as cpe: if cpe.returncode < 0: logging.exception("%s was terminated by signal %d", cpe.cmd, -cpe.returncode) else: logging.exception("%s returned %d", cpe.cmd, cpe.returncode) raise gae
def _set_session()
Sets global __SESSION and __USER_ID if they haven't been set
2.703346
2.526229
1.070111
_set_session() fiss_headers = {"User-Agent" : FISS_USER_AGENT} if headers is not None: fiss_headers.update(headers) return fiss_headers
def _fiss_agent_header(headers=None)
Return request headers for fiss. Inserts FISS as the User-Agent. Initializes __SESSION if it hasn't been set. Args: headers (dict): Include additional headers as key-value pairs
3.907356
4.52406
0.863683
if type(codes) == int: codes = [codes] if response.status_code not in codes: raise FireCloudServerError(response.status_code, response.content)
def _check_response_code(response, codes)
Throws an exception if the http response is not expected. Can check single integer or list of valid responses. Example usage: >>> r = api.get_workspace("broad-firecloud-testing", "Fake-Bucket") >>> _check_response_code(r, 200) ... FireCloudServerError ...
3.364711
3.126143
1.076314
headers = _fiss_agent_header({"Content-type": "application/json"}) uri = "workspaces/{0}/{1}/entities".format(namespace, workspace) return __get(uri, headers=headers)
def list_entity_types(namespace, workspace)
List the entity types present in a workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name Swagger: https://api.firecloud.org/#!/Entities/getEntityTypes
7.833532
11.368377
0.689063
body = urlencode({"entities" : entity_data}) headers = _fiss_agent_header({ 'Content-type': "application/x-www-form-urlencoded" }) uri = "workspaces/{0}/{1}/importEntities".format(namespace, workspace) return __post(uri, headers=headers, data=body)
def upload_entities(namespace, workspace, entity_data)
Upload entities from tab-delimited string. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name entity_data (str): TSV string describing entites Swagger: https://api.firecloud.org/#!/Entities/importEntities
5.539474
5.714684
0.969341
if isinstance(entities_tsv, string_types): with open(entities_tsv, "r") as tsv: entity_data = tsv.read() elif isinstance(entities_tsv, io.StringIO): entity_data = entities_tsv.getvalue() else: raise ValueError('Unsupported input type.') return upload_entities(namespace, workspace, entity_data)
def upload_entities_tsv(namespace, workspace, entities_tsv)
Upload entities from a tsv loadfile. File-based wrapper for api.upload_entities(). A loadfile is a tab-separated text file with a header row describing entity type and attribute names, followed by rows of entities and their attribute values. Ex: entity:participant_id age alive participant_23 25 Y participant_27 35 N Args: namespace (str): project to which workspace belongs workspace (str): Workspace name entities_tsv (file): FireCloud loadfile, see format above
2.410152
2.494432
0.966212
uri = "workspaces/{0}/{1}/entities/copy".format(to_namespace, to_workspace) body = { "sourceWorkspace": { "namespace": from_namespace, "name": from_workspace }, "entityType": etype, "entityNames": enames } return __post(uri, json=body, params={'linkExistingEntities': str(link_existing_entities).lower()})
def copy_entities(from_namespace, from_workspace, to_namespace, to_workspace, etype, enames, link_existing_entities=False)
Copy entities between workspaces Args: from_namespace (str): project (namespace) to which source workspace belongs from_workspace (str): Source workspace name to_namespace (str): project (namespace) to which target workspace belongs to_workspace (str): Target workspace name etype (str): Entity type enames (list(str)): List of entity names to copy link_existing_entities (boolean): Link all soft conflicts to the entities that already exist. Swagger: https://api.firecloud.org/#!/Entities/copyEntities
2.814073
3.450397
0.81558
uri = "workspaces/{0}/{1}/entities/{2}".format(namespace, workspace, etype) return __get(uri)
def get_entities(namespace, workspace, etype)
List entities of given type in a workspace. Response content will be in JSON format. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name etype (str): Entity type Swagger: https://api.firecloud.org/#!/Entities/getEntities
4.416014
9.199654
0.48002
uri = "workspaces/{0}/{1}/entities/{2}/tsv".format(namespace, workspace, etype) return __get(uri)
def get_entities_tsv(namespace, workspace, etype)
List entities of given type in a workspace as a TSV. Identical to get_entities(), but the response is a TSV. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name etype (str): Entity type Swagger: https://api.firecloud.org/#!/Entities/browserDownloadEntitiesTSV
5.509852
12.355536
0.445942
uri = "workspaces/{0}/{1}/entities/{2}/{3}".format(namespace, workspace, etype, ename) return __get(uri)
def get_entity(namespace, workspace, etype, ename)
Request entity information. Gets entity metadata and attributes. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name etype (str): Entity type ename (str): The entity's unique id Swagger: https://api.firecloud.org/#!/Entities/getEntity
4.469219
10.108876
0.442108
uri = "workspaces/{0}/{1}/entities/delete".format(namespace, workspace) return __post(uri, json=json_body)
def delete_entities(namespace, workspace, json_body)
Delete entities in a workspace. Note: This action is not reversible. Be careful! Args: namespace (str): project to which workspace belongs workspace (str): Workspace name json_body: [ { "entityType": "string", "entityName": "string" } ] Swagger: https://api.firecloud.org/#!/Entities/deleteEntities
4.794464
7.457309
0.642921
uri = "workspaces/{0}/{1}/entities/delete".format(namespace, workspace) if isinstance(ename, string_types): body = [{"entityType":etype, "entityName":ename}] elif isinstance(ename, Iterable): body = [{"entityType":etype, "entityName":i} for i in ename] return __post(uri, json=body)
def delete_entity_type(namespace, workspace, etype, ename)
Delete entities in a workspace. Note: This action is not reversible. Be careful! Args: namespace (str): project to which workspace belongs workspace (str): Workspace name etype (str): Entity type ename (str, or iterable of str): unique entity id(s) Swagger: https://api.firecloud.org/#!/Entities/deleteEntities
3.31129
3.511453
0.942997
# Initial parameters for pagination params = { "page" : page, "pageSize" : page_size, "sortDirection" : sort_direction } if filter_terms: params['filterTerms'] = filter_terms uri = "workspaces/{0}/{1}/entityQuery/{2}".format(namespace,workspace,etype) return __get(uri, params=params)
def get_entities_query(namespace, workspace, etype, page=1, page_size=100, sort_direction="asc", filter_terms=None)
Paginated version of get_entities_with_type. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name Swagger: https://api.firecloud.org/#!/Entities/entityQuery
2.936949
3.441215
0.853463
headers = _fiss_agent_header({"Content-type": "application/json"}) uri = "{0}workspaces/{1}/{2}/entities/{3}/{4}".format(fcconfig.root_url, namespace, workspace, etype, ename) # FIXME: create __patch method, akin to __get, __delete etc return __SESSION.patch(uri, headers=headers, json=updates)
def update_entity(namespace, workspace, etype, ename, updates)
Update entity attributes in a workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name etype (str): Entity type ename (str): Entity name updates (list(dict)): List of updates to entity from _attr_set, e.g. Swagger: https://api.firecloud.org/#!/Entities/update_entity
9.507824
11.283084
0.842662
uri = "workspaces/{0}/{1}/methodconfigs".format(namespace, workspace) return __get(uri, params={'allRepos': allRepos})
def list_workspace_configs(namespace, workspace, allRepos=False)
List method configurations in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name Swagger: https://api.firecloud.org/#!/Method_Configurations/listWorkspaceMethodConfigs DUPLICATE: https://api.firecloud.org/#!/Workspaces/listWorkspaceMethodConfigs
6.171646
9.261141
0.666402
#json_body = { # "namespace" : mnamespace, # "name" : method, # "rootEntityType" : root_etype, # "inputs" : {}, # "outputs" : {}, # "prerequisites" : {} #} uri = "workspaces/{0}/{1}/methodconfigs".format(namespace, workspace) return __post(uri, json=body)
def create_workspace_config(namespace, workspace, body)
Create method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name body (json) : a filled-in JSON object for the new method config (e.g. see return value of get_workspace_config) Swagger: https://api.firecloud.org/#!/Method_Configurations/postWorkspaceMethodConfig DUPLICATE: https://api.firecloud.org/#!/Workspaces/postWorkspaceMethodConfig
5.257768
5.014183
1.048579
uri = "workspaces/{0}/{1}/method_configs/{2}/{3}".format(namespace, workspace, cnamespace, config) return __delete(uri)
def delete_workspace_config(namespace, workspace, cnamespace, config)
Delete method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name mnamespace (str): Method namespace method (str): Method name Swagger: https://api.firecloud.org/#!/Method_Configurations/deleteWorkspaceMethodConfig
6.16158
10.783263
0.571402
uri = "workspaces/{0}/{1}/method_configs/{2}/{3}".format(namespace, workspace, cnamespace, config) return __get(uri)
def get_workspace_config(namespace, workspace, cnamespace, config)
Get method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Config namespace config (str): Config name Swagger: https://api.firecloud.org/#!/Method_Configurations/getWorkspaceMethodConfig
6.662724
8.036458
0.829062
headers = _fiss_agent_header({"Content-type": "application/json"}) uri = "workspaces/{0}/{1}/method_configs/{2}/{3}".format(namespace, workspace, cnamespace, configname) return __put(uri, headers=headers, json=body)
def overwrite_workspace_config(namespace, workspace, cnamespace, configname, body)
Add or overwrite method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Configuration namespace configname (str): Configuration name body (json): new body (definition) of the method config Swagger: https://api.firecloud.org/#!/Method_Configurations/overwriteWorkspaceMethodConfig
6.123557
6.939252
0.882452
uri = "workspaces/{0}/{1}/method_configs/{2}/{3}".format(namespace, workspace, cnamespace, configname) return __post(uri, json=body)
def update_workspace_config(namespace, workspace, cnamespace, configname, body)
Update method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Configuration namespace configname (str): Configuration name body (json): new body (definition) of the method config Swagger: https://api.firecloud.org/#!/Method_Configurations/updateWorkspaceMethodConfig
5.858142
7.72817
0.758024
uri = "workspaces/{0}/{1}/method_configs/{2}/{3}/validate".format(namespace, workspace, cnamespace, config) return __get(uri)
def validate_config(namespace, workspace, cnamespace, config)
Get syntax validation for a configuration. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Configuration namespace config (str): Configuration name Swagger: https://api.firecloud.org/#!/Method_Configurations/validate_method_configuration
8.887426
10.385422
0.85576
body = { "namespace" : new_namespace, "name" : new_name, # I have no idea why this is required by FC, but it is... "workspaceName" : { "namespace" : namespace, "name" : workspace } } uri = "workspaces/{0}/{1}/method_configs/{2}/{3}/rename".format(namespace, workspace, cnamespace, config) return __post(uri, json=body)
def rename_workspace_config(namespace, workspace, cnamespace, config, new_namespace, new_name)
Rename a method configuration in a workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name mnamespace (str): Config namespace config (str): Config name new_namespace (str): Updated config namespace new_name (str): Updated method name Swagger: https://api.firecloud.org/#!/Method_Configurations/renameWorkspaceMethodConfig
4.407393
4.696891
0.938364
body = { "configurationNamespace" : from_cnamespace, "configurationName" : from_config, "configurationSnapshotId" : from_snapshot_id, "destinationNamespace" : to_cnamespace, "destinationName" : to_config } uri = "workspaces/{0}/{1}/method_configs/copyFromMethodRepo".format( namespace, workspace) return __post(uri, json=body)
def copy_config_from_repo(namespace, workspace, from_cnamespace, from_config, from_snapshot_id, to_cnamespace, to_config)
Copy a method config from the methods repository to a workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name from_cnamespace (str): Source configuration namespace from_config (str): Source configuration name from_snapshot_id (int): Source configuration snapshot_id to_cnamespace (str): Target configuration namespace to_config (str): Target configuration name Swagger: https://api.firecloud.org/#!/Method_Configurations/copyFromMethodRepo DUPLICATE: https://api.firecloud.org/#!/Method_Repository/copyFromMethodRepo
3.874381
3.300254
1.173965
body = { "configurationNamespace" : to_cnamespace, "configurationName" : to_config, "sourceNamespace" : from_cnamespace, "sourceName" : from_config } uri = "workspaces/{0}/{1}/method_configs/copyToMethodRepo".format( namespace, workspace) return __post(uri, json=body)
def copy_config_to_repo(namespace, workspace, from_cnamespace, from_config, to_cnamespace, to_config)
Copy a method config from a workspace to the methods repository. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name from_cnamespace (str): Source configuration namespace from_config (str): Source configuration name to_cnamespace (str): Target configuration namespace to_config (str): Target configuration name Swagger: https://api.firecloud.org/#!/Method_Configurations/copyToMethodRepo DUPLICATE: https://api.firecloud.org/#!/Method_Repository/copyToMethodRepo
4.366333
3.940883
1.107958
params = {k:v for (k,v) in locals().items() if v is not None} return __get("methods", params=params)
def list_repository_methods(namespace=None, name=None, snapshotId=None)
List method(s) in the methods repository. Args: namespace (str): Method Repository namespace name (str): method name snapshotId (int): method snapshot ID Swagger: https://api.firecloud.org/#!/Method_Repository/listMethodRepositoryMethods
5.588019
8.621756
0.64813
body = { "methodNamespace" : namespace, "methodName" : method, "methodVersion" : int(version) } return __post("template", json=body)
def get_config_template(namespace, method, version)
Get the configuration template for a method. The method should exist in the methods repository. Args: namespace (str): Method's namespace method (str): method name version (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/createMethodTemplate
6.106356
6.519085
0.936689
body = { "methodNamespace" : namespace, "methodName" : method, "methodVersion" : snapshot_id } return __post("inputsOutputs", json=body)
def get_inputs_outputs(namespace, method, snapshot_id)
Get a description of the inputs and outputs for a method. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/getMethodIO
5.828105
7.780753
0.749041
uri = "configurations/{0}/{1}/{2}".format(namespace, config, snapshot_id) return __get(uri)
def get_repository_config(namespace, config, snapshot_id)
Get a method configuration from the methods repository. Args: namespace (str): Methods namespace config (str): config name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/getMethodRepositoryConfiguration
5.596405
10.941453
0.511486
uri = "methods/{0}/{1}/{2}?onlyPayload={3}".format(namespace, method, snapshot_id, str(wdl_only).lower()) return __get(uri)
def get_repository_method(namespace, method, snapshot_id, wdl_only=False)
Get a method definition from the method repository. Args: namespace (str): Methods namespace method (str): method name version (int): snapshot_id of the method wdl_only (bool): Exclude metadata Swagger: https://api.firecloud.org/#!/Method_Repository/get_api_methods_namespace_name_snapshotId
6.004138
9.143546
0.656653
with open(wdl, 'r') as wf: wdl_payload = wf.read() if doc is not None: with open (doc, 'r') as df: doc = df.read() body = { "namespace": namespace, "name": method, "entityType": "Workflow", "payload": wdl_payload, "documentation": doc, "synopsis": synopsis, "snapshotComment": comment } return __post("methods", json={key: value for key, value in body.items() if value})
def update_repository_method(namespace, method, synopsis, wdl, doc=None, comment="")
Create/Update workflow definition. FireCloud will create a new snapshot_id for the given workflow. Args: namespace (str): Methods namespace method (str): method name synopsis (str): short (<80 char) description of method wdl (file): Workflow Description Language file doc (file): (Optional) Additional documentation comment (str): (Optional) Comment specific to this snapshot Swagger: https://api.firecloud.org/#!/Method_Repository/post_api_methods
3.45707
3.499871
0.987771
uri = "methods/{0}/{1}/{2}".format(namespace, name, snapshot_id) return __delete(uri)
def delete_repository_method(namespace, name, snapshot_id)
Redacts a method and all of its associated configurations. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/delete_api_methods_namespace_name_snapshotId
5.162096
8.497038
0.607517
uri = "configurations/{0}/{1}/{2}".format(namespace, name, snapshot_id) return __delete(uri)
def delete_repository_config(namespace, name, snapshot_id)
Redacts a configuration and all of its associated configurations. The configuration should exist in the methods repository. Args: namespace (str): configuration namespace configuration (str): configuration name snapshot_id (int): snapshot_id of the configuration Swagger: https://api.firecloud.org/#!/Method_Repository/delete_api_configurations_namespace_name_snapshotId
5.031746
8.478127
0.593497
uri = "methods/{0}/{1}/{2}/permissions".format(namespace,method,snapshot_id) return __get(uri)
def get_repository_method_acl(namespace, method, snapshot_id)
Get permissions for a method. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name version (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/getMethodACL
5.863867
11.66965
0.502489
uri = "methods/{0}/{1}/{2}/permissions".format(namespace,method,snapshot_id) return __post(uri, json=acl_updates)
def update_repository_method_acl(namespace, method, snapshot_id, acl_updates)
Set method permissions. The method should exist in the methods repository. Args: namespace (str): Methods namespace method (str): method name snapshot_id (int): snapshot_id of the method acl_updates (list(dict)): List of access control updates Swagger: https://api.firecloud.org/#!/Method_Repository/setMethodACL
5.943749
7.674299
0.774501
uri = "configurations/{0}/{1}/{2}/permissions".format(namespace, config, snapshot_id) return __get(uri)
def get_repository_config_acl(namespace, config, snapshot_id)
Get configuration permissions. The configuration should exist in the methods repository. Args: namespace (str): Configuration namespace config (str): Configuration name snapshot_id (int): snapshot_id of the method Swagger: https://api.firecloud.org/#!/Method_Repository/getConfigACL
7.377395
13.718435
0.537772
uri = "configurations/{0}/{1}/{2}/permissions".format(namespace, config, snapshot_id) return __post(uri, json=acl_updates)
def update_repository_config_acl(namespace, config, snapshot_id, acl_updates)
Set configuration permissions. The configuration should exist in the methods repository. Args: namespace (str): Configuration namespace config (str): Configuration name snapshot_id (int): snapshot_id of the method acl_updates (list(dict)): List of access control updates Swagger: https://api.firecloud.org/#!/Method_Repository/setConfigACL
7.018434
9.50923
0.738065
uri = "workspaces/{0}/{1}/submissions".format(wnamespace, workspace) body = { "methodConfigurationNamespace" : cnamespace, "methodConfigurationName" : config, "entityType" : etype, "entityName" : entity, "useCallCache" : use_callcache } if expression: body['expression'] = expression return __post(uri, json=body)
def create_submission(wnamespace, workspace, cnamespace, config, entity, etype, expression=None, use_callcache=True)
Submit job in FireCloud workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Method configuration namespace config (str): Method configuration name entity (str): Entity to submit job on. Should be the same type as the root entity type of the method config, unless an expression is used etype (str): Entity type of root_entity expression (str): Instead of using entity as the root entity, evaluate the root entity from this expression. use_callcache (bool): use call cache if applicable (default: true) Swagger: https://api.firecloud.org/#!/Submissions/createSubmission
3.297909
3.358622
0.981923
uri = "workspaces/{0}/{1}/submissions/{2}".format(namespace, workspace, submission_id) return __delete(uri)
def abort_submission(namespace, workspace, submission_id)
Abort running job in a workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name submission_id (str): Submission's unique identifier Swagger: https://api.firecloud.org/#!/Submissions/deleteSubmission
5.077279
12.268434
0.413849
uri = "workspaces/{0}/{1}/submissions/{2}".format(namespace, workspace, submission_id) return __get(uri)
def get_submission(namespace, workspace, submission_id)
Request submission information. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name submission_id (str): Submission's unique identifier Swagger: https://api.firecloud.org/#!/Submissions/monitorSubmission
4.753302
12.959605
0.366778
uri = "workspaces/{0}/{1}/submissions/{2}/workflows/{3}".format(namespace, workspace, submission_id, workflow_id) return __get(uri)
def get_workflow_metadata(namespace, workspace, submission_id, workflow_id)
Request the metadata for a workflow in a submission. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name submission_id (str): Submission's unique identifier workflow_id (str): Workflow's unique identifier. Swagger: https://api.firecloud.org/#!/Submissions/workflowMetadata
3.784556
8.741958
0.432919
uri = "workspaces/{0}/{1}/".format(namespace, workspace) uri += "submissions/{0}/workflows/{1}/outputs".format(submission_id, workflow_id) return __get(uri)
def get_workflow_outputs(namespace, workspace, submission_id, workflow_id)
Request the outputs for a workflow in a submission. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name submission_id (str): Submission's unique identifier workflow_id (str): Workflow's unique identifier. Swagger: https://api.firecloud.org/#!/Submissions/workflowOutputsInSubmission
3.50757
6.206892
0.565109
if not attributes: attributes = dict() body = { "namespace": namespace, "name": name, "attributes": attributes } if authorizationDomain: authDomain = [{"membersGroupName": authorizationDomain}] else: authDomain = [] body["authorizationDomain"] = authDomain return __post("workspaces", json=body)
def create_workspace(namespace, name, authorizationDomain="", attributes=None)
Create a new FireCloud Workspace. Args: namespace (str): project to which workspace belongs name (str): Workspace name protected (bool): If True, this workspace is protected by dbGaP credentials. This option is only available if your FireCloud account is linked to your NIH account. attributes (dict): Workspace attributes as key value pairs Swagger: https://api.firecloud.org/#!/Workspaces/createWorkspace
4.091576
4.482445
0.9128
uri = "{0}workspaces/{1}/{2}/acl?inviteUsersNotFound={3}".format(fcconfig.root_url, namespace, workspace, str(invite_users_not_found).lower()) headers = _fiss_agent_header({"Content-type": "application/json"}) # FIXME: create __patch method, akin to __get, __delete etc return __SESSION.patch(uri, headers=headers, data=json.dumps(acl_updates))
def update_workspace_acl(namespace, workspace, acl_updates, invite_users_not_found=False)
Update workspace access control list. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name acl_updates (list(dict)): Acl updates as dicts with two keys: "email" - Firecloud user email "accessLevel" - one of "OWNER", "READER", "WRITER", "NO ACCESS" Example: {"email":"user1@mail.com", "accessLevel":"WRITER"} invite_users_not_found (bool): true to invite unregistered users, false to ignore Swagger: https://api.firecloud.org/#!/Workspaces/updateWorkspaceACL
8.123955
8.55609
0.949494
if authorizationDomain: if isinstance(authorizationDomain, string_types): authDomain = [{"membersGroupName": authorizationDomain}] else: authDomain = [{"membersGroupName": authDomain} for authDomain in authorizationDomain] else: authDomain = [] body = { "namespace": to_namespace, "name": to_workspace, "attributes": dict(), "authorizationDomain": authDomain, } uri = "workspaces/{0}/{1}/clone".format(from_namespace, from_workspace) return __post(uri, json=body)
def clone_workspace(from_namespace, from_workspace, to_namespace, to_workspace, authorizationDomain="")
Clone a FireCloud workspace. A clone is a shallow copy of a FireCloud workspace, enabling easy sharing of data, such as TCGA data, without duplication. Args: from_namespace (str): project (namespace) to which source workspace belongs from_workspace (str): Source workspace's name to_namespace (str): project to which target workspace belongs to_workspace (str): Target workspace's name authorizationDomain: (str) required authorization domains Swagger: https://api.firecloud.org/#!/Workspaces/cloneWorkspace
3.403107
3.465734
0.98193
headers = _fiss_agent_header({"Content-type": "application/json"}) uri = "{0}workspaces/{1}/{2}/updateAttributes".format(fcconfig.root_url, namespace, workspace) body = json.dumps(attrs) # FIXME: create __patch method, akin to __get, __delete etc return __SESSION.patch(uri, headers=headers, data=body)
def update_workspace_attributes(namespace, workspace, attrs)
Update or remove workspace attributes. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name attrs (list(dict)): List of update operations for workspace attributes. Use the helper dictionary construction functions to create these: _attr_set() : Set/Update attribute _attr_rem() : Remove attribute _attr_ladd() : Add list member to attribute _attr_lrem() : Remove list member from attribute Swagger: https://api.firecloud.org/#!/Workspaces/updateAttributes
10.341503
11.93841
0.866238
uri = "groups/{0}/{1}/{2}".format(group, role, email) return __put(uri)
def add_user_to_group(group, role, email)
Add a user to a group the caller owns Args: group (str): Group name role (str) : Role of user for group; either 'member' or 'admin' email (str): Email of user or group to add Swagger: https://api.firecloud.org/#!/Groups/addUserToGroup
5.589357
11.998446
0.46584
uri = "groups/{0}/{1}/{2}".format(group, role, email) return __delete(uri)
def remove_user_from_group(group, role, email)
Remove a user from a group the caller owns Args: group (str): Group name role (str) : Role of user for group; either 'member' or 'admin' email (str): Email of user or group to remove Swagger: https://api.firecloud.org/#!/Groups/removeUserFromGroup
4.684332
10.301043
0.454743
#print_("DEV PLUGIN: Loaded commands") prsr = subparsers.add_parser( 'upload', description='Copy the file or directory into the given') prsr.add_argument('workspace', help='Workspace name') prsr.add_argument('source', help='File or directory to upload') prsr.add_argument('-s', '--show', action='store_true', help="Show the gsutil command, but don't run it") dest_help = 'Destination relative to the bucket root. ' dest_help += 'If omitted the file will be placed in the root directory' prsr.add_argument('-d', '--destination', help=dest_help) prsr.set_defaults(func=upload)
def register_commands(self, subparsers)
Add commands to a list of subparsers. This will be called by Fissfc to add additional command targets from this plugin. Each command added should follow the pattern: parser = subparsers.add_parser('cmd', ...) parser.add_argument(...) ... parser.set_defaults(func=do_my_cmd) where do_my_cmd is a function that takes one argument "args": def do_my_cmd(args): pass
5.175791
5.680583
0.911137
r = fapi.update_workflow(namespace, name, synopsis, wdl, documentation, api_url) fapi._check_response_code(r, 201) d = r.json() return Method(namespace, name, d["snapshotId"])
def new(namespace, name, wdl, synopsis, documentation=None, api_url=fapi.PROD_API_ROOT)
Create new FireCloud method. If the namespace + name already exists, a new snapshot is created. Args: namespace (str): Method namespace for this method name (str): Method name wdl (file): WDL description synopsis (str): Short description of task documentation (file): Extra documentation for method
4.477047
5.542793
0.807724
r = fapi.get_config_template(self.namespace, self.name, self.snapshot_id, self.api_url) fapi._check_response_code(r, 200) return r.json()
def template(self)
Return a method template for this method.
6.065407
6.123729
0.990476
r = fapi.get_inputs_outputs(self.namespace, self.name, self.snapshot_id, self.api_url) fapi._check_response_code(r, 200) return r.json()
def inputs_outputs(self)
Get information on method inputs & outputs.
5.883956
5.265218
1.117514
r = fapi.get_repository_method_acl( self.namespace, self.name, self.snapshot_id, self.api_url) fapi._check_response_code(r, 200) return r.json()
def acl(self)
Get the access control list for this method.
5.999015
5.344354
1.122496
acl_updates = [{"user": user, "role": role} for user in users] r = fapi.update_repository_method_acl( self.namespace, self.name, self.snapshot_id, acl_updates, self.api_url ) fapi._check_response_code(r, 200)
def set_acl(self, role, users)
Set permissions for this method. Args: role (str): Access level one of {one of "OWNER", "READER", "WRITER", "NO ACCESS"} users (list(str)): List of users to give role to
4.867137
5.152627
0.944593
if which('gcloud') is None: gcloud_path = os.path.join(os.path.expanduser('~'), 'google-cloud-sdk', 'bin') env_path = os.getenv('PATH') if os.path.isdir(gcloud_path): if env_path is not None: os.environ['PATH'] = gcloud_path + os.pathsep + env_path else: os.environ['PATH'] = gcloud_path
def __ensure_gcloud()
The *NIX installer is not guaranteed to add the google cloud sdk to the user's PATH (the Windows installer does). This ensures that if the default directory for the executables exists, it is added to the PATH for the duration of this package's use.
1.99114
1.889343
1.05388
''' Read initial configuration state, from named config files; store this state within a config dictionary (which may be nested) whose keys may also be referenced as attributes (safely, defaulting to None if unset). A config object may be passed in, as a way of accumulating or overwriting configuration state; if one is NOT passed, the default config obj is used ''' local_config = config config = __fcconfig cfgparser = configparser.SafeConfigParser() filenames = list() # Give personal/user followed by current working directory configuration the first say filenames.append(os.path.join(os.path.expanduser('~'), config_profile)) filenames.append(os.path.join(os.getcwd(), config_profile)) if files: if isinstance(files, string_types): filenames.append(files) elif isinstance(files, Iterable): for f in files: if isinstance(f, IOBase): f = f.name filenames.append(f) cfgparser.read(filenames) # [DEFAULT] defines common variables for interpolation/substitution in # other sections, and are stored at the root level of the config object for keyval in cfgparser.items('DEFAULT'): #print("config_parse: adding config variable %s=%s" % (keyval[0], str(keyval[1]))) __fcconfig[keyval[0]] = keyval[1] for section in cfgparser.sections(): config[section] = attrdict() for option in cfgparser.options(section): # DEFAULT vars ALSO behave as though they were defined in every # section, but we purposely skip them here so that each section # reflects only the options explicitly defined in that section if not config[option]: config[section][option] = cfgparser.get(section, option) config.verbosity = int(config.verbosity) if not config.root_url.endswith('/'): config.root_url += '/' if os.path.isfile(config.credentials): os.environ[environment_vars.CREDENTIALS] = config.credentials # if local_config override options with passed options if local_config is not None: for key, value in local_config.items(): config[key] = value # if any explict config options are passed override. for key, value in kwargs.items(): config[key] = value return config
def config_parse(files=None, config=None, config_profile=".fissconfig", **kwargs)
Read initial configuration state, from named config files; store this state within a config dictionary (which may be nested) whose keys may also be referenced as attributes (safely, defaulting to None if unset). A config object may be passed in, as a way of accumulating or overwriting configuration state; if one is NOT passed, the default config obj is used
5.605304
3.707769
1.511772
r = fapi.create_workspace(namespace, name, protected, attributes, api_url) fapi._check_response_code(r, 201) return Workspace(namespace, name, api_url)
def new(namespace, name, protected=False, attributes=dict(), api_url=fapi.PROD_API_ROOT)
Create a new FireCloud workspace. Returns: Workspace: A new FireCloud workspace Raises: FireCloudServerError: API call failed.
3.083229
3.597633
0.857016
r = fapi.get_workspace(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 200) self.data = r.json() return self
def refresh(self)
Reload workspace metadata from firecloud. Workspace metadata is cached in the data attribute of a Workspace, and may become stale, requiring a refresh().
5.376301
3.921556
1.370961
r = fapi.delete_workspace(self.namespace, self.name) fapi._check_response_code(r, 202)
def delete(self)
Delete the workspace from FireCloud. Note: This action cannot be undone. Be careful!
6.403953
5.04702
1.268858
r = fapi.lock_workspace(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 204) self.data['workspace']['isLocked'] = True return self
def lock(self)
Lock this Workspace. This causes the workspace to behave in a read-only way, regardless of access permissions.
6.418757
6.261473
1.025119
r = fapi.unlock_workspace(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 204) self.data['workspace']['isLocked'] = False return self
def unlock(self)
Unlock this Workspace.
6.360489
5.460395
1.164841
update = [fapi._attr_up(attr, value)] r = fapi.update_workspace_attributes(self.namespace, self.name, update, self.api_url) fapi._check_response_code(r, 200)
def update_attribute(self, attr, value)
Set the value of a workspace attribute.
6.690888
5.512777
1.213705
update = [fapi._attr_rem(attr)] r = fapi.update_workspace_attributes(self.namespace, self.name, update, self.api_url) self.data["workspace"]["attributes"].pop(attr, None) fapi._check_response_code(r, 200)
def remove_attribute(self, attr)
Remove attribute from a workspace. Args: attr (str): attribute name
7.389754
8.110847
0.911095
r = fapi.upload_entities_tsv(self.namespace, self.name, self.tsv_file, self.api_url) fapi._check_response_code(r, 201)
def import_tsv(self, tsv_file)
Upload entity data to workspace from tsv loadfile. Args: tsv_file (file): Tab-delimited file of entity data
7.054965
7.126657
0.98994
r = fapi.get_entity(self.namespace, self.name, etype, entity_id, self.api_url) fapi._check_response_code(r, 200) dresp = r.json() return Entity(etype, entity_id, dresp['attributes'])
def get_entity(self, etype, entity_id)
Return entity in this workspace. Args: etype (str): Entity type entity_id (str): Entity name/unique id
4.825043
5.57717
0.865142
r = fapi.delete_entity(self.namespace, self.name, etype, entity_id, self.api_url) fapi._check_response_code(r, 202)
def delete_entity(self, etype, entity_id)
Delete an entity in this workspace. Args: etype (str): Entity type entity_id (str): Entity name/unique id
5.262965
7.292327
0.721713
edata = Entity.create_payload(entities) r = fapi.upload_entities(self.namespace, self.name, edata, self.api_url) fapi._check_response_code(r, 201)
def import_entities(self, entities)
Upload entity objects. Args: entities: iterable of firecloud.Entity objects.
7.07711
7.210711
0.981472
if etype not in {"sample", "pair", "participant"}: raise ValueError("Unsupported entity type:" + str(etype)) payload = "membership:" + etype + "_set_id\t" + etype + "_id\n" for e in entities: if e.etype != etype: msg = "Entity type '" + e.etype + "' does not match " msg += "set type '" + etype + "'" raise ValueError(msg) payload += set_id + '\t' + e.entity_id + '\n' r = fapi.upload_entities(self.namespace, self.name, payload, self.api_url) fapi._check_response_code(r, 201)
def create_set(self, set_id, etype, entities)
Create a set of entities and upload to FireCloud. Args etype (str): one of {"sample, "pair", "participant"} entities: iterable of firecloud.Entity objects.
4.533764
3.902619
1.161724
r = fapi.get_submissions(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 200) return r.json()
def submissions(self)
List job submissions in workspace.
5.340859
4.53833
1.176834
r = fapi.get_entity_types(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 200) return r.json().keys()
def entity_types(self)
List entity types in workspace.
5.309985
4.60181
1.15389
r = fapi.get_entities_with_type(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 200) edicts = r.json() return [Entity(e['entityType'], e['name'], e['attributes']) for e in edicts]
def entities(self)
List all entities in workspace.
5.110106
4.604702
1.109758
r = fapi.get_entities(self.namespace, self.name, etype, self.api_url) fapi._check_response_code(r, 200) return [Entity(e['entityType'], e['name'], e['attributes']) for e in r.json()]
def __get_entities(self, etype)
Helper to get entities for a given type.
4.881608
4.456866
1.095301
r = fapi.copy_entities(from_namespace, from_workspace, self.namespace, self.name, etype, enames, self.api_url) fapi._check_response_code(r, 201)
def copy_entities(self, from_namespace, from_workspace, etype, enames)
Copy entities from another workspace. Args: from_namespace (str): Source workspace namespace from_workspace (str): Source workspace name etype (str): Entity type enames (list(str)): List of entity names to copy
3.801906
4.998372
0.760629
raise NotImplementedError r = fapi.get_configs(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 200) cdata = r.json() configs = [] for c in cdata: cnamespace = c['namespace'] cname = c['name'] root_etype = c['rootEntityType'] method_namespace = c['methodRepoMethod']['methodNamespace'] method_name = c['methodRepoMethod']['methodName'] method_version = c['methodRepoMethod']['methodVersion']
def configs(self)
Get method configurations in a workspace.
4.493305
4.0814
1.100922
r = fapi.get_workspace_acl(self.namespace, self.name, self.api_url) fapi._check_response_code(r, 200) return r.json()
def acl(self)
Get the access control list for this workspace.
5.161176
3.749751
1.376405
r = fapi.clone_workspace(self.namespace, self.name, to_namespace, to_name, self.api_url) fapi._check_response_code(r, 201) return Workspace(to_namespace, to_name, self.api_url)
def clone(self, to_namespace, to_name)
Clone this workspace. Args: to_namespace (str): Target workspace namespace to_name (str): Target workspace name
3.429074
3.415239
1.004051
# Get arguments logging.info("Initializing FireCloud Supervisor...") logging.info("Saving recovery checkpoints to " + recovery_file) # Parse workflow description # these three objects must be saved in order to recover the supervisor args = { 'project' : project, 'workspace': workspace, 'namespace': namespace, 'workflow' : workflow, 'sample_sets': sample_sets } monitor_data, dependencies = init_supervisor_data(workflow, sample_sets) recovery_data = { 'args' : args, 'monitor_data' : monitor_data, 'dependencies' : dependencies } # Monitor loop. Keep going until all nodes have been evaluated supervise_until_complete(monitor_data, dependencies, args, recovery_file)
def supervise(project, workspace, namespace, workflow, sample_sets, recovery_file)
Supervise submission of jobs from a Firehose-style workflow of workflows
5.521318
5.76918
0.957037
with open(dotfile) as wf: graph_data = wf.read() graph = pydot.graph_from_dot_data(graph_data)[0] nodes = [n.get_name().strip('"') for n in graph.get_nodes()] monitor_data = dict() dependencies = {n:[] for n in nodes} # Initialize empty list of dependencies for n in nodes: monitor_data[n] = dict() for sset in sample_sets: monitor_data[n][sset] = { 'state' : "Not Started", 'evaluated' : False, 'succeeded' : False } edges = graph.get_edges() # Iterate over the edges, and get the dependency information for each node for e in edges: source = e.get_source().strip('"') dest = e.get_destination().strip('"') dep = e.get_attributes() dep['upstream_task'] = source dependencies[dest].append(dep) return monitor_data, dependencies
def init_supervisor_data(dotfile, sample_sets)
Parse a workflow description written in DOT (like Firehose)
3.036683
2.961992
1.025216