source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(zero_ok=False, more_ok=True, **kwargs): return _find_one(find_projects, zero_ok=zero_ok, more_ok=more_ok, **kwargs)
find_one_project
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(zero_ok=False, more_ok=True, **kwargs):\n return _find_one(find_projects, zero_ok=zero_ok, more_ok=more_ok, **kwargs)\n", "role": "user" }, { "content": "find_one_project", "role": "assistant" } ]
def x(zero_ok=False, more_ok=True, **kwargs): return _find_one(find_apps, zero_ok=zero_ok, more_ok=more_ok, **kwargs)
find_one_app
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(zero_ok=False, more_ok=True, **kwargs):\n return _find_one(find_apps, zero_ok=zero_ok, more_ok=more_ok, **kwargs)\n", "role": "user" }, { "content": "find_one_app", "role": "assistant" } ]
def x(api_method, org_id, query, first_page_size=100): if 'limit' not in query: query['limit'] = min(first_page_size, 1000) while True: resp = api_method(org_id, query) for result in resp['results']: yield result if resp['next'] is not None: query['starting'] = resp['next'] query['limit'] = min(query['limit'] * 2, 1000) else: break
org_find
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(api_method, org_id, query, first_page_size=100):\n if 'limit' not in query:\n query['limit'] = min(first_page_size, 1000)\n while True:\n resp = api_method(org_id, query)\n for result in resp['results']:\n yield result\n if resp['next'] is not None:\n query['starting'] = resp['next']\n query['limit'] = min(query['limit'] * 2, 1000)\n else:\n break\n", "role": "user" }, { "content": "org_find", "role": "assistant" } ]
def x(org_id=None, level=None, describe=False): query = {} if level is not None: query['level'] = level query['describe'] = describe return _org_find(dxpy.api.org_find_members, org_id, query)
org_find_members
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(org_id=None, level=None, describe=False):\n query = {}\n if level is not None:\n query['level'] = level\n query['describe'] = describe\n return _org_find(dxpy.api.org_find_members, org_id, query)\n", "role": "user" }, { "content": "org_find_members", "role": "assistant" } ]
def x(org_id=None, name=None, name_mode='exact', ids=None, properties=None, tags=None, describe=False, public=None, created_after=None, created_before=None, region=None, containsPHI=None): query = {} if name is not None: if name_mode == 'exact': query['name'] = name elif name_mode == 'glob': query['name'] = {'glob': name} elif name_mode == 'regexp': query['name'] = {'regexp': name} else: raise DXError( 'org_find_projects: Unexpected value found for argument name_mode' ) if ids is not None: query['id'] = ids if properties is not None: if len(properties.keys()) == 1: query['properties'] = properties else: query['properties'] = {'$and': [{k: v} for k, v in properties. iteritems()]} if tags is not None: if len(tags) == 1: query['tags'] = tags[0] else: query['tags'] = {'$and': tags} query['describe'] = describe if public is not None: query['public'] = public if created_after is not None or created_before is not None: query['created'] = {} if created_after is not None: query['created']['after'] = dxpy.utils.normalize_time_input( created_after) if created_before is not None: query['created']['before'] = dxpy.utils.normalize_time_input( created_before) if region is not None: query['region'] = region if containsPHI is not None: query['containsPHI'] = containsPHI return _org_find(dxpy.api.org_find_projects, org_id, query)
org_find_projects
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(org_id=None, name=None, name_mode='exact', ids=None, properties=None,\n tags=None, describe=False, public=None, created_after=None,\n created_before=None, region=None, containsPHI=None):\n query = {}\n if name is not None:\n if name_mode == 'exact':\n query['name'] = name\n elif name_mode == 'glob':\n query['name'] = {'glob': name}\n elif name_mode == 'regexp':\n query['name'] = {'regexp': name}\n else:\n raise DXError(\n 'org_find_projects: Unexpected value found for argument name_mode'\n )\n if ids is not None:\n query['id'] = ids\n if properties is not None:\n if len(properties.keys()) == 1:\n query['properties'] = properties\n else:\n query['properties'] = {'$and': [{k: v} for k, v in properties.\n iteritems()]}\n if tags is not None:\n if len(tags) == 1:\n query['tags'] = tags[0]\n else:\n query['tags'] = {'$and': tags}\n query['describe'] = describe\n if public is not None:\n query['public'] = public\n if created_after is not None or created_before is not None:\n query['created'] = {}\n if created_after is not None:\n query['created']['after'] = dxpy.utils.normalize_time_input(\n created_after)\n if created_before is not None:\n query['created']['before'] = dxpy.utils.normalize_time_input(\n created_before)\n if region is not None:\n query['region'] = region\n if containsPHI is not None:\n query['containsPHI'] = containsPHI\n return _org_find(dxpy.api.org_find_projects, org_id, query)\n", "role": "user" }, { "content": "org_find_projects", "role": "assistant" } ]
def x(org_id, name=None, name_mode='exact', category=None, all_versions= None, published=None, created_by=None, developer=None, authorized_user= None, created_after=None, created_before=None, modified_after=None, modified_before=None, describe=False, limit=None, return_handler=False, first_page_size=100, **kwargs): query = {} if name is not None: if name_mode == 'exact': query['name'] = name elif name_mode == 'glob': query['name'] = {'glob': name} elif name_mode == 'regexp': query['name'] = {'regexp': name} else: raise DXError( 'find_apps: Unexpected value found for argument name_mode') if category is not None: query['category'] = category if all_versions is not None: query['allVersions'] = all_versions if published is not None: query['published'] = published if created_by is not None: query['createdBy'] = created_by if developer is not None: query['developer'] = developer if authorized_user is not None: query['authorizedUser'] = authorized_user if modified_after is not None or modified_before is not None: query['modified'] = {} if modified_after is not None: query['modified']['after'] = dxpy.utils.normalize_time_input( modified_after) if modified_before is not None: query['modified']['before'] = dxpy.utils.normalize_time_input( modified_before) if created_after is not None or created_before is not None: query['created'] = {} if created_after is not None: query['created']['after'] = dxpy.utils.normalize_time_input( created_after) if created_before is not None: query['created']['before'] = dxpy.utils.normalize_time_input( created_before) if describe is not None and describe is not False: query['describe'] = describe if limit is not None: query['limit'] = limit return _org_find(dxpy.api.org_find_apps, org_id, query)
org_find_apps
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(org_id, name=None, name_mode='exact', category=None, all_versions=\n None, published=None, created_by=None, developer=None, authorized_user=\n None, created_after=None, created_before=None, modified_after=None,\n modified_before=None, describe=False, limit=None, return_handler=False,\n first_page_size=100, **kwargs):\n query = {}\n if name is not None:\n if name_mode == 'exact':\n query['name'] = name\n elif name_mode == 'glob':\n query['name'] = {'glob': name}\n elif name_mode == 'regexp':\n query['name'] = {'regexp': name}\n else:\n raise DXError(\n 'find_apps: Unexpected value found for argument name_mode')\n if category is not None:\n query['category'] = category\n if all_versions is not None:\n query['allVersions'] = all_versions\n if published is not None:\n query['published'] = published\n if created_by is not None:\n query['createdBy'] = created_by\n if developer is not None:\n query['developer'] = developer\n if authorized_user is not None:\n query['authorizedUser'] = authorized_user\n if modified_after is not None or modified_before is not None:\n query['modified'] = {}\n if modified_after is not None:\n query['modified']['after'] = dxpy.utils.normalize_time_input(\n modified_after)\n if modified_before is not None:\n query['modified']['before'] = dxpy.utils.normalize_time_input(\n modified_before)\n if created_after is not None or created_before is not None:\n query['created'] = {}\n if created_after is not None:\n query['created']['after'] = dxpy.utils.normalize_time_input(\n created_after)\n if created_before is not None:\n query['created']['before'] = dxpy.utils.normalize_time_input(\n created_before)\n if describe is not None and describe is not False:\n query['describe'] = describe\n if limit is not None:\n query['limit'] = limit\n return _org_find(dxpy.api.org_find_apps, org_id, query)\n", "role": "user" }, { "content": "org_find_apps", "role": "assistant" } ]
def x(query, first_page_size=10): return _find(dxpy.api.system_find_orgs, query, limit=None, return_handler=False, first_page_size=first_page_size)
find_orgs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(query, first_page_size=10):\n return _find(dxpy.api.system_find_orgs, query, limit=None,\n return_handler=False, first_page_size=first_page_size)\n", "role": "user" }, { "content": "find_orgs", "role": "assistant" } ]
def x(idir, dirs): file_load_utils.ensure_dir(idir) for d in dirs: file_load_utils.ensure_dir(os.path.join(idir, d))
create_dirs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(idir, dirs):\n file_load_utils.ensure_dir(idir)\n for d in dirs:\n file_load_utils.ensure_dir(os.path.join(idir, d))\n", "role": "user" }, { "content": "create_dirs", "role": "assistant" } ]
def x(filtered_inputs): file_key_descs, _ignore = file_load_utils.analyze_bash_vars(file_load_utils .get_input_json_file(), None) flattened_dict = {} def x(key, value, dict_): if key not in dict_: dict_[key] = value for input_ in filtered_inputs: if input_ not in file_key_descs: continue input_var_dict = file_key_descs[input_] add_if_no_collision(input_ + '_path', input_var_dict['path'], flattened_dict) add_if_no_collision(input_ + '_name', input_var_dict['basename'], flattened_dict) add_if_no_collision(input_ + '_prefix', input_var_dict['prefix'], flattened_dict) return flattened_dict
gen_helper_dict
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(filtered_inputs):\n file_key_descs, _ignore = file_load_utils.analyze_bash_vars(file_load_utils\n .get_input_json_file(), None)\n flattened_dict = {}\n\n def x(key, value, dict_):\n if key not in dict_:\n dict_[key] = value\n for input_ in filtered_inputs:\n if input_ not in file_key_descs:\n continue\n input_var_dict = file_key_descs[input_]\n add_if_no_collision(input_ + '_path', input_var_dict['path'],\n flattened_dict)\n add_if_no_collision(input_ + '_name', input_var_dict['basename'],\n flattened_dict)\n add_if_no_collision(input_ + '_prefix', input_var_dict['prefix'],\n flattened_dict)\n return flattened_dict\n", "role": "user" }, { "content": "gen_helper_dict", "role": "assistant" } ]
def x(max_threads, num_cores, mem_available_mb): return min(max_threads, num_cores, max(int(mem_available_mb / 1200), 1))
get_num_parallel_threads
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(max_threads, num_cores, mem_available_mb):\n return min(max_threads, num_cores, max(int(mem_available_mb / 1200), 1))\n", "role": "user" }, { "content": "get_num_parallel_threads", "role": "assistant" } ]
def x(exclude=None, parallel=False, max_threads=8): idir = file_load_utils.get_input_dir() try: job_input_file = file_load_utils.get_input_json_file() dirs, inputs, rest = file_load_utils.get_job_input_filenames( job_input_file) except IOError: msg = 'Error: Could not find the input json file: {0}.\n'.format( job_input_file) msg += ( ' This function should only be called from within a running job.' ) print(msg) raise dirs_to_create = [] for d in dirs: keep = True if exclude is not None and d is not None: if d.split('/')[0] in exclude: keep = False if keep: dirs_to_create.append(d) _create_dirs(idir, dirs_to_create) if exclude: inputs = file_load_utils.filter_dict(inputs, exclude) to_download = [] for ival_list in inputs.values(): to_download.extend(ival_list) if parallel: total_mem = psutil.virtual_memory().total >> 20 num_cores = multiprocessing.cpu_count() max_num_parallel_downloads = _get_num_parallel_threads(max_threads, num_cores, total_mem) sys.stderr.write('Downloading files using {} threads'.format( max_num_parallel_downloads)) _parallel_file_download(to_download, idir, max_num_parallel_downloads) else: _sequential_file_download(to_download, idir) helper_vars = _gen_helper_dict(inputs) return helper_vars
download_all_inputs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(exclude=None, parallel=False, max_threads=8):\n idir = file_load_utils.get_input_dir()\n try:\n job_input_file = file_load_utils.get_input_json_file()\n dirs, inputs, rest = file_load_utils.get_job_input_filenames(\n job_input_file)\n except IOError:\n msg = 'Error: Could not find the input json file: {0}.\\n'.format(\n job_input_file)\n msg += (\n ' This function should only be called from within a running job.'\n )\n print(msg)\n raise\n dirs_to_create = []\n for d in dirs:\n keep = True\n if exclude is not None and d is not None:\n if d.split('/')[0] in exclude:\n keep = False\n if keep:\n dirs_to_create.append(d)\n _create_dirs(idir, dirs_to_create)\n if exclude:\n inputs = file_load_utils.filter_dict(inputs, exclude)\n to_download = []\n for ival_list in inputs.values():\n to_download.extend(ival_list)\n if parallel:\n total_mem = psutil.virtual_memory().total >> 20\n num_cores = multiprocessing.cpu_count()\n max_num_parallel_downloads = _get_num_parallel_threads(max_threads,\n num_cores, total_mem)\n sys.stderr.write('Downloading files using {} threads'.format(\n max_num_parallel_downloads))\n _parallel_file_download(to_download, idir, max_num_parallel_downloads)\n else:\n _sequential_file_download(to_download, idir)\n helper_vars = _gen_helper_dict(inputs)\n return helper_vars\n", "role": "user" }, { "content": "download_all_inputs", "role": "assistant" } ]
def x(string, ignored_prefix=' '): paragraphs = string.split('\n\n') refilled_paragraphs = [(fill(paragraph) if not paragraph.startswith( ignored_prefix) else paragraph) for paragraph in paragraphs] return '\n\n'.join(refilled_paragraphs).strip('\n')
refill_paragraphs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(string, ignored_prefix=' '):\n paragraphs = string.split('\\n\\n')\n refilled_paragraphs = [(fill(paragraph) if not paragraph.startswith(\n ignored_prefix) else paragraph) for paragraph in paragraphs]\n return '\\n\\n'.join(refilled_paragraphs).strip('\\n')\n", "role": "user" }, { "content": "refill_paragraphs", "role": "assistant" } ]
def x(args, results): if args.json: print(json.dumps(list(results), indent=4)) elif args.brief: for result in results: print(result['id']) elif args.func.__name__ in ('find_projects', 'org_find_projects'): _format_find_projects_results(results) elif args.func.__name__ in 'org_find_members': _format_find_org_members_results(results) elif args.func.__name__ in 'org_find_apps': _format_find_apps_results(results, verbose=args.verbose)
format_find_results
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args, results):\n if args.json:\n print(json.dumps(list(results), indent=4))\n elif args.brief:\n for result in results:\n print(result['id'])\n elif args.func.__name__ in ('find_projects', 'org_find_projects'):\n _format_find_projects_results(results)\n elif args.func.__name__ in 'org_find_members':\n _format_find_org_members_results(results)\n elif args.func.__name__ in 'org_find_apps':\n _format_find_apps_results(results, verbose=args.verbose)\n", "role": "user" }, { "content": "format_find_results", "role": "assistant" } ]
def x(**kwargs): draw_lines() draw_histogram() draw_bar_chart() destination = '-r /report' if use_html: generate_html() command = 'dx-build-report-html {h} {d}'.format(h=html_filename, d= destination) else: command = 'dx-build-report-html {l} {b} {h} {d}'.format(l= lines_filename, b=bars_filename, h=histogram_filename, d= destination) sub_output = json.loads(subprocess.check_output(command, shell=True)) output = {} output['report'] = dxpy.dxlink(sub_output['recordId']) return output
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(**kwargs):\n draw_lines()\n draw_histogram()\n draw_bar_chart()\n destination = '-r /report'\n if use_html:\n generate_html()\n command = 'dx-build-report-html {h} {d}'.format(h=html_filename, d=\n destination)\n else:\n command = 'dx-build-report-html {l} {b} {h} {d}'.format(l=\n lines_filename, b=bars_filename, h=histogram_filename, d=\n destination)\n sub_output = json.loads(subprocess.check_output(command, shell=True))\n output = {}\n output['report'] = dxpy.dxlink(sub_output['recordId'])\n return output\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(): mu, sigma = 100, 15 x = mu + sigma * numpy.random.randn(10000) fig = pyplot.figure() ax = fig.add_subplot(111) n, bins, patches = ax.hist(x, 50, normed=1, facecolor='green', alpha=0.75) ax.set_xlabel('Random number generated') ax.set_ylabel('Probability') ax.set_xlim(40, 160) ax.set_ylim(0, 0.03) ax.grid(True) pyplot.savefig(histogram_filename)
draw_histogram
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n mu, sigma = 100, 15\n x = mu + sigma * numpy.random.randn(10000)\n fig = pyplot.figure()\n ax = fig.add_subplot(111)\n n, bins, patches = ax.hist(x, 50, normed=1, facecolor='green', alpha=0.75)\n ax.set_xlabel('Random number generated')\n ax.set_ylabel('Probability')\n ax.set_xlim(40, 160)\n ax.set_ylim(0, 0.03)\n ax.grid(True)\n pyplot.savefig(histogram_filename)\n", "role": "user" }, { "content": "draw_histogram", "role": "assistant" } ]
def x(): N = 5 menMeans = 20, 35, 30, 35, 27 menStd = 2, 3, 4, 1, 2 ind = numpy.arange(N) width = 0.35 fig = pyplot.figure() ax = fig.add_subplot(111) rects1 = ax.bar(ind, menMeans, width, color='r', yerr=menStd) womenMeans = 25, 32, 34, 20, 25 womenStd = 3, 5, 2, 3, 3 rects2 = ax.bar(ind + width, womenMeans, width, color='y', yerr=womenStd) ax.set_ylabel('Scores') ax.set_title('Scores by group and color') ax.set_xticks(ind + width) ax.set_xticklabels(('G1', 'G2', 'G3', 'G4', 'G5')) ax.legend((rects1[0], rects2[0]), ('Red', 'Yellow')) def x(rects): for rect in rects: height = rect.get_height() ax.text(rect.get_x() + rect.get_width() / 2.0, 1.05 * height, '%d' % int(height), ha='center', va='bottom') autolabel(rects1) autolabel(rects2) pyplot.savefig(bars_filename)
draw_bar_chart
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n N = 5\n menMeans = 20, 35, 30, 35, 27\n menStd = 2, 3, 4, 1, 2\n ind = numpy.arange(N)\n width = 0.35\n fig = pyplot.figure()\n ax = fig.add_subplot(111)\n rects1 = ax.bar(ind, menMeans, width, color='r', yerr=menStd)\n womenMeans = 25, 32, 34, 20, 25\n womenStd = 3, 5, 2, 3, 3\n rects2 = ax.bar(ind + width, womenMeans, width, color='y', yerr=womenStd)\n ax.set_ylabel('Scores')\n ax.set_title('Scores by group and color')\n ax.set_xticks(ind + width)\n ax.set_xticklabels(('G1', 'G2', 'G3', 'G4', 'G5'))\n ax.legend((rects1[0], rects2[0]), ('Red', 'Yellow'))\n\n def x(rects):\n for rect in rects:\n height = rect.get_height()\n ax.text(rect.get_x() + rect.get_width() / 2.0, 1.05 * height, \n '%d' % int(height), ha='center', va='bottom')\n autolabel(rects1)\n autolabel(rects2)\n pyplot.savefig(bars_filename)\n", "role": "user" }, { "content": "draw_bar_chart", "role": "assistant" } ]
def x(): r = numpy.random.randn(200) fig = pyplot.figure() ax = fig.add_subplot(111) ax.plot(r) ax.grid(True) pyplot.savefig(lines_filename)
draw_lines
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n r = numpy.random.randn(200)\n fig = pyplot.figure()\n ax = fig.add_subplot(111)\n ax.plot(r)\n ax.grid(True)\n pyplot.savefig(lines_filename)\n", "role": "user" }, { "content": "draw_lines", "role": "assistant" } ]
def x(): html_file = open(html_filename, 'w') html_file.write('<html><body>') html_file.write('<h1>Here are some graphs for you!</h1>') for image in [lines_filename, bars_filename, histogram_filename]: html_file.write("<div><h2>{0}</h2><img src='{0}' /></div>".format( image)) html_file.write('</body></html>') html_file.close()
generate_html
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n html_file = open(html_filename, 'w')\n html_file.write('<html><body>')\n html_file.write('<h1>Here are some graphs for you!</h1>')\n for image in [lines_filename, bars_filename, histogram_filename]:\n html_file.write(\"<div><h2>{0}</h2><img src='{0}' /></div>\".format(\n image))\n html_file.write('</body></html>')\n html_file.close()\n", "role": "user" }, { "content": "generate_html", "role": "assistant" } ]
def x(o): def x(s): return u''.join([(c if c in ASCII_PRINTABLE else '?') for c in s]) if USING_PYTHON2: try: return unicode(o) except: try: s = str(o) try: return s.decode('utf-8') except: return clean(s[:2048] ) + u' [Raw error message: ' + unicode(s.encode( 'hex'), 'utf-8') + u']' except: return u'(Unable to decode Python exception message)' else: return str(o)
safe_unicode
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(o):\n\n def x(s):\n return u''.join([(c if c in ASCII_PRINTABLE else '?') for c in s])\n if USING_PYTHON2:\n try:\n return unicode(o)\n except:\n try:\n s = str(o)\n try:\n return s.decode('utf-8')\n except:\n return clean(s[:2048]\n ) + u' [Raw error message: ' + unicode(s.encode(\n 'hex'), 'utf-8') + u']'\n except:\n return u'(Unable to decode Python exception message)'\n else:\n return str(o)\n", "role": "user" }, { "content": "safe_unicode", "role": "assistant" } ]
def x(e): if isinstance(e, dxpy.AppError): return _safe_unicode(e) if USING_PYTHON2: return unicode(e.__class__.__name__, 'utf-8') + ': ' + _safe_unicode(e) else: return e.__class__.__name__ + ': ' + _safe_unicode(e)
format_exception_message
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(e):\n if isinstance(e, dxpy.AppError):\n return _safe_unicode(e)\n if USING_PYTHON2:\n return unicode(e.__class__.__name__, 'utf-8') + ': ' + _safe_unicode(e)\n else:\n return e.__class__.__name__ + ': ' + _safe_unicode(e)\n", "role": "user" }, { "content": "format_exception_message", "role": "assistant" } ]
def x(function_name=None, function_input=None): global RUN_COUNT RUN_COUNT += 1 dx_working_dir = os.getcwd() if dxpy.JOB_ID is not None: logging.basicConfig() try: logging.getLogger().addHandler(dxpy.DXLogHandler()) except dxpy.exceptions.DXError: print( 'TODO: FIXME: the EE client should die if logging is not available' ) job = dxpy.describe(dxpy.JOB_ID) else: if function_name is None: function_name = os.environ.get('DX_TEST_FUNCTION', 'main') if function_input is None: with open('job_input.json', 'r') as fh: function_input = json.load(fh) job = {'function': function_name, 'input': function_input} with open('job_error_reserved_space', 'w') as fh: fh.write( """This file contains reserved space for writing job errors in case the filesystem becomes full. """ + ' ' * 1024 * 64) print('Invoking', job.get('function'), 'with', job.get('input')) try: result = ENTRY_POINT_TABLE[job['function']](**job['input']) except dxpy.AppError as e: save_error(e, dx_working_dir, error_type='AppError') raise except Exception as e: save_error(e, dx_working_dir) raise if result is not None: os.chdir(dx_working_dir) if USING_PYTHON2: with open('job_output.json', 'wb') as fh: json.dump(result, fh, indent=2, cls=DXJSONEncoder) fh.write(b'\n') else: with open('job_output.json', 'w') as fh: json.dump(result, fh, indent=2, cls=DXJSONEncoder) fh.write('\n') return result
run
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(function_name=None, function_input=None):\n global RUN_COUNT\n RUN_COUNT += 1\n dx_working_dir = os.getcwd()\n if dxpy.JOB_ID is not None:\n logging.basicConfig()\n try:\n logging.getLogger().addHandler(dxpy.DXLogHandler())\n except dxpy.exceptions.DXError:\n print(\n 'TODO: FIXME: the EE client should die if logging is not available'\n )\n job = dxpy.describe(dxpy.JOB_ID)\n else:\n if function_name is None:\n function_name = os.environ.get('DX_TEST_FUNCTION', 'main')\n if function_input is None:\n with open('job_input.json', 'r') as fh:\n function_input = json.load(fh)\n job = {'function': function_name, 'input': function_input}\n with open('job_error_reserved_space', 'w') as fh:\n fh.write(\n \"\"\"This file contains reserved space for writing job errors in case the filesystem becomes full.\n\"\"\"\n + ' ' * 1024 * 64)\n print('Invoking', job.get('function'), 'with', job.get('input'))\n try:\n result = ENTRY_POINT_TABLE[job['function']](**job['input'])\n except dxpy.AppError as e:\n save_error(e, dx_working_dir, error_type='AppError')\n raise\n except Exception as e:\n save_error(e, dx_working_dir)\n raise\n if result is not None:\n os.chdir(dx_working_dir)\n if USING_PYTHON2:\n with open('job_output.json', 'wb') as fh:\n json.dump(result, fh, indent=2, cls=DXJSONEncoder)\n fh.write(b'\\n')\n else:\n with open('job_output.json', 'w') as fh:\n json.dump(result, fh, indent=2, cls=DXJSONEncoder)\n fh.write('\\n')\n return result\n", "role": "user" }, { "content": "run", "role": "assistant" } ]
def x(entry_point_name): def x(f): ENTRY_POINT_TABLE[entry_point_name] = f @wraps(f) def x(*args, **kwargs): return f(*args, **kwargs) return wrapped_f return wrap
entry_point
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(entry_point_name):\n\n def x(f):\n ENTRY_POINT_TABLE[entry_point_name] = f\n\n @wraps(f)\n def x(*args, **kwargs):\n return f(*args, **kwargs)\n return wrapped_f\n return wrap\n", "role": "user" }, { "content": "entry_point", "role": "assistant" } ]
def x(path): project, _folderpath, entity_result = try_call(resolve_existing_path, path, expected='entity') try: if entity_result is None or not entity_result['id'].startswith( 'workflow-'): raise DXCLIError('Could not resolve "' + path + '" to a workflow object') except: err_exit() return entity_result['id'], project
get_workflow_id_and_project
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(path):\n project, _folderpath, entity_result = try_call(resolve_existing_path,\n path, expected='entity')\n try:\n if entity_result is None or not entity_result['id'].startswith(\n 'workflow-'):\n raise DXCLIError('Could not resolve \"' + path +\n '\" to a workflow object')\n except:\n err_exit()\n return entity_result['id'], project\n", "role": "user" }, { "content": "get_workflow_id_and_project", "role": "assistant" } ]
def x(authserver_host=None, authserver_port=None): authserver = get_auth_server_name(authserver_host, authserver_port) return DXHTTPRequest(authserver + '/system/getUserInfo', {}, prepend_srv=False)
user_info
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(authserver_host=None, authserver_port=None):\n authserver = get_auth_server_name(authserver_host, authserver_port)\n return DXHTTPRequest(authserver + '/system/getUserInfo', {},\n prepend_srv=False)\n", "role": "user" }, { "content": "user_info", "role": "assistant" } ]
def x(job_homedir=None): if job_homedir is not None: home_dir = job_homedir else: home_dir = environ.get('HOME') idir = os.path.join(home_dir, 'in') return idir
get_input_dir
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(job_homedir=None):\n if job_homedir is not None:\n home_dir = job_homedir\n else:\n home_dir = environ.get('HOME')\n idir = os.path.join(home_dir, 'in')\n return idir\n", "role": "user" }, { "content": "get_input_dir", "role": "assistant" } ]
def x(job_homedir=None): if job_homedir is not None: home_dir = job_homedir else: home_dir = environ.get('HOME') odir = os.path.join(home_dir, 'out') return odir
get_output_dir
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(job_homedir=None):\n if job_homedir is not None:\n home_dir = job_homedir\n else:\n home_dir = environ.get('HOME')\n odir = os.path.join(home_dir, 'out')\n return odir\n", "role": "user" }, { "content": "get_output_dir", "role": "assistant" } ]
def x(): path = get_output_json_file() try: os.remove(path) except OSError as e: if e.errno == errno.ENOENT: pass else: raise
rm_output_json_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n path = get_output_json_file()\n try:\n os.remove(path)\n except OSError as e:\n if e.errno == errno.ENOENT:\n pass\n else:\n raise\n", "role": "user" }, { "content": "rm_output_json_file", "role": "assistant" } ]
def x(path): if not os.path.exists(path): os.mkdir(path) elif os.path.isfile(path): raise Exception( 'Path %s already exists, and it is a file, not a directory' % path)
ensure_dir
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(path):\n if not os.path.exists(path):\n os.mkdir(path)\n elif os.path.isfile(path):\n raise Exception(\n 'Path %s already exists, and it is a file, not a directory' % path)\n", "role": "user" }, { "content": "ensure_dir", "role": "assistant" } ]
def x(fname): bad_filenames = ['.', '..'] if fname in bad_filenames: raise DXError('Invalid filename {}'.format(fname)) return fname.replace('/', '%2F')
make_unix_filename
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(fname):\n bad_filenames = ['.', '..']\n if fname in bad_filenames:\n raise DXError('Invalid filename {}'.format(fname))\n return fname.replace('/', '%2F')\n", "role": "user" }, { "content": "make_unix_filename", "role": "assistant" } ]
def x(job_input_file): def x(): with open(job_input_file) as fh: job_input = json.load(fh) return job_input job_input = get_input_hash() files = collections.defaultdict(list) dirs = [] def x(iname, subdir, value): if not dxpy.is_dxlink(value): return handler = dxpy.get_handler(value) if not isinstance(handler, dxpy.DXFile): return filename = make_unix_filename(handler.name) trg_dir = iname if subdir is not None: trg_dir = os.path.join(trg_dir, subdir) files[iname].append({'trg_fname': os.path.join(trg_dir, filename), 'handler': handler, 'src_file_id': handler.id}) dirs.append(trg_dir) def x(input_name, links): num_files = len(links) if num_files == 0: return num_digits = len(str(num_files - 1)) dirs.append(input_name) for i, link in enumerate(links): subdir = str(i).zfill(num_digits) add_file(input_name, subdir, link) for input_name, value in list(job_input.items()): if isinstance(value, list): add_file_array(input_name, value) else: add_file(input_name, None, value) rest_hash = {key: val for key, val in list(job_input.items()) if key not in files} return dirs, files, rest_hash
get_job_input_filenames
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(job_input_file):\n\n def x():\n with open(job_input_file) as fh:\n job_input = json.load(fh)\n return job_input\n job_input = get_input_hash()\n files = collections.defaultdict(list)\n dirs = []\n\n def x(iname, subdir, value):\n if not dxpy.is_dxlink(value):\n return\n handler = dxpy.get_handler(value)\n if not isinstance(handler, dxpy.DXFile):\n return\n filename = make_unix_filename(handler.name)\n trg_dir = iname\n if subdir is not None:\n trg_dir = os.path.join(trg_dir, subdir)\n files[iname].append({'trg_fname': os.path.join(trg_dir, filename),\n 'handler': handler, 'src_file_id': handler.id})\n dirs.append(trg_dir)\n\n def x(input_name, links):\n num_files = len(links)\n if num_files == 0:\n return\n num_digits = len(str(num_files - 1))\n dirs.append(input_name)\n for i, link in enumerate(links):\n subdir = str(i).zfill(num_digits)\n add_file(input_name, subdir, link)\n for input_name, value in list(job_input.items()):\n if isinstance(value, list):\n add_file_array(input_name, value)\n else:\n add_file(input_name, None, value)\n rest_hash = {key: val for key, val in list(job_input.items()) if key not in\n files}\n return dirs, files, rest_hash\n", "role": "user" }, { "content": "get_job_input_filenames", "role": "assistant" } ]
def x(): input_spec = None if 'DX_JOB_ID' in environ: job_desc = dxpy.describe(dxpy.JOB_ID) if job_desc['function'] == 'main': desc = dxpy.describe(job_desc.get('app', job_desc.get('applet'))) if 'inputSpec' in desc: input_spec = desc['inputSpec'] elif 'DX_TEST_DXAPP_JSON' in environ: path_to_dxapp_json = environ['DX_TEST_DXAPP_JSON'] with open(path_to_dxapp_json) as fd: dxapp_json = json.load(fd) input_spec = dxapp_json.get('inputSpec') if input_spec is None: return {} def x(pattern): return '*' in pattern patterns_dict = {} for spec in input_spec: name = spec['name'] if 'patterns' in spec: patterns_dict[name] = [] for p in spec['patterns']: if is_legal_pattern(p): patterns_dict[name].append(p) return patterns_dict
get_input_spec_patterns
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n input_spec = None\n if 'DX_JOB_ID' in environ:\n job_desc = dxpy.describe(dxpy.JOB_ID)\n if job_desc['function'] == 'main':\n desc = dxpy.describe(job_desc.get('app', job_desc.get('applet')))\n if 'inputSpec' in desc:\n input_spec = desc['inputSpec']\n elif 'DX_TEST_DXAPP_JSON' in environ:\n path_to_dxapp_json = environ['DX_TEST_DXAPP_JSON']\n with open(path_to_dxapp_json) as fd:\n dxapp_json = json.load(fd)\n input_spec = dxapp_json.get('inputSpec')\n if input_spec is None:\n return {}\n\n def x(pattern):\n return '*' in pattern\n patterns_dict = {}\n for spec in input_spec:\n name = spec['name']\n if 'patterns' in spec:\n patterns_dict[name] = []\n for p in spec['patterns']:\n if is_legal_pattern(p):\n patterns_dict[name].append(p)\n return patterns_dict\n", "role": "user" }, { "content": "get_input_spec_patterns", "role": "assistant" } ]
def x(job_input_file, job_homedir): _, file_entries, rest_hash = get_job_input_filenames(job_input_file) patterns_dict = get_input_spec_patterns() def x(basename, key): best_prefix = None patterns = patterns_dict.get(key) if patterns is not None: for pattern in patterns: if fnmatch.fnmatch(basename, pattern): _, _, right_piece = pattern.rpartition('*') best_prefix = choose_shorter_string(best_prefix, basename[:-len(right_piece)]) if best_prefix is not None: return best_prefix else: parts = os.path.splitext(basename) if parts[1] == '.gz': parts = os.path.splitext(parts[0]) return parts[0] def x(): return {'handler': [], 'basename': [], 'prefix': [], 'path': []} file_key_descs = collections.defaultdict(factory) rel_home_dir = get_input_dir(job_homedir) for key, entries in list(file_entries.items()): for entry in entries: filename = entry['trg_fname'] basename = os.path.basename(filename) prefix = get_prefix(basename, key) k_desc = file_key_descs[key] k_desc['handler'].append(entry['handler']) k_desc['basename'].append(basename) k_desc['prefix'].append(prefix) k_desc['path'].append(os.path.join(rel_home_dir, filename)) return file_key_descs, rest_hash
analyze_bash_vars
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(job_input_file, job_homedir):\n _, file_entries, rest_hash = get_job_input_filenames(job_input_file)\n patterns_dict = get_input_spec_patterns()\n\n def x(basename, key):\n best_prefix = None\n patterns = patterns_dict.get(key)\n if patterns is not None:\n for pattern in patterns:\n if fnmatch.fnmatch(basename, pattern):\n _, _, right_piece = pattern.rpartition('*')\n best_prefix = choose_shorter_string(best_prefix,\n basename[:-len(right_piece)])\n if best_prefix is not None:\n return best_prefix\n else:\n parts = os.path.splitext(basename)\n if parts[1] == '.gz':\n parts = os.path.splitext(parts[0])\n return parts[0]\n\n def x():\n return {'handler': [], 'basename': [], 'prefix': [], 'path': []}\n file_key_descs = collections.defaultdict(factory)\n rel_home_dir = get_input_dir(job_homedir)\n for key, entries in list(file_entries.items()):\n for entry in entries:\n filename = entry['trg_fname']\n basename = os.path.basename(filename)\n prefix = get_prefix(basename, key)\n k_desc = file_key_descs[key]\n k_desc['handler'].append(entry['handler'])\n k_desc['basename'].append(basename)\n k_desc['prefix'].append(prefix)\n k_desc['path'].append(os.path.join(rel_home_dir, filename))\n return file_key_descs, rest_hash\n", "role": "user" }, { "content": "analyze_bash_vars", "role": "assistant" } ]
def x(job_input_file, job_homedir=None, check_name_collision=True): file_key_descs, rest_hash = analyze_bash_vars(job_input_file, job_homedir) def x(elem): result = None if isinstance(elem, basestring): result = elem elif isinstance(elem, dxpy.DXFile): result = json.dumps(dxpy.dxlink(elem)) else: result = json.dumps(elem) return pipes.quote(result) def x(val): if isinstance(val, list): string = ' '.join([string_of_elem(vitem) for vitem in val]) return '( {} )'.format(string) else: return string_of_elem(val) var_defs_hash = {} def x(key, val): if check_name_collision: if key not in environ and key not in var_defs_hash: var_defs_hash[key] = val else: sys.stderr.write(dxpy.utils.printing.fill( 'Creating environment variable ({}) would cause a name collision' .format(key)) + '\n') else: var_defs_hash[key] = val for key, desc in list(rest_hash.items()): gen_text_line_and_name_collision(key, string_of_value(desc)) for file_key, desc in list(file_key_descs.items()): gen_text_line_and_name_collision(file_key, string_of_value(desc[ 'handler'])) gen_text_line_and_name_collision(file_key + '_name', string_of_value(desc['basename'])) gen_text_line_and_name_collision(file_key + '_prefix', string_of_value(desc['prefix'])) gen_text_line_and_name_collision(file_key + '_path', string_of_value(desc['path'])) return var_defs_hash
gen_bash_vars
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(job_input_file, job_homedir=None, check_name_collision=True):\n file_key_descs, rest_hash = analyze_bash_vars(job_input_file, job_homedir)\n\n def x(elem):\n result = None\n if isinstance(elem, basestring):\n result = elem\n elif isinstance(elem, dxpy.DXFile):\n result = json.dumps(dxpy.dxlink(elem))\n else:\n result = json.dumps(elem)\n return pipes.quote(result)\n\n def x(val):\n if isinstance(val, list):\n string = ' '.join([string_of_elem(vitem) for vitem in val])\n return '( {} )'.format(string)\n else:\n return string_of_elem(val)\n var_defs_hash = {}\n\n def x(key, val):\n if check_name_collision:\n if key not in environ and key not in var_defs_hash:\n var_defs_hash[key] = val\n else:\n sys.stderr.write(dxpy.utils.printing.fill(\n 'Creating environment variable ({}) would cause a name collision'\n .format(key)) + '\\n')\n else:\n var_defs_hash[key] = val\n for key, desc in list(rest_hash.items()):\n gen_text_line_and_name_collision(key, string_of_value(desc))\n for file_key, desc in list(file_key_descs.items()):\n gen_text_line_and_name_collision(file_key, string_of_value(desc[\n 'handler']))\n gen_text_line_and_name_collision(file_key + '_name',\n string_of_value(desc['basename']))\n gen_text_line_and_name_collision(file_key + '_prefix',\n string_of_value(desc['prefix']))\n gen_text_line_and_name_collision(file_key + '_path',\n string_of_value(desc['path']))\n return var_defs_hash\n", "role": "user" }, { "content": "gen_bash_vars", "role": "assistant" } ]
def x(futures, print_traceback=False): while True: try: future = next(concurrent.futures.as_completed(futures, timeout= THREAD_TIMEOUT_MAX)) break except concurrent.futures.TimeoutError: pass except KeyboardInterrupt: if print_traceback: traceback.print_stack() else: print('') os._exit(os.EX_IOERR) return future
wait_for_a_future
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(futures, print_traceback=False):\n while True:\n try:\n future = next(concurrent.futures.as_completed(futures, timeout=\n THREAD_TIMEOUT_MAX))\n break\n except concurrent.futures.TimeoutError:\n pass\n except KeyboardInterrupt:\n if print_traceback:\n traceback.print_stack()\n else:\n print('')\n os._exit(os.EX_IOERR)\n return future\n", "role": "user" }, { "content": "wait_for_a_future", "role": "assistant" } ]
def x(futures, print_traceback=False): try: while True: waited_futures = concurrent.futures.wait(futures, timeout=60) if len(waited_futures.not_done) == 0: break except KeyboardInterrupt: if print_traceback: traceback.print_stack() else: print('') os._exit(os.EX_IOERR)
wait_for_all_futures
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(futures, print_traceback=False):\n try:\n while True:\n waited_futures = concurrent.futures.wait(futures, timeout=60)\n if len(waited_futures.not_done) == 0:\n break\n except KeyboardInterrupt:\n if print_traceback:\n traceback.print_stack()\n else:\n print('')\n os._exit(os.EX_IOERR)\n", "role": "user" }, { "content": "wait_for_all_futures", "role": "assistant" } ]
def x(request_iterator, thread_pool, max_active_tasks=None, do_first_task_sequentially=True): tasks_in_progress = collections.deque() if max_active_tasks is None: max_active_tasks = cpu_count() def x(task_iterator, executor, futures_queue): retval = next(task_iterator, None) if retval is None: return False task_callable, task_args, task_kwargs = retval task_future = executor.submit(task_callable, *task_args, **task_kwargs) futures_queue.append(task_future) return True def x(tasks_in_progress): future = tasks_in_progress.popleft() try: result = future.result(timeout=THREAD_TIMEOUT_MAX) except KeyboardInterrupt: print('') os._exit(os.EX_IOERR) return result if do_first_task_sequentially: task_callable, task_args, task_kwargs = next(request_iterator) yield task_callable(*task_args, **task_kwargs) for _i in range(max_active_tasks): retval = submit_task(request_iterator, thread_pool, tasks_in_progress) if not retval: break while len(tasks_in_progress) > 0: result = next_result(tasks_in_progress) submit_task(request_iterator, thread_pool, tasks_in_progress) yield result del result
response_iterator
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(request_iterator, thread_pool, max_active_tasks=None,\n do_first_task_sequentially=True):\n tasks_in_progress = collections.deque()\n if max_active_tasks is None:\n max_active_tasks = cpu_count()\n\n def x(task_iterator, executor, futures_queue):\n retval = next(task_iterator, None)\n if retval is None:\n return False\n task_callable, task_args, task_kwargs = retval\n task_future = executor.submit(task_callable, *task_args, **task_kwargs)\n futures_queue.append(task_future)\n return True\n\n def x(tasks_in_progress):\n future = tasks_in_progress.popleft()\n try:\n result = future.result(timeout=THREAD_TIMEOUT_MAX)\n except KeyboardInterrupt:\n print('')\n os._exit(os.EX_IOERR)\n return result\n if do_first_task_sequentially:\n task_callable, task_args, task_kwargs = next(request_iterator)\n yield task_callable(*task_args, **task_kwargs)\n for _i in range(max_active_tasks):\n retval = submit_task(request_iterator, thread_pool, tasks_in_progress)\n if not retval:\n break\n while len(tasks_in_progress) > 0:\n result = next_result(tasks_in_progress)\n submit_task(request_iterator, thread_pool, tasks_in_progress)\n yield result\n del result\n", "role": "user" }, { "content": "response_iterator", "role": "assistant" } ]
def x(t, future=False, default_unit='ms'): error_msg = ( 'Error: Expected an int timestamp, a date format (e.g. YYYY-MM-DD), or an int with a single-letter suffix (s=seconds, m=minutes, h=hours, d=days, w=weeks, M=months, y=years; e.g. "-10d" indicates 10 days ago); but got {t}' ) if isinstance(t, basestring) and t.isdigit(): t = int(t) if isinstance(t, basestring): try: t = normalize_timedelta(t) except ValueError: try: t = int(time.mktime(dateutil.parser.parse(t).timetuple()) * 1000) assert t > 0 except (ValueError, OverflowError, AssertionError): raise ValueError(error_msg.format(t=t)) elif isinstance(t, numbers.Integral): units_multipliers = {'ms': 1, 's': 1000} if default_unit not in units_multipliers: raise ValueError("Expected default_unit to be one of 's' or 'ms'") t = t * units_multipliers[default_unit] else: raise ValueError(error_msg.format(t=t)) now = int(time.time() * 1000) if t < 0 or future and t < now: t += now return t
normalize_time_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(t, future=False, default_unit='ms'):\n error_msg = (\n 'Error: Expected an int timestamp, a date format (e.g. YYYY-MM-DD), or an int with a single-letter suffix (s=seconds, m=minutes, h=hours, d=days, w=weeks, M=months, y=years; e.g. \"-10d\" indicates 10 days ago); but got {t}'\n )\n if isinstance(t, basestring) and t.isdigit():\n t = int(t)\n if isinstance(t, basestring):\n try:\n t = normalize_timedelta(t)\n except ValueError:\n try:\n t = int(time.mktime(dateutil.parser.parse(t).timetuple()) *\n 1000)\n assert t > 0\n except (ValueError, OverflowError, AssertionError):\n raise ValueError(error_msg.format(t=t))\n elif isinstance(t, numbers.Integral):\n units_multipliers = {'ms': 1, 's': 1000}\n if default_unit not in units_multipliers:\n raise ValueError(\"Expected default_unit to be one of 's' or 'ms'\")\n t = t * units_multipliers[default_unit]\n else:\n raise ValueError(error_msg.format(t=t))\n now = int(time.time() * 1000)\n if t < 0 or future and t < now:\n t += now\n return t\n", "role": "user" }, { "content": "normalize_time_input", "role": "assistant" } ]
def x(timedelta): try: return int(timedelta) * 1000 except ValueError as e: t, suffix = timedelta[:-1], timedelta[-1:] suffix_multipliers = {'s': 1000, 'm': 1000 * 60, 'h': 1000 * 60 * 60, 'd': 1000 * 60 * 60 * 24, 'w': 1000 * 60 * 60 * 24 * 7, 'M': 1000 * 60 * 60 * 24 * 30, 'y': 1000 * 60 * 60 * 24 * 365} if suffix not in suffix_multipliers: raise ValueError() return int(t) * suffix_multipliers[suffix]
normalize_timedelta
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(timedelta):\n try:\n return int(timedelta) * 1000\n except ValueError as e:\n t, suffix = timedelta[:-1], timedelta[-1:]\n suffix_multipliers = {'s': 1000, 'm': 1000 * 60, 'h': 1000 * 60 * \n 60, 'd': 1000 * 60 * 60 * 24, 'w': 1000 * 60 * 60 * 24 * 7, 'M':\n 1000 * 60 * 60 * 24 * 30, 'y': 1000 * 60 * 60 * 24 * 365}\n if suffix not in suffix_multipliers:\n raise ValueError()\n return int(t) * suffix_multipliers[suffix]\n", "role": "user" }, { "content": "normalize_timedelta", "role": "assistant" } ]
def x(ordered_pairs): d = {} for k, v in ordered_pairs: if k in d: raise ValueError('duplicate key: %r' % (k,)) else: d[k] = v return d
dict_raise_on_duplicates
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(ordered_pairs):\n d = {}\n for k, v in ordered_pairs:\n if k in d:\n raise ValueError('duplicate key: %r' % (k,))\n else:\n d[k] = v\n return d\n", "role": "user" }, { "content": "dict_raise_on_duplicates", "role": "assistant" } ]
def x(input_params): input_cp = input_params.copy() if len(input_cp.get('nonce', '')) == 0: input_cp['nonce'] = str(Nonce()) return input_cp
update_nonce
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(input_params):\n input_cp = input_params.copy()\n if len(input_cp.get('nonce', '')) == 0:\n input_cp['nonce'] = str(Nonce())\n return input_cp\n", "role": "user" }, { "content": "update_nonce", "role": "assistant" } ]
def x(json_spec, src_dir): if 'description' not in json_spec: readme_filename = None for filename in ('README.md', 'Readme.md', 'readme.md'): if os.path.exists(os.path.join(src_dir, filename)): readme_filename = filename break if readme_filename is not None: with open(os.path.join(src_dir, readme_filename)) as fh: json_spec['description'] = fh.read() if 'developerNotes' not in json_spec: for filename in ('README.developer.md', 'Readme.developer.md', 'readme.developer.md'): if os.path.exists(os.path.join(src_dir, filename)): with open(os.path.join(src_dir, filename)) as fh: json_spec['developerNotes'] = fh.read() break
inline_documentation_files
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, src_dir):\n if 'description' not in json_spec:\n readme_filename = None\n for filename in ('README.md', 'Readme.md', 'readme.md'):\n if os.path.exists(os.path.join(src_dir, filename)):\n readme_filename = filename\n break\n if readme_filename is not None:\n with open(os.path.join(src_dir, readme_filename)) as fh:\n json_spec['description'] = fh.read()\n if 'developerNotes' not in json_spec:\n for filename in ('README.developer.md', 'Readme.developer.md',\n 'readme.developer.md'):\n if os.path.exists(os.path.join(src_dir, filename)):\n with open(os.path.join(src_dir, filename)) as fh:\n json_spec['developerNotes'] = fh.read()\n break\n", "role": "user" }, { "content": "inline_documentation_files", "role": "assistant" } ]
def x(prefixed_name): assert prefixed_name.startswith('app-') or prefixed_name.startswith( 'globalworkflow-') if prefixed_name.partition('-')[0] == 'app': exception_type = dxpy.app_builder.AppBuilderException describe_method = dxpy.api.app_describe exception_msg = ( 'An app with the given name already exists and you are not a developer of that app' ) else: exception_type = dxpy.workflow_builder.WorkflowBuilderException describe_method = dxpy.api.global_workflow_describe exception_msg = ( 'A global workflow with the given name already exists and you are not a developer of that workflow' ) name_already_exists = True is_developer = False version = None executable_id = None FoundExecutable = collections.namedtuple('FoundExecutable', ['name', 'version', 'id']) try: describe_output = describe_method(prefixed_name, input_params={ 'fields': {'isDeveloperFor': True, 'version': True, 'id': True}}) is_developer = describe_output['isDeveloperFor'] version = describe_output['version'] executable_id = describe_output['id'] except dxpy.exceptions.DXAPIError as e: if e.name == 'ResourceNotFound': name_already_exists = False elif e.name == 'PermissionDenied': raise exception_type(exception_msg) else: raise e if not name_already_exists: return FoundExecutable(name=None, version=None, id=None) name_without_prefix = prefixed_name.partition('-')[2] if not is_developer: raise exception_type('You are not a developer for {n}'.format(n= name_without_prefix)) return FoundExecutable(name=name_without_prefix, version=version, id= executable_id)
verify_developer_rights
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(prefixed_name):\n assert prefixed_name.startswith('app-') or prefixed_name.startswith(\n 'globalworkflow-')\n if prefixed_name.partition('-')[0] == 'app':\n exception_type = dxpy.app_builder.AppBuilderException\n describe_method = dxpy.api.app_describe\n exception_msg = (\n 'An app with the given name already exists and you are not a developer of that app'\n )\n else:\n exception_type = dxpy.workflow_builder.WorkflowBuilderException\n describe_method = dxpy.api.global_workflow_describe\n exception_msg = (\n 'A global workflow with the given name already exists and you are not a developer of that workflow'\n )\n name_already_exists = True\n is_developer = False\n version = None\n executable_id = None\n FoundExecutable = collections.namedtuple('FoundExecutable', ['name',\n 'version', 'id'])\n try:\n describe_output = describe_method(prefixed_name, input_params={\n 'fields': {'isDeveloperFor': True, 'version': True, 'id': True}})\n is_developer = describe_output['isDeveloperFor']\n version = describe_output['version']\n executable_id = describe_output['id']\n except dxpy.exceptions.DXAPIError as e:\n if e.name == 'ResourceNotFound':\n name_already_exists = False\n elif e.name == 'PermissionDenied':\n raise exception_type(exception_msg)\n else:\n raise e\n if not name_already_exists:\n return FoundExecutable(name=None, version=None, id=None)\n name_without_prefix = prefixed_name.partition('-')[2]\n if not is_developer:\n raise exception_type('You are not a developer for {n}'.format(n=\n name_without_prefix))\n return FoundExecutable(name=name_without_prefix, version=version, id=\n executable_id)\n", "role": "user" }, { "content": "verify_developer_rights", "role": "assistant" } ]
def x(from_spec, from_command_line, builder_exception): if from_spec is None or from_command_line is None: return if set(from_spec) != set(from_command_line): raise builder_exception( "--region and the 'regionalOptions' key in the JSON file do not agree" )
assert_consistent_regions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(from_spec, from_command_line, builder_exception):\n if from_spec is None or from_command_line is None:\n return\n if set(from_spec) != set(from_command_line):\n raise builder_exception(\n \"--region and the 'regionalOptions' key in the JSON file do not agree\"\n )\n", "role": "user" }, { "content": "assert_consistent_regions", "role": "assistant" } ]
def x(exec_type, json_spec, executable_builder_exeception): reg_options_spec = json_spec.get('regionalOptions') json_fn = 'dxapp.json' if exec_type == 'app' else 'dxworkflow.json' if not isinstance(reg_options_spec, dict): raise executable_builder_exeception( "The field 'regionalOptions' in must be a mapping") if not reg_options_spec: raise executable_builder_exeception( "The field 'regionalOptions' in " + json_fn + ' must be a non-empty mapping') regional_options_list = list(reg_options_spec.items()) for region, opts_for_region in regional_options_list: if not isinstance(opts_for_region, dict): raise executable_builder_exeception( "The field 'regionalOptions['" + region + "']' in " + json_fn + ' must be a mapping') if set(opts_for_region.keys()) != set(regional_options_list[0][1]. keys()): if set(opts_for_region.keys()) - set(regional_options_list[0][1 ].keys()): with_key, without_key = region, regional_options_list[0][0] key_name = next(iter(set(opts_for_region.keys()) - set( regional_options_list[0][1].keys()))) else: with_key, without_key = regional_options_list[0][0], region key_name = next(iter(set(regional_options_list[0][1].keys() ) - set(opts_for_region.keys()))) raise executable_builder_exeception( 'All regions in regionalOptions must specify the same options; ' + '%s was given for %s but not for %s' % (key_name, with_key, without_key)) if exec_type == 'app': for key in opts_for_region: if key in json_spec.get('runSpec', {}): raise executable_builder_exeception(key + ' cannot be given in both runSpec and in regional options for ' + region)
assert_consistent_reg_options
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(exec_type, json_spec, executable_builder_exeception):\n reg_options_spec = json_spec.get('regionalOptions')\n json_fn = 'dxapp.json' if exec_type == 'app' else 'dxworkflow.json'\n if not isinstance(reg_options_spec, dict):\n raise executable_builder_exeception(\n \"The field 'regionalOptions' in must be a mapping\")\n if not reg_options_spec:\n raise executable_builder_exeception(\n \"The field 'regionalOptions' in \" + json_fn +\n ' must be a non-empty mapping')\n regional_options_list = list(reg_options_spec.items())\n for region, opts_for_region in regional_options_list:\n if not isinstance(opts_for_region, dict):\n raise executable_builder_exeception(\n \"The field 'regionalOptions['\" + region + \"']' in \" +\n json_fn + ' must be a mapping')\n if set(opts_for_region.keys()) != set(regional_options_list[0][1].\n keys()):\n if set(opts_for_region.keys()) - set(regional_options_list[0][1\n ].keys()):\n with_key, without_key = region, regional_options_list[0][0]\n key_name = next(iter(set(opts_for_region.keys()) - set(\n regional_options_list[0][1].keys())))\n else:\n with_key, without_key = regional_options_list[0][0], region\n key_name = next(iter(set(regional_options_list[0][1].keys()\n ) - set(opts_for_region.keys())))\n raise executable_builder_exeception(\n 'All regions in regionalOptions must specify the same options; '\n + '%s was given for %s but not for %s' % (key_name,\n with_key, without_key))\n if exec_type == 'app':\n for key in opts_for_region:\n if key in json_spec.get('runSpec', {}):\n raise executable_builder_exeception(key +\n ' cannot be given in both runSpec and in regional options for '\n + region)\n", "role": "user" }, { "content": "assert_consistent_reg_options", "role": "assistant" } ]
def x(exec_type, json_spec, from_command_line, executable_builder_exeception): from_spec = json_spec.get('regionalOptions') if from_spec is not None: assert_consistent_reg_options(exec_type, json_spec, executable_builder_exeception) assert_consistent_regions(from_spec, from_command_line, executable_builder_exeception) enabled_regions = None if from_spec is not None: enabled_regions = from_spec.keys() elif from_command_line is not None: enabled_regions = from_command_line return enabled_regions
get_enabled_regions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(exec_type, json_spec, from_command_line, executable_builder_exeception):\n from_spec = json_spec.get('regionalOptions')\n if from_spec is not None:\n assert_consistent_reg_options(exec_type, json_spec,\n executable_builder_exeception)\n assert_consistent_regions(from_spec, from_command_line,\n executable_builder_exeception)\n enabled_regions = None\n if from_spec is not None:\n enabled_regions = from_spec.keys()\n elif from_command_line is not None:\n enabled_regions = from_command_line\n return enabled_regions\n", "role": "user" }, { "content": "get_enabled_regions", "role": "assistant" } ]
def x(self, fields=None, **kwargs): describe_input = {} if fields is not None: describe_input['fields'] = fields self._desc = dxpy.api.analysis_describe(self._dxid, describe_input, ** kwargs) return self._desc
describe
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, fields=None, **kwargs):\n describe_input = {}\n if fields is not None:\n describe_input['fields'] = fields\n self._desc = dxpy.api.analysis_describe(self._dxid, describe_input, **\n kwargs)\n return self._desc\n", "role": "user" }, { "content": "describe", "role": "assistant" } ]
def x(self, tags, **kwargs): dxpy.api.analysis_add_tags(self._dxid, {'tags': tags}, **kwargs)
add_tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, tags, **kwargs):\n dxpy.api.analysis_add_tags(self._dxid, {'tags': tags}, **kwargs)\n", "role": "user" }, { "content": "add_tags", "role": "assistant" } ]
def x(self, tags, **kwargs): dxpy.api.analysis_remove_tags(self._dxid, {'tags': tags}, **kwargs)
remove_tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, tags, **kwargs):\n dxpy.api.analysis_remove_tags(self._dxid, {'tags': tags}, **kwargs)\n", "role": "user" }, { "content": "remove_tags", "role": "assistant" } ]
def x(self, properties, **kwargs): dxpy.api.analysis_set_properties(self._dxid, {'properties': properties}, **kwargs)
set_properties
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, properties, **kwargs):\n dxpy.api.analysis_set_properties(self._dxid, {'properties': properties},\n **kwargs)\n", "role": "user" }, { "content": "set_properties", "role": "assistant" } ]
def x(self, field, index=None, metadata=None): link = {'$dnanexus_link': {'analysis': self._dxid, 'field': field}} if index is not None: link['$dnanexus_link']['index'] = index if metadata is not None: link['$dnanexus_link']['metadata'] = metadata return link
get_output_ref
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, field, index=None, metadata=None):\n link = {'$dnanexus_link': {'analysis': self._dxid, 'field': field}}\n if index is not None:\n link['$dnanexus_link']['index'] = index\n if metadata is not None:\n link['$dnanexus_link']['metadata'] = metadata\n return link\n", "role": "user" }, { "content": "get_output_ref", "role": "assistant" } ]
def x(app_json, publish=False): for input_field in app_json.get('inputSpec', []): for suggestion in input_field.get('suggestions', []): if 'project' in suggestion: try: project = dxpy.api.project_describe(suggestion[ 'project'], {'permissions': True}) if 'PUBLIC' not in project['permissions'] and publish: logger.warn('Project {name} NOT PUBLIC!'.format( name=project['name'])) except dxpy.exceptions.DXAPIError as e: if e.code == 404: logger.warn( 'Suggested project {name} does not exist, or not accessible by user' .format(name=suggestion['project'])) if 'path' in suggestion: try: check_folder_exists(suggestion['project'], suggestion['path'], '') except ResolutionError as e: logger.warn( 'Folder {path} could not be found in project {project}' .format(path=suggestion['path'], project= suggestion['project'])) if '$dnanexus_link' in suggestion: if suggestion['$dnanexus_link'].startswith(('file-', 'record-') ): try: dnanexus_link = dxpy.describe(suggestion[ '$dnanexus_link']) except dxpy.exceptions.DXAPIError as e: if e.code == 404: raise dxpy.app_builder.AppBuilderException( 'Suggested object {name} could not be found' .format(name=suggestion['$dnanexus_link'])) except Exception as e: raise dxpy.app_builder.AppBuilderException(str(e)) if 'value' in suggestion and isinstance(suggestion['value'], dict): if '$dnanexus_link' in suggestion['value']: if isinstance(suggestion['value']['$dnanexus_link'], dict): if 'project' in suggestion['value']['$dnanexus_link']: try: dxpy.api.project_describe(suggestion[ 'value']['$dnanexus_link']['project']) except dxpy.exceptions.DXAPIError as e: if e.code == 404: logger.warn( 'Suggested project {name} does not exist, or not accessible by user' .format(name=suggestion['value'][ '$dnanexus_link']['project'])) elif isinstance(suggestion['value']['$dnanexus_link'], basestring): if suggestion['value']['$dnanexus_link'].startswith(( 'file-', 'record-')): try: dnanexus_link = dxpy.describe(suggestion[ 'value']['$dnanexus_link']) except dxpy.exceptions.DXAPIError as e: if e.code == 404: raise dxpy.app_builder.AppBuilderException( 'Suggested object {name} could not be found' .format(name=suggestion['value'][ '$dnanexus_link'])) except Exception as e: raise dxpy.app_builder.AppBuilderException(str (e))
check_suggestions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(app_json, publish=False):\n for input_field in app_json.get('inputSpec', []):\n for suggestion in input_field.get('suggestions', []):\n if 'project' in suggestion:\n try:\n project = dxpy.api.project_describe(suggestion[\n 'project'], {'permissions': True})\n if 'PUBLIC' not in project['permissions'] and publish:\n logger.warn('Project {name} NOT PUBLIC!'.format(\n name=project['name']))\n except dxpy.exceptions.DXAPIError as e:\n if e.code == 404:\n logger.warn(\n 'Suggested project {name} does not exist, or not accessible by user'\n .format(name=suggestion['project']))\n if 'path' in suggestion:\n try:\n check_folder_exists(suggestion['project'],\n suggestion['path'], '')\n except ResolutionError as e:\n logger.warn(\n 'Folder {path} could not be found in project {project}'\n .format(path=suggestion['path'], project=\n suggestion['project']))\n if '$dnanexus_link' in suggestion:\n if suggestion['$dnanexus_link'].startswith(('file-', 'record-')\n ):\n try:\n dnanexus_link = dxpy.describe(suggestion[\n '$dnanexus_link'])\n except dxpy.exceptions.DXAPIError as e:\n if e.code == 404:\n raise dxpy.app_builder.AppBuilderException(\n 'Suggested object {name} could not be found'\n .format(name=suggestion['$dnanexus_link']))\n except Exception as e:\n raise dxpy.app_builder.AppBuilderException(str(e))\n if 'value' in suggestion and isinstance(suggestion['value'], dict):\n if '$dnanexus_link' in suggestion['value']:\n if isinstance(suggestion['value']['$dnanexus_link'], dict):\n if 'project' in suggestion['value']['$dnanexus_link']:\n try:\n dxpy.api.project_describe(suggestion[\n 'value']['$dnanexus_link']['project'])\n except dxpy.exceptions.DXAPIError as e:\n if e.code == 404:\n logger.warn(\n 'Suggested project {name} does not exist, or not accessible by user'\n .format(name=suggestion['value'][\n '$dnanexus_link']['project']))\n elif isinstance(suggestion['value']['$dnanexus_link'],\n basestring):\n if suggestion['value']['$dnanexus_link'].startswith((\n 'file-', 'record-')):\n try:\n dnanexus_link = dxpy.describe(suggestion[\n 'value']['$dnanexus_link'])\n except dxpy.exceptions.DXAPIError as e:\n if e.code == 404:\n raise dxpy.app_builder.AppBuilderException(\n 'Suggested object {name} could not be found'\n .format(name=suggestion['value'][\n '$dnanexus_link']))\n except Exception as e:\n raise dxpy.app_builder.AppBuilderException(str\n (e))\n", "role": "user" }, { "content": "check_suggestions", "role": "assistant" } ]
def x(dxapp_json_filename, mode): def x(dirname): for basename in ['README.md', 'Readme.md', 'readme.md']: if os.path.exists(os.path.join(dirname, basename)): return os.path.join(dirname, basename) return None app_spec = json.load(open(dxapp_json_filename)) dirname = os.path.basename(os.path.dirname(os.path.abspath( dxapp_json_filename))) if mode == 'app': if 'title' not in app_spec: logger.warn( 'app is missing a title, please add one in the "title" field of dxapp.json' ) if 'summary' in app_spec: if app_spec['summary'].endswith('.'): logger.warn( 'summary "%s" should be a short phrase not ending in a period' % (app_spec['summary'],)) else: logger.warn( 'app is missing a summary, please add one in the "summary" field of dxapp.json' ) readme_filename = _find_readme(os.path.dirname(dxapp_json_filename)) if 'description' in app_spec: if readme_filename: raise dxpy.app_builder.AppBuilderException( 'Description was provided both in Readme.md and in the "description" field of {file}. Please consolidate content in Readme.md and remove the "description" field.' .format(file=dxapp_json_filename)) if not app_spec['description'].strip().endswith('.'): logger.warn( '"description" field should be written in complete sentences and end with a period' ) elif readme_filename is None: logger.warn( 'app is missing a description, please supply one in README.md') if 'categories' in app_spec: for category in app_spec['categories']: if category not in APP_CATEGORIES: logger.warn('app has unrecognized category "%s"' % ( category,)) if category == 'Import': if 'title' in app_spec and not app_spec['title'].endswith( 'Importer'): logger.warn('title "%s" should end in "Importer"' % (app_spec['title'],)) if category == 'Export': if 'title' in app_spec and not app_spec['title'].endswith( 'Exporter'): logger.warn('title "%s" should end in "Exporter"' % (app_spec['title'],)) if 'name' in app_spec: if app_spec['name'] != app_spec['name'].lower(): logger.warn('name "%s" should be all lowercase' % (app_spec[ 'name'],)) if dirname != app_spec['name']: logger.warn( 'app name "%s" does not match containing directory "%s"' % (app_spec['name'], dirname)) else: logger.warn( 'app is missing a name, please add one in the "name" field of dxapp.json' ) if 'version' in app_spec: if not dxpy.executable_builder.GLOBAL_EXEC_VERSION_RE.match(app_spec ['version']): logger.warn( '"version" %s should be semver compliant (e.g. of the form X.Y.Z)' % (app_spec['version'],)) if 'inputSpec' in app_spec: for i, input_field in enumerate(app_spec['inputSpec']): if not re.match('^[a-zA-Z_][0-9a-zA-Z_]*$', input_field['name']): logger.error( 'input %d has illegal name "%s" (must match ^[a-zA-Z_][0-9a-zA-Z_]*$)' % (i, input_field['name'])) else: logger.warn( 'dxapp.json contains no input specification (inputSpec). Your applet will not be usable as an ' + 'app, runnable from the GUI, or composable using workflows.') if 'outputSpec' in app_spec: for i, output_field in enumerate(app_spec['outputSpec']): if not re.match('^[a-zA-Z_][0-9a-zA-Z_]*$', output_field['name']): logger.error( 'output %d has illegal name "%s" (must match ^[a-zA-Z_][0-9a-zA-Z_]*$)' % (i, output_field['name'])) else: logger.warn( 'dxapp.json contains no output specification (outputSpec). Your applet will not be usable as an ' + 'app, runnable from the GUI, or composable using workflows.')
lint
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(dxapp_json_filename, mode):\n\n def x(dirname):\n for basename in ['README.md', 'Readme.md', 'readme.md']:\n if os.path.exists(os.path.join(dirname, basename)):\n return os.path.join(dirname, basename)\n return None\n app_spec = json.load(open(dxapp_json_filename))\n dirname = os.path.basename(os.path.dirname(os.path.abspath(\n dxapp_json_filename)))\n if mode == 'app':\n if 'title' not in app_spec:\n logger.warn(\n 'app is missing a title, please add one in the \"title\" field of dxapp.json'\n )\n if 'summary' in app_spec:\n if app_spec['summary'].endswith('.'):\n logger.warn(\n 'summary \"%s\" should be a short phrase not ending in a period'\n % (app_spec['summary'],))\n else:\n logger.warn(\n 'app is missing a summary, please add one in the \"summary\" field of dxapp.json'\n )\n readme_filename = _find_readme(os.path.dirname(dxapp_json_filename))\n if 'description' in app_spec:\n if readme_filename:\n raise dxpy.app_builder.AppBuilderException(\n 'Description was provided both in Readme.md and in the \"description\" field of {file}. Please consolidate content in Readme.md and remove the \"description\" field.'\n .format(file=dxapp_json_filename))\n if not app_spec['description'].strip().endswith('.'):\n logger.warn(\n '\"description\" field should be written in complete sentences and end with a period'\n )\n elif readme_filename is None:\n logger.warn(\n 'app is missing a description, please supply one in README.md')\n if 'categories' in app_spec:\n for category in app_spec['categories']:\n if category not in APP_CATEGORIES:\n logger.warn('app has unrecognized category \"%s\"' % (\n category,))\n if category == 'Import':\n if 'title' in app_spec and not app_spec['title'].endswith(\n 'Importer'):\n logger.warn('title \"%s\" should end in \"Importer\"' %\n (app_spec['title'],))\n if category == 'Export':\n if 'title' in app_spec and not app_spec['title'].endswith(\n 'Exporter'):\n logger.warn('title \"%s\" should end in \"Exporter\"' %\n (app_spec['title'],))\n if 'name' in app_spec:\n if app_spec['name'] != app_spec['name'].lower():\n logger.warn('name \"%s\" should be all lowercase' % (app_spec[\n 'name'],))\n if dirname != app_spec['name']:\n logger.warn(\n 'app name \"%s\" does not match containing directory \"%s\"' %\n (app_spec['name'], dirname))\n else:\n logger.warn(\n 'app is missing a name, please add one in the \"name\" field of dxapp.json'\n )\n if 'version' in app_spec:\n if not dxpy.executable_builder.GLOBAL_EXEC_VERSION_RE.match(app_spec\n ['version']):\n logger.warn(\n '\"version\" %s should be semver compliant (e.g. of the form X.Y.Z)'\n % (app_spec['version'],))\n if 'inputSpec' in app_spec:\n for i, input_field in enumerate(app_spec['inputSpec']):\n if not re.match('^[a-zA-Z_][0-9a-zA-Z_]*$', input_field['name']):\n logger.error(\n 'input %d has illegal name \"%s\" (must match ^[a-zA-Z_][0-9a-zA-Z_]*$)'\n % (i, input_field['name']))\n else:\n logger.warn(\n 'dxapp.json contains no input specification (inputSpec). Your applet will not be usable as an '\n + 'app, runnable from the GUI, or composable using workflows.')\n if 'outputSpec' in app_spec:\n for i, output_field in enumerate(app_spec['outputSpec']):\n if not re.match('^[a-zA-Z_][0-9a-zA-Z_]*$', output_field['name']):\n logger.error(\n 'output %d has illegal name \"%s\" (must match ^[a-zA-Z_][0-9a-zA-Z_]*$)'\n % (i, output_field['name']))\n else:\n logger.warn(\n 'dxapp.json contains no output specification (outputSpec). Your applet will not be usable as an '\n + 'app, runnable from the GUI, or composable using workflows.')\n", "role": "user" }, { "content": "lint", "role": "assistant" } ]
def x(code, lang, temp_dir, enforce=True): if lang == 'python2.7': temp_basename = 'inlined_code_from_dxapp_json.py' elif lang == 'bash': temp_basename = 'inlined_code_from_dxapp_json.sh' else: raise ValueError('lang must be one of "python2.7" or "bash"') with open(os.path.join(temp_dir, temp_basename), 'w') as ofile: ofile.write(code) _check_file_syntax(os.path.join(temp_dir, temp_basename), temp_dir, override_lang=lang, enforce=enforce)
check_syntax
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(code, lang, temp_dir, enforce=True):\n if lang == 'python2.7':\n temp_basename = 'inlined_code_from_dxapp_json.py'\n elif lang == 'bash':\n temp_basename = 'inlined_code_from_dxapp_json.sh'\n else:\n raise ValueError('lang must be one of \"python2.7\" or \"bash\"')\n with open(os.path.join(temp_dir, temp_basename), 'w') as ofile:\n ofile.write(code)\n _check_file_syntax(os.path.join(temp_dir, temp_basename), temp_dir,\n override_lang=lang, enforce=enforce)\n", "role": "user" }, { "content": "check_syntax", "role": "assistant" } ]
def x(filename, temp_dir, override_lang=None, enforce=True): def x(filename): pyc_path = os.path.join(temp_dir, os.path.basename(filename) + '.pyc') try: if USING_PYTHON2: filename = filename.encode(sys.getfilesystemencoding()) py_compile.compile(filename, cfile=pyc_path, doraise=True) finally: try: os.unlink(pyc_path) except OSError: pass def x(filename): if platform.system() == 'Windows': logging.warn( 'Skipping bash syntax check due to unavailability of bash on Windows.' ) else: subprocess.check_output(['/bin/bash', '-n', filename], stderr= subprocess.STDOUT) if override_lang == 'python2.7': checker_fn = check_python elif override_lang == 'bash': checker_fn = check_bash elif filename.endswith('.py'): checker_fn = check_python elif filename.endswith('.sh'): checker_fn = check_bash else: return open(filename) try: checker_fn(filename) except subprocess.CalledProcessError as e: print(filename + ' has a syntax error! Interpreter output:', file= sys.stderr) for line in e.output.strip('\n').split('\n'): print(' ' + line.rstrip('\n'), file=sys.stderr) if enforce: raise DXSyntaxError(filename + ' has a syntax error') except py_compile.PyCompileError as e: print(filename + ' has a syntax error! Interpreter output:', file= sys.stderr) print(' ' + e.msg.strip(), file=sys.stderr) if enforce: raise DXSyntaxError(e.msg.strip())
check_file_syntax
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(filename, temp_dir, override_lang=None, enforce=True):\n\n def x(filename):\n pyc_path = os.path.join(temp_dir, os.path.basename(filename) + '.pyc')\n try:\n if USING_PYTHON2:\n filename = filename.encode(sys.getfilesystemencoding())\n py_compile.compile(filename, cfile=pyc_path, doraise=True)\n finally:\n try:\n os.unlink(pyc_path)\n except OSError:\n pass\n\n def x(filename):\n if platform.system() == 'Windows':\n logging.warn(\n 'Skipping bash syntax check due to unavailability of bash on Windows.'\n )\n else:\n subprocess.check_output(['/bin/bash', '-n', filename], stderr=\n subprocess.STDOUT)\n if override_lang == 'python2.7':\n checker_fn = check_python\n elif override_lang == 'bash':\n checker_fn = check_bash\n elif filename.endswith('.py'):\n checker_fn = check_python\n elif filename.endswith('.sh'):\n checker_fn = check_bash\n else:\n return\n open(filename)\n try:\n checker_fn(filename)\n except subprocess.CalledProcessError as e:\n print(filename + ' has a syntax error! Interpreter output:', file=\n sys.stderr)\n for line in e.output.strip('\\n').split('\\n'):\n print(' ' + line.rstrip('\\n'), file=sys.stderr)\n if enforce:\n raise DXSyntaxError(filename + ' has a syntax error')\n except py_compile.PyCompileError as e:\n print(filename + ' has a syntax error! Interpreter output:', file=\n sys.stderr)\n print(' ' + e.msg.strip(), file=sys.stderr)\n if enforce:\n raise DXSyntaxError(e.msg.strip())\n", "role": "user" }, { "content": "check_file_syntax", "role": "assistant" } ]
def x(src_dir, temp_dir, mode, enforce=True): _lint(os.path.join(src_dir, 'dxapp.json'), mode) manifest = json.load(open(os.path.join(src_dir, 'dxapp.json'))) if 'runSpec' in manifest: if 'interpreter' not in manifest['runSpec']: raise dxpy.app_builder.AppBuilderException( 'runSpec.interpreter field was not present') if 'release' not in manifest['runSpec' ] or 'distribution' not in manifest['runSpec']: warn_message = ( 'runSpec.distribution or runSpec.release was not present. These fields ' ) warn_message += ( 'will be required in a future version of the API. Recommended value ' ) warn_message += ( 'for distribution is "Ubuntu" and release - "14.04".') logger.warn(warn_message) if manifest['runSpec']['interpreter'] in ['python2.7', 'bash']: if 'file' in manifest['runSpec']: entry_point_file = os.path.abspath(os.path.join(src_dir, manifest['runSpec']['file'])) try: _check_file_syntax(entry_point_file, temp_dir, override_lang=manifest['runSpec']['interpreter'], enforce=enforce) except IOError as e: raise dxpy.app_builder.AppBuilderException( 'Could not open runSpec.file=%r. The problem was: %s' % (entry_point_file, e)) except DXSyntaxError: raise dxpy.app_builder.AppBuilderException( 'Entry point file %s has syntax errors, see above for details. Rerun with --no-check-syntax to proceed anyway.' % (entry_point_file,)) elif 'code' in manifest['runSpec']: try: _check_syntax(manifest['runSpec']['code'], manifest[ 'runSpec']['interpreter'], temp_dir, enforce=enforce) except DXSyntaxError: raise dxpy.app_builder.AppBuilderException( 'Code in runSpec.code has syntax errors, see above for details. Rerun with --no-check-syntax to proceed anyway.' ) cluster_bootstrap_scripts = ( _get_all_cluster_bootstrap_script_names(manifest)) for filename in cluster_bootstrap_scripts: abs_filename = os.path.abspath(os.path.join(src_dir, filename)) try: _check_file_syntax(abs_filename, temp_dir, override_lang=manifest['runSpec']['interpreter'], enforce=enforce) except IOError as e: raise dxpy.app_builder.AppBuilderException( 'Could not open cluster bootstrap script %r. The problem was: %s' % (abs_filename, e)) except DXSyntaxError: raise dxpy.app_builder.AppBuilderException( 'Code in cluster bootstrapScript %r has syntax errors, see above for details. Rerun with --no-check-syntax to proceed anyway.' % filename) if 'execDepends' in manifest['runSpec']: if not isinstance(manifest['runSpec']['execDepends'], list): raise dxpy.app_builder.AppBuilderException( 'Expected runSpec.execDepends to be an array. Rerun with --no-check-syntax to proceed anyway.' ) if not all(isinstance(dep, dict) for dep in manifest['runSpec'] ['execDepends']): raise dxpy.app_builder.AppBuilderException( 'Expected runSpec.execDepends to be an array of hashes. Rerun with --no-check-syntax to proceed anyway.' ) if any(dep.get('package_manager', 'apt') != 'apt' for dep in manifest['runSpec']['execDepends']): if not isinstance(manifest.get('access'), dict ) or 'network' not in manifest['access']: msg = '\n'.join([ 'runSpec.execDepends specifies non-APT dependencies, but no network access spec is given.' , 'Add {"access": {"network": ["*"]}} to allow dependencies to install.' , 'See https://wiki.dnanexus.com/Developer-Tutorials/Request-Additional-App-Resources#Network-Access.' , 'Rerun with --no-check-syntax to proceed anyway.']) raise dxpy.app_builder.AppBuilderException(msg) if 'authorizedUsers' in manifest: if not isinstance(manifest['authorizedUsers'], list) or isinstance( manifest['authorizedUsers'], basestring): raise dxpy.app_builder.AppBuilderException( 'Expected authorizedUsers to be a list of strings') for thing in manifest['authorizedUsers']: if thing != 'PUBLIC' and (not isinstance(thing, basestring) or not re.match('^(org-|user-)', thing)): raise dxpy.app_builder.AppBuilderException( 'authorizedUsers field contains an entry which is not either the string "PUBLIC" or a user or org ID' ) if 'pricingPolicy' in manifest: error_message = ( '"pricingPolicy" at the top level is not accepted. It must be specified ' ) error_message += ( 'under the "regionalOptions" field in all enabled regions of the app' ) raise dxpy.app_builder.AppBuilderException(error_message) files_with_problems = [] for dirpath, dirnames, filenames in os.walk(os.path.abspath(os.path. join(src_dir, 'resources'))): for filename in filenames: if not filename.startswith('._'): try: _check_file_syntax(os.path.join(dirpath, filename), temp_dir, enforce=True) except IOError as e: raise dxpy.app_builder.AppBuilderException( 'Could not open file in resources directory %r. The problem was: %s' % (os.path.join(dirpath, filename), e)) except DXSyntaxError: files_with_problems.append(os.path.join(dirpath, filename)) if files_with_problems: files_str = files_with_problems[0] if len(files_with_problems ) == 1 else files_with_problems[0] + ' and ' + str(len( files_with_problems) - 1) + ' other file' + ('s' if len( files_with_problems) > 2 else '') logging.warn('%s contained syntax errors, see above for details' % (files_str,))
verify_app_source_dir_impl
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(src_dir, temp_dir, mode, enforce=True):\n _lint(os.path.join(src_dir, 'dxapp.json'), mode)\n manifest = json.load(open(os.path.join(src_dir, 'dxapp.json')))\n if 'runSpec' in manifest:\n if 'interpreter' not in manifest['runSpec']:\n raise dxpy.app_builder.AppBuilderException(\n 'runSpec.interpreter field was not present')\n if 'release' not in manifest['runSpec'\n ] or 'distribution' not in manifest['runSpec']:\n warn_message = (\n 'runSpec.distribution or runSpec.release was not present. These fields '\n )\n warn_message += (\n 'will be required in a future version of the API. Recommended value '\n )\n warn_message += (\n 'for distribution is \"Ubuntu\" and release - \"14.04\".')\n logger.warn(warn_message)\n if manifest['runSpec']['interpreter'] in ['python2.7', 'bash']:\n if 'file' in manifest['runSpec']:\n entry_point_file = os.path.abspath(os.path.join(src_dir,\n manifest['runSpec']['file']))\n try:\n _check_file_syntax(entry_point_file, temp_dir,\n override_lang=manifest['runSpec']['interpreter'],\n enforce=enforce)\n except IOError as e:\n raise dxpy.app_builder.AppBuilderException(\n 'Could not open runSpec.file=%r. The problem was: %s' %\n (entry_point_file, e))\n except DXSyntaxError:\n raise dxpy.app_builder.AppBuilderException(\n 'Entry point file %s has syntax errors, see above for details. Rerun with --no-check-syntax to proceed anyway.'\n % (entry_point_file,))\n elif 'code' in manifest['runSpec']:\n try:\n _check_syntax(manifest['runSpec']['code'], manifest[\n 'runSpec']['interpreter'], temp_dir, enforce=enforce)\n except DXSyntaxError:\n raise dxpy.app_builder.AppBuilderException(\n 'Code in runSpec.code has syntax errors, see above for details. Rerun with --no-check-syntax to proceed anyway.'\n )\n cluster_bootstrap_scripts = (\n _get_all_cluster_bootstrap_script_names(manifest))\n for filename in cluster_bootstrap_scripts:\n abs_filename = os.path.abspath(os.path.join(src_dir, filename))\n try:\n _check_file_syntax(abs_filename, temp_dir,\n override_lang=manifest['runSpec']['interpreter'],\n enforce=enforce)\n except IOError as e:\n raise dxpy.app_builder.AppBuilderException(\n 'Could not open cluster bootstrap script %r. The problem was: %s'\n % (abs_filename, e))\n except DXSyntaxError:\n raise dxpy.app_builder.AppBuilderException(\n 'Code in cluster bootstrapScript %r has syntax errors, see above for details. Rerun with --no-check-syntax to proceed anyway.'\n % filename)\n if 'execDepends' in manifest['runSpec']:\n if not isinstance(manifest['runSpec']['execDepends'], list):\n raise dxpy.app_builder.AppBuilderException(\n 'Expected runSpec.execDepends to be an array. Rerun with --no-check-syntax to proceed anyway.'\n )\n if not all(isinstance(dep, dict) for dep in manifest['runSpec']\n ['execDepends']):\n raise dxpy.app_builder.AppBuilderException(\n 'Expected runSpec.execDepends to be an array of hashes. Rerun with --no-check-syntax to proceed anyway.'\n )\n if any(dep.get('package_manager', 'apt') != 'apt' for dep in\n manifest['runSpec']['execDepends']):\n if not isinstance(manifest.get('access'), dict\n ) or 'network' not in manifest['access']:\n msg = '\\n'.join([\n 'runSpec.execDepends specifies non-APT dependencies, but no network access spec is given.'\n ,\n 'Add {\"access\": {\"network\": [\"*\"]}} to allow dependencies to install.'\n ,\n 'See https://wiki.dnanexus.com/Developer-Tutorials/Request-Additional-App-Resources#Network-Access.'\n , 'Rerun with --no-check-syntax to proceed anyway.'])\n raise dxpy.app_builder.AppBuilderException(msg)\n if 'authorizedUsers' in manifest:\n if not isinstance(manifest['authorizedUsers'], list) or isinstance(\n manifest['authorizedUsers'], basestring):\n raise dxpy.app_builder.AppBuilderException(\n 'Expected authorizedUsers to be a list of strings')\n for thing in manifest['authorizedUsers']:\n if thing != 'PUBLIC' and (not isinstance(thing, basestring) or \n not re.match('^(org-|user-)', thing)):\n raise dxpy.app_builder.AppBuilderException(\n 'authorizedUsers field contains an entry which is not either the string \"PUBLIC\" or a user or org ID'\n )\n if 'pricingPolicy' in manifest:\n error_message = (\n '\"pricingPolicy\" at the top level is not accepted. It must be specified '\n )\n error_message += (\n 'under the \"regionalOptions\" field in all enabled regions of the app'\n )\n raise dxpy.app_builder.AppBuilderException(error_message)\n files_with_problems = []\n for dirpath, dirnames, filenames in os.walk(os.path.abspath(os.path.\n join(src_dir, 'resources'))):\n for filename in filenames:\n if not filename.startswith('._'):\n try:\n _check_file_syntax(os.path.join(dirpath, filename),\n temp_dir, enforce=True)\n except IOError as e:\n raise dxpy.app_builder.AppBuilderException(\n 'Could not open file in resources directory %r. The problem was: %s'\n % (os.path.join(dirpath, filename), e))\n except DXSyntaxError:\n files_with_problems.append(os.path.join(dirpath, filename))\n if files_with_problems:\n files_str = files_with_problems[0] if len(files_with_problems\n ) == 1 else files_with_problems[0] + ' and ' + str(len(\n files_with_problems) - 1) + ' other file' + ('s' if len(\n files_with_problems) > 2 else '')\n logging.warn('%s contained syntax errors, see above for details' %\n (files_str,))\n", "role": "user" }, { "content": "verify_app_source_dir_impl", "role": "assistant" } ]
def x(src_dir, mode, enforce=True): temp_dir = tempfile.mkdtemp(prefix='dx-build_tmp') try: _verify_app_source_dir_impl(src_dir, temp_dir, mode, enforce=enforce) finally: shutil.rmtree(temp_dir)
verify_app_source_dir
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(src_dir, mode, enforce=True):\n temp_dir = tempfile.mkdtemp(prefix='dx-build_tmp')\n try:\n _verify_app_source_dir_impl(src_dir, temp_dir, mode, enforce=enforce)\n finally:\n shutil.rmtree(temp_dir)\n", "role": "user" }, { "content": "verify_app_source_dir", "role": "assistant" } ]
def x(src_dir): if not os.path.isdir(src_dir): parser.error('%s is not a directory' % src_dir) if not os.path.exists(os.path.join(src_dir, 'dxapp.json')): raise dxpy.app_builder.AppBuilderException( 'Directory %s does not contain dxapp.json: not a valid DNAnexus app source directory' % src_dir) with open(os.path.join(src_dir, 'dxapp.json')) as app_desc: try: return json_load_raise_on_duplicates(app_desc) except Exception as e: raise dxpy.app_builder.AppBuilderException( 'Could not parse dxapp.json file as JSON: ' + str(e.args))
parse_app_spec
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(src_dir):\n if not os.path.isdir(src_dir):\n parser.error('%s is not a directory' % src_dir)\n if not os.path.exists(os.path.join(src_dir, 'dxapp.json')):\n raise dxpy.app_builder.AppBuilderException(\n 'Directory %s does not contain dxapp.json: not a valid DNAnexus app source directory'\n % src_dir)\n with open(os.path.join(src_dir, 'dxapp.json')) as app_desc:\n try:\n return json_load_raise_on_duplicates(app_desc)\n except Exception as e:\n raise dxpy.app_builder.AppBuilderException(\n 'Could not parse dxapp.json file as JSON: ' + str(e.args))\n", "role": "user" }, { "content": "parse_app_spec", "role": "assistant" } ]
def x(args, extra_args): if not args.remote: try: output = build_and_upload_locally(args.src_dir, args.mode, overwrite=args.overwrite, archive=args.archive, publish= args.publish, destination_override=args.destination, version_override=args.version_override, bill_to_override= args.bill_to, use_temp_build_project=args. use_temp_build_project, do_parallel_build=args. parallel_build, do_version_autonumbering=args. version_autonumbering, do_try_update=args.update, dx_toolkit_autodep=args.dx_toolkit_autodep, do_check_syntax =args.check_syntax, ensure_upload=args.ensure_upload, force_symlinks=args.force_symlinks, dry_run=args.dry_run, confirm=args.confirm, return_object_dump=args.json, region= args.region, **extra_args) if output is not None and args.run is None: print(json.dumps(output)) except dxpy.app_builder.AppBuilderException as e: print('Error: %s' % (e.args,), file=sys.stderr) sys.exit(3) except dxpy.exceptions.DXAPIError as e: print('Error: %s' % (e,), file=sys.stderr) sys.exit(3) if args.dry_run: return None return output['id'] else: try: app_json = _parse_app_spec(args.src_dir) _check_suggestions(app_json, publish=args.publish) _verify_app_source_dir(args.src_dir, args.mode) if args.mode == 'app' and not args.dry_run: dxpy.executable_builder.verify_developer_rights('app-' + app_json['name']) except dxpy.app_builder.AppBuilderException as e: print('Error: %s' % (e.args,), file=sys.stderr) sys.exit(3) if args.dry_run: parser.error('--remote cannot be combined with --dry-run') if args.overwrite: parser.error('--remote cannot be combined with --overwrite/-f') if args.archive: parser.error('--remote cannot be combined with --archive/-a') if args.json: parser.error('--remote cannot be combined with --json') if not args.use_temp_build_project: parser.error( '--remote cannot be combined with --no-temp-build-project') if isinstance(args.region, list) and len(args.region) > 1: parser.error( '--region can only be specified once for remote builds') region = args.region[0] if args.region is not None else None more_kwargs = {} if args.version_override: more_kwargs['version_override'] = args.version_override if args.bill_to: more_kwargs['bill_to_override'] = args.bill_to if not args.version_autonumbering: more_kwargs['do_version_autonumbering'] = False if not args.update: more_kwargs['do_try_update'] = False if not args.parallel_build: more_kwargs['do_parallel_build'] = False if not args.check_syntax: more_kwargs['do_check_syntax'] = False return _build_app_remote(args.mode, args.src_dir, destination_override=args.destination, publish=args.publish, dx_toolkit_autodep=args.dx_toolkit_autodep, region=region, watch=args.watch, **more_kwargs)
build_app
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args, extra_args):\n if not args.remote:\n try:\n output = build_and_upload_locally(args.src_dir, args.mode,\n overwrite=args.overwrite, archive=args.archive, publish=\n args.publish, destination_override=args.destination,\n version_override=args.version_override, bill_to_override=\n args.bill_to, use_temp_build_project=args.\n use_temp_build_project, do_parallel_build=args.\n parallel_build, do_version_autonumbering=args.\n version_autonumbering, do_try_update=args.update,\n dx_toolkit_autodep=args.dx_toolkit_autodep, do_check_syntax\n =args.check_syntax, ensure_upload=args.ensure_upload,\n force_symlinks=args.force_symlinks, dry_run=args.dry_run,\n confirm=args.confirm, return_object_dump=args.json, region=\n args.region, **extra_args)\n if output is not None and args.run is None:\n print(json.dumps(output))\n except dxpy.app_builder.AppBuilderException as e:\n print('Error: %s' % (e.args,), file=sys.stderr)\n sys.exit(3)\n except dxpy.exceptions.DXAPIError as e:\n print('Error: %s' % (e,), file=sys.stderr)\n sys.exit(3)\n if args.dry_run:\n return None\n return output['id']\n else:\n try:\n app_json = _parse_app_spec(args.src_dir)\n _check_suggestions(app_json, publish=args.publish)\n _verify_app_source_dir(args.src_dir, args.mode)\n if args.mode == 'app' and not args.dry_run:\n dxpy.executable_builder.verify_developer_rights('app-' +\n app_json['name'])\n except dxpy.app_builder.AppBuilderException as e:\n print('Error: %s' % (e.args,), file=sys.stderr)\n sys.exit(3)\n if args.dry_run:\n parser.error('--remote cannot be combined with --dry-run')\n if args.overwrite:\n parser.error('--remote cannot be combined with --overwrite/-f')\n if args.archive:\n parser.error('--remote cannot be combined with --archive/-a')\n if args.json:\n parser.error('--remote cannot be combined with --json')\n if not args.use_temp_build_project:\n parser.error(\n '--remote cannot be combined with --no-temp-build-project')\n if isinstance(args.region, list) and len(args.region) > 1:\n parser.error(\n '--region can only be specified once for remote builds')\n region = args.region[0] if args.region is not None else None\n more_kwargs = {}\n if args.version_override:\n more_kwargs['version_override'] = args.version_override\n if args.bill_to:\n more_kwargs['bill_to_override'] = args.bill_to\n if not args.version_autonumbering:\n more_kwargs['do_version_autonumbering'] = False\n if not args.update:\n more_kwargs['do_try_update'] = False\n if not args.parallel_build:\n more_kwargs['do_parallel_build'] = False\n if not args.check_syntax:\n more_kwargs['do_check_syntax'] = False\n return _build_app_remote(args.mode, args.src_dir,\n destination_override=args.destination, publish=args.publish,\n dx_toolkit_autodep=args.dx_toolkit_autodep, region=region,\n watch=args.watch, **more_kwargs)\n", "role": "user" }, { "content": "build_app", "role": "assistant" } ]
def x(**kwargs): if len(sys.argv) > 0: if sys.argv[0].endswith('dx-build-app'): logging.warn( 'Warning: dx-build-app has been replaced with "dx build --create-app". Please update your scripts.' ) elif sys.argv[0].endswith('dx-build-applet'): logging.warn( 'Warning: dx-build-applet has been replaced with "dx build". Please update your scripts.' ) exit(0)
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(**kwargs):\n if len(sys.argv) > 0:\n if sys.argv[0].endswith('dx-build-app'):\n logging.warn(\n 'Warning: dx-build-app has been replaced with \"dx build --create-app\". Please update your scripts.'\n )\n elif sys.argv[0].endswith('dx-build-applet'):\n logging.warn(\n 'Warning: dx-build-applet has been replaced with \"dx build\". Please update your scripts.'\n )\n exit(0)\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(self, dxid=None, name=None, alias=None): self._dxid = None self._name = None self._alias = None if dxid is not None: if name is not None or alias is not None: raise DXError( 'Did not expect name or alias to be given if dxid is given') verify_string_dxid(dxid, self._class) self._dxid = dxid elif name is not None: self._name = name if not isinstance(name, basestring): raise DXError('App name needs to be a string: %r' % (name,)) if alias is not None: if not isinstance(alias, basestring): raise DXError('App alias needs to be a string: %r' % (alias,)) self._alias = alias else: self._alias = 'default'
set_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dxid=None, name=None, alias=None):\n self._dxid = None\n self._name = None\n self._alias = None\n if dxid is not None:\n if name is not None or alias is not None:\n raise DXError(\n 'Did not expect name or alias to be given if dxid is given')\n verify_string_dxid(dxid, self._class)\n self._dxid = dxid\n elif name is not None:\n self._name = name\n if not isinstance(name, basestring):\n raise DXError('App name needs to be a string: %r' % (name,))\n if alias is not None:\n if not isinstance(alias, basestring):\n raise DXError('App alias needs to be a string: %r' % (alias,))\n self._alias = alias\n else:\n self._alias = 'default'\n", "role": "user" }, { "content": "set_id", "role": "assistant" } ]
def x(self): if self._dxid is not None: return self._dxid else: return 'app-' + self._name + '/' + self._alias
get_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self._dxid is not None:\n return self._dxid\n else:\n return 'app-' + self._name + '/' + self._alias\n", "role": "user" }, { "content": "get_id", "role": "assistant" } ]
def x(self, **kwargs): dx_hash = {} if 'applet' not in kwargs and 'initializeFrom' not in kwargs: raise DXError( '%s: One of the keyword arguments %s and %s is required' % ( self.__class__.__name__, 'applet', 'initializeFrom')) for field in ['version']: if field not in kwargs: raise DXError('%s: Keyword argument %s is required' % (self. __class__.__name__, field)) dx_hash[field] = kwargs[field] del kwargs[field] for field in ('initializeFrom', 'applet', 'name', 'title', 'summary', 'description', 'billing', 'access', 'resources'): if field in kwargs: dx_hash[field] = kwargs[field] del kwargs[field] if 'bill_to' in kwargs: dx_hash['billTo'] = kwargs['bill_to'] del kwargs['bill_to'] resp = dxpy.api.app_new(dx_hash, **kwargs) self.set_id(dxid=resp['id'])
new
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **kwargs):\n dx_hash = {}\n if 'applet' not in kwargs and 'initializeFrom' not in kwargs:\n raise DXError(\n '%s: One of the keyword arguments %s and %s is required' % (\n self.__class__.__name__, 'applet', 'initializeFrom'))\n for field in ['version']:\n if field not in kwargs:\n raise DXError('%s: Keyword argument %s is required' % (self.\n __class__.__name__, field))\n dx_hash[field] = kwargs[field]\n del kwargs[field]\n for field in ('initializeFrom', 'applet', 'name', 'title', 'summary',\n 'description', 'billing', 'access', 'resources'):\n if field in kwargs:\n dx_hash[field] = kwargs[field]\n del kwargs[field]\n if 'bill_to' in kwargs:\n dx_hash['billTo'] = kwargs['bill_to']\n del kwargs['bill_to']\n resp = dxpy.api.app_new(dx_hash, **kwargs)\n self.set_id(dxid=resp['id'])\n", "role": "user" }, { "content": "new", "role": "assistant" } ]
def x(self, fields=None, **kwargs): describe_input = {} if fields: describe_input['fields'] = fields if self._dxid is not None: self._desc = dxpy.api.app_describe(self._dxid, input_params= describe_input, **kwargs) else: self._desc = dxpy.api.app_describe('app-' + self._name, alias=self. _alias, input_params=describe_input, **kwargs) return self._desc
describe
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, fields=None, **kwargs):\n describe_input = {}\n if fields:\n describe_input['fields'] = fields\n if self._dxid is not None:\n self._desc = dxpy.api.app_describe(self._dxid, input_params=\n describe_input, **kwargs)\n else:\n self._desc = dxpy.api.app_describe('app-' + self._name, alias=self.\n _alias, input_params=describe_input, **kwargs)\n return self._desc\n", "role": "user" }, { "content": "describe", "role": "assistant" } ]
def x(self, **kwargs): updates = {} for field in ('applet', 'billing', 'access', 'resources', 'details'): if field in kwargs: updates[field] = kwargs[field] del kwargs[field] if self._dxid is not None: resp = dxpy.api.app_update(self._dxid, input_params=updates, **kwargs) else: resp = dxpy.api.app_update('app-' + self._name, alias=self._alias, input_params=updates, **kwargs)
update
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **kwargs):\n updates = {}\n for field in ('applet', 'billing', 'access', 'resources', 'details'):\n if field in kwargs:\n updates[field] = kwargs[field]\n del kwargs[field]\n if self._dxid is not None:\n resp = dxpy.api.app_update(self._dxid, input_params=updates, **kwargs)\n else:\n resp = dxpy.api.app_update('app-' + self._name, alias=self._alias,\n input_params=updates, **kwargs)\n", "role": "user" }, { "content": "update", "role": "assistant" } ]
def x(self, tags, **kwargs): if self._dxid is not None: return dxpy.api.app_add_tags(self._dxid, input_params={'tags': tags }, **kwargs) else: return dxpy.api.app_add_tags('app-' + self._name, alias=self._alias, input_params={'tags': tags}, **kwargs)
add_tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, tags, **kwargs):\n if self._dxid is not None:\n return dxpy.api.app_add_tags(self._dxid, input_params={'tags': tags\n }, **kwargs)\n else:\n return dxpy.api.app_add_tags('app-' + self._name, alias=self._alias,\n input_params={'tags': tags}, **kwargs)\n", "role": "user" }, { "content": "add_tags", "role": "assistant" } ]
def x(self, tags, **kwargs): if self._dxid is not None: return dxpy.api.app_remove_tags(self._dxid, input_params={'tags': tags}, **kwargs) else: return dxpy.api.app_remove_tags('app-' + self._name, alias=self. _alias, input_params={'tags': tags}, **kwargs)
remove_tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, tags, **kwargs):\n if self._dxid is not None:\n return dxpy.api.app_remove_tags(self._dxid, input_params={'tags':\n tags}, **kwargs)\n else:\n return dxpy.api.app_remove_tags('app-' + self._name, alias=self.\n _alias, input_params={'tags': tags}, **kwargs)\n", "role": "user" }, { "content": "remove_tags", "role": "assistant" } ]
def x(self, **kwargs): if self._dxid is not None: return dxpy.api.app_install(self._dxid, **kwargs) else: return dxpy.api.app_install('app-' + self._name, alias=self._alias, **kwargs)
install
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **kwargs):\n if self._dxid is not None:\n return dxpy.api.app_install(self._dxid, **kwargs)\n else:\n return dxpy.api.app_install('app-' + self._name, alias=self._alias,\n **kwargs)\n", "role": "user" }, { "content": "install", "role": "assistant" } ]
def x(self, **kwargs): if self._dxid is not None: return dxpy.api.app_uninstall(self._dxid, **kwargs) else: return dxpy.api.app_uninstall('app-' + self._name, alias=self. _alias, **kwargs)
uninstall
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **kwargs):\n if self._dxid is not None:\n return dxpy.api.app_uninstall(self._dxid, **kwargs)\n else:\n return dxpy.api.app_uninstall('app-' + self._name, alias=self.\n _alias, **kwargs)\n", "role": "user" }, { "content": "uninstall", "role": "assistant" } ]
def x(self, **kwargs): if self._dxid is not None: return dxpy.api.app_get(self._dxid, **kwargs) else: return dxpy.api.app_get('app-' + self._name, alias=self._alias, ** kwargs)
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **kwargs):\n if self._dxid is not None:\n return dxpy.api.app_get(self._dxid, **kwargs)\n else:\n return dxpy.api.app_get('app-' + self._name, alias=self._alias, **\n kwargs)\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, **kwargs): if self._dxid is not None: return dxpy.api.app_delete(self._dxid, **kwargs) else: return dxpy.api.app_delete('app-' + self._name, alias=self._alias, **kwargs)
delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **kwargs):\n if self._dxid is not None:\n return dxpy.api.app_delete(self._dxid, **kwargs)\n else:\n return dxpy.api.app_delete('app-' + self._name, alias=self._alias,\n **kwargs)\n", "role": "user" }, { "content": "delete", "role": "assistant" } ]
def x(self, app_input, *args, **kwargs): return super(DXApp, self).run(app_input, *args, **kwargs)
run
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, app_input, *args, **kwargs):\n return super(DXApp, self).run(app_input, *args, **kwargs)\n", "role": "user" }, { "content": "run", "role": "assistant" } ]
def x(src_dir, json_file_name, parser): if not os.path.isdir(src_dir): parser.error('{} is not a directory'.format(src_dir)) if not os.path.exists(os.path.join(src_dir, json_file_name)): raise WorkflowBuilderException( 'Directory {} does not contain dxworkflow.json: not a valid DNAnexus workflow source directory' .format(src_dir)) with open(os.path.join(src_dir, json_file_name)) as desc: try: return json_load_raise_on_duplicates(desc) except Exception as e: raise WorkflowBuilderException( 'Could not parse {} file as JSON: {}'.format(json_file_name, e.args))
parse_executable_spec
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(src_dir, json_file_name, parser):\n if not os.path.isdir(src_dir):\n parser.error('{} is not a directory'.format(src_dir))\n if not os.path.exists(os.path.join(src_dir, json_file_name)):\n raise WorkflowBuilderException(\n 'Directory {} does not contain dxworkflow.json: not a valid DNAnexus workflow source directory'\n .format(src_dir))\n with open(os.path.join(src_dir, json_file_name)) as desc:\n try:\n return json_load_raise_on_duplicates(desc)\n except Exception as e:\n raise WorkflowBuilderException(\n 'Could not parse {} file as JSON: {}'.format(json_file_name,\n e.args))\n", "role": "user" }, { "content": "parse_executable_spec", "role": "assistant" } ]
def x(json_spec, args, build_project_id=None): if build_project_id: return build_project_id if 'project' in json_spec: return json_spec['project'] if dxpy.WORKSPACE_ID: return dxpy.WORKSPACE_ID error_msg = ( "Can't create a workflow without specifying a destination project; ") error_msg += ( 'please use the -d/--destination flag to explicitly specify a project') raise WorkflowBuilderException(error_msg)
get_destination_project
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, args, build_project_id=None):\n if build_project_id:\n return build_project_id\n if 'project' in json_spec:\n return json_spec['project']\n if dxpy.WORKSPACE_ID:\n return dxpy.WORKSPACE_ID\n error_msg = (\n \"Can't create a workflow without specifying a destination project; \")\n error_msg += (\n 'please use the -d/--destination flag to explicitly specify a project')\n raise WorkflowBuilderException(error_msg)\n", "role": "user" }, { "content": "get_destination_project", "role": "assistant" } ]
def x(json_spec, folder_name=None): dest_folder = folder_name or json_spec.get('folder') or '/' if not dest_folder.endswith('/'): dest_folder = dest_folder + '/' return dest_folder
get_destination_folder
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, folder_name=None):\n dest_folder = folder_name or json_spec.get('folder') or '/'\n if not dest_folder.endswith('/'):\n dest_folder = dest_folder + '/'\n return dest_folder\n", "role": "user" }, { "content": "get_destination_folder", "role": "assistant" } ]
def x(global_workflow_id, categories_to_set): assert isinstance(categories_to_set, list) existing_categories = dxpy.api.global_workflow_list_categories( global_workflow_id)['categories'] categories_to_add = set(categories_to_set).difference(set( existing_categories)) categories_to_remove = set(existing_categories).difference(set( categories_to_set)) if categories_to_add: dxpy.api.global_workflow_add_categories(global_workflow_id, input_params={'categories': list(categories_to_add)}) if categories_to_remove: dxpy.api.global_workflow_remove_categories(global_workflow_id, input_params={'categories': list(categories_to_remove)})
set_categories_on_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(global_workflow_id, categories_to_set):\n assert isinstance(categories_to_set, list)\n existing_categories = dxpy.api.global_workflow_list_categories(\n global_workflow_id)['categories']\n categories_to_add = set(categories_to_set).difference(set(\n existing_categories))\n categories_to_remove = set(existing_categories).difference(set(\n categories_to_set))\n if categories_to_add:\n dxpy.api.global_workflow_add_categories(global_workflow_id,\n input_params={'categories': list(categories_to_add)})\n if categories_to_remove:\n dxpy.api.global_workflow_remove_categories(global_workflow_id,\n input_params={'categories': list(categories_to_remove)})\n", "role": "user" }, { "content": "set_categories_on_workflow", "role": "assistant" } ]
def x(json_spec, name=None, version=None): requested_name = json_spec['name'] requested_version = json_spec['version'] if requested_name == name and requested_version == version: return True else: try: desc_output = dxpy.api.global_workflow_describe( 'globalworkflow-' + json_spec['name'], alias=json_spec[ 'version'], input_params={'fields': {'name': True, 'version': True}}) return desc_output['name'] == json_spec['name'] and desc_output[ 'version'] == json_spec['version'] except dxpy.exceptions.DXAPIError: return False except: raise
version_exists
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, name=None, version=None):\n requested_name = json_spec['name']\n requested_version = json_spec['version']\n if requested_name == name and requested_version == version:\n return True\n else:\n try:\n desc_output = dxpy.api.global_workflow_describe(\n 'globalworkflow-' + json_spec['name'], alias=json_spec[\n 'version'], input_params={'fields': {'name': True,\n 'version': True}})\n return desc_output['name'] == json_spec['name'] and desc_output[\n 'version'] == json_spec['version']\n except dxpy.exceptions.DXAPIError:\n return False\n except:\n raise\n", "role": "user" }, { "content": "version_exists", "role": "assistant" } ]
def x(stages, ignore_reuse_stages): if not isinstance(ignore_reuse_stages, list): raise WorkflowBuilderException( '"IgnoreReuse must be a list of strings - stage IDs or "*"') ignore_reuse_set = set(ignore_reuse_stages) if '*' in ignore_reuse_set and ignore_reuse_set == 1: return stage_ids = set([stage.get('id') for stage in stages]) for ignored in ignore_reuse_set: if ignored not in stage_ids: raise WorkflowBuilderException( 'Stage with ID {} not found. Add a matching "id" for the stage you wish to set ignoreReuse for' .format(ignored))
validate_ignore_reuse
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(stages, ignore_reuse_stages):\n if not isinstance(ignore_reuse_stages, list):\n raise WorkflowBuilderException(\n '\"IgnoreReuse must be a list of strings - stage IDs or \"*\"')\n ignore_reuse_set = set(ignore_reuse_stages)\n if '*' in ignore_reuse_set and ignore_reuse_set == 1:\n return\n stage_ids = set([stage.get('id') for stage in stages])\n for ignored in ignore_reuse_set:\n if ignored not in stage_ids:\n raise WorkflowBuilderException(\n 'Stage with ID {} not found. Add a matching \"id\" for the stage you wish to set ignoreReuse for'\n .format(ignored))\n", "role": "user" }, { "content": "validate_ignore_reuse", "role": "assistant" } ]
def x(stages): if not isinstance(stages, list): raise WorkflowBuilderException( 'Stages must be specified as a list of dictionaries') validated_stages = [] for index, stage in enumerate(stages): validated_stages.append(_get_validated_stage(stage, index)) return validated_stages
get_validated_stages
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(stages):\n if not isinstance(stages, list):\n raise WorkflowBuilderException(\n 'Stages must be specified as a list of dictionaries')\n validated_stages = []\n for index, stage in enumerate(stages):\n validated_stages.append(_get_validated_stage(stage, index))\n return validated_stages\n", "role": "user" }, { "content": "get_validated_stages", "role": "assistant" } ]
def x(json_spec, args): validated = {} override_project_id, override_folder, override_workflow_name = (dxpy. executable_builder.get_parsed_destination(args.destination)) validated['project'] = _get_destination_project(json_spec, args, override_project_id) validated['folder'] = _get_destination_folder(json_spec, override_folder) workflow_name = _get_workflow_name(json_spec, override_workflow_name) if not workflow_name: print('Warning: workflow name is not specified') else: validated['name'] = workflow_name return validated
validate_json_for_regular_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, args):\n validated = {}\n override_project_id, override_folder, override_workflow_name = (dxpy.\n executable_builder.get_parsed_destination(args.destination))\n validated['project'] = _get_destination_project(json_spec, args,\n override_project_id)\n validated['folder'] = _get_destination_folder(json_spec, override_folder)\n workflow_name = _get_workflow_name(json_spec, override_workflow_name)\n if not workflow_name:\n print('Warning: workflow name is not specified')\n else:\n validated['name'] = workflow_name\n return validated\n", "role": "user" }, { "content": "validate_json_for_regular_workflow", "role": "assistant" } ]
def x(json_spec, args): if 'name' not in json_spec: raise WorkflowBuilderException( "dxworkflow.json contains no 'name' field, but it is required to build a global workflow" ) if not dxpy.executable_builder.GLOBAL_EXEC_NAME_RE.match(json_spec['name'] ): raise WorkflowBuilderException( 'The name of your workflow must match /^[a-zA-Z0-9._-]+$/') if json_spec['name'] != json_spec['name'].lower(): logger.warn('workflow name "{}" should be all lowercase'.format( json_spec['name'])) if 'version' not in json_spec: raise WorkflowBuilderException( "dxworkflow.json contains no 'version' field, but it is required to build a global workflow" ) if not dxpy.executable_builder.GLOBAL_EXEC_VERSION_RE.match(json_spec[ 'version']): logger.warn( '"version" {} should be semver compliant (e.g. of the form X.Y.Z)' .format(json_spec['version'])) if 'details' in json_spec: if not isinstance(json_spec['details'], dict): raise WorkflowBuilderException( 'The field "details" must be a dictionary') if 'regionalOptions' in json_spec: if not (isinstance(json_spec['regionalOptions'], dict) and json_spec['regionalOptions'] and all([isinstance(i, dict) for i in json_spec['regionalOptions'].values()])): raise WorkflowBuilderException( 'The field "regionalOptions" must be a non-empty dictionary whose values are dictionaries' ) if args.bill_to: json_spec['billTo'] = args.bill_to
validate_json_for_global_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, args):\n if 'name' not in json_spec:\n raise WorkflowBuilderException(\n \"dxworkflow.json contains no 'name' field, but it is required to build a global workflow\"\n )\n if not dxpy.executable_builder.GLOBAL_EXEC_NAME_RE.match(json_spec['name']\n ):\n raise WorkflowBuilderException(\n 'The name of your workflow must match /^[a-zA-Z0-9._-]+$/')\n if json_spec['name'] != json_spec['name'].lower():\n logger.warn('workflow name \"{}\" should be all lowercase'.format(\n json_spec['name']))\n if 'version' not in json_spec:\n raise WorkflowBuilderException(\n \"dxworkflow.json contains no 'version' field, but it is required to build a global workflow\"\n )\n if not dxpy.executable_builder.GLOBAL_EXEC_VERSION_RE.match(json_spec[\n 'version']):\n logger.warn(\n '\"version\" {} should be semver compliant (e.g. of the form X.Y.Z)'\n .format(json_spec['version']))\n if 'details' in json_spec:\n if not isinstance(json_spec['details'], dict):\n raise WorkflowBuilderException(\n 'The field \"details\" must be a dictionary')\n if 'regionalOptions' in json_spec:\n if not (isinstance(json_spec['regionalOptions'], dict) and\n json_spec['regionalOptions'] and all([isinstance(i, dict) for i in\n json_spec['regionalOptions'].values()])):\n raise WorkflowBuilderException(\n 'The field \"regionalOptions\" must be a non-empty dictionary whose values are dictionaries'\n )\n if args.bill_to:\n json_spec['billTo'] = args.bill_to\n", "role": "user" }, { "content": "validate_json_for_global_workflow", "role": "assistant" } ]
def x(json_spec, args): if not json_spec: return if not args: return validated_spec = copy.deepcopy(json_spec) unsupported_keys = _get_unsupported_keys(validated_spec.keys(), SUPPORTED_KEYS) if len(unsupported_keys) > 0: logger.warn( 'Warning: the following root level fields are not supported and will be ignored: {}' .format(', '.join(unsupported_keys))) if 'stages' in validated_spec: validated_spec['stages'] = _get_validated_stages(validated_spec[ 'stages']) if 'name' in validated_spec: if args.src_dir != validated_spec['name']: logger.warn( 'workflow name "%s" does not match containing directory "%s"' % (validated_spec['name'], args.src_dir)) if 'ignoreReuse' in validated_spec: validate_ignore_reuse(validated_spec['stages'], validated_spec[ 'ignoreReuse']) validated_documentation_fields = _get_validated_json_for_build_or_update( validated_spec, args) validated_spec.update(validated_documentation_fields) if args.mode == 'workflow': validated = _validate_json_for_regular_workflow(json_spec, args) validated_spec.update(validated) if args.mode == 'globalworkflow': _validate_json_for_global_workflow(validated_spec, args) return validated_spec
get_validated_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, args):\n if not json_spec:\n return\n if not args:\n return\n validated_spec = copy.deepcopy(json_spec)\n unsupported_keys = _get_unsupported_keys(validated_spec.keys(),\n SUPPORTED_KEYS)\n if len(unsupported_keys) > 0:\n logger.warn(\n 'Warning: the following root level fields are not supported and will be ignored: {}'\n .format(', '.join(unsupported_keys)))\n if 'stages' in validated_spec:\n validated_spec['stages'] = _get_validated_stages(validated_spec[\n 'stages'])\n if 'name' in validated_spec:\n if args.src_dir != validated_spec['name']:\n logger.warn(\n 'workflow name \"%s\" does not match containing directory \"%s\"' %\n (validated_spec['name'], args.src_dir))\n if 'ignoreReuse' in validated_spec:\n validate_ignore_reuse(validated_spec['stages'], validated_spec[\n 'ignoreReuse'])\n validated_documentation_fields = _get_validated_json_for_build_or_update(\n validated_spec, args)\n validated_spec.update(validated_documentation_fields)\n if args.mode == 'workflow':\n validated = _validate_json_for_regular_workflow(json_spec, args)\n validated_spec.update(validated)\n if args.mode == 'globalworkflow':\n _validate_json_for_global_workflow(validated_spec, args)\n return validated_spec\n", "role": "user" }, { "content": "get_validated_json", "role": "assistant" } ]
def x(json_spec, args): validated = copy.deepcopy(json_spec) dxpy.executable_builder.inline_documentation_files(validated, args.src_dir) if 'title' not in json_spec: logger.warn( "dxworkflow.json is missing a title, please add one in the 'title' field" ) if 'summary' not in json_spec: logger.warn( "dxworkflow.json is missing a summary, please add one in the 'summary' field" ) elif json_spec['summary'].endswith('.'): logger.warn( 'summary {} should be a short phrase not ending in a period'. format(json_spec['summary'])) return validated
get_validated_json_for_build_or_update
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, args):\n validated = copy.deepcopy(json_spec)\n dxpy.executable_builder.inline_documentation_files(validated, args.src_dir)\n if 'title' not in json_spec:\n logger.warn(\n \"dxworkflow.json is missing a title, please add one in the 'title' field\"\n )\n if 'summary' not in json_spec:\n logger.warn(\n \"dxworkflow.json is missing a summary, please add one in the 'summary' field\"\n )\n elif json_spec['summary'].endswith('.'):\n logger.warn(\n 'summary {} should be a short phrase not ending in a period'.\n format(json_spec['summary']))\n return validated\n", "role": "user" }, { "content": "get_validated_json_for_build_or_update", "role": "assistant" } ]
def x(workflow_enabled_regions, workflow_spec): executables = [i.get('executable') for i in workflow_spec.get('stages')] for exect in executables: if exect.startswith('applet-') and len(workflow_enabled_regions) > 1: raise WorkflowBuilderException( 'Building a global workflow with applets in more than one region is not yet supported.' ) elif exect.startswith('app-'): app_regional_options = dxpy.api.app_describe(exect, input_params={'fields': {'regionalOptions': True}}) app_regions = set(app_regional_options['regionalOptions'].keys()) if not workflow_enabled_regions.issubset(app_regions): additional_workflow_regions = (workflow_enabled_regions - app_regions) mesg = ( 'The app {} is enabled in regions {} while the global workflow in {}.' .format(exect, ', '.join(app_regions), ', '.join( workflow_enabled_regions))) mesg += ' The workflow will not be able to run in {}.'.format( ', '.join(additional_workflow_regions)) mesg += ( ' If you are a developer of the app, you can enable the app in {} to run the workflow in that region(s).' .format(', '.join(additional_workflow_regions))) logger.warn(mesg) elif exect.startswith('workflow-'): inner_workflow_spec = dxpy.api.workflow_describe(exect) _assert_executable_regions_match(workflow_enabled_regions, inner_workflow_spec) elif exect.startswith('globalworkflow-'): raise WorkflowBuilderException( 'Building a global workflow with nested global workflows is not yet supported' )
assert_executable_regions_match
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workflow_enabled_regions, workflow_spec):\n executables = [i.get('executable') for i in workflow_spec.get('stages')]\n for exect in executables:\n if exect.startswith('applet-') and len(workflow_enabled_regions) > 1:\n raise WorkflowBuilderException(\n 'Building a global workflow with applets in more than one region is not yet supported.'\n )\n elif exect.startswith('app-'):\n app_regional_options = dxpy.api.app_describe(exect,\n input_params={'fields': {'regionalOptions': True}})\n app_regions = set(app_regional_options['regionalOptions'].keys())\n if not workflow_enabled_regions.issubset(app_regions):\n additional_workflow_regions = (workflow_enabled_regions -\n app_regions)\n mesg = (\n 'The app {} is enabled in regions {} while the global workflow in {}.'\n .format(exect, ', '.join(app_regions), ', '.join(\n workflow_enabled_regions)))\n mesg += ' The workflow will not be able to run in {}.'.format(\n ', '.join(additional_workflow_regions))\n mesg += (\n ' If you are a developer of the app, you can enable the app in {} to run the workflow in that region(s).'\n .format(', '.join(additional_workflow_regions)))\n logger.warn(mesg)\n elif exect.startswith('workflow-'):\n inner_workflow_spec = dxpy.api.workflow_describe(exect)\n _assert_executable_regions_match(workflow_enabled_regions,\n inner_workflow_spec)\n elif exect.startswith('globalworkflow-'):\n raise WorkflowBuilderException(\n 'Building a global workflow with nested global workflows is not yet supported'\n )\n", "role": "user" }, { "content": "assert_executable_regions_match", "role": "assistant" } ]
def x(json_spec): workflow_id = dxpy.api.workflow_new(json_spec)['id'] dxpy.api.workflow_close(workflow_id) return workflow_id
build_regular_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec):\n workflow_id = dxpy.api.workflow_new(json_spec)['id']\n dxpy.api.workflow_close(workflow_id)\n return workflow_id\n", "role": "user" }, { "content": "build_regular_workflow", "role": "assistant" } ]
def x(json_spec, from_command_line): enabled_regions = dxpy.executable_builder.get_enabled_regions( 'globalworkflow', json_spec, from_command_line, WorkflowBuilderException) if not enabled_regions: enabled_regions = [] if not dxpy.WORKSPACE_ID: msg = ( "A context project must be selected to enable a workflow in the project's region." ) msg += ( " You can use 'dx select' to select a project. Otherwise you can use --region option" ) msg += ( ' to select a region in which the workflow should be enabled') raise WorkflowBuilderException(msg) region = dxpy.api.project_describe(dxpy.WORKSPACE_ID, input_params= {'fields': {'region': True}})['region'] enabled_regions.append(region) if not enabled_regions: raise AssertionError( 'This workflow should be enabled in at least one region') return set(enabled_regions)
get_validated_enabled_regions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, from_command_line):\n enabled_regions = dxpy.executable_builder.get_enabled_regions(\n 'globalworkflow', json_spec, from_command_line,\n WorkflowBuilderException)\n if not enabled_regions:\n enabled_regions = []\n if not dxpy.WORKSPACE_ID:\n msg = (\n \"A context project must be selected to enable a workflow in the project's region.\"\n )\n msg += (\n \" You can use 'dx select' to select a project. Otherwise you can use --region option\"\n )\n msg += (\n ' to select a region in which the workflow should be enabled')\n raise WorkflowBuilderException(msg)\n region = dxpy.api.project_describe(dxpy.WORKSPACE_ID, input_params=\n {'fields': {'region': True}})['region']\n enabled_regions.append(region)\n if not enabled_regions:\n raise AssertionError(\n 'This workflow should be enabled in at least one region')\n return set(enabled_regions)\n", "role": "user" }, { "content": "get_validated_enabled_regions", "role": "assistant" } ]
def x(enabled_regions, args): projects_by_region = {} for region in enabled_regions: try: project_input = {'name': 'Temporary build project for dx build global workflow', 'region': region} if args.bill_to: project_input['billTo'] = args.bill_to temp_project = dxpy.api.project_new(project_input)['id'] projects_by_region[region] = temp_project logger.debug('Created temporary project {} to build in'.format( temp_project)) except: if projects_by_region: dxpy.executable_builder.delete_temporary_projects( projects_by_region.values()) err_exit() return projects_by_region
create_temporary_projects
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(enabled_regions, args):\n projects_by_region = {}\n for region in enabled_regions:\n try:\n project_input = {'name':\n 'Temporary build project for dx build global workflow',\n 'region': region}\n if args.bill_to:\n project_input['billTo'] = args.bill_to\n temp_project = dxpy.api.project_new(project_input)['id']\n projects_by_region[region] = temp_project\n logger.debug('Created temporary project {} to build in'.format(\n temp_project))\n except:\n if projects_by_region:\n dxpy.executable_builder.delete_temporary_projects(\n projects_by_region.values())\n err_exit()\n return projects_by_region\n", "role": "user" }, { "content": "create_temporary_projects", "role": "assistant" } ]
def x(enabled_regions, json_spec, args): projects_by_region = _create_temporary_projects(enabled_regions, args) workflows_by_region = {} try: for region, project in projects_by_region.items(): json_spec['project'] = project workflow_id = _build_regular_workflow(json_spec) logger.debug('Created workflow ' + workflow_id + ' successfully') workflows_by_region[region] = workflow_id except: if projects_by_region: dxpy.executable_builder.delete_temporary_projects( projects_by_region.values()) raise return workflows_by_region, projects_by_region
build_underlying_workflows
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(enabled_regions, json_spec, args):\n projects_by_region = _create_temporary_projects(enabled_regions, args)\n workflows_by_region = {}\n try:\n for region, project in projects_by_region.items():\n json_spec['project'] = project\n workflow_id = _build_regular_workflow(json_spec)\n logger.debug('Created workflow ' + workflow_id + ' successfully')\n workflows_by_region[region] = workflow_id\n except:\n if projects_by_region:\n dxpy.executable_builder.delete_temporary_projects(\n projects_by_region.values())\n raise\n return workflows_by_region, projects_by_region\n", "role": "user" }, { "content": "build_underlying_workflows", "role": "assistant" } ]
def x(json_spec, args): enabled_regions = _get_validated_enabled_regions(json_spec, args.region) _assert_executable_regions_match(enabled_regions, json_spec) workflows_by_region, projects_by_region = {}, {} try: workflows_by_region, projects_by_region = _build_underlying_workflows( enabled_regions, json_spec, args) regional_options = {} for region, workflow_id in workflows_by_region.items(): regional_options[region] = {'workflow': workflow_id} json_spec.update({'regionalOptions': regional_options}) gwf_provided_keys = GLOBALWF_SUPPORTED_KEYS.intersection(set( json_spec.keys())) gwf_final_json = dict((k, v) for k, v in json_spec.items() if k in gwf_provided_keys) print_spec = copy.deepcopy(gwf_final_json) if 'description' in gwf_final_json: del print_spec['description'] if 'developerNotes' in gwf_final_json: del print_spec['developerNotes'] logger.info('Will create global workflow with spec: {}'.format(json .dumps(print_spec))) global_workflow_id = dxpy.api.global_workflow_new(gwf_final_json)['id'] logger.info('Uploaded global workflow {n}/{v} successfully'.format( n=gwf_final_json['name'], v=gwf_final_json['version'])) logger.info('You can publish this workflow with:') logger.info(' dx publish {n}/{v}'.format(n=gwf_final_json['name'], v=gwf_final_json['version'])) finally: if projects_by_region: dxpy.executable_builder.delete_temporary_projects( projects_by_region.values()) try: _set_categories_on_workflow(global_workflow_id, gwf_final_json.get( 'categories', [])) except: logger.warn( 'The workflow {n}/{v} was created but setting categories failed' .format(n=gwf_final_json['name'], v=gwf_final_json['version'])) raise return global_workflow_id
build_global_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, args):\n enabled_regions = _get_validated_enabled_regions(json_spec, args.region)\n _assert_executable_regions_match(enabled_regions, json_spec)\n workflows_by_region, projects_by_region = {}, {}\n try:\n workflows_by_region, projects_by_region = _build_underlying_workflows(\n enabled_regions, json_spec, args)\n regional_options = {}\n for region, workflow_id in workflows_by_region.items():\n regional_options[region] = {'workflow': workflow_id}\n json_spec.update({'regionalOptions': regional_options})\n gwf_provided_keys = GLOBALWF_SUPPORTED_KEYS.intersection(set(\n json_spec.keys()))\n gwf_final_json = dict((k, v) for k, v in json_spec.items() if k in\n gwf_provided_keys)\n print_spec = copy.deepcopy(gwf_final_json)\n if 'description' in gwf_final_json:\n del print_spec['description']\n if 'developerNotes' in gwf_final_json:\n del print_spec['developerNotes']\n logger.info('Will create global workflow with spec: {}'.format(json\n .dumps(print_spec)))\n global_workflow_id = dxpy.api.global_workflow_new(gwf_final_json)['id']\n logger.info('Uploaded global workflow {n}/{v} successfully'.format(\n n=gwf_final_json['name'], v=gwf_final_json['version']))\n logger.info('You can publish this workflow with:')\n logger.info(' dx publish {n}/{v}'.format(n=gwf_final_json['name'],\n v=gwf_final_json['version']))\n finally:\n if projects_by_region:\n dxpy.executable_builder.delete_temporary_projects(\n projects_by_region.values())\n try:\n _set_categories_on_workflow(global_workflow_id, gwf_final_json.get(\n 'categories', []))\n except:\n logger.warn(\n 'The workflow {n}/{v} was created but setting categories failed'\n .format(n=gwf_final_json['name'], v=gwf_final_json['version']))\n raise\n return global_workflow_id\n", "role": "user" }, { "content": "build_global_workflow", "role": "assistant" } ]
def x(json_spec, args): try: if args.mode == 'workflow': json_spec = _get_validated_json(json_spec, args) workflow_id = _build_regular_workflow(json_spec) elif args.mode == 'globalworkflow': existing_workflow = (dxpy.executable_builder. verify_developer_rights('globalworkflow-' + json_spec['name'])) if existing_workflow and _version_exists(json_spec, existing_workflow.name, existing_workflow.version): workflow_id = _update_global_workflow(json_spec, args, existing_workflow.id) else: json_spec = _get_validated_json(json_spec, args) workflow_id = _build_global_workflow(json_spec, args) else: raise WorkflowBuilderException('Unrecognized workflow type: {}' .format(args.mode)) except dxpy.exceptions.DXAPIError as e: raise e return workflow_id
build_or_update_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_spec, args):\n try:\n if args.mode == 'workflow':\n json_spec = _get_validated_json(json_spec, args)\n workflow_id = _build_regular_workflow(json_spec)\n elif args.mode == 'globalworkflow':\n existing_workflow = (dxpy.executable_builder.\n verify_developer_rights('globalworkflow-' + json_spec['name']))\n if existing_workflow and _version_exists(json_spec,\n existing_workflow.name, existing_workflow.version):\n workflow_id = _update_global_workflow(json_spec, args,\n existing_workflow.id)\n else:\n json_spec = _get_validated_json(json_spec, args)\n workflow_id = _build_global_workflow(json_spec, args)\n else:\n raise WorkflowBuilderException('Unrecognized workflow type: {}'\n .format(args.mode))\n except dxpy.exceptions.DXAPIError as e:\n raise e\n return workflow_id\n", "role": "user" }, { "content": "build_or_update_workflow", "role": "assistant" } ]
def x(args, parser): if args is None: raise Exception('arguments not provided') try: json_spec = _parse_executable_spec(args.src_dir, 'dxworkflow.json', parser) workflow_id = _build_or_update_workflow(json_spec, args) _print_output(workflow_id, args) except WorkflowBuilderException as e: print('Error: %s' % (e.args,), file=sys.stderr) sys.exit(3)
build
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args, parser):\n if args is None:\n raise Exception('arguments not provided')\n try:\n json_spec = _parse_executable_spec(args.src_dir, 'dxworkflow.json',\n parser)\n workflow_id = _build_or_update_workflow(json_spec, args)\n _print_output(workflow_id, args)\n except WorkflowBuilderException as e:\n print('Error: %s' % (e.args,), file=sys.stderr)\n sys.exit(3)\n", "role": "user" }, { "content": "build", "role": "assistant" } ]
def x(args, dest_path, dx_dest, dest_proj): if len(args.sources) != 1: raise DXCLIError('The destination folder does not exist') last_slash_pos = get_last_pos_of_char('/', dest_path) if last_slash_pos == 0: dest_folder = '/' else: dest_folder = dest_path[:last_slash_pos] dest_name = dest_path[last_slash_pos + 1:].replace('\\/', '/') try: dx_dest.list_folder(folder=dest_folder, only='folders') except dxpy.DXAPIError as details: if details.code == requests.codes['not_found']: raise DXCLIError('The destination folder does not exist') else: raise except: err_exit() src_proj, src_path, src_results = try_call(resolve_existing_path, args. sources[0], allow_mult=True, all_mult=args.all) if src_proj == dest_proj: if is_hashid(args.sources[0]): raise DXCLIError(fill( 'Error: You must specify a source project for ' + args. sources[0])) else: raise DXCLIError(fill( 'A source path and the destination path resolved to the ' + 'same project or container. Please specify different source ' + 'and destination containers, e.g.') + """ dx cp source-project:source-id-or-path dest-project:dest-path""" ) if src_results is None: try: contents = dxpy.api.project_list_folder(src_proj, {'folder': src_path, 'includeHidden': True}) dxpy.api.project_new_folder(dest_proj, {'folder': dest_path}) exists = dxpy.api.project_clone(src_proj, {'folders': contents[ 'folders'], 'objects': [result['id'] for result in contents ['objects']], 'project': dest_proj, 'destination': dest_path})[ 'exists'] if len(exists) > 0: print(fill( 'The following objects already existed in the destination ' + 'container and were not copied:') + '\n ' + '\n '. join(exists)) return except: err_exit() else: try: exists = dxpy.api.project_clone(src_proj, {'objects': [result[ 'id'] for result in src_results], 'project': dest_proj, 'destination': dest_folder})['exists'] if len(exists) > 0: print(fill( 'The following objects already existed in the destination ' + 'container and were not copied:') + '\n ' + '\n '. join(exists)) for result in src_results: if result['id'] not in exists: dxpy.DXHTTPRequest('/' + result['id'] + '/rename', { 'project': dest_proj, 'name': dest_name}) return except: err_exit()
cp_to_noexistent_destination
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args, dest_path, dx_dest, dest_proj):\n if len(args.sources) != 1:\n raise DXCLIError('The destination folder does not exist')\n last_slash_pos = get_last_pos_of_char('/', dest_path)\n if last_slash_pos == 0:\n dest_folder = '/'\n else:\n dest_folder = dest_path[:last_slash_pos]\n dest_name = dest_path[last_slash_pos + 1:].replace('\\\\/', '/')\n try:\n dx_dest.list_folder(folder=dest_folder, only='folders')\n except dxpy.DXAPIError as details:\n if details.code == requests.codes['not_found']:\n raise DXCLIError('The destination folder does not exist')\n else:\n raise\n except:\n err_exit()\n src_proj, src_path, src_results = try_call(resolve_existing_path, args.\n sources[0], allow_mult=True, all_mult=args.all)\n if src_proj == dest_proj:\n if is_hashid(args.sources[0]):\n raise DXCLIError(fill(\n 'Error: You must specify a source project for ' + args.\n sources[0]))\n else:\n raise DXCLIError(fill(\n 'A source path and the destination path resolved to the ' +\n 'same project or container. Please specify different source '\n + 'and destination containers, e.g.') +\n \"\"\"\n dx cp source-project:source-id-or-path dest-project:dest-path\"\"\"\n )\n if src_results is None:\n try:\n contents = dxpy.api.project_list_folder(src_proj, {'folder':\n src_path, 'includeHidden': True})\n dxpy.api.project_new_folder(dest_proj, {'folder': dest_path})\n exists = dxpy.api.project_clone(src_proj, {'folders': contents[\n 'folders'], 'objects': [result['id'] for result in contents\n ['objects']], 'project': dest_proj, 'destination': dest_path})[\n 'exists']\n if len(exists) > 0:\n print(fill(\n 'The following objects already existed in the destination '\n + 'container and were not copied:') + '\\n ' + '\\n '.\n join(exists))\n return\n except:\n err_exit()\n else:\n try:\n exists = dxpy.api.project_clone(src_proj, {'objects': [result[\n 'id'] for result in src_results], 'project': dest_proj,\n 'destination': dest_folder})['exists']\n if len(exists) > 0:\n print(fill(\n 'The following objects already existed in the destination '\n + 'container and were not copied:') + '\\n ' + '\\n '.\n join(exists))\n for result in src_results:\n if result['id'] not in exists:\n dxpy.DXHTTPRequest('/' + result['id'] + '/rename', {\n 'project': dest_proj, 'name': dest_name})\n return\n except:\n err_exit()\n", "role": "user" }, { "content": "cp_to_noexistent_destination", "role": "assistant" } ]
def x(num_bytes): B = num_bytes KB = float(1024) MB = float(KB * 1024) GB = float(MB * 1024) TB = float(GB * 1024) if B < KB: return '{0} {1}'.format(B, 'bytes' if B != 1 else 'byte') elif KB <= B < MB: return '{0:.2f} KiB'.format(B / KB) elif MB <= B < GB: return '{0:.2f} MiB'.format(B / MB) elif GB <= B < TB: return '{0:.2f} GiB'.format(B / GB) elif TB <= B: return '{0:.2f} TiB'.format(B / TB)
readable_part_size
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(num_bytes):\n B = num_bytes\n KB = float(1024)\n MB = float(KB * 1024)\n GB = float(MB * 1024)\n TB = float(GB * 1024)\n if B < KB:\n return '{0} {1}'.format(B, 'bytes' if B != 1 else 'byte')\n elif KB <= B < MB:\n return '{0:.2f} KiB'.format(B / KB)\n elif MB <= B < GB:\n return '{0:.2f} MiB'.format(B / MB)\n elif GB <= B < TB:\n return '{0:.2f} GiB'.format(B / GB)\n elif TB <= B:\n return '{0:.2f} TiB'.format(B / TB)\n", "role": "user" }, { "content": "readable_part_size", "role": "assistant" } ]
def x(self, dx_hash, media_type=None, **kwargs): if media_type is not None: dx_hash['media'] = media_type resp = dxpy.api.file_new(dx_hash, **kwargs) self.set_ids(resp['id'], dx_hash['project'])
new
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dx_hash, media_type=None, **kwargs):\n if media_type is not None:\n dx_hash['media'] = media_type\n resp = dxpy.api.file_new(dx_hash, **kwargs)\n self.set_ids(resp['id'], dx_hash['project'])\n", "role": "user" }, { "content": "new", "role": "assistant" } ]
def x(self, dxid, project=None): if self._dxid is not None: self.flush() DXDataObject.set_ids(self, dxid, project) self._pos = 0 self._file_length = None self._cur_part = 1 self._num_uploaded_parts = 0
set_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dxid, project=None):\n if self._dxid is not None:\n self.flush()\n DXDataObject.set_ids(self, dxid, project)\n self._pos = 0\n self._file_length = None\n self._cur_part = 1\n self._num_uploaded_parts = 0\n", "role": "user" }, { "content": "set_ids", "role": "assistant" } ]
def x(self, offset, from_what=os.SEEK_SET): if from_what == os.SEEK_SET: reference_pos = 0 elif from_what == os.SEEK_CUR: reference_pos = self._pos elif from_what == os.SEEK_END: if self._file_length == None: desc = self.describe() self._file_length = int(desc['size']) reference_pos = self._file_length else: raise DXFileError('Invalid value supplied for from_what') orig_pos = self._pos self._pos = reference_pos + offset in_buf = False orig_buf_pos = self._read_buf.tell() if offset < orig_pos: if orig_buf_pos > orig_pos - offset: in_buf = True else: buf_len = dxpy.utils.string_buffer_length(self._read_buf) if buf_len - orig_buf_pos > offset - orig_pos: in_buf = True if in_buf: self._read_buf.seek(orig_buf_pos - orig_pos + offset) elif offset == orig_pos: pass else: self._read_buf = BytesIO() self._request_iterator, self._response_iterator = None, None
seek
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, offset, from_what=os.SEEK_SET):\n if from_what == os.SEEK_SET:\n reference_pos = 0\n elif from_what == os.SEEK_CUR:\n reference_pos = self._pos\n elif from_what == os.SEEK_END:\n if self._file_length == None:\n desc = self.describe()\n self._file_length = int(desc['size'])\n reference_pos = self._file_length\n else:\n raise DXFileError('Invalid value supplied for from_what')\n orig_pos = self._pos\n self._pos = reference_pos + offset\n in_buf = False\n orig_buf_pos = self._read_buf.tell()\n if offset < orig_pos:\n if orig_buf_pos > orig_pos - offset:\n in_buf = True\n else:\n buf_len = dxpy.utils.string_buffer_length(self._read_buf)\n if buf_len - orig_buf_pos > offset - orig_pos:\n in_buf = True\n if in_buf:\n self._read_buf.seek(orig_buf_pos - orig_pos + offset)\n elif offset == orig_pos:\n pass\n else:\n self._read_buf = BytesIO()\n self._request_iterator, self._response_iterator = None, None\n", "role": "user" }, { "content": "seek", "role": "assistant" } ]
def x(self, multithread=True, **kwargs): if self._write_buf.tell() > 0: data = self._write_buf.getvalue() self._write_buf = BytesIO() if multithread: self._async_upload_part_request(data, index=self._cur_part, ** kwargs) else: self.upload_part(data, self._cur_part, **kwargs) self._cur_part += 1 if len(self._http_threadpool_futures) > 0: dxpy.utils.wait_for_all_futures(self._http_threadpool_futures) try: for future in self._http_threadpool_futures: if future.exception() != None: raise future.exception() finally: self._http_threadpool_futures = set()
flush
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, multithread=True, **kwargs):\n if self._write_buf.tell() > 0:\n data = self._write_buf.getvalue()\n self._write_buf = BytesIO()\n if multithread:\n self._async_upload_part_request(data, index=self._cur_part, **\n kwargs)\n else:\n self.upload_part(data, self._cur_part, **kwargs)\n self._cur_part += 1\n if len(self._http_threadpool_futures) > 0:\n dxpy.utils.wait_for_all_futures(self._http_threadpool_futures)\n try:\n for future in self._http_threadpool_futures:\n if future.exception() != None:\n raise future.exception()\n finally:\n self._http_threadpool_futures = set()\n", "role": "user" }, { "content": "flush", "role": "assistant" } ]
def x(self, data, multithread=True, **kwargs): if not USING_PYTHON2: assert isinstance(data, bytes) self._ensure_write_bufsize(**kwargs) def x(data_for_write_req): if multithread: self._async_upload_part_request(data_for_write_req, index=self. _cur_part, **kwargs) else: self.upload_part(data_for_write_req, self._cur_part, **kwargs) self._cur_part += 1 if self._write_buf.tell() == 0 and self._write_bufsize == len(data): write_request(data) return remaining_space = self._write_bufsize - self._write_buf.tell() if len(data) <= remaining_space: self._write_buf.write(data) else: self._write_buf.write(data[:remaining_space]) temp_data = self._write_buf.getvalue() self._write_buf = BytesIO() write_request(temp_data) self.write(data[remaining_space:], **kwargs)
write2
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data, multithread=True, **kwargs):\n if not USING_PYTHON2:\n assert isinstance(data, bytes)\n self._ensure_write_bufsize(**kwargs)\n\n def x(data_for_write_req):\n if multithread:\n self._async_upload_part_request(data_for_write_req, index=self.\n _cur_part, **kwargs)\n else:\n self.upload_part(data_for_write_req, self._cur_part, **kwargs)\n self._cur_part += 1\n if self._write_buf.tell() == 0 and self._write_bufsize == len(data):\n write_request(data)\n return\n remaining_space = self._write_bufsize - self._write_buf.tell()\n if len(data) <= remaining_space:\n self._write_buf.write(data)\n else:\n self._write_buf.write(data[:remaining_space])\n temp_data = self._write_buf.getvalue()\n self._write_buf = BytesIO()\n write_request(temp_data)\n self.write(data[remaining_space:], **kwargs)\n", "role": "user" }, { "content": "write2", "role": "assistant" } ]
def x(self, data, multithread=True, **kwargs): if USING_PYTHON2: self._write2(data, multithread=multithread, **kwargs) else: if isinstance(data, str): bt = data.encode('utf-8') elif isinstance(data, bytearray): bt = bytes(data) elif isinstance(data, bytes): bt = data elif isinstance(data, mmap.mmap): bt = bytes(data) else: raise DXFileError('Invalid type {} for write data argument'. format(type(data))) assert isinstance(bt, bytes) self._write2(bt, multithread=multithread, **kwargs)
write
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data, multithread=True, **kwargs):\n if USING_PYTHON2:\n self._write2(data, multithread=multithread, **kwargs)\n else:\n if isinstance(data, str):\n bt = data.encode('utf-8')\n elif isinstance(data, bytearray):\n bt = bytes(data)\n elif isinstance(data, bytes):\n bt = data\n elif isinstance(data, mmap.mmap):\n bt = bytes(data)\n else:\n raise DXFileError('Invalid type {} for write data argument'.\n format(type(data)))\n assert isinstance(bt, bytes)\n self._write2(bt, multithread=multithread, **kwargs)\n", "role": "user" }, { "content": "write", "role": "assistant" } ]
def x(self, block=False, **kwargs): self.flush(**kwargs) self._ensure_write_bufsize(**kwargs) if self._num_uploaded_parts == 0 and self._empty_last_part_allowed: try: if USING_PYTHON2: self.upload_part('', 1, **kwargs) else: self.upload_part(b'', 1, **kwargs) except dxpy.exceptions.InvalidState: pass if 'report_progress_fn' in kwargs: del kwargs['report_progress_fn'] dxpy.api.file_close(self._dxid, **kwargs) if block: self._wait_on_close(**kwargs)
close
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block=False, **kwargs):\n self.flush(**kwargs)\n self._ensure_write_bufsize(**kwargs)\n if self._num_uploaded_parts == 0 and self._empty_last_part_allowed:\n try:\n if USING_PYTHON2:\n self.upload_part('', 1, **kwargs)\n else:\n self.upload_part(b'', 1, **kwargs)\n except dxpy.exceptions.InvalidState:\n pass\n if 'report_progress_fn' in kwargs:\n del kwargs['report_progress_fn']\n dxpy.api.file_close(self._dxid, **kwargs)\n if block:\n self._wait_on_close(**kwargs)\n", "role": "user" }, { "content": "close", "role": "assistant" } ]
def x(self, data, index=None, display_progress=False, report_progress_fn= None, **kwargs): if not USING_PYTHON2: assert isinstance(data, bytes) req_input = {} if index is not None: req_input['index'] = int(index) md5 = hashlib.md5() if hasattr(data, 'seek') and hasattr(data, 'tell'): rewind_input_buffer_offset = data.tell() while True: bytes_read = data.read(MD5_READ_CHUNK_SIZE) if bytes_read: md5.update(bytes_read) else: break data.seek(rewind_input_buffer_offset) else: md5.update(data) req_input['md5'] = md5.hexdigest() req_input['size'] = len(data) def x(): if 'max_retries' not in kwargs: kwargs['max_retries'] = dxpy.DEFAULT_RETRIES elif kwargs['max_retries'] > 0: kwargs['max_retries'] -= 1 if 'timeout' not in kwargs: kwargs['timeout'] = FILE_REQUEST_TIMEOUT resp = dxpy.api.file_upload(self._dxid, req_input, **kwargs) url = resp['url'] return url, _validate_headers(resp.get('headers', {})) dxpy.DXHTTPRequest(get_upload_url_and_headers, data, jsonify_data=False, prepend_srv=False, always_retry=True, timeout=FILE_REQUEST_TIMEOUT, auth=None, method='PUT') self._num_uploaded_parts += 1 if display_progress: warn('.') if report_progress_fn is not None: report_progress_fn(self, len(data))
upload_part
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data, index=None, display_progress=False, report_progress_fn=\n None, **kwargs):\n if not USING_PYTHON2:\n assert isinstance(data, bytes)\n req_input = {}\n if index is not None:\n req_input['index'] = int(index)\n md5 = hashlib.md5()\n if hasattr(data, 'seek') and hasattr(data, 'tell'):\n rewind_input_buffer_offset = data.tell()\n while True:\n bytes_read = data.read(MD5_READ_CHUNK_SIZE)\n if bytes_read:\n md5.update(bytes_read)\n else:\n break\n data.seek(rewind_input_buffer_offset)\n else:\n md5.update(data)\n req_input['md5'] = md5.hexdigest()\n req_input['size'] = len(data)\n\n def x():\n if 'max_retries' not in kwargs:\n kwargs['max_retries'] = dxpy.DEFAULT_RETRIES\n elif kwargs['max_retries'] > 0:\n kwargs['max_retries'] -= 1\n if 'timeout' not in kwargs:\n kwargs['timeout'] = FILE_REQUEST_TIMEOUT\n resp = dxpy.api.file_upload(self._dxid, req_input, **kwargs)\n url = resp['url']\n return url, _validate_headers(resp.get('headers', {}))\n dxpy.DXHTTPRequest(get_upload_url_and_headers, data, jsonify_data=False,\n prepend_srv=False, always_retry=True, timeout=FILE_REQUEST_TIMEOUT,\n auth=None, method='PUT')\n self._num_uploaded_parts += 1\n if display_progress:\n warn('.')\n if report_progress_fn is not None:\n report_progress_fn(self, len(data))\n", "role": "user" }, { "content": "upload_part", "role": "assistant" } ]