source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(self, table, column, join=None, where=None, insert=False, ifnone=None): select_result = self.select(table=table, columns=[column], join=join, where=where, limit=1) if self.debug: return select_result result = select_result[0] if select_result else None if result: return result[0 if self.cursorclass is pymysql.cursors.Cursor else column] if ifnone: raise ValueError(ifnone) if insert: if any([isinstance(d, dict) for d in where.values()]): raise ValueError( "The where parameter in get() doesn't support nested condition with insert==True." ) return self.insert(table=table, value=where) return None
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table, column, join=None, where=None, insert=False, ifnone=None):\n select_result = self.select(table=table, columns=[column], join=join,\n where=where, limit=1)\n if self.debug:\n return select_result\n result = select_result[0] if select_result else None\n if result:\n return result[0 if self.cursorclass is pymysql.cursors.Cursor else\n column]\n if ifnone:\n raise ValueError(ifnone)\n if insert:\n if any([isinstance(d, dict) for d in where.values()]):\n raise ValueError(\n \"The where parameter in get() doesn't support nested condition with insert==True.\"\n )\n return self.insert(table=table, value=where)\n return None\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, table, value, ignore=False, commit=True): value_q, _args = self._value_parser(value, columnname=False) _sql = ''.join(['INSERT', ' IGNORE' if ignore else '', ' INTO ', self. _backtick(table), ' (', self._backtick_columns(value), ') VALUES (', value_q, ');']) if self.debug: return self.cur.mogrify(_sql, _args) self.cur.execute(_sql, _args) if commit: self.conn.commit() return self.cur.lastrowid
insert
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table, value, ignore=False, commit=True):\n value_q, _args = self._value_parser(value, columnname=False)\n _sql = ''.join(['INSERT', ' IGNORE' if ignore else '', ' INTO ', self.\n _backtick(table), ' (', self._backtick_columns(value), ') VALUES (',\n value_q, ');'])\n if self.debug:\n return self.cur.mogrify(_sql, _args)\n self.cur.execute(_sql, _args)\n if commit:\n self.conn.commit()\n return self.cur.lastrowid\n", "role": "user" }, { "content": "insert", "role": "assistant" } ]
def x(self, table, value, update_columns=None, commit=True): if not isinstance(value, dict): raise TypeError('Input value should be a dictionary') if not update_columns: update_columns = value.keys() value_q, _args = self._value_parser(value, columnname=False) _sql = ''.join(['INSERT INTO ', self._backtick(table), ' (', self. _backtick_columns(value), ') VALUES ', '(', value_q, ') ', 'ON DUPLICATE KEY UPDATE ', ', '.join(['='.join([k, 'VALUES(' + k + ')']) for k in update_columns]), ';']) if self.debug: return self.cur.mogrify(_sql, _args) self.cur.execute(_sql, _args) if commit: self.conn.commit() return self.cur.lastrowid
upsert
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table, value, update_columns=None, commit=True):\n if not isinstance(value, dict):\n raise TypeError('Input value should be a dictionary')\n if not update_columns:\n update_columns = value.keys()\n value_q, _args = self._value_parser(value, columnname=False)\n _sql = ''.join(['INSERT INTO ', self._backtick(table), ' (', self.\n _backtick_columns(value), ') VALUES ', '(', value_q, ') ',\n 'ON DUPLICATE KEY UPDATE ', ', '.join(['='.join([k, 'VALUES(' + k +\n ')']) for k in update_columns]), ';'])\n if self.debug:\n return self.cur.mogrify(_sql, _args)\n self.cur.execute(_sql, _args)\n if commit:\n self.conn.commit()\n return self.cur.lastrowid\n", "role": "user" }, { "content": "upsert", "role": "assistant" } ]
def x(self, table, columns, value, ignore=False, commit=True): if not isinstance(value, (list, tuple)): raise TypeError('Input value should be a list or tuple') _sql = ''.join(['INSERT', ' IGNORE' if ignore else '', ' INTO ', self. _backtick(table), ' (', self._backtick_columns(columns), ') VALUES (', ', '.join(['%s'] * len(columns)), ')']) _args = tuple(value) _sql_full = ''.join(['INSERT', ' IGNORE' if ignore else '', ' INTO ', self._backtick(table), ' (', self._backtick_columns(columns), ') VALUES ', ', '.join([''.join(['(', ', '.join(['%s'] * len( columns)), ')'])] * len(_args)), ';']) _args_flattened = [item for sublist in _args for item in sublist] if self.debug: return self.cur.mogrify(_sql_full, _args_flattened) self.cur.executemany(_sql, _args) if commit: self.conn.commit() return self.cur.lastrowid
insertmany
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table, columns, value, ignore=False, commit=True):\n if not isinstance(value, (list, tuple)):\n raise TypeError('Input value should be a list or tuple')\n _sql = ''.join(['INSERT', ' IGNORE' if ignore else '', ' INTO ', self.\n _backtick(table), ' (', self._backtick_columns(columns),\n ') VALUES (', ', '.join(['%s'] * len(columns)), ')'])\n _args = tuple(value)\n _sql_full = ''.join(['INSERT', ' IGNORE' if ignore else '', ' INTO ',\n self._backtick(table), ' (', self._backtick_columns(columns),\n ') VALUES ', ', '.join([''.join(['(', ', '.join(['%s'] * len(\n columns)), ')'])] * len(_args)), ';'])\n _args_flattened = [item for sublist in _args for item in sublist]\n if self.debug:\n return self.cur.mogrify(_sql_full, _args_flattened)\n self.cur.executemany(_sql, _args)\n if commit:\n self.conn.commit()\n return self.cur.lastrowid\n", "role": "user" }, { "content": "insertmany", "role": "assistant" } ]
def x(self, table, value, where, join=None, commit=True): value_q, _value_args = self._value_parser(value, columnname=True) where_q, _where_args = self._where_parser(where) _sql = ''.join(['UPDATE ', self._tablename_parser(table)[ 'formatted_tablename'], self._join_parser(join), ' SET ', value_q, where_q, ';']) _args = _value_args + _where_args if self.debug: return self.cur.mogrify(_sql, _args) result = self.cur.execute(_sql, _args) if commit: self.commit() return result
update
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table, value, where, join=None, commit=True):\n value_q, _value_args = self._value_parser(value, columnname=True)\n where_q, _where_args = self._where_parser(where)\n _sql = ''.join(['UPDATE ', self._tablename_parser(table)[\n 'formatted_tablename'], self._join_parser(join), ' SET ', value_q,\n where_q, ';'])\n _args = _value_args + _where_args\n if self.debug:\n return self.cur.mogrify(_sql, _args)\n result = self.cur.execute(_sql, _args)\n if commit:\n self.commit()\n return result\n", "role": "user" }, { "content": "update", "role": "assistant" } ]
def x(self, table, where=None, commit=True): where_q, _args = self._where_parser(where) alias = self._tablename_parser(table)['alias'] _sql = ''.join(['DELETE ', alias + ' ' if alias else '', 'FROM ', self. _tablename_parser(table)['formatted_tablename'], where_q, ';']) if self.debug: return self.cur.mogrify(_sql, _args) result = self.cur.execute(_sql, _args) if commit: self.commit() return result
delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table, where=None, commit=True):\n where_q, _args = self._where_parser(where)\n alias = self._tablename_parser(table)['alias']\n _sql = ''.join(['DELETE ', alias + ' ' if alias else '', 'FROM ', self.\n _tablename_parser(table)['formatted_tablename'], where_q, ';'])\n if self.debug:\n return self.cur.mogrify(_sql, _args)\n result = self.cur.execute(_sql, _args)\n if commit:\n self.commit()\n return result\n", "role": "user" }, { "content": "delete", "role": "assistant" } ]
def x(txt): rall = re.search('^([\\s])+$', txt) if rall: tmp = txt.split('\n', 1) if len(tmp) == 2: return tmp[0], '\n' + tmp[1] else: return '', tmp[0] left = '' rlm = re.search('^([\\s])+', txt) if rlm: left = rlm.group(0) right = '' rrm = re.search('([\\s])+$', txt) if rrm: right = rrm.group(0) return left, right
get_whitespace
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(txt):\n rall = re.search('^([\\\\s])+$', txt)\n if rall:\n tmp = txt.split('\\n', 1)\n if len(tmp) == 2:\n return tmp[0], '\\n' + tmp[1]\n else:\n return '', tmp[0]\n left = ''\n rlm = re.search('^([\\\\s])+', txt)\n if rlm:\n left = rlm.group(0)\n right = ''\n rrm = re.search('([\\\\s])+$', txt)\n if rrm:\n right = rrm.group(0)\n return left, right\n", "role": "user" }, { "content": "get_whitespace", "role": "assistant" } ]
def x(self): name_ws = [] value_ws = [] for entry in self._entries: name_ws.append(get_whitespace(entry.name)) if entry.value != '': value_ws.append(get_whitespace(entry._value)) if len(value_ws) >= 1: value_ws = most_common(value_ws) else: value_ws = '', ' ' if len(name_ws) >= 1: name_ws = most_common(name_ws) else: name_ws = ' ', '' return name_ws, value_ws
find_whitespace_pattern
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n name_ws = []\n value_ws = []\n for entry in self._entries:\n name_ws.append(get_whitespace(entry.name))\n if entry.value != '':\n value_ws.append(get_whitespace(entry._value))\n if len(value_ws) >= 1:\n value_ws = most_common(value_ws)\n else:\n value_ws = '', ' '\n if len(name_ws) >= 1:\n name_ws = most_common(name_ws)\n else:\n name_ws = ' ', ''\n return name_ws, value_ws\n", "role": "user" }, { "content": "find_whitespace_pattern", "role": "assistant" } ]
def x(self, project_name, date): return os.path.join(self.cache_path, '%s_%s.json' % (project_name, date .strftime('%Y%m%d')))
path_for_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, project_name, date):\n return os.path.join(self.cache_path, '%s_%s.json' % (project_name, date\n .strftime('%Y%m%d')))\n", "role": "user" }, { "content": "path_for_file", "role": "assistant" } ]
def x(self, project, date): fpath = self._path_for_file(project, date) logger.debug('Cache GET project=%s date=%s - path=%s', project, date. strftime('%Y-%m-%d'), fpath) try: with open(fpath, 'r') as fh: data = json.loads(fh.read()) except: logger.debug('Error getting from cache for project=%s date=%s', project, date.strftime('%Y-%m-%d')) return None data['cache_metadata']['date'] = datetime.strptime(data[ 'cache_metadata']['date'], '%Y%m%d') data['cache_metadata']['updated'] = datetime.fromtimestamp(data[ 'cache_metadata']['updated']) return data
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, project, date):\n fpath = self._path_for_file(project, date)\n logger.debug('Cache GET project=%s date=%s - path=%s', project, date.\n strftime('%Y-%m-%d'), fpath)\n try:\n with open(fpath, 'r') as fh:\n data = json.loads(fh.read())\n except:\n logger.debug('Error getting from cache for project=%s date=%s',\n project, date.strftime('%Y-%m-%d'))\n return None\n data['cache_metadata']['date'] = datetime.strptime(data[\n 'cache_metadata']['date'], '%Y%m%d')\n data['cache_metadata']['updated'] = datetime.fromtimestamp(data[\n 'cache_metadata']['updated'])\n return data\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, project, date, data, data_ts): data['cache_metadata'] = {'project': project, 'date': date.strftime( '%Y%m%d'), 'updated': time.time(), 'version': VERSION, 'data_ts': data_ts} fpath = self._path_for_file(project, date) logger.debug('Cache SET project=%s date=%s - path=%s', project, date. strftime('%Y-%m-%d'), fpath) with open(fpath, 'w') as fh: fh.write(json.dumps(data))
set
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, project, date, data, data_ts):\n data['cache_metadata'] = {'project': project, 'date': date.strftime(\n '%Y%m%d'), 'updated': time.time(), 'version': VERSION, 'data_ts':\n data_ts}\n fpath = self._path_for_file(project, date)\n logger.debug('Cache SET project=%s date=%s - path=%s', project, date.\n strftime('%Y-%m-%d'), fpath)\n with open(fpath, 'w') as fh:\n fh.write(json.dumps(data))\n", "role": "user" }, { "content": "set", "role": "assistant" } ]
def x(self, project): file_re = re.compile('^%s_([0-9]{8})\\.json$' % project) all_dates = [] for f in os.listdir(self.cache_path): if not os.path.isfile(os.path.join(self.cache_path, f)): continue m = file_re.match(f) if m is None: continue all_dates.append(datetime.strptime(m.group(1), '%Y%m%d')) return sorted(all_dates)
get_dates_for_project
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, project):\n file_re = re.compile('^%s_([0-9]{8})\\\\.json$' % project)\n all_dates = []\n for f in os.listdir(self.cache_path):\n if not os.path.isfile(os.path.join(self.cache_path, f)):\n continue\n m = file_re.match(f)\n if m is None:\n continue\n all_dates.append(datetime.strptime(m.group(1), '%Y%m%d'))\n return sorted(all_dates)\n", "role": "user" }, { "content": "get_dates_for_project", "role": "assistant" } ]
def x(argv): p = argparse.ArgumentParser(description= 'pypi-download-stats - Calculate detailed download stats and generate HTML and badges for PyPI packages - <%s>' % PROJECT_URL, prog='pypi-download-stats') p.add_argument('-V', '--version', action='version', version='%(prog)s ' + VERSION) p.add_argument('-v', '--verbose', dest='verbose', action='count', default=0, help='verbose output. specify twice for debug-level output.' ) m = p.add_mutually_exclusive_group() m.add_argument('-Q', '--no-query', dest='query', action='store_false', default=True, help= 'do not query; just generate output from cached data') m.add_argument('-G', '--no-generate', dest='generate', action= 'store_false', default=True, help= 'do not generate output; just query data and cache results') p.add_argument('-o', '--out-dir', dest='out_dir', action='store', type= str, default='./pypi-stats', help= 'output directory (default: ./pypi-stats') p.add_argument('-p', '--project-id', dest='project_id', action='store', type=str, default=None, help= 'ProjectID for your Google Cloud user, if not using service account credentials JSON file' ) p.add_argument('-c', '--cache-dir', dest='cache_dir', action='store', type=str, default='./pypi-stats-cache', help= 'stats cache directory (default: ./pypi-stats-cache)') p.add_argument('-B', '--backfill-num-days', dest='backfill_days', type= int, action='store', default=7, help= 'number of days of historical data to backfill, if missing (defaut: 7). Note this may incur BigQuery charges. Set to -1 to backfill all available history.' ) g = p.add_mutually_exclusive_group() g.add_argument('-P', '--project', dest='PROJECT', action='append', type =str, help= 'project name to query/generate stats for (can be specified more than once; this will reduce query cost for multiple projects)' ) g.add_argument('-U', '--user', dest='user', action='store', type=str, help='Run for all PyPI projects owned by the specifieduser.') args = p.parse_args(argv) return args
parse_args
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(argv):\n p = argparse.ArgumentParser(description=\n 'pypi-download-stats - Calculate detailed download stats and generate HTML and badges for PyPI packages - <%s>'\n % PROJECT_URL, prog='pypi-download-stats')\n p.add_argument('-V', '--version', action='version', version='%(prog)s ' +\n VERSION)\n p.add_argument('-v', '--verbose', dest='verbose', action='count',\n default=0, help='verbose output. specify twice for debug-level output.'\n )\n m = p.add_mutually_exclusive_group()\n m.add_argument('-Q', '--no-query', dest='query', action='store_false',\n default=True, help=\n 'do not query; just generate output from cached data')\n m.add_argument('-G', '--no-generate', dest='generate', action=\n 'store_false', default=True, help=\n 'do not generate output; just query data and cache results')\n p.add_argument('-o', '--out-dir', dest='out_dir', action='store', type=\n str, default='./pypi-stats', help=\n 'output directory (default: ./pypi-stats')\n p.add_argument('-p', '--project-id', dest='project_id', action='store',\n type=str, default=None, help=\n 'ProjectID for your Google Cloud user, if not using service account credentials JSON file'\n )\n p.add_argument('-c', '--cache-dir', dest='cache_dir', action='store',\n type=str, default='./pypi-stats-cache', help=\n 'stats cache directory (default: ./pypi-stats-cache)')\n p.add_argument('-B', '--backfill-num-days', dest='backfill_days', type=\n int, action='store', default=7, help=\n 'number of days of historical data to backfill, if missing (defaut: 7). Note this may incur BigQuery charges. Set to -1 to backfill all available history.'\n )\n g = p.add_mutually_exclusive_group()\n g.add_argument('-P', '--project', dest='PROJECT', action='append', type\n =str, help=\n 'project name to query/generate stats for (can be specified more than once; this will reduce query cost for multiple projects)'\n )\n g.add_argument('-U', '--user', dest='user', action='store', type=str,\n help='Run for all PyPI projects owned by the specifieduser.')\n args = p.parse_args(argv)\n return args\n", "role": "user" }, { "content": "parse_args", "role": "assistant" } ]
def x(level, format): formatter = logging.Formatter(fmt=format) logger.handlers[0].setFormatter(formatter) logger.setLevel(level)
set_log_level_format
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(level, format):\n formatter = logging.Formatter(fmt=format)\n logger.handlers[0].setFormatter(formatter)\n logger.setLevel(level)\n", "role": "user" }, { "content": "set_log_level_format", "role": "assistant" } ]
def x(username): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') pkgs = client.user_packages(username) return [x[1] for x in pkgs]
pypi_get_projects_for_user
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(username):\n client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')\n pkgs = client.user_packages(username)\n return [x[1] for x in pkgs]\n", "role": "user" }, { "content": "pypi_get_projects_for_user", "role": "assistant" } ]
def x(args=None): if args is None: args = parse_args(sys.argv[1:]) if args.verbose > 1: set_log_debug() elif args.verbose == 1: set_log_info() outpath = os.path.abspath(os.path.expanduser(args.out_dir)) cachepath = os.path.abspath(os.path.expanduser(args.cache_dir)) cache = DiskDataCache(cache_path=cachepath) if args.user: args.PROJECT = _pypi_get_projects_for_user(args.user) if args.query: DataQuery(args.project_id, args.PROJECT, cache).run_queries( backfill_num_days=args.backfill_days) else: logger.warning( 'Query disabled by command-line flag; operating on cached data only.' ) if not args.generate: logger.warning( 'Output generation disabled by command-line flag; exiting now.') raise SystemExit(0) for proj in args.PROJECT: logger.info('Generating output for: %s', proj) stats = ProjectStats(proj, cache) outdir = os.path.join(outpath, proj) OutputGenerator(proj, stats, outdir).generate()
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args=None):\n if args is None:\n args = parse_args(sys.argv[1:])\n if args.verbose > 1:\n set_log_debug()\n elif args.verbose == 1:\n set_log_info()\n outpath = os.path.abspath(os.path.expanduser(args.out_dir))\n cachepath = os.path.abspath(os.path.expanduser(args.cache_dir))\n cache = DiskDataCache(cache_path=cachepath)\n if args.user:\n args.PROJECT = _pypi_get_projects_for_user(args.user)\n if args.query:\n DataQuery(args.project_id, args.PROJECT, cache).run_queries(\n backfill_num_days=args.backfill_days)\n else:\n logger.warning(\n 'Query disabled by command-line flag; operating on cached data only.'\n )\n if not args.generate:\n logger.warning(\n 'Output generation disabled by command-line flag; exiting now.')\n raise SystemExit(0)\n for proj in args.PROJECT:\n logger.info('Generating output for: %s', proj)\n stats = ProjectStats(proj, cache)\n outdir = os.path.join(outpath, proj)\n OutputGenerator(proj, stats, outdir).generate()\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(self): logger.debug('Generating graph for %s', self._graph_id) tools = [PanTool(), BoxZoomTool(), WheelZoomTool(), SaveTool(), ResetTool(), ResizeTool()] try: g = Area(self._data, x='Date', y=self._y_series_names, title=self. _title, stack=True, xlabel='Date', ylabel='Downloads', tools= tools, plot_height=400, plot_width=800, toolbar_location= 'above', legend=False) except Exception as ex: logger.error('Error generating %s graph', self._graph_id) logger.error('Data: %s', self._data) logger.error('y=%s', self._y_series_names) raise ex lines = [] legend_parts = [] for renderer in g.select(GlyphRenderer): if not isinstance(renderer.glyph, Patches): continue series_name = renderer.data_source.data['series'][0] logger.debug('Adding line for Patches %s (series: %s)', renderer, series_name) line = self._line_for_patches(self._data, g, renderer, series_name) if line is not None: lines.append(line) legend_parts.append((series_name, [line])) g.add_tools(HoverTool(tooltips=[(self._y_name, '@SeriesName'), ('Date', '@FmtDate'), ('Downloads', '@Downloads')], renderers=lines, line_policy='nearest')) legend = Legend(legends=legend_parts, location=(0, 0)) g.add_layout(legend, 'right') return components(g)
generate_graph
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n logger.debug('Generating graph for %s', self._graph_id)\n tools = [PanTool(), BoxZoomTool(), WheelZoomTool(), SaveTool(),\n ResetTool(), ResizeTool()]\n try:\n g = Area(self._data, x='Date', y=self._y_series_names, title=self.\n _title, stack=True, xlabel='Date', ylabel='Downloads', tools=\n tools, plot_height=400, plot_width=800, toolbar_location=\n 'above', legend=False)\n except Exception as ex:\n logger.error('Error generating %s graph', self._graph_id)\n logger.error('Data: %s', self._data)\n logger.error('y=%s', self._y_series_names)\n raise ex\n lines = []\n legend_parts = []\n for renderer in g.select(GlyphRenderer):\n if not isinstance(renderer.glyph, Patches):\n continue\n series_name = renderer.data_source.data['series'][0]\n logger.debug('Adding line for Patches %s (series: %s)', renderer,\n series_name)\n line = self._line_for_patches(self._data, g, renderer, series_name)\n if line is not None:\n lines.append(line)\n legend_parts.append((series_name, [line]))\n g.add_tools(HoverTool(tooltips=[(self._y_name, '@SeriesName'), ('Date',\n '@FmtDate'), ('Downloads', '@Downloads')], renderers=lines,\n line_policy='nearest'))\n legend = Legend(legends=legend_parts, location=(0, 0))\n g.add_layout(legend, 'right')\n return components(g)\n", "role": "user" }, { "content": "generate_graph", "role": "assistant" } ]
def x(self, data, chart, renderer, series_name): xvals = deepcopy(renderer.data_source.data['x_values'][0]) yvals = deepcopy(renderer.data_source.data['y_values'][0]) line_color = renderer.glyph.fill_color orig_xvals = [x for x in xvals] orig_yvals = [y for y in yvals] new_xvals = [x for x in xvals] new_yvals = [y for y in yvals] xvals = new_xvals[1:] yvals = new_yvals[1:] last_idx = None for idx, val in enumerate(xvals): if yvals[idx + 1] == 0 and xvals[idx + 1] == xvals[idx]: last_idx = idx break if last_idx is None: logger.error( 'Unable to find top line of patch (x_values=%s y_values=%s', orig_xvals, orig_yvals) return None xvals = xvals[:last_idx + 1] yvals = yvals[:last_idx + 1] lastx = xvals[-1] xvals[-1] = lastx - 1000 xvals.append(lastx) yvals.append(yvals[-1]) download_counts = [data[series_name][y] for y in range(0, len(yvals) - 1)] download_counts.append(download_counts[-1]) data2 = {'x': xvals, 'y': yvals, 'SeriesName': [series_name for _ in yvals], 'FmtDate': [self.datetime64_to_formatted_date(x) for x in xvals], 'Downloads': download_counts} data2['FmtDate'][-2] = data2['FmtDate'][-1] line_ds = ColumnDataSource(data2) line = Line(x='x', y='y', line_color=line_color) lineglyph = chart.add_glyph(line_ds, line) return lineglyph
line_for_patches
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data, chart, renderer, series_name):\n xvals = deepcopy(renderer.data_source.data['x_values'][0])\n yvals = deepcopy(renderer.data_source.data['y_values'][0])\n line_color = renderer.glyph.fill_color\n orig_xvals = [x for x in xvals]\n orig_yvals = [y for y in yvals]\n new_xvals = [x for x in xvals]\n new_yvals = [y for y in yvals]\n xvals = new_xvals[1:]\n yvals = new_yvals[1:]\n last_idx = None\n for idx, val in enumerate(xvals):\n if yvals[idx + 1] == 0 and xvals[idx + 1] == xvals[idx]:\n last_idx = idx\n break\n if last_idx is None:\n logger.error(\n 'Unable to find top line of patch (x_values=%s y_values=%s',\n orig_xvals, orig_yvals)\n return None\n xvals = xvals[:last_idx + 1]\n yvals = yvals[:last_idx + 1]\n lastx = xvals[-1]\n xvals[-1] = lastx - 1000\n xvals.append(lastx)\n yvals.append(yvals[-1])\n download_counts = [data[series_name][y] for y in range(0, len(yvals) - 1)]\n download_counts.append(download_counts[-1])\n data2 = {'x': xvals, 'y': yvals, 'SeriesName': [series_name for _ in\n yvals], 'FmtDate': [self.datetime64_to_formatted_date(x) for x in\n xvals], 'Downloads': download_counts}\n data2['FmtDate'][-2] = data2['FmtDate'][-1]\n line_ds = ColumnDataSource(data2)\n line = Line(x='x', y='y', line_color=line_color)\n lineglyph = chart.add_glyph(line_ds, line)\n return lineglyph\n", "role": "user" }, { "content": "line_for_patches", "role": "assistant" } ]
def x(self): all_dates = self.cache.get_dates_for_project(self.project_name) dates = [] last_date = None for val in sorted(all_dates): if last_date is None: last_date = val continue if val - last_date > timedelta(hours=48): logger.warning( 'Last cache date was %s, current date is %s; delta is too large. Starting cache date series at current date.' , last_date, val) dates = [] last_date = val dates.append(val) for idx, cache_date in enumerate(dates): data = self._cache_get(cache_date) if not self._is_empty_cache_record(data): logger.debug('First cache date with data: %s', cache_date) return dates[idx:] return dates
get_cache_dates
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n all_dates = self.cache.get_dates_for_project(self.project_name)\n dates = []\n last_date = None\n for val in sorted(all_dates):\n if last_date is None:\n last_date = val\n continue\n if val - last_date > timedelta(hours=48):\n logger.warning(\n 'Last cache date was %s, current date is %s; delta is too large. Starting cache date series at current date.'\n , last_date, val)\n dates = []\n last_date = val\n dates.append(val)\n for idx, cache_date in enumerate(dates):\n data = self._cache_get(cache_date)\n if not self._is_empty_cache_record(data):\n logger.debug('First cache date with data: %s', cache_date)\n return dates[idx:]\n return dates\n", "role": "user" }, { "content": "get_cache_dates", "role": "assistant" } ]
def x(self, rec): for k in ['by_version', 'by_file_type', 'by_installer', 'by_implementation', 'by_system', 'by_distro', 'by_country']: if k in rec and len(rec[k]) > 0: return False return True
is_empty_cache_record
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, rec):\n for k in ['by_version', 'by_file_type', 'by_installer',\n 'by_implementation', 'by_system', 'by_distro', 'by_country']:\n if k in rec and len(rec[k]) > 0:\n return False\n return True\n", "role": "user" }, { "content": "is_empty_cache_record", "role": "assistant" } ]
def x(self, date): if date in self.cache_data: logger.debug('Using class-cached data for date %s', date.strftime( '%Y-%m-%d')) return self.cache_data[date] logger.debug('Getting data from cache for date %s', date.strftime( '%Y-%m-%d')) data = self.cache.get(self.project_name, date) self.cache_data[date] = data return data
cache_get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, date):\n if date in self.cache_data:\n logger.debug('Using class-cached data for date %s', date.strftime(\n '%Y-%m-%d'))\n return self.cache_data[date]\n logger.debug('Getting data from cache for date %s', date.strftime(\n '%Y-%m-%d'))\n data = self.cache.get(self.project_name, date)\n self.cache_data[date] = data\n return data\n", "role": "user" }, { "content": "cache_get", "role": "assistant" } ]
def x(k1, k2): k1 = ProjectStats._column_value(k1) k2 = ProjectStats._column_value(k2) if k1 == 'unknown' and k2 == 'unknown': return 'unknown' return '%s %s' % (k1, k2)
compound_column_value
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(k1, k2):\n k1 = ProjectStats._column_value(k1)\n k2 = ProjectStats._column_value(k2)\n if k1 == 'unknown' and k2 == 'unknown':\n return 'unknown'\n return '%s %s' % (k1, k2)\n", "role": "user" }, { "content": "compound_column_value", "role": "assistant" } ]
def x(ver, num_components=2): parts = ver.split('.') if len(parts) <= num_components: return ver return '.'.join(parts[:num_components])
shorten_version
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(ver, num_components=2):\n parts = ver.split('.')\n if len(parts) <= num_components:\n return ver\n return '.'.join(parts[:num_components])\n", "role": "user" }, { "content": "shorten_version", "role": "assistant" } ]
def x(self): ret = {} for cache_date in self.cache_dates: data = self._cache_get(cache_date) if len(data['by_version']) == 0: data['by_version'] = {'other': 0} ret[cache_date] = data['by_version'] return ret
per_version_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n ret = {}\n for cache_date in self.cache_dates:\n data = self._cache_get(cache_date)\n if len(data['by_version']) == 0:\n data['by_version'] = {'other': 0}\n ret[cache_date] = data['by_version']\n return ret\n", "role": "user" }, { "content": "per_version_data", "role": "assistant" } ]
def x(self): ret = {} for cache_date in self.cache_dates: data = self._cache_get(cache_date) if len(data['by_file_type']) == 0: data['by_file_type'] = {'other': 0} ret[cache_date] = data['by_file_type'] return ret
per_file_type_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n ret = {}\n for cache_date in self.cache_dates:\n data = self._cache_get(cache_date)\n if len(data['by_file_type']) == 0:\n data['by_file_type'] = {'other': 0}\n ret[cache_date] = data['by_file_type']\n return ret\n", "role": "user" }, { "content": "per_file_type_data", "role": "assistant" } ]
def x(self): ret = {} for cache_date in self.cache_dates: data = self._cache_get(cache_date) ret[cache_date] = {} for inst_name, inst_data in data['by_installer'].items(): for inst_ver, count in inst_data.items(): k = self._compound_column_value(inst_name, self. _shorten_version(inst_ver)) ret[cache_date][k] = count if len(ret[cache_date]) == 0: ret[cache_date]['unknown'] = 0 return ret
per_installer_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n ret = {}\n for cache_date in self.cache_dates:\n data = self._cache_get(cache_date)\n ret[cache_date] = {}\n for inst_name, inst_data in data['by_installer'].items():\n for inst_ver, count in inst_data.items():\n k = self._compound_column_value(inst_name, self.\n _shorten_version(inst_ver))\n ret[cache_date][k] = count\n if len(ret[cache_date]) == 0:\n ret[cache_date]['unknown'] = 0\n return ret\n", "role": "user" }, { "content": "per_installer_data", "role": "assistant" } ]
def x(self): ret = {} for cache_date in self.cache_dates: data = self._cache_get(cache_date) ret[cache_date] = {} for impl_name, impl_data in data['by_implementation'].items(): for impl_ver, count in impl_data.items(): k = self._compound_column_value(impl_name, self. _shorten_version(impl_ver)) ret[cache_date][k] = count if len(ret[cache_date]) == 0: ret[cache_date]['unknown'] = 0 return ret
per_implementation_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n ret = {}\n for cache_date in self.cache_dates:\n data = self._cache_get(cache_date)\n ret[cache_date] = {}\n for impl_name, impl_data in data['by_implementation'].items():\n for impl_ver, count in impl_data.items():\n k = self._compound_column_value(impl_name, self.\n _shorten_version(impl_ver))\n ret[cache_date][k] = count\n if len(ret[cache_date]) == 0:\n ret[cache_date]['unknown'] = 0\n return ret\n", "role": "user" }, { "content": "per_implementation_data", "role": "assistant" } ]
def x(self): ret = {} for cache_date in self.cache_dates: data = self._cache_get(cache_date) ret[cache_date] = {self._column_value(x): data['by_system'][x] for x in data['by_system']} if len(ret[cache_date]) == 0: ret[cache_date]['unknown'] = 0 return ret
per_system_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n ret = {}\n for cache_date in self.cache_dates:\n data = self._cache_get(cache_date)\n ret[cache_date] = {self._column_value(x): data['by_system'][x] for\n x in data['by_system']}\n if len(ret[cache_date]) == 0:\n ret[cache_date]['unknown'] = 0\n return ret\n", "role": "user" }, { "content": "per_system_data", "role": "assistant" } ]
def x(self): ret = {} for cache_date in self.cache_dates: data = self._cache_get(cache_date) ret[cache_date] = {} for cc, count in data['by_country'].items(): k = '%s (%s)' % (self._alpha2_to_country(cc), cc) ret[cache_date][k] = count if len(ret[cache_date]) == 0: ret[cache_date]['unknown'] = 0 return ret
per_country_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n ret = {}\n for cache_date in self.cache_dates:\n data = self._cache_get(cache_date)\n ret[cache_date] = {}\n for cc, count in data['by_country'].items():\n k = '%s (%s)' % (self._alpha2_to_country(cc), cc)\n ret[cache_date][k] = count\n if len(ret[cache_date]) == 0:\n ret[cache_date]['unknown'] = 0\n return ret\n", "role": "user" }, { "content": "per_country_data", "role": "assistant" } ]
def x(self): ret = {} for cache_date in self.cache_dates: data = self._cache_get(cache_date) ret[cache_date] = {} for distro_name, distro_data in data['by_distro'].items(): if distro_name.lower() == 'red hat enterprise linux server': distro_name = 'RHEL' for distro_ver, count in distro_data.items(): ver = self._shorten_version(distro_ver, num_components=1) if distro_name.lower() == 'os x': ver = self._shorten_version(distro_ver, num_components=2) k = self._compound_column_value(distro_name, ver) ret[cache_date][k] = count if len(ret[cache_date]) == 0: ret[cache_date]['unknown'] = 0 return ret
per_distro_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n ret = {}\n for cache_date in self.cache_dates:\n data = self._cache_get(cache_date)\n ret[cache_date] = {}\n for distro_name, distro_data in data['by_distro'].items():\n if distro_name.lower() == 'red hat enterprise linux server':\n distro_name = 'RHEL'\n for distro_ver, count in distro_data.items():\n ver = self._shorten_version(distro_ver, num_components=1)\n if distro_name.lower() == 'os x':\n ver = self._shorten_version(distro_ver, num_components=2)\n k = self._compound_column_value(distro_name, ver)\n ret[cache_date][k] = count\n if len(ret[cache_date]) == 0:\n ret[cache_date]['unknown'] = 0\n return ret\n", "role": "user" }, { "content": "per_distro_data", "role": "assistant" } ]
def x(self): count, num_days = self._downloads_for_num_days(7) res = ceil(count / num_days) logger.debug('Downloads per day = (%d / %d) = %d', count, num_days, res) return res
downloads_per_day
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n count, num_days = self._downloads_for_num_days(7)\n res = ceil(count / num_days)\n logger.debug('Downloads per day = (%d / %d) = %d', count, num_days, res)\n return res\n", "role": "user" }, { "content": "downloads_per_day", "role": "assistant" } ]
def x(self): if len(self.cache_dates) < 7: logger.error( 'Only have %d days of data; cannot calculate downloads per week', len(self.cache_dates)) return None count, _ = self._downloads_for_num_days(7) logger.debug('Downloads per week = %d', count) return count
downloads_per_week
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if len(self.cache_dates) < 7:\n logger.error(\n 'Only have %d days of data; cannot calculate downloads per week',\n len(self.cache_dates))\n return None\n count, _ = self._downloads_for_num_days(7)\n logger.debug('Downloads per week = %d', count)\n return count\n", "role": "user" }, { "content": "downloads_per_week", "role": "assistant" } ]
def x(self, num_days): logger.debug('Getting download total for last %d days', num_days) dates = self.cache_dates logger.debug('Cache has %d days of data', len(dates)) if len(dates) > num_days: dates = dates[-1 * num_days:] logger.debug('Looking at last %d days of data', len(dates)) dl_sum = 0 for cache_date in dates: data = self._cache_get(cache_date) dl_sum += sum(data['by_version'].values()) logger.debug('Sum of download counts: %d', dl_sum) return dl_sum, len(dates)
downloads_for_num_days
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, num_days):\n logger.debug('Getting download total for last %d days', num_days)\n dates = self.cache_dates\n logger.debug('Cache has %d days of data', len(dates))\n if len(dates) > num_days:\n dates = dates[-1 * num_days:]\n logger.debug('Looking at last %d days of data', len(dates))\n dl_sum = 0\n for cache_date in dates:\n data = self._cache_get(cache_date)\n dl_sum += sum(data['by_version'].values())\n logger.debug('Sum of download counts: %d', dl_sum)\n return dl_sum, len(dates)\n", "role": "user" }, { "content": "downloads_for_num_days", "role": "assistant" } ]
def x(self): fpath = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', None) if fpath is None: raise Exception( 'ERROR: No project ID specified, and GOOGLE_APPLICATION_CREDENTIALS env var is not set' ) fpath = os.path.abspath(os.path.expanduser(fpath)) logger.debug('Reading credentials file at %s to get project_id', fpath) with open(fpath, 'r') as fh: cred_data = json.loads(fh.read()) return cred_data['project_id']
get_project_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n fpath = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', None)\n if fpath is None:\n raise Exception(\n 'ERROR: No project ID specified, and GOOGLE_APPLICATION_CREDENTIALS env var is not set'\n )\n fpath = os.path.abspath(os.path.expanduser(fpath))\n logger.debug('Reading credentials file at %s to get project_id', fpath)\n with open(fpath, 'r') as fh:\n cred_data = json.loads(fh.read())\n return cred_data['project_id']\n", "role": "user" }, { "content": "get_project_id", "role": "assistant" } ]
def x(self): logger.debug('Getting Google Credentials') credentials = GoogleCredentials.get_application_default() logger.debug('Building BigQuery service instance') bigquery_service = build('bigquery', 'v2', credentials=credentials) return bigquery_service
get_bigquery_service
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n logger.debug('Getting Google Credentials')\n credentials = GoogleCredentials.get_application_default()\n logger.debug('Building BigQuery service instance')\n bigquery_service = build('bigquery', 'v2', credentials=credentials)\n return bigquery_service\n", "role": "user" }, { "content": "get_bigquery_service", "role": "assistant" } ]
def x(self): all_table_names = [] logger.info('Querying for all tables in dataset') tables = self.service.tables() request = tables.list(projectId=self._PROJECT_ID, datasetId=self. _DATASET_ID) while request is not None: response = request.execute() if 'tables' not in response: response['tables'] = [] for table in response['tables']: if table['type'] != 'TABLE': logger.debug('Skipping %s (type=%s)', table[ 'tableReference']['tableId'], table['type']) continue if not self._table_re.match(table['tableReference']['tableId']): logger.debug('Skipping table with non-matching name: %s', table['tableReference']['tableId']) continue all_table_names.append(table['tableReference']['tableId']) request = tables.list_next(previous_request=request, previous_response=response) return sorted(all_table_names)
get_download_table_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n all_table_names = []\n logger.info('Querying for all tables in dataset')\n tables = self.service.tables()\n request = tables.list(projectId=self._PROJECT_ID, datasetId=self.\n _DATASET_ID)\n while request is not None:\n response = request.execute()\n if 'tables' not in response:\n response['tables'] = []\n for table in response['tables']:\n if table['type'] != 'TABLE':\n logger.debug('Skipping %s (type=%s)', table[\n 'tableReference']['tableId'], table['type'])\n continue\n if not self._table_re.match(table['tableReference']['tableId']):\n logger.debug('Skipping table with non-matching name: %s',\n table['tableReference']['tableId'])\n continue\n all_table_names.append(table['tableReference']['tableId'])\n request = tables.list_next(previous_request=request,\n previous_response=response)\n return sorted(all_table_names)\n", "role": "user" }, { "content": "get_download_table_ids", "role": "assistant" } ]
def x(self, table_name): m = self._table_re.match(table_name) dt = datetime.strptime(m.group(1), '%Y%m%d') return dt
datetime_for_table_name
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table_name):\n m = self._table_re.match(table_name)\n dt = datetime.strptime(m.group(1), '%Y%m%d')\n return dt\n", "role": "user" }, { "content": "datetime_for_table_name", "role": "assistant" } ]
def x(self, query): query_request = self.service.jobs() logger.debug('Running query: %s', query) start = datetime.now() resp = query_request.query(projectId=self.project_id, body={'query': query} ).execute() duration = datetime.now() - start logger.debug('Query response (in %s): %s', duration, resp) if not resp['jobComplete']: logger.error('Error: query reported job not complete!') if int(resp['totalRows']) == 0: return [] if int(resp['totalRows']) != len(resp['rows']): logger.error( 'Error: query reported %s total rows, but only returned %d', resp['totalRows'], len(resp['rows'])) data = [] fields = [f['name'] for f in resp['schema']['fields']] for row in resp['rows']: d = {} for idx, val in enumerate(row['f']): d[fields[idx]] = val['v'] data.append(d) return data
run_query
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, query):\n query_request = self.service.jobs()\n logger.debug('Running query: %s', query)\n start = datetime.now()\n resp = query_request.query(projectId=self.project_id, body={'query': query}\n ).execute()\n duration = datetime.now() - start\n logger.debug('Query response (in %s): %s', duration, resp)\n if not resp['jobComplete']:\n logger.error('Error: query reported job not complete!')\n if int(resp['totalRows']) == 0:\n return []\n if int(resp['totalRows']) != len(resp['rows']):\n logger.error(\n 'Error: query reported %s total rows, but only returned %d',\n resp['totalRows'], len(resp['rows']))\n data = []\n fields = [f['name'] for f in resp['schema']['fields']]\n for row in resp['rows']:\n d = {}\n for idx, val in enumerate(row['f']):\n d[fields[idx]] = val['v']\n data.append(d)\n return data\n", "role": "user" }, { "content": "run_query", "role": "assistant" } ]
def x(self, table_name): logger.debug('Querying for newest timestamp in table %s', table_name) q = ('SELECT TIMESTAMP_TO_SEC(MAX(timestamp)) AS max_ts %s;' % self. _from_for_table(table_name)) res = self._run_query(q) ts = int(res[0]['max_ts']) logger.debug('Newest timestamp in table %s: %s', table_name, ts) return ts
get_newest_ts_in_table
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table_name):\n logger.debug('Querying for newest timestamp in table %s', table_name)\n q = ('SELECT TIMESTAMP_TO_SEC(MAX(timestamp)) AS max_ts %s;' % self.\n _from_for_table(table_name))\n res = self._run_query(q)\n ts = int(res[0]['max_ts'])\n logger.debug('Newest timestamp in table %s: %s', table_name, ts)\n return ts\n", "role": "user" }, { "content": "get_newest_ts_in_table", "role": "assistant" } ]
def x(self, table_name): logger.info('Querying for downloads by installer in table %s', table_name) q = ( 'SELECT file.project, details.installer.name, details.installer.version, COUNT(*) as dl_count %s %s GROUP BY file.project, details.installer.name, details.installer.version;' % (self._from_for_table(table_name), self._where_for_projects)) res = self._run_query(q) result = self._dict_for_projects() for row in res: proj = result[row['file_project']] iname = row['details_installer_name'] iver = row['details_installer_version'] if iname not in proj: proj[iname] = {} if iver not in proj[iname]: proj[iname][iver] = 0 proj[iname][iver] += int(row['dl_count']) return result
query_by_installer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table_name):\n logger.info('Querying for downloads by installer in table %s', table_name)\n q = (\n 'SELECT file.project, details.installer.name, details.installer.version, COUNT(*) as dl_count %s %s GROUP BY file.project, details.installer.name, details.installer.version;'\n % (self._from_for_table(table_name), self._where_for_projects))\n res = self._run_query(q)\n result = self._dict_for_projects()\n for row in res:\n proj = result[row['file_project']]\n iname = row['details_installer_name']\n iver = row['details_installer_version']\n if iname not in proj:\n proj[iname] = {}\n if iver not in proj[iname]:\n proj[iname][iver] = 0\n proj[iname][iver] += int(row['dl_count'])\n return result\n", "role": "user" }, { "content": "query_by_installer", "role": "assistant" } ]
def x(self, table_name): logger.info('Querying for downloads by system in table %s', table_name) q = ( 'SELECT file.project, details.system.name, COUNT(*) as dl_count %s %s GROUP BY file.project, details.system.name;' % (self._from_for_table(table_name), self._where_for_projects)) res = self._run_query(q) result = self._dict_for_projects() for row in res: system = row['details_system_name'] result[row['file_project']][system] = int(row['dl_count']) return result
query_by_system
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table_name):\n logger.info('Querying for downloads by system in table %s', table_name)\n q = (\n 'SELECT file.project, details.system.name, COUNT(*) as dl_count %s %s GROUP BY file.project, details.system.name;'\n % (self._from_for_table(table_name), self._where_for_projects))\n res = self._run_query(q)\n result = self._dict_for_projects()\n for row in res:\n system = row['details_system_name']\n result[row['file_project']][system] = int(row['dl_count'])\n return result\n", "role": "user" }, { "content": "query_by_system", "role": "assistant" } ]
def x(self, table_name): logger.info('Querying for downloads by distro in table %s', table_name) q = ( 'SELECT file.project, details.distro.name, details.distro.version, COUNT(*) as dl_count %s %s GROUP BY file.project, details.distro.name, details.distro.version;' % (self._from_for_table(table_name), self._where_for_projects)) res = self._run_query(q) result = self._dict_for_projects() for row in res: proj = result[row['file_project']] dname = row['details_distro_name'] dver = row['details_distro_version'] if dname not in proj: proj[dname] = {} if dver not in proj[dname]: proj[dname][dver] = 0 proj[dname][dver] += int(row['dl_count']) return result
query_by_distro
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table_name):\n logger.info('Querying for downloads by distro in table %s', table_name)\n q = (\n 'SELECT file.project, details.distro.name, details.distro.version, COUNT(*) as dl_count %s %s GROUP BY file.project, details.distro.name, details.distro.version;'\n % (self._from_for_table(table_name), self._where_for_projects))\n res = self._run_query(q)\n result = self._dict_for_projects()\n for row in res:\n proj = result[row['file_project']]\n dname = row['details_distro_name']\n dver = row['details_distro_version']\n if dname not in proj:\n proj[dname] = {}\n if dver not in proj[dname]:\n proj[dname][dver] = 0\n proj[dname][dver] += int(row['dl_count'])\n return result\n", "role": "user" }, { "content": "query_by_distro", "role": "assistant" } ]
def x(self, table_name): table_date = self._datetime_for_table_name(table_name) logger.info('Running all queries for date table: %s (%s)', table_name, table_date.strftime('%Y-%m-%d')) final = self._dict_for_projects() try: data_timestamp = self._get_newest_ts_in_table(table_name) except HttpError as exc: try: content = json.loads(exc.content.decode('utf-8')) if content['error']['message'].startswith('Not found: Table'): logger.error('Table %s not found; no data for that day', table_name) return except: pass raise exc for name, func in {'by_version': self._query_by_version, 'by_file_type': self._query_by_file_type, 'by_installer': self._query_by_installer, 'by_implementation': self._query_by_implementation, 'by_system': self._query_by_system, 'by_distro': self._query_by_distro, 'by_country': self._query_by_country_code}.items(): tmp = func(table_name) for proj_name in tmp: final[proj_name][name] = tmp[proj_name] for proj_name in final: self.cache.set(proj_name, table_date, final[proj_name], data_timestamp)
query_one_table
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, table_name):\n table_date = self._datetime_for_table_name(table_name)\n logger.info('Running all queries for date table: %s (%s)', table_name,\n table_date.strftime('%Y-%m-%d'))\n final = self._dict_for_projects()\n try:\n data_timestamp = self._get_newest_ts_in_table(table_name)\n except HttpError as exc:\n try:\n content = json.loads(exc.content.decode('utf-8'))\n if content['error']['message'].startswith('Not found: Table'):\n logger.error('Table %s not found; no data for that day',\n table_name)\n return\n except:\n pass\n raise exc\n for name, func in {'by_version': self._query_by_version, 'by_file_type':\n self._query_by_file_type, 'by_installer': self._query_by_installer,\n 'by_implementation': self._query_by_implementation, 'by_system':\n self._query_by_system, 'by_distro': self._query_by_distro,\n 'by_country': self._query_by_country_code}.items():\n tmp = func(table_name)\n for proj_name in tmp:\n final[proj_name][name] = tmp[proj_name]\n for proj_name in final:\n self.cache.set(proj_name, table_date, final[proj_name], data_timestamp)\n", "role": "user" }, { "content": "query_one_table", "role": "assistant" } ]
def x(self, dt): for p in self.projects: if self.cache.get(p, dt) is None: return False return True
have_cache_for_date
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, dt):\n for p in self.projects:\n if self.cache.get(p, dt) is None:\n return False\n return True\n", "role": "user" }, { "content": "have_cache_for_date", "role": "assistant" } ]
def x(self, num_days, available_table_names): if num_days == -1: logger.info('Backfilling all available history') start_table = available_table_names[1] else: logger.info('Backfilling %d days of history', num_days) start_table = available_table_names[-1 * num_days] start_date = self._datetime_for_table_name(start_table) end_table = available_table_names[-3] end_date = self._datetime_for_table_name(end_table) logger.debug('Backfilling history from %s (%s) to %s (%s)', start_table, start_date.strftime('%Y-%m-%d'), end_table, end_date.strftime( '%Y-%m-%d')) for days in range((end_date - start_date).days + 1): backfill_dt = start_date + timedelta(days=days) if self._have_cache_for_date(backfill_dt): logger.info('Cache present for all projects for %s; skipping', backfill_dt.strftime('%Y-%m-%d')) continue backfill_table = self._table_name_for_datetime(backfill_dt) logger.info('Backfilling %s (%s)', backfill_table, backfill_dt. strftime('%Y-%m-%d')) self.query_one_table(backfill_table)
backfill_history
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, num_days, available_table_names):\n if num_days == -1:\n logger.info('Backfilling all available history')\n start_table = available_table_names[1]\n else:\n logger.info('Backfilling %d days of history', num_days)\n start_table = available_table_names[-1 * num_days]\n start_date = self._datetime_for_table_name(start_table)\n end_table = available_table_names[-3]\n end_date = self._datetime_for_table_name(end_table)\n logger.debug('Backfilling history from %s (%s) to %s (%s)', start_table,\n start_date.strftime('%Y-%m-%d'), end_table, end_date.strftime(\n '%Y-%m-%d'))\n for days in range((end_date - start_date).days + 1):\n backfill_dt = start_date + timedelta(days=days)\n if self._have_cache_for_date(backfill_dt):\n logger.info('Cache present for all projects for %s; skipping',\n backfill_dt.strftime('%Y-%m-%d'))\n continue\n backfill_table = self._table_name_for_datetime(backfill_dt)\n logger.info('Backfilling %s (%s)', backfill_table, backfill_dt.\n strftime('%Y-%m-%d'))\n self.query_one_table(backfill_table)\n", "role": "user" }, { "content": "backfill_history", "role": "assistant" } ]
def x(self, backfill_num_days=7): available_tables = self._get_download_table_ids() logger.debug('Found %d available download tables: %s', len( available_tables), available_tables) today_table = available_tables[-1] yesterday_table = available_tables[-2] self.query_one_table(today_table) self.query_one_table(yesterday_table) self.backfill_history(backfill_num_days, available_tables)
run_queries
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, backfill_num_days=7):\n available_tables = self._get_download_table_ids()\n logger.debug('Found %d available download tables: %s', len(\n available_tables), available_tables)\n today_table = available_tables[-1]\n yesterday_table = available_tables[-2]\n self.query_one_table(today_table)\n self.query_one_table(yesterday_table)\n self.backfill_history(backfill_num_days, available_tables)\n", "role": "user" }, { "content": "run_queries", "role": "assistant" } ]
def x(data): keys = set() for dt, d in data.items(): for k in d: keys.add(k) return sorted([x for x in keys])
filter_data_columns
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(data):\n keys = set()\n for dt, d in data.items():\n for k in d:\n keys.add(k)\n return sorted([x for x in keys])\n", "role": "user" }, { "content": "filter_data_columns", "role": "assistant" } ]
def x(self): logger.debug('Generating templated HTML') env = Environment(loader=PackageLoader('pypi_download_stats', 'templates'), extensions=['jinja2.ext.loopcontrols']) env.filters['format_date_long'] = filter_format_date_long env.filters['format_date_ymd'] = filter_format_date_ymd env.filters['data_columns'] = filter_data_columns template = env.get_template('base.html') logger.debug('Rendering template') html = template.render(project=self.project_name, cache_date=self. _stats.as_of_datetime, user=getuser(), host=platform_node(), version=VERSION, proj_url=PROJECT_URL, graphs=self._graphs, graph_keys=self.GRAPH_KEYS, resources=Resources(mode='inline'). render(), badges=self._badges) logger.debug('Template rendered') return html
generate_html
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n logger.debug('Generating templated HTML')\n env = Environment(loader=PackageLoader('pypi_download_stats',\n 'templates'), extensions=['jinja2.ext.loopcontrols'])\n env.filters['format_date_long'] = filter_format_date_long\n env.filters['format_date_ymd'] = filter_format_date_ymd\n env.filters['data_columns'] = filter_data_columns\n template = env.get_template('base.html')\n logger.debug('Rendering template')\n html = template.render(project=self.project_name, cache_date=self.\n _stats.as_of_datetime, user=getuser(), host=platform_node(),\n version=VERSION, proj_url=PROJECT_URL, graphs=self._graphs,\n graph_keys=self.GRAPH_KEYS, resources=Resources(mode='inline').\n render(), badges=self._badges)\n logger.debug('Template rendered')\n return html\n", "role": "user" }, { "content": "generate_html", "role": "assistant" } ]
def x(self, data): labels = [] keys = set() for date in data: for k in data[date]: keys.add(k) out_data = {} for k in keys: out_data[k] = [] for data_date, data_dict in sorted(data.items()): labels.append(data_date) for k in out_data: if k in data_dict: out_data[k].append(data_dict[k]) else: out_data[k].append(0) return out_data, labels
data_dict_to_bokeh_chart_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data):\n labels = []\n keys = set()\n for date in data:\n for k in data[date]:\n keys.add(k)\n out_data = {}\n for k in keys:\n out_data[k] = []\n for data_date, data_dict in sorted(data.items()):\n labels.append(data_date)\n for k in out_data:\n if k in data_dict:\n out_data[k].append(data_dict[k])\n else:\n out_data[k].append(0)\n return out_data, labels\n", "role": "user" }, { "content": "data_dict_to_bokeh_chart_data", "role": "assistant" } ]
def x(self, data): if len(data.keys()) <= 10: logger.debug('Data has less than 10 keys; not limiting') return data avgs = {} for k in data: if len(data[k]) <= 7: vals = data[k] else: vals = data[k][-7:] avgs[k] = sum(vals) / len(vals) final_data = {} other = [] count = 0 for k in sorted(avgs, key=avgs.get, reverse=True): if count < 10: final_data[k] = data[k] logger.debug( 'Keeping data series %s (average over last 7 days of data: %d', k, avgs[k]) else: logger.debug( "Adding data series %s to 'other' (average over last 7 days of data: %d" , k, avgs[k]) other.append(data[k]) count += 1 final_data['other'] = [sum(series) for series in zip(*other)] return final_data
limit_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data):\n if len(data.keys()) <= 10:\n logger.debug('Data has less than 10 keys; not limiting')\n return data\n avgs = {}\n for k in data:\n if len(data[k]) <= 7:\n vals = data[k]\n else:\n vals = data[k][-7:]\n avgs[k] = sum(vals) / len(vals)\n final_data = {}\n other = []\n count = 0\n for k in sorted(avgs, key=avgs.get, reverse=True):\n if count < 10:\n final_data[k] = data[k]\n logger.debug(\n 'Keeping data series %s (average over last 7 days of data: %d',\n k, avgs[k])\n else:\n logger.debug(\n \"Adding data series %s to 'other' (average over last 7 days of data: %d\"\n , k, avgs[k])\n other.append(data[k])\n count += 1\n final_data['other'] = [sum(series) for series in zip(*other)]\n return final_data\n", "role": "user" }, { "content": "limit_data", "role": "assistant" } ]
def x(self, name, title, stats_data, y_name): logger.debug('Generating chart data for %s graph', name) orig_data, labels = self._data_dict_to_bokeh_chart_data(stats_data) data = self._limit_data(orig_data) logger.debug('Generating %s graph', name) script, div = FancyAreaGraph(name, '%s %s' % (self.project_name, title), data, labels, y_name).generate_graph() logger.debug('%s graph generated', name) self._graphs[name] = {'title': title, 'script': script, 'div': div, 'raw_data': stats_data}
generate_graph
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, title, stats_data, y_name):\n logger.debug('Generating chart data for %s graph', name)\n orig_data, labels = self._data_dict_to_bokeh_chart_data(stats_data)\n data = self._limit_data(orig_data)\n logger.debug('Generating %s graph', name)\n script, div = FancyAreaGraph(name, '%s %s' % (self.project_name, title),\n data, labels, y_name).generate_graph()\n logger.debug('%s graph generated', name)\n self._graphs[name] = {'title': title, 'script': script, 'div': div,\n 'raw_data': stats_data}\n", "role": "user" }, { "content": "generate_graph", "role": "assistant" } ]
def x(self): daycount = self._stats.downloads_per_day day = self._generate_badge('Downloads', '%d/day' % daycount) self._badges['per-day'] = day weekcount = self._stats.downloads_per_week if weekcount is None: return week = self._generate_badge('Downloads', '%d/week' % weekcount) self._badges['per-week'] = week monthcount = self._stats.downloads_per_month if monthcount is None: return month = self._generate_badge('Downloads', '%d/month' % monthcount) self._badges['per-month'] = month
generate_badges
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n daycount = self._stats.downloads_per_day\n day = self._generate_badge('Downloads', '%d/day' % daycount)\n self._badges['per-day'] = day\n weekcount = self._stats.downloads_per_week\n if weekcount is None:\n return\n week = self._generate_badge('Downloads', '%d/week' % weekcount)\n self._badges['per-week'] = week\n monthcount = self._stats.downloads_per_month\n if monthcount is None:\n return\n month = self._generate_badge('Downloads', '%d/month' % monthcount)\n self._badges['per-month'] = month\n", "role": "user" }, { "content": "generate_badges", "role": "assistant" } ]
def x(self, subject, status): url = ( 'https://img.shields.io/badge/%s-%s-brightgreen.svg?style=flat&maxAge=3600' % (subject, status)) logger.debug('Getting badge for %s => %s (%s)', subject, status, url) res = requests.get(url) if res.status_code != 200: raise Exception('Error: got status %s for shields.io badge: %s', res.status_code, res.text) logger.debug('Got %d character response from shields.io', len(res.text)) return res.text
generate_badge
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, subject, status):\n url = (\n 'https://img.shields.io/badge/%s-%s-brightgreen.svg?style=flat&maxAge=3600'\n % (subject, status))\n logger.debug('Getting badge for %s => %s (%s)', subject, status, url)\n res = requests.get(url)\n if res.status_code != 200:\n raise Exception('Error: got status %s for shields.io badge: %s',\n res.status_code, res.text)\n logger.debug('Got %d character response from shields.io', len(res.text))\n return res.text\n", "role": "user" }, { "content": "generate_badge", "role": "assistant" } ]
def x(self): logger.info('Generating graphs') self._generate_graph('by-version', 'Downloads by Version', self._stats. per_version_data, 'Version') self._generate_graph('by-file-type', 'Downloads by File Type', self. _stats.per_file_type_data, 'File Type') self._generate_graph('by-installer', 'Downloads by Installer', self. _stats.per_installer_data, 'Installer') self._generate_graph('by-implementation', 'Downloads by Python Implementation/Version', self._stats. per_implementation_data, 'Implementation/Version') self._generate_graph('by-system', 'Downloads by System Type', self. _stats.per_system_data, 'System') self._generate_graph('by-country', 'Downloads by Country', self._stats. per_country_data, 'Country') self._generate_graph('by-distro', 'Downloads by Distro', self._stats. per_distro_data, 'Distro') self._generate_badges() logger.info('Generating HTML') html = self._generate_html() html_path = os.path.join(self.output_dir, 'index.html') with open(html_path, 'wb') as fh: fh.write(html.encode('utf-8')) logger.info('HTML report written to %s', html_path) logger.info('Writing SVG badges') for name, svg in self._badges.items(): path = os.path.join(self.output_dir, '%s.svg' % name) with open(path, 'w') as fh: fh.write(svg) logger.info('%s badge written to: %s', name, path)
generate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n logger.info('Generating graphs')\n self._generate_graph('by-version', 'Downloads by Version', self._stats.\n per_version_data, 'Version')\n self._generate_graph('by-file-type', 'Downloads by File Type', self.\n _stats.per_file_type_data, 'File Type')\n self._generate_graph('by-installer', 'Downloads by Installer', self.\n _stats.per_installer_data, 'Installer')\n self._generate_graph('by-implementation',\n 'Downloads by Python Implementation/Version', self._stats.\n per_implementation_data, 'Implementation/Version')\n self._generate_graph('by-system', 'Downloads by System Type', self.\n _stats.per_system_data, 'System')\n self._generate_graph('by-country', 'Downloads by Country', self._stats.\n per_country_data, 'Country')\n self._generate_graph('by-distro', 'Downloads by Distro', self._stats.\n per_distro_data, 'Distro')\n self._generate_badges()\n logger.info('Generating HTML')\n html = self._generate_html()\n html_path = os.path.join(self.output_dir, 'index.html')\n with open(html_path, 'wb') as fh:\n fh.write(html.encode('utf-8'))\n logger.info('HTML report written to %s', html_path)\n logger.info('Writing SVG badges')\n for name, svg in self._badges.items():\n path = os.path.join(self.output_dir, '%s.svg' % name)\n with open(path, 'w') as fh:\n fh.write(svg)\n logger.info('%s badge written to: %s', name, path)\n", "role": "user" }, { "content": "generate", "role": "assistant" } ]
def x(desired_format, datetime_instance=None, *args, **kwargs): for strf, exp in datetime_regex.datetime.format.items(): desired_format = exp.sub(strf, desired_format) if not datetime_instance: datetime_instance = now() return datetime_instance.strftime(desired_format.format(*args, **kwargs))
datetime_format
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(desired_format, datetime_instance=None, *args, **kwargs):\n for strf, exp in datetime_regex.datetime.format.items():\n desired_format = exp.sub(strf, desired_format)\n if not datetime_instance:\n datetime_instance = now()\n return datetime_instance.strftime(desired_format.format(*args, **kwargs))\n", "role": "user" }, { "content": "datetime_format", "role": "assistant" } ]
def x(iso_string): try: assert datetime_regex.datetime.datetime.match(iso_string).groups()[0] except (ValueError, AssertionError, IndexError, AttributeError): raise TypeError('String is not in ISO format') try: return datetime.datetime.strptime(iso_string, '%Y-%m-%dT%H:%M:%S.%f') except ValueError: return datetime.datetime.strptime(iso_string, '%Y-%m-%dT%H:%M:%S')
datetime_from_iso
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(iso_string):\n try:\n assert datetime_regex.datetime.datetime.match(iso_string).groups()[0]\n except (ValueError, AssertionError, IndexError, AttributeError):\n raise TypeError('String is not in ISO format')\n try:\n return datetime.datetime.strptime(iso_string, '%Y-%m-%dT%H:%M:%S.%f')\n except ValueError:\n return datetime.datetime.strptime(iso_string, '%Y-%m-%dT%H:%M:%S')\n", "role": "user" }, { "content": "datetime_from_iso", "role": "assistant" } ]
def x(utc=False, tz=None): return datetime.datetime.utcnow() if utc else datetime.datetime.now(tz=tz)
now
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(utc=False, tz=None):\n return datetime.datetime.utcnow() if utc else datetime.datetime.now(tz=tz)\n", "role": "user" }, { "content": "now", "role": "assistant" } ]
def x(command, input=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=None, copy_local_env=False, **kwargs): if copy_local_env: env = os.environ.copy() env.update(kwargs.get('env', {})) else: env = kwargs.get('env') if sys.version_info >= (3, 5): return subprocess.run(command, input=input, stdout=stdout, stderr= stderr, timeout=timeout, env=env, **kwargs) class CompletedProcess(object): """A backwards compatible near clone of subprocess.CompletedProcess""" def x(self, args, returncode, stdout=None, stderr=None): self.args = args self.returncode = returncode self.stdout = stdout self.stderr = stderr def x(self): args = ['args={0!r}'.format(self.args), 'returncode={0!r}'. format(self.returncode), 'stdout={0!r}'.format(self.stdout) if self.stdout else '', 'stderr={0!r}'.format(self.stderr) if self.stderr else ''] return '{0}({1})'.format(type(self).__name__, ', '.join(filter( None, args))) def x(self): if self.returncode: if python_version < (2, 7): raise subprocess.CalledProcessError(self.returncode, self.args) raise subprocess.CalledProcessError(self.returncode, self. args, self.stdout) proc = subprocess.Popen(command, stdout=stdout, stderr=stderr, env=env, **kwargs) if PY3: out, err = proc.communicate(input=input, timeout=timeout) else: if timeout: raise NotImplementedError('Timeout is only available on Python 3') out, err = proc.communicate(input=input) return CompletedProcess(command, proc.returncode, out, err)
run
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(command, input=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n timeout=None, copy_local_env=False, **kwargs):\n if copy_local_env:\n env = os.environ.copy()\n env.update(kwargs.get('env', {}))\n else:\n env = kwargs.get('env')\n if sys.version_info >= (3, 5):\n return subprocess.run(command, input=input, stdout=stdout, stderr=\n stderr, timeout=timeout, env=env, **kwargs)\n\n\n class CompletedProcess(object):\n \"\"\"A backwards compatible near clone of subprocess.CompletedProcess\"\"\"\n\n def x(self, args, returncode, stdout=None, stderr=None):\n self.args = args\n self.returncode = returncode\n self.stdout = stdout\n self.stderr = stderr\n\n def x(self):\n args = ['args={0!r}'.format(self.args), 'returncode={0!r}'.\n format(self.returncode), 'stdout={0!r}'.format(self.stdout) if\n self.stdout else '', 'stderr={0!r}'.format(self.stderr) if\n self.stderr else '']\n return '{0}({1})'.format(type(self).__name__, ', '.join(filter(\n None, args)))\n\n def x(self):\n if self.returncode:\n if python_version < (2, 7):\n raise subprocess.CalledProcessError(self.returncode,\n self.args)\n raise subprocess.CalledProcessError(self.returncode, self.\n args, self.stdout)\n proc = subprocess.Popen(command, stdout=stdout, stderr=stderr, env=env,\n **kwargs)\n if PY3:\n out, err = proc.communicate(input=input, timeout=timeout)\n else:\n if timeout:\n raise NotImplementedError('Timeout is only available on Python 3')\n out, err = proc.communicate(input=input)\n return CompletedProcess(command, proc.returncode, out, err)\n", "role": "user" }, { "content": "run", "role": "assistant" } ]
def x(target, iterable, threaded=True, processes=4, asynchronous=False, target_kwargs=None): my_pool = pool.ThreadPool if threaded else pool.Pool if target_kwargs: target = partial(target, **target_kwargs if target_kwargs else None) p = my_pool(processes) try: results = p.map_async(target, iterable) if asynchronous else p.map( target, iterable) finally: p.close() p.join() return results
run_in_pool
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(target, iterable, threaded=True, processes=4, asynchronous=False,\n target_kwargs=None):\n my_pool = pool.ThreadPool if threaded else pool.Pool\n if target_kwargs:\n target = partial(target, **target_kwargs if target_kwargs else None)\n p = my_pool(processes)\n try:\n results = p.map_async(target, iterable) if asynchronous else p.map(\n target, iterable)\n finally:\n p.close()\n p.join()\n return results\n", "role": "user" }, { "content": "run_in_pool", "role": "assistant" } ]
def x(dictionary, level=0, sep='| '): return ''.join(['{0}{1}\n{2}'.format(sep * level, k, tree_view(v, level + 1, sep=sep) if isinstance(v, dict) else '') for k, v in dictionary. items()])
tree_view
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(dictionary, level=0, sep='| '):\n return ''.join(['{0}{1}\\n{2}'.format(sep * level, k, tree_view(v, level +\n 1, sep=sep) if isinstance(v, dict) else '') for k, v in dictionary.\n items()])\n", "role": "user" }, { "content": "tree_view", "role": "assistant" } ]
def x(self, in_dict=None): in_dict = in_dict if in_dict else self out_dict = dict() for k, v in in_dict.items(): if isinstance(v, Namespace): v = v.to_dict() out_dict[k] = v return out_dict
to_dict
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, in_dict=None):\n in_dict = in_dict if in_dict else self\n out_dict = dict()\n for k, v in in_dict.items():\n if isinstance(v, Namespace):\n v = v.to_dict()\n out_dict[k] = v\n return out_dict\n", "role": "user" }, { "content": "to_dict", "role": "assistant" } ]
def x(self, item, default=None, spliter=',', strip=True, mod=None): try: item = self.__getattr__(item) except AttributeError as err: if default is not None: return default raise err if strip: item = item.lstrip('[').rstrip(']') out = [(x.strip() if strip else x) for x in item.split(spliter)] if mod: return list(map(mod, out)) return out
list
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, item, default=None, spliter=',', strip=True, mod=None):\n try:\n item = self.__getattr__(item)\n except AttributeError as err:\n if default is not None:\n return default\n raise err\n if strip:\n item = item.lstrip('[').rstrip(']')\n out = [(x.strip() if strip else x) for x in item.split(spliter)]\n if mod:\n return list(map(mod, out))\n return out\n", "role": "user" }, { "content": "list", "role": "assistant" } ]
def x(url, save_to_file=True, save_dir='.', filename=None, block_size=64000, overwrite=False, quiet=False): if save_to_file: if not filename: filename = safe_filename(url.split('/')[-1]) if not filename: filename = 'downloaded_at_{}.file'.format(time.time()) save_location = os.path.abspath(os.path.join(save_dir, filename)) if os.path.exists(save_location) and not overwrite: logger.error('File {0} already exists'.format(save_location)) return False else: save_location = 'memory' try: request = urlopen(url) except ValueError as err: if not quiet and 'unknown url type' in str(err): logger.error( 'Please make sure URL is formatted correctly and starts with http:// or other protocol' ) raise err except Exception as err: if not quiet: logger.error('Could not download {0} - {1}'.format(url, err)) raise err try: kb_size = int(request.headers['Content-Length']) / 1024 except Exception as err: if not quiet: logger.debug('Could not determine file size - {0}'.format(err)) file_size = '(unknown size)' else: file_size = '({0:.1f} {1})'.format(*((kb_size, 'KB') if kb_size < 9999 else (kb_size / 1024, 'MB'))) if not quiet: logger.info('Downloading {0} {1} to {2}'.format(url, file_size, save_location)) if save_to_file: with open(save_location, 'wb') as f: while True: buffer = request.read(block_size) if not buffer: break f.write(buffer) return save_location else: return request.read()
download
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(url, save_to_file=True, save_dir='.', filename=None, block_size=64000,\n overwrite=False, quiet=False):\n if save_to_file:\n if not filename:\n filename = safe_filename(url.split('/')[-1])\n if not filename:\n filename = 'downloaded_at_{}.file'.format(time.time())\n save_location = os.path.abspath(os.path.join(save_dir, filename))\n if os.path.exists(save_location) and not overwrite:\n logger.error('File {0} already exists'.format(save_location))\n return False\n else:\n save_location = 'memory'\n try:\n request = urlopen(url)\n except ValueError as err:\n if not quiet and 'unknown url type' in str(err):\n logger.error(\n 'Please make sure URL is formatted correctly and starts with http:// or other protocol'\n )\n raise err\n except Exception as err:\n if not quiet:\n logger.error('Could not download {0} - {1}'.format(url, err))\n raise err\n try:\n kb_size = int(request.headers['Content-Length']) / 1024\n except Exception as err:\n if not quiet:\n logger.debug('Could not determine file size - {0}'.format(err))\n file_size = '(unknown size)'\n else:\n file_size = '({0:.1f} {1})'.format(*((kb_size, 'KB') if kb_size < \n 9999 else (kb_size / 1024, 'MB')))\n if not quiet:\n logger.info('Downloading {0} {1} to {2}'.format(url, file_size,\n save_location))\n if save_to_file:\n with open(save_location, 'wb') as f:\n while True:\n buffer = request.read(block_size)\n if not buffer:\n break\n f.write(buffer)\n return save_location\n else:\n return request.read()\n", "role": "user" }, { "content": "download", "role": "assistant" } ]
def x(url, port=None, ipv6=False, connect_type=socket.SOCK_STREAM, proto= socket.IPPROTO_TCP, flags=0): try: results = socket.getaddrinfo(url, port, socket.AF_INET if not ipv6 else socket.AF_INET6, connect_type, proto, flags) except socket.gaierror: logger.exception('Could not resolve hostname') return [] return list(set([result[-1][0] for result in results]))
url_to_ips
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(url, port=None, ipv6=False, connect_type=socket.SOCK_STREAM, proto=\n socket.IPPROTO_TCP, flags=0):\n try:\n results = socket.getaddrinfo(url, port, socket.AF_INET if not ipv6 else\n socket.AF_INET6, connect_type, proto, flags)\n except socket.gaierror:\n logger.exception('Could not resolve hostname')\n return []\n return list(set([result[-1][0] for result in results]))\n", "role": "user" }, { "content": "url_to_ips", "role": "assistant" } ]
def x(ip_addr): try: return socket.gethostbyaddr(ip_addr)[0] except (socket.gaierror, socket.herror): logger.exception('Could not resolve hostname')
ip_to_url
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(ip_addr):\n try:\n return socket.gethostbyaddr(ip_addr)[0]\n except (socket.gaierror, socket.herror):\n logger.exception('Could not resolve hostname')\n", "role": "user" }, { "content": "ip_to_url", "role": "assistant" } ]
def x(self): self._process = threading.Thread(target=self._background_runner) self._process.start()
start
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self._process = threading.Thread(target=self._background_runner)\n self._process.start()\n", "role": "user" }, { "content": "start", "role": "assistant" } ]
def x(stream=sys.stderr, level=logging.INFO, log_format=log_formats.easy_read): sh = logging.StreamHandler(stream) sh.setLevel(level) sh.setFormatter(logging.Formatter(log_format)) return sh
get_stream_handler
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(stream=sys.stderr, level=logging.INFO, log_format=log_formats.easy_read):\n sh = logging.StreamHandler(stream)\n sh.setLevel(level)\n sh.setFormatter(logging.Formatter(log_format))\n return sh\n", "role": "user" }, { "content": "get_stream_handler", "role": "assistant" } ]
def x(file_path='out.log', level=logging.INFO, log_format=log_formats. easy_read, handler=logging.FileHandler, **handler_kwargs): fh = handler(file_path, **handler_kwargs) fh.setLevel(level) fh.setFormatter(logging.Formatter(log_format)) return fh
get_file_handler
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(file_path='out.log', level=logging.INFO, log_format=log_formats.\n easy_read, handler=logging.FileHandler, **handler_kwargs):\n fh = handler(file_path, **handler_kwargs)\n fh.setLevel(level)\n fh.setFormatter(logging.Formatter(log_format))\n return fh\n", "role": "user" }, { "content": "get_file_handler", "role": "assistant" } ]
def x(module_name=None, level=logging.INFO, stream=sys.stderr, file_path= None, log_format=log_formats.easy_read, suppress_warning=True): new_logger = logging.getLogger(module_name) if stream: new_logger.addHandler(get_stream_handler(stream, level, log_format)) elif not file_path and suppress_warning and not new_logger.handlers: new_logger.addHandler(logging.NullHandler()) if file_path: new_logger.addHandler(get_file_handler(file_path, level, log_format)) if level > 0: new_logger.setLevel(level) return new_logger
setup_logger
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(module_name=None, level=logging.INFO, stream=sys.stderr, file_path=\n None, log_format=log_formats.easy_read, suppress_warning=True):\n new_logger = logging.getLogger(module_name)\n if stream:\n new_logger.addHandler(get_stream_handler(stream, level, log_format))\n elif not file_path and suppress_warning and not new_logger.handlers:\n new_logger.addHandler(logging.NullHandler())\n if file_path:\n new_logger.addHandler(get_file_handler(file_path, level, log_format))\n if level > 0:\n new_logger.setLevel(level)\n return new_logger\n", "role": "user" }, { "content": "setup_logger", "role": "assistant" } ]
def x(logger=None, stream=sys.stderr, level=logging.INFO, log_format= log_formats.easy_read): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) logger.addHandler(get_stream_handler(stream, level, log_format))
add_stream_handler
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None, stream=sys.stderr, level=logging.INFO, log_format=\n log_formats.easy_read):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n logger.addHandler(get_stream_handler(stream, level, log_format))\n", "role": "user" }, { "content": "add_stream_handler", "role": "assistant" } ]
def x(logger=None, file_path='out.log', level=logging.INFO, log_format= log_formats.easy_read): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) logger.addHandler(get_file_handler(file_path, level, log_format))
add_file_handler
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None, file_path='out.log', level=logging.INFO, log_format=\n log_formats.easy_read):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n logger.addHandler(get_file_handler(file_path, level, log_format))\n", "role": "user" }, { "content": "add_file_handler", "role": "assistant" } ]
def x(logger=None, file_path='out.log', level=logging.INFO, log_format= log_formats.easy_read, max_bytes=10 * sizes.mb, backup_count=5, ** handler_kwargs): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) logger.addHandler(get_file_handler(file_path, level, log_format, handler=RotatingFileHandler, maxBytes=max_bytes, backupCount= backup_count, **handler_kwargs))
add_rotating_file_handler
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None, file_path='out.log', level=logging.INFO, log_format=\n log_formats.easy_read, max_bytes=10 * sizes.mb, backup_count=5, **\n handler_kwargs):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n logger.addHandler(get_file_handler(file_path, level, log_format,\n handler=RotatingFileHandler, maxBytes=max_bytes, backupCount=\n backup_count, **handler_kwargs))\n", "role": "user" }, { "content": "add_rotating_file_handler", "role": "assistant" } ]
def x(logger=None, file_path='out.log', level=logging.INFO, log_format= log_formats.easy_read, when='w0', interval=1, backup_count=5, ** handler_kwargs): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) logger.addHandler(get_file_handler(file_path, level, log_format, handler=TimedRotatingFileHandler, when=when, interval=interval, backupCount=backup_count, **handler_kwargs))
add_timed_rotating_file_handler
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None, file_path='out.log', level=logging.INFO, log_format=\n log_formats.easy_read, when='w0', interval=1, backup_count=5, **\n handler_kwargs):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n logger.addHandler(get_file_handler(file_path, level, log_format,\n handler=TimedRotatingFileHandler, when=when, interval=interval,\n backupCount=backup_count, **handler_kwargs))\n", "role": "user" }, { "content": "add_timed_rotating_file_handler", "role": "assistant" } ]
def x(logger=None): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) new_handlers = [] for handler in logger.handlers: if isinstance(handler, logging.FileHandler) or isinstance(handler, logging.NullHandler) or isinstance(handler, logging.Handler ) and not isinstance(handler, logging.StreamHandler): new_handlers.append(handler) logger.handlers = new_handlers
remove_stream_handlers
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n new_handlers = []\n for handler in logger.handlers:\n if isinstance(handler, logging.FileHandler) or isinstance(handler,\n logging.NullHandler) or isinstance(handler, logging.Handler\n ) and not isinstance(handler, logging.StreamHandler):\n new_handlers.append(handler)\n logger.handlers = new_handlers\n", "role": "user" }, { "content": "remove_stream_handlers", "role": "assistant" } ]
def x(logger=None): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) new_handlers = [] for handler in logger.handlers: if isinstance(handler, logging.FileHandler): handler.close() else: new_handlers.append(handler) logger.handlers = new_handlers
remove_file_handlers
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n new_handlers = []\n for handler in logger.handlers:\n if isinstance(handler, logging.FileHandler):\n handler.close()\n else:\n new_handlers.append(handler)\n logger.handlers = new_handlers\n", "role": "user" }, { "content": "remove_file_handlers", "role": "assistant" } ]
def x(logger=None): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) remove_file_handlers(logger) logger.handlers = []
remove_all_handlers
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n remove_file_handlers(logger)\n logger.handlers = []\n", "role": "user" }, { "content": "remove_all_handlers", "role": "assistant" } ]
def x(logger=None, level=logging.DEBUG): if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) logger.setLevel(level) for handler in logger.handlers: handler.level = level
change_logger_levels
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger=None, level=logging.DEBUG):\n if not isinstance(logger, logging.Logger):\n logger = logging.getLogger(logger)\n logger.setLevel(level)\n for handler in logger.handlers:\n handler.level = level\n", "role": "user" }, { "content": "change_logger_levels", "role": "assistant" } ]
def x(hide_children=False, hide_reusables=False): return [logger for logger in logging.Logger.manager.loggerDict.keys() if not (hide_reusables and 'reusables' in logger) and not ( hide_children and '.' in logger)]
get_registered_loggers
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(hide_children=False, hide_reusables=False):\n return [logger for logger in logging.Logger.manager.loggerDict.keys() if\n not (hide_reusables and 'reusables' in logger) and not (\n hide_children and '.' in logger)]\n", "role": "user" }, { "content": "get_registered_loggers", "role": "assistant" } ]
def x(max_retries=10, wait=0, alt_return='-no_alt_return-', exception= Exception, error_text=None): def x(func): @wraps(func) def x(*args, **kwargs): msg = (error_text if error_text else "No result was unique for function '{func}'") if not error_text: msg = _add_args(msg, *args, **kwargs) for i in range(max_retries): value = func(*args, **kwargs) if value not in unique_cache[func.__name__]: unique_cache[func.__name__].append(value) return value if wait: time.sleep(wait) else: if alt_return != '-no_alt_return-': return alt_return raise exception(msg.format(func=func.__name__, args=args, kwargs=kwargs)) return wrapper return func_wrap
unique
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(max_retries=10, wait=0, alt_return='-no_alt_return-', exception=\n Exception, error_text=None):\n\n def x(func):\n\n @wraps(func)\n def x(*args, **kwargs):\n msg = (error_text if error_text else\n \"No result was unique for function '{func}'\")\n if not error_text:\n msg = _add_args(msg, *args, **kwargs)\n for i in range(max_retries):\n value = func(*args, **kwargs)\n if value not in unique_cache[func.__name__]:\n unique_cache[func.__name__].append(value)\n return value\n if wait:\n time.sleep(wait)\n else:\n if alt_return != '-no_alt_return-':\n return alt_return\n raise exception(msg.format(func=func.__name__, args=args,\n kwargs=kwargs))\n return wrapper\n return func_wrap\n", "role": "user" }, { "content": "unique", "role": "assistant" } ]
def x(lock=g_lock): def x(func): @wraps(func) def x(*args, **kwargs): with lock: return func(*args, **kwargs) return wrapper return func_wrapper
lock_it
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(lock=g_lock):\n\n def x(func):\n\n @wraps(func)\n def x(*args, **kwargs):\n with lock:\n return func(*args, **kwargs)\n return wrapper\n return func_wrapper\n", "role": "user" }, { "content": "lock_it", "role": "assistant" } ]
def x(log=None, message=None, append=None): def x(func): @wraps(func) def x(*args, **kwargs): msg = (message if message else "Function '{func}' took a total of {seconds} seconds") if not message: msg = _add_args(msg, *args, **kwargs) time_func = time.perf_counter if python_version >= (3, 3 ) else time.time start_time = time_func() try: return func(*args, **kwargs) finally: total_time = time_func() - start_time time_string = msg.format(func=func.__name__, seconds= total_time, args=args, kwargs=kwargs) if log: my_logger = logging.getLogger(log) if isinstance(log, str ) else logger my_logger.info(time_string) else: print(time_string) if isinstance(append, list): append.append(total_time) return wrapper return func_wrapper
time_it
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(log=None, message=None, append=None):\n\n def x(func):\n\n @wraps(func)\n def x(*args, **kwargs):\n msg = (message if message else\n \"Function '{func}' took a total of {seconds} seconds\")\n if not message:\n msg = _add_args(msg, *args, **kwargs)\n time_func = time.perf_counter if python_version >= (3, 3\n ) else time.time\n start_time = time_func()\n try:\n return func(*args, **kwargs)\n finally:\n total_time = time_func() - start_time\n time_string = msg.format(func=func.__name__, seconds=\n total_time, args=args, kwargs=kwargs)\n if log:\n my_logger = logging.getLogger(log) if isinstance(log, str\n ) else logger\n my_logger.info(time_string)\n else:\n print(time_string)\n if isinstance(append, list):\n append.append(total_time)\n return wrapper\n return func_wrapper\n", "role": "user" }, { "content": "time_it", "role": "assistant" } ]
def x(queue=g_queue, **put_args): def x(func): @wraps(func) def x(*args, **kwargs): queue.put(func(*args, **kwargs), **put_args) return wrapper return func_wrapper
queue_it
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(queue=g_queue, **put_args):\n\n def x(func):\n\n @wraps(func)\n def x(*args, **kwargs):\n queue.put(func(*args, **kwargs), **put_args)\n return wrapper\n return func_wrapper\n", "role": "user" }, { "content": "queue_it", "role": "assistant" } ]
def x(log='reusables', message=None, exceptions=(Exception,), level=logging .ERROR, show_traceback=True): def x(func): @wraps(func) def x(*args, **kwargs): msg = message if message else "Exception in '{func}': {err}" if not message: msg = _add_args(msg, *args, **kwargs) try: return func(*args, **kwargs) except exceptions as err: my_logger = logging.getLogger(log) if isinstance(log, str ) else log my_logger.log(level, msg.format(func=func.__name__, err=str (err), args=args, kwargs=kwargs), exc_info=show_traceback) raise err return wrapper return func_wrapper
log_exception
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(log='reusables', message=None, exceptions=(Exception,), level=logging\n .ERROR, show_traceback=True):\n\n def x(func):\n\n @wraps(func)\n def x(*args, **kwargs):\n msg = message if message else \"Exception in '{func}': {err}\"\n if not message:\n msg = _add_args(msg, *args, **kwargs)\n try:\n return func(*args, **kwargs)\n except exceptions as err:\n my_logger = logging.getLogger(log) if isinstance(log, str\n ) else log\n my_logger.log(level, msg.format(func=func.__name__, err=str\n (err), args=args, kwargs=kwargs), exc_info=show_traceback)\n raise err\n return wrapper\n return func_wrapper\n", "role": "user" }, { "content": "log_exception", "role": "assistant" } ]
def x(exceptions=(Exception,), default=None, handler=None): def x(func): @wraps(func) def x(*args, **kwargs): try: return func(*args, **kwargs) except exceptions as err: if handler: return handler(err, func, *args, **kwargs) return default return wrapper return func_wrapper
catch_it
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(exceptions=(Exception,), default=None, handler=None):\n\n def x(func):\n\n @wraps(func)\n def x(*args, **kwargs):\n try:\n return func(*args, **kwargs)\n except exceptions as err:\n if handler:\n return handler(err, func, *args, **kwargs)\n return default\n return wrapper\n return func_wrapper\n", "role": "user" }, { "content": "catch_it", "role": "assistant" } ]
def x(exceptions=(Exception,), tries=10, wait=0, handler=None, raised_exception=ReusablesError, raised_message=None): def x(func): @wraps(func) def x(*args, **kwargs): msg = (raised_message if raised_message else "Max retries exceeded for function '{func}'") if not raised_message: msg = _add_args(msg, *args, **kwargs) try: result = func(*args, **kwargs) except exceptions: if tries: if wait: time.sleep(wait) return retry_it(exceptions=exceptions, tries=tries - 1, handler=handler, wait=wait)(func)(*args, **kwargs) if raised_exception: exc = raised_exception(msg.format(func=func.__name__, args=args, kwargs=kwargs)) exc.__cause__ = None raise exc else: if handler: if not handler(result): return retry_it(exceptions=exceptions, tries=tries - 1, handler=handler, wait=wait)(func)(*args, ** kwargs) return result return wrapper return func_wrapper
retry_it
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(exceptions=(Exception,), tries=10, wait=0, handler=None,\n raised_exception=ReusablesError, raised_message=None):\n\n def x(func):\n\n @wraps(func)\n def x(*args, **kwargs):\n msg = (raised_message if raised_message else\n \"Max retries exceeded for function '{func}'\")\n if not raised_message:\n msg = _add_args(msg, *args, **kwargs)\n try:\n result = func(*args, **kwargs)\n except exceptions:\n if tries:\n if wait:\n time.sleep(wait)\n return retry_it(exceptions=exceptions, tries=tries - 1,\n handler=handler, wait=wait)(func)(*args, **kwargs)\n if raised_exception:\n exc = raised_exception(msg.format(func=func.__name__,\n args=args, kwargs=kwargs))\n exc.__cause__ = None\n raise exc\n else:\n if handler:\n if not handler(result):\n return retry_it(exceptions=exceptions, tries=tries -\n 1, handler=handler, wait=wait)(func)(*args, **\n kwargs)\n return result\n return wrapper\n return func_wrapper\n", "role": "user" }, { "content": "retry_it", "role": "assistant" } ]
def x(archive_file, path='.', delete_on_success=False, enable_rar=False): if not os.path.exists(archive_file) or not os.path.getsize(archive_file): logger.error('File {0} unextractable'.format(archive_file)) raise OSError('File does not exist or has zero size') arch = None if zipfile.is_zipfile(archive_file): logger.debug('File {0} detected as a zip file'.format(archive_file)) arch = zipfile.ZipFile(archive_file) elif tarfile.is_tarfile(archive_file): logger.debug('File {0} detected as a tar file'.format(archive_file)) arch = tarfile.open(archive_file) elif enable_rar: import rarfile if rarfile.is_rarfile(archive_file): logger.debug('File {0} detected as a rar file'.format(archive_file) ) arch = rarfile.RarFile(archive_file) if not arch: raise TypeError('File is not a known archive') logger.debug('Extracting files to {0}'.format(path)) try: arch.extractall(path=path) finally: arch.close() if delete_on_success: logger.debug('Archive {0} will now be deleted'.format(archive_file)) os.unlink(archive_file) return os.path.abspath(path)
extract
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(archive_file, path='.', delete_on_success=False, enable_rar=False):\n if not os.path.exists(archive_file) or not os.path.getsize(archive_file):\n logger.error('File {0} unextractable'.format(archive_file))\n raise OSError('File does not exist or has zero size')\n arch = None\n if zipfile.is_zipfile(archive_file):\n logger.debug('File {0} detected as a zip file'.format(archive_file))\n arch = zipfile.ZipFile(archive_file)\n elif tarfile.is_tarfile(archive_file):\n logger.debug('File {0} detected as a tar file'.format(archive_file))\n arch = tarfile.open(archive_file)\n elif enable_rar:\n import rarfile\n if rarfile.is_rarfile(archive_file):\n logger.debug('File {0} detected as a rar file'.format(archive_file)\n )\n arch = rarfile.RarFile(archive_file)\n if not arch:\n raise TypeError('File is not a known archive')\n logger.debug('Extracting files to {0}'.format(path))\n try:\n arch.extractall(path=path)\n finally:\n arch.close()\n if delete_on_success:\n logger.debug('Archive {0} will now be deleted'.format(archive_file))\n os.unlink(archive_file)\n return os.path.abspath(path)\n", "role": "user" }, { "content": "extract", "role": "assistant" } ]
def x(files_to_archive, name='archive.zip', archive_type=None, overwrite= False, store=False, depth=None, err_non_exist=True, allow_zip_64=True, **tarfile_kwargs): if not isinstance(files_to_archive, (list, tuple)): files_to_archive = [files_to_archive] if not archive_type: if name.lower().endswith('zip'): archive_type = 'zip' elif name.lower().endswith('gz'): archive_type = 'gz' elif name.lower().endswith('z2'): archive_type = 'bz2' elif name.lower().endswith('tar'): archive_type = 'tar' else: err_msg = 'Could not determine archive type based off {0}'.format( name) logger.error(err_msg) raise ValueError(err_msg) logger.debug('{0} file detected for {1}'.format(archive_type, name)) elif archive_type not in ('tar', 'gz', 'bz2', 'zip'): err_msg = 'archive_type must be zip, gz, bz2, or gz, was {0}'.format( archive_type) logger.error(err_msg) raise ValueError(err_msg) if not overwrite and os.path.exists(name): err_msg = 'File {0} exists and overwrite not specified'.format(name) logger.error(err_msg) raise OSError(err_msg) if archive_type == 'zip': arch = zipfile.ZipFile(name, 'w', zipfile.ZIP_STORED if store else zipfile.ZIP_DEFLATED, allowZip64=allow_zip_64) write = arch.write elif archive_type in ('tar', 'gz', 'bz2'): mode = archive_type if archive_type != 'tar' else '' arch = tarfile.open(name, 'w:{0}'.format(mode), **tarfile_kwargs) write = arch.add else: raise ValueError('archive_type must be zip, gz, bz2, or gz') try: for file_path in files_to_archive: if os.path.isfile(file_path): if err_non_exist and not os.path.exists(file_path): raise OSError('File {0} does not exist'.format(file_path)) write(file_path) elif os.path.isdir(file_path): for nf in find_files(file_path, abspath=False, depth=depth): write(nf) except (Exception, KeyboardInterrupt) as err: logger.exception('Could not archive {0}'.format(files_to_archive)) try: arch.close() finally: os.unlink(name) raise err else: arch.close() return os.path.abspath(name)
archive
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(files_to_archive, name='archive.zip', archive_type=None, overwrite=\n False, store=False, depth=None, err_non_exist=True, allow_zip_64=True,\n **tarfile_kwargs):\n if not isinstance(files_to_archive, (list, tuple)):\n files_to_archive = [files_to_archive]\n if not archive_type:\n if name.lower().endswith('zip'):\n archive_type = 'zip'\n elif name.lower().endswith('gz'):\n archive_type = 'gz'\n elif name.lower().endswith('z2'):\n archive_type = 'bz2'\n elif name.lower().endswith('tar'):\n archive_type = 'tar'\n else:\n err_msg = 'Could not determine archive type based off {0}'.format(\n name)\n logger.error(err_msg)\n raise ValueError(err_msg)\n logger.debug('{0} file detected for {1}'.format(archive_type, name))\n elif archive_type not in ('tar', 'gz', 'bz2', 'zip'):\n err_msg = 'archive_type must be zip, gz, bz2, or gz, was {0}'.format(\n archive_type)\n logger.error(err_msg)\n raise ValueError(err_msg)\n if not overwrite and os.path.exists(name):\n err_msg = 'File {0} exists and overwrite not specified'.format(name)\n logger.error(err_msg)\n raise OSError(err_msg)\n if archive_type == 'zip':\n arch = zipfile.ZipFile(name, 'w', zipfile.ZIP_STORED if store else\n zipfile.ZIP_DEFLATED, allowZip64=allow_zip_64)\n write = arch.write\n elif archive_type in ('tar', 'gz', 'bz2'):\n mode = archive_type if archive_type != 'tar' else ''\n arch = tarfile.open(name, 'w:{0}'.format(mode), **tarfile_kwargs)\n write = arch.add\n else:\n raise ValueError('archive_type must be zip, gz, bz2, or gz')\n try:\n for file_path in files_to_archive:\n if os.path.isfile(file_path):\n if err_non_exist and not os.path.exists(file_path):\n raise OSError('File {0} does not exist'.format(file_path))\n write(file_path)\n elif os.path.isdir(file_path):\n for nf in find_files(file_path, abspath=False, depth=depth):\n write(nf)\n except (Exception, KeyboardInterrupt) as err:\n logger.exception('Could not archive {0}'.format(files_to_archive))\n try:\n arch.close()\n finally:\n os.unlink(name)\n raise err\n else:\n arch.close()\n return os.path.abspath(name)\n", "role": "user" }, { "content": "archive", "role": "assistant" } ]
def x(my_list, csv_file): if PY3: csv_handler = open(csv_file, 'w', newline='') else: csv_handler = open(csv_file, 'wb') try: writer = csv.writer(csv_handler, delimiter=',', quoting=csv.QUOTE_ALL) writer.writerows(my_list) finally: csv_handler.close()
list_to_csv
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(my_list, csv_file):\n if PY3:\n csv_handler = open(csv_file, 'w', newline='')\n else:\n csv_handler = open(csv_file, 'wb')\n try:\n writer = csv.writer(csv_handler, delimiter=',', quoting=csv.QUOTE_ALL)\n writer.writerows(my_list)\n finally:\n csv_handler.close()\n", "role": "user" }, { "content": "list_to_csv", "role": "assistant" } ]
def x(csv_file): with open(csv_file, 'r' if PY3 else 'rb') as f: return list(csv.reader(f))
csv_to_list
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(csv_file):\n with open(csv_file, 'r' if PY3 else 'rb') as f:\n return list(csv.reader(f))\n", "role": "user" }, { "content": "csv_to_list", "role": "assistant" } ]
def x(json_file, **kwargs): with open(json_file) as f: return json.load(f, **kwargs)
load_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(json_file, **kwargs):\n with open(json_file) as f:\n return json.load(f, **kwargs)\n", "role": "user" }, { "content": "load_json", "role": "assistant" } ]
def x(data, json_file, indent=4, **kwargs): with open(json_file, 'w') as f: json.dump(data, f, indent=indent, **kwargs)
save_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(data, json_file, indent=4, **kwargs):\n with open(json_file, 'w') as f:\n json.dump(data, f, indent=indent, **kwargs)\n", "role": "user" }, { "content": "save_json", "role": "assistant" } ]
def x(config_file=None, auto_find=False, verify=True, **cfg_options): if not config_file: config_file = [] cfg_parser = ConfigParser.ConfigParser(**cfg_options) cfg_files = [] if config_file: if not isinstance(config_file, (list, tuple)): if isinstance(config_file, str): cfg_files.append(config_file) else: raise TypeError('config_files must be a list or a string') else: cfg_files.extend(config_file) if auto_find: cfg_files.extend(find_files_list(current_root if isinstance( auto_find, bool) else auto_find, ext=('.cfg', '.config', '.ini'))) logger.info('config files to be used: {0}'.format(cfg_files)) if verify: cfg_parser.read([cfg for cfg in cfg_files if os.path.exists(cfg)]) else: cfg_parser.read(cfg_files) return dict((section, dict(cfg_parser.items(section))) for section in cfg_parser.sections())
config_dict
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(config_file=None, auto_find=False, verify=True, **cfg_options):\n if not config_file:\n config_file = []\n cfg_parser = ConfigParser.ConfigParser(**cfg_options)\n cfg_files = []\n if config_file:\n if not isinstance(config_file, (list, tuple)):\n if isinstance(config_file, str):\n cfg_files.append(config_file)\n else:\n raise TypeError('config_files must be a list or a string')\n else:\n cfg_files.extend(config_file)\n if auto_find:\n cfg_files.extend(find_files_list(current_root if isinstance(\n auto_find, bool) else auto_find, ext=('.cfg', '.config', '.ini')))\n logger.info('config files to be used: {0}'.format(cfg_files))\n if verify:\n cfg_parser.read([cfg for cfg in cfg_files if os.path.exists(cfg)])\n else:\n cfg_parser.read(cfg_files)\n return dict((section, dict(cfg_parser.items(section))) for section in\n cfg_parser.sections())\n", "role": "user" }, { "content": "config_dict", "role": "assistant" } ]
def x(config_file=None, auto_find=False, verify=True, **cfg_options): return ConfigNamespace(**config_dict(config_file, auto_find, verify, ** cfg_options))
config_namespace
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(config_file=None, auto_find=False, verify=True, **cfg_options):\n return ConfigNamespace(**config_dict(config_file, auto_find, verify, **\n cfg_options))\n", "role": "user" }, { "content": "config_namespace", "role": "assistant" } ]
def x(directory, enable_scandir=False, **kwargs): walk = os.walk if python_version < (3, 5) and enable_scandir: import scandir walk = scandir.walk return walk(directory, **kwargs)
walk
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(directory, enable_scandir=False, **kwargs):\n walk = os.walk\n if python_version < (3, 5) and enable_scandir:\n import scandir\n walk = scandir.walk\n return walk(directory, **kwargs)\n", "role": "user" }, { "content": "walk", "role": "assistant" } ]
def x(directory, enable_scandir=False): if not os.path.exists(directory): raise OSError('Directory does not exist') if not os.path.isdir(directory): raise OSError('Path is not a directory') full_list = [] for root, dirs, files in _walk(directory, enable_scandir=enable_scandir): full_list.extend([(os.path.join(root, d).lstrip(directory) + os.sep ) for d in dirs]) tree = {os.path.basename(directory): {}} for item in full_list: separated = item.split(os.sep) is_dir = separated[-1:] == [''] if is_dir: separated = separated[:-1] parent = tree[os.path.basename(directory)] for index, path in enumerate(separated): if path in parent: parent = parent[path] continue else: parent[path] = dict() parent = parent[path] return tree
os_tree
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(directory, enable_scandir=False):\n if not os.path.exists(directory):\n raise OSError('Directory does not exist')\n if not os.path.isdir(directory):\n raise OSError('Path is not a directory')\n full_list = []\n for root, dirs, files in _walk(directory, enable_scandir=enable_scandir):\n full_list.extend([(os.path.join(root, d).lstrip(directory) + os.sep\n ) for d in dirs])\n tree = {os.path.basename(directory): {}}\n for item in full_list:\n separated = item.split(os.sep)\n is_dir = separated[-1:] == ['']\n if is_dir:\n separated = separated[:-1]\n parent = tree[os.path.basename(directory)]\n for index, path in enumerate(separated):\n if path in parent:\n parent = parent[path]\n continue\n else:\n parent[path] = dict()\n parent = parent[path]\n return tree\n", "role": "user" }, { "content": "os_tree", "role": "assistant" } ]
def x(path, hash_type='md5', block_size=65536, hex_digest=True): hashed = hashlib.new(hash_type) with open(path, 'rb') as infile: buf = infile.read(block_size) while len(buf) > 0: hashed.update(buf) buf = infile.read(block_size) return hashed.hexdigest() if hex_digest else hashed.digest()
file_hash
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(path, hash_type='md5', block_size=65536, hex_digest=True):\n hashed = hashlib.new(hash_type)\n with open(path, 'rb') as infile:\n buf = infile.read(block_size)\n while len(buf) > 0:\n hashed.update(buf)\n buf = infile.read(block_size)\n return hashed.hexdigest() if hex_digest else hashed.digest()\n", "role": "user" }, { "content": "file_hash", "role": "assistant" } ]
def x(directory='.', ext=None, name=None, match_case=False, disable_glob= False, depth=None, abspath=False, enable_scandir=False): if ext or not name: disable_glob = True if not disable_glob: disable_glob = not glob.has_magic(name) if ext and isinstance(ext, str): ext = [ext] elif ext and not isinstance(ext, (list, tuple)): raise TypeError('extension must be either one extension or a list') if abspath: directory = os.path.abspath(directory) starting_depth = directory.count(os.sep) for root, dirs, files in _walk(directory, enable_scandir=enable_scandir): if depth and root.count(os.sep) - starting_depth >= depth: continue if not disable_glob: if match_case: raise ValueError( 'Cannot use glob and match case, please either disable glob or not set match_case' ) glob_generator = glob.iglob(os.path.join(root, name)) for item in glob_generator: yield item continue for file_name in files: if ext: for end in ext: if file_name.lower().endswith(end.lower() if not match_case else end): break else: continue if name: if match_case and name not in file_name: continue elif name.lower() not in file_name.lower(): continue yield os.path.join(root, file_name)
find_files
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(directory='.', ext=None, name=None, match_case=False, disable_glob=\n False, depth=None, abspath=False, enable_scandir=False):\n if ext or not name:\n disable_glob = True\n if not disable_glob:\n disable_glob = not glob.has_magic(name)\n if ext and isinstance(ext, str):\n ext = [ext]\n elif ext and not isinstance(ext, (list, tuple)):\n raise TypeError('extension must be either one extension or a list')\n if abspath:\n directory = os.path.abspath(directory)\n starting_depth = directory.count(os.sep)\n for root, dirs, files in _walk(directory, enable_scandir=enable_scandir):\n if depth and root.count(os.sep) - starting_depth >= depth:\n continue\n if not disable_glob:\n if match_case:\n raise ValueError(\n 'Cannot use glob and match case, please either disable glob or not set match_case'\n )\n glob_generator = glob.iglob(os.path.join(root, name))\n for item in glob_generator:\n yield item\n continue\n for file_name in files:\n if ext:\n for end in ext:\n if file_name.lower().endswith(end.lower() if not\n match_case else end):\n break\n else:\n continue\n if name:\n if match_case and name not in file_name:\n continue\n elif name.lower() not in file_name.lower():\n continue\n yield os.path.join(root, file_name)\n", "role": "user" }, { "content": "find_files", "role": "assistant" } ]
def x(root_directory, dry_run=False, ignore_errors=True, enable_scandir=False): listdir = os.listdir if python_version < (3, 5) and enable_scandir: import scandir as _scandir def x(directory): return list(_scandir.scandir(directory)) directory_list = [] for root, directories, files in _walk(root_directory, enable_scandir= enable_scandir, topdown=False): if not directories and not files and os.path.exists(root ) and root != root_directory and os.path.isdir(root): directory_list.append(root) if not dry_run: try: os.rmdir(root) except OSError as err: if ignore_errors: logger.info('{0} could not be deleted'.format(root)) else: raise err elif directories and not files: for directory in directories: directory = join_paths(root, directory, strict=True) if os.path.exists(directory) and os.path.isdir(directory ) and not listdir(directory): directory_list.append(directory) if not dry_run: try: os.rmdir(directory) except OSError as err: if ignore_errors: logger.info('{0} could not be deleted'. format(directory)) else: raise err return directory_list
remove_empty_directories
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(root_directory, dry_run=False, ignore_errors=True, enable_scandir=False):\n listdir = os.listdir\n if python_version < (3, 5) and enable_scandir:\n import scandir as _scandir\n\n def x(directory):\n return list(_scandir.scandir(directory))\n directory_list = []\n for root, directories, files in _walk(root_directory, enable_scandir=\n enable_scandir, topdown=False):\n if not directories and not files and os.path.exists(root\n ) and root != root_directory and os.path.isdir(root):\n directory_list.append(root)\n if not dry_run:\n try:\n os.rmdir(root)\n except OSError as err:\n if ignore_errors:\n logger.info('{0} could not be deleted'.format(root))\n else:\n raise err\n elif directories and not files:\n for directory in directories:\n directory = join_paths(root, directory, strict=True)\n if os.path.exists(directory) and os.path.isdir(directory\n ) and not listdir(directory):\n directory_list.append(directory)\n if not dry_run:\n try:\n os.rmdir(directory)\n except OSError as err:\n if ignore_errors:\n logger.info('{0} could not be deleted'.\n format(directory))\n else:\n raise err\n return directory_list\n", "role": "user" }, { "content": "remove_empty_directories", "role": "assistant" } ]
def x(root_directory, dry_run=False, ignore_errors=True, enable_scandir=False): file_list = [] for root, directories, files in _walk(root_directory, enable_scandir= enable_scandir): for file_name in files: file_path = join_paths(root, file_name, strict=True) if os.path.isfile(file_path) and not os.path.getsize(file_path): if file_hash(file_path) == variables.hashes.empty_file.md5: file_list.append(file_path) file_list = sorted(set(file_list)) if not dry_run: for afile in file_list: try: os.unlink(afile) except OSError as err: if ignore_errors: logger.info('File {0} could not be deleted'.format(afile)) else: raise err return file_list
remove_empty_files
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(root_directory, dry_run=False, ignore_errors=True, enable_scandir=False):\n file_list = []\n for root, directories, files in _walk(root_directory, enable_scandir=\n enable_scandir):\n for file_name in files:\n file_path = join_paths(root, file_name, strict=True)\n if os.path.isfile(file_path) and not os.path.getsize(file_path):\n if file_hash(file_path) == variables.hashes.empty_file.md5:\n file_list.append(file_path)\n file_list = sorted(set(file_list))\n if not dry_run:\n for afile in file_list:\n try:\n os.unlink(afile)\n except OSError as err:\n if ignore_errors:\n logger.info('File {0} could not be deleted'.format(afile))\n else:\n raise err\n return file_list\n", "role": "user" }, { "content": "remove_empty_files", "role": "assistant" } ]
def x(file_path, directory='.', enable_scandir=False): size = os.path.getsize(file_path) if size == 0: for empty_file in remove_empty_files(directory, dry_run=True): yield empty_file else: with open(file_path, 'rb') as f: first_twenty = f.read(20) file_sha256 = file_hash(file_path, 'sha256') for root, directories, files in _walk(directory, enable_scandir= enable_scandir): for each_file in files: test_file = os.path.join(root, each_file) if os.path.getsize(test_file) == size: try: with open(test_file, 'rb') as f: test_first_twenty = f.read(20) except OSError: logger.warning('Could not open file to compare - {0}' .format(test_file)) else: if first_twenty == test_first_twenty: if file_hash(test_file, 'sha256') == file_sha256: yield os.path.abspath(test_file)
dup_finder
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(file_path, directory='.', enable_scandir=False):\n size = os.path.getsize(file_path)\n if size == 0:\n for empty_file in remove_empty_files(directory, dry_run=True):\n yield empty_file\n else:\n with open(file_path, 'rb') as f:\n first_twenty = f.read(20)\n file_sha256 = file_hash(file_path, 'sha256')\n for root, directories, files in _walk(directory, enable_scandir=\n enable_scandir):\n for each_file in files:\n test_file = os.path.join(root, each_file)\n if os.path.getsize(test_file) == size:\n try:\n with open(test_file, 'rb') as f:\n test_first_twenty = f.read(20)\n except OSError:\n logger.warning('Could not open file to compare - {0}'\n .format(test_file))\n else:\n if first_twenty == test_first_twenty:\n if file_hash(test_file, 'sha256') == file_sha256:\n yield os.path.abspath(test_file)\n", "role": "user" }, { "content": "dup_finder", "role": "assistant" } ]