source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(self, duration=None, preauthenticated=False, filename=None, project= None, **kwargs): args = {'preauthenticated': preauthenticated} if duration is not None: args['duration'] = duration if filename is not None: args['filename'] = filename if project is None and 'DX_JOB_ID' not in os.environ: project_from_handler = self.get_proj_id() if project_from_handler and object_exists_in_project(self.get_id(), project_from_handler): project = project_from_handler if project is not None and project is not DXFile.NO_PROJECT_HINT: args['project'] = project if '_DX_DUMP_BILLED_PROJECT' in os.environ: with open(os.environ['_DX_DUMP_BILLED_PROJECT'], 'w') as fd: if project is not None and project != DXFile.NO_PROJECT_HINT: fd.write(project) with self._url_download_mutex: if (self._download_url is None or self._download_url_expires < time .time()): if 'timeout' not in kwargs: kwargs['timeout'] = FILE_REQUEST_TIMEOUT resp = dxpy.api.file_download(self._dxid, args, **kwargs) self._download_url = resp['url'] self._download_url_headers = _validate_headers(resp.get( 'headers', {})) if preauthenticated: self._download_url_expires = resp['expires'] / 1000 - 60 else: self._download_url_expires = 32503680000 retval_download_url = self._download_url retval_download_url_headers = copy.copy(self._download_url_headers) return retval_download_url, retval_download_url_headers
get_download_url
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, duration=None, preauthenticated=False, filename=None, project=\n None, **kwargs):\n args = {'preauthenticated': preauthenticated}\n if duration is not None:\n args['duration'] = duration\n if filename is not None:\n args['filename'] = filename\n if project is None and 'DX_JOB_ID' not in os.environ:\n project_from_handler = self.get_proj_id()\n if project_from_handler and object_exists_in_project(self.get_id(),\n project_from_handler):\n project = project_from_handler\n if project is not None and project is not DXFile.NO_PROJECT_HINT:\n args['project'] = project\n if '_DX_DUMP_BILLED_PROJECT' in os.environ:\n with open(os.environ['_DX_DUMP_BILLED_PROJECT'], 'w') as fd:\n if project is not None and project != DXFile.NO_PROJECT_HINT:\n fd.write(project)\n with self._url_download_mutex:\n if (self._download_url is None or self._download_url_expires < time\n .time()):\n if 'timeout' not in kwargs:\n kwargs['timeout'] = FILE_REQUEST_TIMEOUT\n resp = dxpy.api.file_download(self._dxid, args, **kwargs)\n self._download_url = resp['url']\n self._download_url_headers = _validate_headers(resp.get(\n 'headers', {}))\n if preauthenticated:\n self._download_url_expires = resp['expires'] / 1000 - 60\n else:\n self._download_url_expires = 32503680000\n retval_download_url = self._download_url\n retval_download_url_headers = copy.copy(self._download_url_headers)\n return retval_download_url, retval_download_url_headers\n", "role": "user" }, { "content": "get_download_url", "role": "assistant" } ]
def x(self, length=None, use_compression=None, project=None, **kwargs): if self._file_length == None: desc = self.describe(**kwargs) if desc['state'] != 'closed': raise DXFileError( 'Cannot read from file until it is in the closed state') self._file_length = int(desc['size']) get_first_chunk_sequentially = (self._file_length > 128 * 1024 and self ._pos == 0 and dxpy.JOB_ID) if self._pos == self._file_length: return b'' if length == None or length > self._file_length - self._pos: length = self._file_length - self._pos buf = self._read_buf buf_remaining_bytes = dxpy.utils.string_buffer_length(buf) - buf.tell() if length <= buf_remaining_bytes: self._pos += length return buf.read(length) else: orig_buf_pos = buf.tell() orig_file_pos = self._pos buf.seek(0, os.SEEK_END) self._pos += buf_remaining_bytes while self._pos < orig_file_pos + length: remaining_len = orig_file_pos + length - self._pos if self._response_iterator is None: self._request_iterator = self._generate_read_requests(start_pos =self._pos, project=project, **kwargs) content = self._next_response_content(get_first_chunk_sequentially =get_first_chunk_sequentially) if len(content) < remaining_len: buf.write(content) self._pos += len(content) else: buf.write(content[:remaining_len]) self._pos += remaining_len self._read_buf = BytesIO() self._read_buf.write(content[remaining_len:]) self._read_buf.seek(0) buf.seek(orig_buf_pos) return buf.read()
read2
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, length=None, use_compression=None, project=None, **kwargs):\n if self._file_length == None:\n desc = self.describe(**kwargs)\n if desc['state'] != 'closed':\n raise DXFileError(\n 'Cannot read from file until it is in the closed state')\n self._file_length = int(desc['size'])\n get_first_chunk_sequentially = (self._file_length > 128 * 1024 and self\n ._pos == 0 and dxpy.JOB_ID)\n if self._pos == self._file_length:\n return b''\n if length == None or length > self._file_length - self._pos:\n length = self._file_length - self._pos\n buf = self._read_buf\n buf_remaining_bytes = dxpy.utils.string_buffer_length(buf) - buf.tell()\n if length <= buf_remaining_bytes:\n self._pos += length\n return buf.read(length)\n else:\n orig_buf_pos = buf.tell()\n orig_file_pos = self._pos\n buf.seek(0, os.SEEK_END)\n self._pos += buf_remaining_bytes\n while self._pos < orig_file_pos + length:\n remaining_len = orig_file_pos + length - self._pos\n if self._response_iterator is None:\n self._request_iterator = self._generate_read_requests(start_pos\n =self._pos, project=project, **kwargs)\n content = self._next_response_content(get_first_chunk_sequentially\n =get_first_chunk_sequentially)\n if len(content) < remaining_len:\n buf.write(content)\n self._pos += len(content)\n else:\n buf.write(content[:remaining_len])\n self._pos += remaining_len\n self._read_buf = BytesIO()\n self._read_buf.write(content[remaining_len:])\n self._read_buf.seek(0)\n buf.seek(orig_buf_pos)\n return buf.read()\n", "role": "user" }, { "content": "read2", "role": "assistant" } ]
def x(job_id, local_port, remote_port): cmd = ['dx', 'ssh', '--suppress-running-check', job_id, '-o', 'StrictHostKeyChecking no'] cmd += ['-f', '-L', '{0}:localhost:{1}'.format(local_port, remote_port), '-N'] subprocess.check_call(cmd)
setup_ssh_tunnel
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(job_id, local_port, remote_port):\n cmd = ['dx', 'ssh', '--suppress-running-check', job_id, '-o',\n 'StrictHostKeyChecking no']\n cmd += ['-f', '-L', '{0}:localhost:{1}'.format(local_port, remote_port),\n '-N']\n subprocess.check_call(cmd)\n", "role": "user" }, { "content": "setup_ssh_tunnel", "role": "assistant" } ]
def x(job_id): sys.stdout.write('Waiting for server in {0} to initialize ...'.format( job_id)) sys.stdout.flush() desc = dxpy.describe(job_id) while SERVER_READY_TAG not in desc['tags'] and desc['state'] != 'failed': time.sleep(SLEEP_PERIOD) sys.stdout.write('.') sys.stdout.flush() desc = dxpy.describe(job_id) if desc['state'] == 'failed': msg = RED('Error:') + ' Server failed to run.\n' msg += 'You may want to check the job logs by running:' msg += BOLD('dx watch {0}'.format(job_id)) err_exit(msg)
poll_for_server_running
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(job_id):\n sys.stdout.write('Waiting for server in {0} to initialize ...'.format(\n job_id))\n sys.stdout.flush()\n desc = dxpy.describe(job_id)\n while SERVER_READY_TAG not in desc['tags'] and desc['state'] != 'failed':\n time.sleep(SLEEP_PERIOD)\n sys.stdout.write('.')\n sys.stdout.flush()\n desc = dxpy.describe(job_id)\n if desc['state'] == 'failed':\n msg = RED('Error:') + ' Server failed to run.\\n'\n msg += 'You may want to check the job logs by running:'\n msg += BOLD('dx watch {0}'.format(job_id))\n err_exit(msg)\n", "role": "user" }, { "content": "poll_for_server_running", "role": "assistant" } ]
def x(cmd): if platform == 'linux' or platform == 'linux2': cmd = ['xdg-open', cmd] elif platform == 'darwin': cmd = ['open', cmd] elif platform == 'win32': cmd = ['start', cmd] subprocess.check_call(cmd)
multi_platform_open
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cmd):\n if platform == 'linux' or platform == 'linux2':\n cmd = ['xdg-open', cmd]\n elif platform == 'darwin':\n cmd = ['open', cmd]\n elif platform == 'win32':\n cmd = ['start', cmd]\n subprocess.check_call(cmd)\n", "role": "user" }, { "content": "multi_platform_open", "role": "assistant" } ]
def x(): notebook_apps = dxpy.find_apps(name=NOTEBOOK_APP, all_versions=True) versions = [str(dxpy.describe(app['id'])['version']) for app in notebook_apps] return versions
get_notebook_app_versions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n notebook_apps = dxpy.find_apps(name=NOTEBOOK_APP, all_versions=True)\n versions = [str(dxpy.describe(app['id'])['version']) for app in\n notebook_apps]\n return versions\n", "role": "user" }, { "content": "get_notebook_app_versions", "role": "assistant" } ]
def x(args, ssh_config_check): ssh_config_check() if args.only_check_config: return if args.version is not None: executable = get_app_from_path('app-{0}/{1}'.format(NOTEBOOK_APP, args.version)) if executable is not None and 'id' in executable: executable = executable['id'] else: msg = RED('Warning:' ) + ' Invalid notebook version: {0}\nValid versions are: '.format( args.version) msg += BOLD('{0}'.format(str(get_notebook_app_versions()))) err_exit(msg) else: executable = 'app-{0}'.format(NOTEBOOK_APP) cmd = ['dx', 'run', executable, '-inotebook_type={0}'.format(args. notebook_type)] cmd += ['-iinput_files={0}'.format(f) for f in args.notebook_files] cmd += ['-itimeout={0}'.format(args.timeout), '-y', '--brief', '--allow-ssh', '--instance-type', args.instance_type] if args.spark: cmd += ['-iinstall_spark=true'] if args.snapshot: cmd += ['-isnapshot={0}'.format(args.snapshot)] job_id = subprocess.check_output(cmd).strip() poll_for_server_running(job_id) if args.notebook_type in {'jupyter', 'jupyter_lab', 'jupyter_notebook'}: remote_port = 8888 setup_ssh_tunnel(job_id, args.port, remote_port) if args.open_server: multi_platform_open('http://localhost:{0}'.format(args.port)) print( 'A web browser should have opened to connect you to your notebook.' ) print( 'If no browser appears, or if you need to reopen a browser at any point, you should be able to point your browser to http://localhost:{0}' .format(args.port))
run_notebook
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args, ssh_config_check):\n ssh_config_check()\n if args.only_check_config:\n return\n if args.version is not None:\n executable = get_app_from_path('app-{0}/{1}'.format(NOTEBOOK_APP,\n args.version))\n if executable is not None and 'id' in executable:\n executable = executable['id']\n else:\n msg = RED('Warning:'\n ) + ' Invalid notebook version: {0}\\nValid versions are: '.format(\n args.version)\n msg += BOLD('{0}'.format(str(get_notebook_app_versions())))\n err_exit(msg)\n else:\n executable = 'app-{0}'.format(NOTEBOOK_APP)\n cmd = ['dx', 'run', executable, '-inotebook_type={0}'.format(args.\n notebook_type)]\n cmd += ['-iinput_files={0}'.format(f) for f in args.notebook_files]\n cmd += ['-itimeout={0}'.format(args.timeout), '-y', '--brief',\n '--allow-ssh', '--instance-type', args.instance_type]\n if args.spark:\n cmd += ['-iinstall_spark=true']\n if args.snapshot:\n cmd += ['-isnapshot={0}'.format(args.snapshot)]\n job_id = subprocess.check_output(cmd).strip()\n poll_for_server_running(job_id)\n if args.notebook_type in {'jupyter', 'jupyter_lab', 'jupyter_notebook'}:\n remote_port = 8888\n setup_ssh_tunnel(job_id, args.port, remote_port)\n if args.open_server:\n multi_platform_open('http://localhost:{0}'.format(args.port))\n print(\n 'A web browser should have opened to connect you to your notebook.'\n )\n print(\n 'If no browser appears, or if you need to reopen a browser at any point, you should be able to point your browser to http://localhost:{0}'\n .format(args.port))\n", "role": "user" }, { "content": "run_notebook", "role": "assistant" } ]
def x(word, known_words): NWORDS = _train(known_words) candidates = _known([word], NWORDS) or _known(_edits1(word), NWORDS ) or _known_edits2(word, NWORDS) or [word] return max(candidates, key=NWORDS.get)
correct
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(word, known_words):\n NWORDS = _train(known_words)\n candidates = _known([word], NWORDS) or _known(_edits1(word), NWORDS\n ) or _known_edits2(word, NWORDS) or [word]\n return max(candidates, key=NWORDS.get)\n", "role": "user" }, { "content": "correct", "role": "assistant" } ]
def x(cls, job_config): params = job_config.input_reader_params shard_count = job_config.shard_count query_spec = cls._get_query_spec(params) if not property_range.should_shard_by_property_range(query_spec.filters): return super(ModelDatastoreInputReader, cls).split_input(job_config) p_range = property_range.PropertyRange(query_spec.filters, query_spec. model_class_path) p_ranges = p_range.split(shard_count) if query_spec.ns: ns_range = namespace_range.NamespaceRange(namespace_start= query_spec.ns, namespace_end=query_spec.ns, _app=query_spec.app) ns_ranges = [copy.copy(ns_range) for _ in p_ranges] else: ns_keys = namespace_range.get_namespace_keys(query_spec.app, cls. MAX_NAMESPACES_FOR_KEY_SHARD + 1) if not ns_keys: return if len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD: ns_ranges = [namespace_range.NamespaceRange(_app=query_spec.app ) for _ in p_ranges] else: ns_ranges = namespace_range.NamespaceRange.split(n=shard_count, contiguous=False, can_query=lambda : True, _app=query_spec.app) p_ranges = [copy.copy(p_range) for _ in ns_ranges] assert len(p_ranges) == len(ns_ranges) iters = [db_iters.RangeIteratorFactory.create_property_range_iterator(p, ns, query_spec) for p, ns in zip(p_ranges, ns_ranges)] return [cls(i) for i in iters]
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config):\n params = job_config.input_reader_params\n shard_count = job_config.shard_count\n query_spec = cls._get_query_spec(params)\n if not property_range.should_shard_by_property_range(query_spec.filters):\n return super(ModelDatastoreInputReader, cls).split_input(job_config)\n p_range = property_range.PropertyRange(query_spec.filters, query_spec.\n model_class_path)\n p_ranges = p_range.split(shard_count)\n if query_spec.ns:\n ns_range = namespace_range.NamespaceRange(namespace_start=\n query_spec.ns, namespace_end=query_spec.ns, _app=query_spec.app)\n ns_ranges = [copy.copy(ns_range) for _ in p_ranges]\n else:\n ns_keys = namespace_range.get_namespace_keys(query_spec.app, cls.\n MAX_NAMESPACES_FOR_KEY_SHARD + 1)\n if not ns_keys:\n return\n if len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD:\n ns_ranges = [namespace_range.NamespaceRange(_app=query_spec.app\n ) for _ in p_ranges]\n else:\n ns_ranges = namespace_range.NamespaceRange.split(n=shard_count,\n contiguous=False, can_query=lambda : True, _app=query_spec.app)\n p_ranges = [copy.copy(p_range) for _ in ns_ranges]\n assert len(p_ranges) == len(ns_ranges)\n iters = [db_iters.RangeIteratorFactory.create_property_range_iterator(p,\n ns, query_spec) for p, ns in zip(p_ranges, ns_ranges)]\n return [cls(i) for i in iters]\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(value): if ndb is not None and isinstance(value, ndb.Model): return None if getattr(value, '_populate_internal_entity', None): return value._populate_internal_entity() return value
normalize_entity
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(value):\n if ndb is not None and isinstance(value, ndb.Model):\n return None\n if getattr(value, '_populate_internal_entity', None):\n return value._populate_internal_entity()\n return value\n", "role": "user" }, { "content": "normalize_entity", "role": "assistant" } ]
def x(value): if ndb is not None and isinstance(value, (ndb.Model, ndb.Key)): return None if getattr(value, 'key', None): return value.key() elif isinstance(value, basestring): return datastore.Key(value) else: return value
normalize_key
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(value):\n if ndb is not None and isinstance(value, (ndb.Model, ndb.Key)):\n return None\n if getattr(value, 'key', None):\n return value.key()\n elif isinstance(value, basestring):\n return datastore.Key(value)\n else:\n return value\n", "role": "user" }, { "content": "normalize_key", "role": "assistant" } ]
def x(self, item): if self.should_flush(): self.flush() self.items.append(item)
append
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, item):\n if self.should_flush():\n self.flush()\n self.items.append(item)\n", "role": "user" }, { "content": "append", "role": "assistant" } ]
def x(self, entity): actual_entity = _normalize_entity(entity) if actual_entity is None: return self.ndb_put(entity) self.puts.append(actual_entity)
put
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, entity):\n actual_entity = _normalize_entity(entity)\n if actual_entity is None:\n return self.ndb_put(entity)\n self.puts.append(actual_entity)\n", "role": "user" }, { "content": "put", "role": "assistant" } ]
def x(self, entity): assert ndb is not None and isinstance(entity, ndb.Model) self.ndb_puts.append(entity)
ndb_put
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, entity):\n assert ndb is not None and isinstance(entity, ndb.Model)\n self.ndb_puts.append(entity)\n", "role": "user" }, { "content": "ndb_put", "role": "assistant" } ]
def x(self, entity): key = _normalize_key(entity) if key is None: return self.ndb_delete(entity) self.deletes.append(key)
delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, entity):\n key = _normalize_key(entity)\n if key is None:\n return self.ndb_delete(entity)\n self.deletes.append(key)\n", "role": "user" }, { "content": "delete", "role": "assistant" } ]
def x(self, entity_or_key): if ndb is not None and isinstance(entity_or_key, ndb.Model): key = entity_or_key.key else: key = entity_or_key self.ndb_deletes.append(key)
ndb_delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, entity_or_key):\n if ndb is not None and isinstance(entity_or_key, ndb.Model):\n key = entity_or_key.key\n else:\n key = entity_or_key\n self.ndb_deletes.append(key)\n", "role": "user" }, { "content": "ndb_delete", "role": "assistant" } ]
def x(self): self.puts.flush() self.deletes.flush() self.ndb_puts.flush() self.ndb_deletes.flush()
flush
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.puts.flush()\n self.deletes.flush()\n self.ndb_puts.flush()\n self.ndb_deletes.flush()\n", "role": "user" }, { "content": "flush", "role": "assistant" } ]
def x(self, items, options): datastore.Put(items, config=self._create_config(options))
flush_puts
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, items, options):\n datastore.Put(items, config=self._create_config(options))\n", "role": "user" }, { "content": "flush_puts", "role": "assistant" } ]
def x(self, items, options): datastore.Delete(items, config=self._create_config(options))
flush_deletes
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, items, options):\n datastore.Delete(items, config=self._create_config(options))\n", "role": "user" }, { "content": "flush_deletes", "role": "assistant" } ]
def x(self, items, options): assert ndb is not None ndb.put_multi(items, config=self._create_config(options))
flush_ndb_puts
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, items, options):\n assert ndb is not None\n ndb.put_multi(items, config=self._create_config(options))\n", "role": "user" }, { "content": "flush_ndb_puts", "role": "assistant" } ]
def x(self, items, options): assert ndb is not None ndb.delete_multi(items, config=self._create_config(options))
flush_ndb_deletes
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, items, options):\n assert ndb is not None\n ndb.delete_multi(items, config=self._create_config(options))\n", "role": "user" }, { "content": "flush_ndb_deletes", "role": "assistant" } ]
def x(self, options): return datastore.CreateConfig(deadline=options['deadline'], force_writes=self.force_writes)
create_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, options):\n return datastore.CreateConfig(deadline=options['deadline'],\n force_writes=self.force_writes)\n", "role": "user" }, { "content": "create_config", "role": "assistant" } ]
def x(self, counter_name, delta=1): self._shard_state.counters_map.increment(counter_name, delta)
increment
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, counter_name, delta=1):\n self._shard_state.counters_map.increment(counter_name, delta)\n", "role": "user" }, { "content": "increment", "role": "assistant" } ]
def x(self): cursor = self._cursor if self._query is not None: if isinstance(self._query, db.Query): cursor = self._query.cursor() else: cursor = self._query.cursor_after() if cursor is None or isinstance(cursor, basestring): cursor_object = False else: cursor_object = True cursor = cursor.to_websafe_string() return {'property_range': self._property_range.to_json(), 'query_spec': self._query_spec.to_json(), 'cursor': cursor, 'ns_range': self. _ns_range.to_json_object(), 'name': self.__class__.__name__, 'cursor_object': cursor_object}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n cursor = self._cursor\n if self._query is not None:\n if isinstance(self._query, db.Query):\n cursor = self._query.cursor()\n else:\n cursor = self._query.cursor_after()\n if cursor is None or isinstance(cursor, basestring):\n cursor_object = False\n else:\n cursor_object = True\n cursor = cursor.to_websafe_string()\n return {'property_range': self._property_range.to_json(), 'query_spec':\n self._query_spec.to_json(), 'cursor': cursor, 'ns_range': self.\n _ns_range.to_json_object(), 'name': self.__class__.__name__,\n 'cursor_object': cursor_object}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): obj = cls(property_range.PropertyRange.from_json(json['property_range'] ), namespace_range.NamespaceRange.from_json_object(json['ns_range'] ), model.QuerySpec.from_json(json['query_spec'])) cursor = json['cursor'] if cursor and json['cursor_object']: obj._cursor = datastore_query.Cursor.from_websafe_string(cursor) else: obj._cursor = cursor return obj
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n obj = cls(property_range.PropertyRange.from_json(json['property_range']\n ), namespace_range.NamespaceRange.from_json_object(json['ns_range']\n ), model.QuerySpec.from_json(json['query_spec']))\n cursor = json['cursor']\n if cursor and json['cursor_object']:\n obj._cursor = datastore_query.Cursor.from_websafe_string(cursor)\n else:\n obj._cursor = cursor\n return obj\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self): json = {'name': self.__class__.__name__, 'num_ranges': len(self._iters)} for i in xrange(len(self._iters)): json_item = self._iters[i].to_json() query_spec = json_item['query_spec'] item_name = json_item['name'] del json_item['query_spec'] del json_item['name'] json[str(i)] = json_item json['query_spec'] = query_spec json['item_name'] = item_name return json
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n json = {'name': self.__class__.__name__, 'num_ranges': len(self._iters)}\n for i in xrange(len(self._iters)):\n json_item = self._iters[i].to_json()\n query_spec = json_item['query_spec']\n item_name = json_item['name']\n del json_item['query_spec']\n del json_item['name']\n json[str(i)] = json_item\n json['query_spec'] = query_spec\n json['item_name'] = item_name\n return json\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): num_ranges = int(json['num_ranges']) query_spec = json['query_spec'] item_name = json['item_name'] p_range_iters = [] for i in xrange(num_ranges): json_item = json[str(i)] json_item['query_spec'] = query_spec json_item['name'] = item_name p_range_iters.append(_PropertyRangeModelIterator.from_json(json_item)) obj = cls(p_range_iters) return obj
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n num_ranges = int(json['num_ranges'])\n query_spec = json['query_spec']\n item_name = json['item_name']\n p_range_iters = []\n for i in xrange(num_ranges):\n json_item = json[str(i)]\n json_item['query_spec'] = query_spec\n json_item['name'] = item_name\n p_range_iters.append(_PropertyRangeModelIterator.from_json(json_item))\n obj = cls(p_range_iters)\n return obj\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self): current_iter = None if self._current_iter: current_iter = self._current_iter.to_json() return {'key_ranges': self._key_ranges.to_json(), 'query_spec': self. _query_spec.to_json(), 'current_iter': current_iter, 'key_range_iter_cls': self._key_range_iter_cls.__name__, 'name': self.__class__.__name__}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n current_iter = None\n if self._current_iter:\n current_iter = self._current_iter.to_json()\n return {'key_ranges': self._key_ranges.to_json(), 'query_spec': self.\n _query_spec.to_json(), 'current_iter': current_iter,\n 'key_range_iter_cls': self._key_range_iter_cls.__name__, 'name':\n self.__class__.__name__}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): key_range_iter_cls = _KEY_RANGE_ITERATORS[json['key_range_iter_cls']] obj = cls(key_ranges.KeyRangesFactory.from_json(json['key_ranges']), model.QuerySpec.from_json(json['query_spec']), key_range_iter_cls) current_iter = None if json['current_iter']: current_iter = key_range_iter_cls.from_json(json['current_iter']) obj._current_iter = current_iter return obj
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n key_range_iter_cls = _KEY_RANGE_ITERATORS[json['key_range_iter_cls']]\n obj = cls(key_ranges.KeyRangesFactory.from_json(json['key_ranges']),\n model.QuerySpec.from_json(json['query_spec']), key_range_iter_cls)\n current_iter = None\n if json['current_iter']:\n current_iter = key_range_iter_cls.from_json(json['current_iter'])\n obj._current_iter = current_iter\n return obj\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self): cursor = self._get_cursor() cursor_object = False if cursor and isinstance(cursor, datastore_query.Cursor): cursor = cursor.to_websafe_string() cursor_object = True return {'key_range': self._key_range.to_json(), 'query_spec': self. _query_spec.to_json(), 'cursor': cursor, 'cursor_object': cursor_object }
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n cursor = self._get_cursor()\n cursor_object = False\n if cursor and isinstance(cursor, datastore_query.Cursor):\n cursor = cursor.to_websafe_string()\n cursor_object = True\n return {'key_range': self._key_range.to_json(), 'query_spec': self.\n _query_spec.to_json(), 'cursor': cursor, 'cursor_object': cursor_object\n }\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): obj = cls(key_range.KeyRange.from_json(json['key_range']), model. QuerySpec.from_json(json['query_spec'])) cursor = json['cursor'] if cursor and json['cursor_object']: obj._cursor = datastore_query.Cursor.from_websafe_string(cursor) else: obj._cursor = cursor return obj
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n obj = cls(key_range.KeyRange.from_json(json['key_range']), model.\n QuerySpec.from_json(json['query_spec']))\n cursor = json['cursor']\n if cursor and json['cursor_object']:\n obj._cursor = datastore_query.Cursor.from_websafe_string(cursor)\n else:\n obj._cursor = cursor\n return obj\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(status_file=__file__): checked = set() yaml = _find_mapreduce_yaml(os.path.dirname(status_file), checked) if not yaml: yaml = _find_mapreduce_yaml(os.getcwd(), checked) return yaml
find_mapreduce_yaml
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(status_file=__file__):\n checked = set()\n yaml = _find_mapreduce_yaml(os.path.dirname(status_file), checked)\n if not yaml:\n yaml = _find_mapreduce_yaml(os.getcwd(), checked)\n return yaml\n", "role": "user" }, { "content": "find_mapreduce_yaml", "role": "assistant" } ]
def x(start, checked): dir = start while dir not in checked: checked.add(dir) for mr_yaml_name in MR_YAML_NAMES: yaml_path = os.path.join(dir, mr_yaml_name) if os.path.exists(yaml_path): return yaml_path dir = os.path.dirname(dir) return None
find_mapreduce_yaml
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(start, checked):\n dir = start\n while dir not in checked:\n checked.add(dir)\n for mr_yaml_name in MR_YAML_NAMES:\n yaml_path = os.path.join(dir, mr_yaml_name)\n if os.path.exists(yaml_path):\n return yaml_path\n dir = os.path.dirname(dir)\n return None\n", "role": "user" }, { "content": "find_mapreduce_yaml", "role": "assistant" } ]
def x(parse=parse_mapreduce_yaml): mr_yaml_path = find_mapreduce_yaml() if not mr_yaml_path: raise errors.MissingYamlError() mr_yaml_file = open(mr_yaml_path) try: return parse(mr_yaml_file.read()) finally: mr_yaml_file.close()
get_mapreduce_yaml
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(parse=parse_mapreduce_yaml):\n mr_yaml_path = find_mapreduce_yaml()\n if not mr_yaml_path:\n raise errors.MissingYamlError()\n mr_yaml_file = open(mr_yaml_path)\n try:\n return parse(mr_yaml_file.read())\n finally:\n mr_yaml_file.close()\n", "role": "user" }, { "content": "get_mapreduce_yaml", "role": "assistant" } ]
def x(mapreduce_yaml): all_configs = [] for config in mapreduce_yaml.mapreduce: out = {'name': config.name, 'mapper_input_reader': config.mapper. input_reader, 'mapper_handler': config.mapper.handler} if config.mapper.params_validator: out['mapper_params_validator'] = config.mapper.params_validator if config.mapper.params: param_defaults = {} for param in config.mapper.params: param_defaults[param.name] = param.default or param.value out['mapper_params'] = param_defaults if config.params: param_defaults = {} for param in config.params: param_defaults[param.name] = param.default or param.value out['params'] = param_defaults if config.mapper.output_writer: out['mapper_output_writer'] = config.mapper.output_writer all_configs.append(out) return all_configs
to_dict
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(mapreduce_yaml):\n all_configs = []\n for config in mapreduce_yaml.mapreduce:\n out = {'name': config.name, 'mapper_input_reader': config.mapper.\n input_reader, 'mapper_handler': config.mapper.handler}\n if config.mapper.params_validator:\n out['mapper_params_validator'] = config.mapper.params_validator\n if config.mapper.params:\n param_defaults = {}\n for param in config.mapper.params:\n param_defaults[param.name] = param.default or param.value\n out['mapper_params'] = param_defaults\n if config.params:\n param_defaults = {}\n for param in config.params:\n param_defaults[param.name] = param.default or param.value\n out['params'] = param_defaults\n if config.mapper.output_writer:\n out['mapper_output_writer'] = config.mapper.output_writer\n all_configs.append(out)\n return all_configs\n", "role": "user" }, { "content": "to_dict", "role": "assistant" } ]
def x(cls, job_config): if job_config.input_reader_cls != cls: raise errors.BadReaderParamsError( 'Expect input reader class %r, got %r.' % (cls, job_config. input_reader_cls))
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config):\n if job_config.input_reader_cls != cls:\n raise errors.BadReaderParamsError(\n 'Expect input reader class %r, got %r.' % (cls, job_config.\n input_reader_cls))\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]
def x(records): ctx = context.get() l = len(records) key_records = [None] * l logging.debug('Parsing') for i in range(l): proto = kv_pb.KeyValue() proto.ParseFromString(records[i]) key_records[i] = proto.key(), records[i] logging.debug('Sorting') key_records.sort(cmp=_compare_keys) logging.debug('Writing') mapper_spec = ctx.mapreduce_spec.mapper params = input_readers._get_params(mapper_spec) bucket_name = params.get('bucket_name') filename = (ctx.mapreduce_spec.name + '/' + ctx.mapreduce_id + '/output-' + ctx.shard_id + '-' + str(int(time.time()))) full_filename = '/%s/%s' % (bucket_name, filename) filehandle = cloudstorage.open(full_filename, mode='w') with output_writers.GCSRecordsPool(filehandle, ctx=ctx) as pool: for key_record in key_records: pool.append(key_record[1]) logging.debug('Finalizing') filehandle.close() entity = _OutputFile(key_name=full_filename, parent=_OutputFile. get_root_key(ctx.mapreduce_id)) entity.put()
sort_records_map
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(records):\n ctx = context.get()\n l = len(records)\n key_records = [None] * l\n logging.debug('Parsing')\n for i in range(l):\n proto = kv_pb.KeyValue()\n proto.ParseFromString(records[i])\n key_records[i] = proto.key(), records[i]\n logging.debug('Sorting')\n key_records.sort(cmp=_compare_keys)\n logging.debug('Writing')\n mapper_spec = ctx.mapreduce_spec.mapper\n params = input_readers._get_params(mapper_spec)\n bucket_name = params.get('bucket_name')\n filename = (ctx.mapreduce_spec.name + '/' + ctx.mapreduce_id +\n '/output-' + ctx.shard_id + '-' + str(int(time.time())))\n full_filename = '/%s/%s' % (bucket_name, filename)\n filehandle = cloudstorage.open(full_filename, mode='w')\n with output_writers.GCSRecordsPool(filehandle, ctx=ctx) as pool:\n for key_record in key_records:\n pool.append(key_record[1])\n logging.debug('Finalizing')\n filehandle.close()\n entity = _OutputFile(key_name=full_filename, parent=_OutputFile.\n get_root_key(ctx.mapreduce_id))\n entity.put()\n", "role": "user" }, { "content": "sort_records_map", "role": "assistant" } ]
def x(key, values, partial): proto = kv_pb.KeyValues() proto.set_key(key) proto.value_list().extend(values) yield proto.Encode()
merge_map
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(key, values, partial):\n proto = kv_pb.KeyValues()\n proto.set_key(key)\n proto.value_list().extend(values)\n yield proto.Encode()\n", "role": "user" }, { "content": "merge_map", "role": "assistant" } ]
def x(binary_record): proto = kv_pb.KeyValue() proto.ParseFromString(binary_record) yield proto.key(), proto.value()
hashing_map
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(binary_record):\n proto = kv_pb.KeyValue()\n proto.ParseFromString(binary_record)\n yield proto.key(), proto.value()\n", "role": "user" }, { "content": "hashing_map", "role": "assistant" } ]
def x(cls, mapper_spec): filelists = mapper_spec.params[cls.FILES_PARAM] max_values_count = mapper_spec.params.get(cls.MAX_VALUES_COUNT_PARAM, -1) max_values_size = mapper_spec.params.get(cls.MAX_VALUES_SIZE_PARAM, -1) return [cls([0] * len(files), max_values_count, max_values_size) for files in filelists]
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n filelists = mapper_spec.params[cls.FILES_PARAM]\n max_values_count = mapper_spec.params.get(cls.MAX_VALUES_COUNT_PARAM, -1)\n max_values_size = mapper_spec.params.get(cls.MAX_VALUES_SIZE_PARAM, -1)\n return [cls([0] * len(files), max_values_count, max_values_size) for\n files in filelists]\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(cls, mapper_spec): if mapper_spec.input_reader_class() != cls: raise errors.BadReaderParamsError('Input reader class mismatch') params = mapper_spec.params if cls.FILES_PARAM not in params: raise errors.BadReaderParamsError('Missing files parameter.')
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n if mapper_spec.input_reader_class() != cls:\n raise errors.BadReaderParamsError('Input reader class mismatch')\n params = mapper_spec.params\n if cls.FILES_PARAM not in params:\n raise errors.BadReaderParamsError('Missing files parameter.')\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]
def x(cls, mapper_spec): if mapper_spec.output_writer_class() != cls: raise errors.BadWriterParamsError('Output writer class mismatch') params = output_writers._get_params(mapper_spec) if cls.BUCKET_NAME_PARAM not in params: raise errors.BadWriterParamsError( '%s is required for the _HashingGCSOutputWriter' % cls. BUCKET_NAME_PARAM)
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n if mapper_spec.output_writer_class() != cls:\n raise errors.BadWriterParamsError('Output writer class mismatch')\n params = output_writers._get_params(mapper_spec)\n if cls.BUCKET_NAME_PARAM not in params:\n raise errors.BadWriterParamsError(\n '%s is required for the _HashingGCSOutputWriter' % cls.\n BUCKET_NAME_PARAM)\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]
def x(self): for pool in self._pools: if pool is not None: pool.flush(True) return {'filehandles': pickle.dumps(self._filehandles)}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n for pool in self._pools:\n if pool is not None:\n pool.flush(True)\n return {'filehandles': pickle.dumps(self._filehandles)}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, mr_spec, shard_number, shard_attempt, _writer_state=None): mapper_spec = mr_spec.mapper params = output_writers._get_params(mapper_spec) bucket_name = params.get(cls.BUCKET_NAME_PARAM) shards = mapper_spec.shard_count filehandles = [] filename = mr_spec.name + '/' + mr_spec.mapreduce_id + '/shard-' + str( shard_number) + '-bucket-' for i in range(shards): full_filename = '/%s/%s%d' % (bucket_name, filename, i) filehandles.append(cloudstorage.open(full_filename, mode='w')) return cls(filehandles)
create
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mr_spec, shard_number, shard_attempt, _writer_state=None):\n mapper_spec = mr_spec.mapper\n params = output_writers._get_params(mapper_spec)\n bucket_name = params.get(cls.BUCKET_NAME_PARAM)\n shards = mapper_spec.shard_count\n filehandles = []\n filename = mr_spec.name + '/' + mr_spec.mapreduce_id + '/shard-' + str(\n shard_number) + '-bucket-'\n for i in range(shards):\n full_filename = '/%s/%s%d' % (bucket_name, filename, i)\n filehandles.append(cloudstorage.open(full_filename, mode='w'))\n return cls(filehandles)\n", "role": "user" }, { "content": "create", "role": "assistant" } ]
def x(cls, mapreduce_state): shards = mapreduce_state.mapreduce_spec.mapper.shard_count filenames = [] for _ in range(shards): filenames.append([None] * shards) shard_states = model.ShardState.find_all_by_mapreduce_state(mapreduce_state ) for x, shard_state in enumerate(shard_states): shard_filenames = shard_state.writer_state['shard_filenames'] for y in range(shards): filenames[y][x] = shard_filenames[y] return filenames
get_filenames
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_state):\n shards = mapreduce_state.mapreduce_spec.mapper.shard_count\n filenames = []\n for _ in range(shards):\n filenames.append([None] * shards)\n shard_states = model.ShardState.find_all_by_mapreduce_state(mapreduce_state\n )\n for x, shard_state in enumerate(shard_states):\n shard_filenames = shard_state.writer_state['shard_filenames']\n for y in range(shards):\n filenames[y][x] = shard_filenames[y]\n return filenames\n", "role": "user" }, { "content": "get_filenames", "role": "assistant" } ]
def x(self, ctx, shard_state): filenames = [] for filehandle in self._filehandles: filenames.append(filehandle.name) filehandle.close() shard_state.writer_state = {'shard_filenames': filenames}
finalize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, ctx, shard_state):\n filenames = []\n for filehandle in self._filehandles:\n filenames.append(filehandle.name)\n filehandle.close()\n shard_state.writer_state = {'shard_filenames': filenames}\n", "role": "user" }, { "content": "finalize", "role": "assistant" } ]
def x(self, data): ctx = context.get() if len(data) != 2: logging.error('Got bad tuple of length %d (2-tuple expected): %s', len(data), data) try: key = str(data[0]) value = str(data[1]) except TypeError: logging.error('Expecting a tuple, but got %s: %s', data.__class__. __name__, data) file_index = key.__hash__() % len(self._filehandles) pool = self._pools[file_index] if pool is None: filehandle = self._filehandles[file_index] pool = output_writers.GCSRecordsPool(filehandle=filehandle, ctx=ctx) self._pools[file_index] = pool proto = kv_pb.KeyValue() proto.set_key(key) proto.set_value(value) pool.append(proto.Encode())
write
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data):\n ctx = context.get()\n if len(data) != 2:\n logging.error('Got bad tuple of length %d (2-tuple expected): %s',\n len(data), data)\n try:\n key = str(data[0])\n value = str(data[1])\n except TypeError:\n logging.error('Expecting a tuple, but got %s: %s', data.__class__.\n __name__, data)\n file_index = key.__hash__() % len(self._filehandles)\n pool = self._pools[file_index]\n if pool is None:\n filehandle = self._filehandles[file_index]\n pool = output_writers.GCSRecordsPool(filehandle=filehandle, ctx=ctx)\n self._pools[file_index] = pool\n proto = kv_pb.KeyValue()\n proto.set_key(key)\n proto.set_value(value)\n pool.append(proto.Encode())\n", "role": "user" }, { "content": "write", "role": "assistant" } ]
def x(self): reader_params = self.input_reader_cls.params_to_json(self. input_reader_params) return {'input_reader': reader_params, 'output_writer': self. output_writer_params}
get_mapper_params
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n reader_params = self.input_reader_cls.params_to_json(self.\n input_reader_params)\n return {'input_reader': reader_params, 'output_writer': self.\n output_writer_params}\n", "role": "user" }, { "content": "get_mapper_params", "role": "assistant" } ]
def x(self): from mapreduce import model return model.MapperSpec(handler_spec=util._obj_to_path(self.mapper), input_reader_spec=util._obj_to_path(self.input_reader_cls), params= self._get_mapper_params(), shard_count=self.shard_count, output_writer_spec=util._obj_to_path(self.output_writer_cls))
get_mapper_spec
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n from mapreduce import model\n return model.MapperSpec(handler_spec=util._obj_to_path(self.mapper),\n input_reader_spec=util._obj_to_path(self.input_reader_cls), params=\n self._get_mapper_params(), shard_count=self.shard_count,\n output_writer_spec=util._obj_to_path(self.output_writer_cls))\n", "role": "user" }, { "content": "get_mapper_spec", "role": "assistant" } ]
def x(self): return {'force_writes': self._force_writes, 'done_callback': self. done_callback_url, 'user_params': self.user_params, 'shard_max_attempts': self.shard_max_attempts, 'task_max_attempts': self._task_max_attempts, 'task_max_data_processing_attempts': self. _task_max_data_processing_attempts, 'queue_name': self.queue_name, 'base_path': self._base_path, 'app_id': self._app, 'api_version': self._api_version}
get_mr_params
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return {'force_writes': self._force_writes, 'done_callback': self.\n done_callback_url, 'user_params': self.user_params,\n 'shard_max_attempts': self.shard_max_attempts, 'task_max_attempts':\n self._task_max_attempts, 'task_max_data_processing_attempts': self.\n _task_max_data_processing_attempts, 'queue_name': self.queue_name,\n 'base_path': self._base_path, 'app_id': self._app, 'api_version':\n self._api_version}\n", "role": "user" }, { "content": "get_mr_params", "role": "assistant" } ]
def x(cls): cfg = cls(_lenient=True) mr_params = cfg._get_mr_params() mr_params['api_version'] = 0 return mr_params
get_default_mr_params
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls):\n cfg = cls(_lenient=True)\n mr_params = cfg._get_mr_params()\n mr_params['api_version'] = 0\n return mr_params\n", "role": "user" }, { "content": "get_default_mr_params", "role": "assistant" } ]
def x(cls, mr_spec, queue_name): mapper_spec = mr_spec.mapper api_version = mr_spec.params.get('api_version', 0) old_api = api_version == 0 input_reader_cls = mapper_spec.input_reader_class() input_reader_params = input_readers._get_params(mapper_spec) if issubclass(input_reader_cls, input_reader.InputReader): input_reader_params = input_reader_cls.params_from_json( input_reader_params) output_writer_cls = mapper_spec.output_writer_class() output_writer_params = output_writers._get_params(mapper_spec) return cls(_lenient=old_api, job_name=mr_spec.name, job_id=mr_spec. mapreduce_id, mapper=util.for_name(mapper_spec.handler_spec), input_reader_cls=input_reader_cls, input_reader_params= input_reader_params, output_writer_cls=output_writer_cls, output_writer_params=output_writer_params, shard_count=mapper_spec. shard_count, queue_name=queue_name, user_params=mr_spec.params.get( 'user_params'), shard_max_attempts=mr_spec.params.get( 'shard_max_attempts'), done_callback_url=mr_spec.params.get( 'done_callback'), _force_writes=mr_spec.params.get('force_writes'), _base_path=mr_spec.params['base_path'], _task_max_attempts=mr_spec. params.get('task_max_attempts'), _task_max_data_processing_attempts =mr_spec.params.get('task_max_data_processing_attempts'), _hooks_cls=util.for_name(mr_spec.hooks_class_name), _app=mr_spec. params.get('app_id'), _api_version=api_version)
to_map_job_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mr_spec, queue_name):\n mapper_spec = mr_spec.mapper\n api_version = mr_spec.params.get('api_version', 0)\n old_api = api_version == 0\n input_reader_cls = mapper_spec.input_reader_class()\n input_reader_params = input_readers._get_params(mapper_spec)\n if issubclass(input_reader_cls, input_reader.InputReader):\n input_reader_params = input_reader_cls.params_from_json(\n input_reader_params)\n output_writer_cls = mapper_spec.output_writer_class()\n output_writer_params = output_writers._get_params(mapper_spec)\n return cls(_lenient=old_api, job_name=mr_spec.name, job_id=mr_spec.\n mapreduce_id, mapper=util.for_name(mapper_spec.handler_spec),\n input_reader_cls=input_reader_cls, input_reader_params=\n input_reader_params, output_writer_cls=output_writer_cls,\n output_writer_params=output_writer_params, shard_count=mapper_spec.\n shard_count, queue_name=queue_name, user_params=mr_spec.params.get(\n 'user_params'), shard_max_attempts=mr_spec.params.get(\n 'shard_max_attempts'), done_callback_url=mr_spec.params.get(\n 'done_callback'), _force_writes=mr_spec.params.get('force_writes'),\n _base_path=mr_spec.params['base_path'], _task_max_attempts=mr_spec.\n params.get('task_max_attempts'), _task_max_data_processing_attempts\n =mr_spec.params.get('task_max_data_processing_attempts'),\n _hooks_cls=util.for_name(mr_spec.hooks_class_name), _app=mr_spec.\n params.get('app_id'), _api_version=api_version)\n", "role": "user" }, { "content": "to_map_job_config", "role": "assistant" } ]
def x(self, record_type, data): length = len(data) crc = crc32c.crc_update(crc32c.CRC_INIT, [record_type]) crc = crc32c.crc_update(crc, data) crc = crc32c.crc_finalize(crc) self.__writer.write(struct.pack(_HEADER_FORMAT, _mask_crc(crc), length, record_type)) self.__writer.write(data) self.__position += _HEADER_LENGTH + length
write_record
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, record_type, data):\n length = len(data)\n crc = crc32c.crc_update(crc32c.CRC_INIT, [record_type])\n crc = crc32c.crc_update(crc, data)\n crc = crc32c.crc_finalize(crc)\n self.__writer.write(struct.pack(_HEADER_FORMAT, _mask_crc(crc), length,\n record_type))\n self.__writer.write(data)\n self.__position += _HEADER_LENGTH + length\n", "role": "user" }, { "content": "write_record", "role": "assistant" } ]
def x(self, data): block_remaining = _BLOCK_SIZE - self.__position % _BLOCK_SIZE if block_remaining < _HEADER_LENGTH: self.__writer.write('\x00' * block_remaining) self.__position += block_remaining block_remaining = _BLOCK_SIZE if block_remaining < len(data) + _HEADER_LENGTH: first_chunk = data[:block_remaining - _HEADER_LENGTH] self.__write_record(_RECORD_TYPE_FIRST, first_chunk) data = data[len(first_chunk):] while True: block_remaining = _BLOCK_SIZE - self.__position % _BLOCK_SIZE if block_remaining >= len(data) + _HEADER_LENGTH: self.__write_record(_RECORD_TYPE_LAST, data) break else: chunk = data[:block_remaining - _HEADER_LENGTH] self.__write_record(_RECORD_TYPE_MIDDLE, chunk) data = data[len(chunk):] else: self.__write_record(_RECORD_TYPE_FULL, data)
write
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data):\n block_remaining = _BLOCK_SIZE - self.__position % _BLOCK_SIZE\n if block_remaining < _HEADER_LENGTH:\n self.__writer.write('\\x00' * block_remaining)\n self.__position += block_remaining\n block_remaining = _BLOCK_SIZE\n if block_remaining < len(data) + _HEADER_LENGTH:\n first_chunk = data[:block_remaining - _HEADER_LENGTH]\n self.__write_record(_RECORD_TYPE_FIRST, first_chunk)\n data = data[len(first_chunk):]\n while True:\n block_remaining = _BLOCK_SIZE - self.__position % _BLOCK_SIZE\n if block_remaining >= len(data) + _HEADER_LENGTH:\n self.__write_record(_RECORD_TYPE_LAST, data)\n break\n else:\n chunk = data[:block_remaining - _HEADER_LENGTH]\n self.__write_record(_RECORD_TYPE_MIDDLE, chunk)\n data = data[len(chunk):]\n else:\n self.__write_record(_RECORD_TYPE_FULL, data)\n", "role": "user" }, { "content": "write", "role": "assistant" } ]
def x(self): pad_length = _BLOCK_SIZE - self.__position % _BLOCK_SIZE if pad_length and pad_length != _BLOCK_SIZE: self.__writer.write('\x00' * pad_length) self.__position += pad_length
pad_block
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n pad_length = _BLOCK_SIZE - self.__position % _BLOCK_SIZE\n if pad_length and pad_length != _BLOCK_SIZE:\n self.__writer.write('\\x00' * pad_length)\n self.__position += pad_length\n", "role": "user" }, { "content": "pad_block", "role": "assistant" } ]
def x(self): block_remaining = _BLOCK_SIZE - self.__reader.tell() % _BLOCK_SIZE if block_remaining < _HEADER_LENGTH: return '', _RECORD_TYPE_NONE header = self.__reader.read(_HEADER_LENGTH) if len(header) != _HEADER_LENGTH: raise EOFError('Read %s bytes instead of %s' % (len(header), _HEADER_LENGTH)) masked_crc, length, record_type = struct.unpack(_HEADER_FORMAT, header) crc = _unmask_crc(masked_crc) if length + _HEADER_LENGTH > block_remaining: raise errors.InvalidRecordError('Length is too big') data = self.__reader.read(length) if len(data) != length: raise EOFError('Not enough data read. Expected: %s but got %s' % ( length, len(data))) if record_type == _RECORD_TYPE_NONE: return '', record_type actual_crc = crc32c.crc_update(crc32c.CRC_INIT, [record_type]) actual_crc = crc32c.crc_update(actual_crc, data) actual_crc = crc32c.crc_finalize(actual_crc) if actual_crc != crc: raise errors.InvalidRecordError('Data crc does not match') return data, record_type
try_read_record
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n block_remaining = _BLOCK_SIZE - self.__reader.tell() % _BLOCK_SIZE\n if block_remaining < _HEADER_LENGTH:\n return '', _RECORD_TYPE_NONE\n header = self.__reader.read(_HEADER_LENGTH)\n if len(header) != _HEADER_LENGTH:\n raise EOFError('Read %s bytes instead of %s' % (len(header),\n _HEADER_LENGTH))\n masked_crc, length, record_type = struct.unpack(_HEADER_FORMAT, header)\n crc = _unmask_crc(masked_crc)\n if length + _HEADER_LENGTH > block_remaining:\n raise errors.InvalidRecordError('Length is too big')\n data = self.__reader.read(length)\n if len(data) != length:\n raise EOFError('Not enough data read. Expected: %s but got %s' % (\n length, len(data)))\n if record_type == _RECORD_TYPE_NONE:\n return '', record_type\n actual_crc = crc32c.crc_update(crc32c.CRC_INIT, [record_type])\n actual_crc = crc32c.crc_update(actual_crc, data)\n actual_crc = crc32c.crc_finalize(actual_crc)\n if actual_crc != crc:\n raise errors.InvalidRecordError('Data crc does not match')\n return data, record_type\n", "role": "user" }, { "content": "try_read_record", "role": "assistant" } ]
def x(self): pad_length = _BLOCK_SIZE - self.__reader.tell() % _BLOCK_SIZE if pad_length and pad_length != _BLOCK_SIZE: data = self.__reader.read(pad_length) if len(data) != pad_length: raise EOFError('Read %d bytes instead of %d' % (len(data), pad_length))
sync
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n pad_length = _BLOCK_SIZE - self.__reader.tell() % _BLOCK_SIZE\n if pad_length and pad_length != _BLOCK_SIZE:\n data = self.__reader.read(pad_length)\n if len(data) != pad_length:\n raise EOFError('Read %d bytes instead of %d' % (len(data),\n pad_length))\n", "role": "user" }, { "content": "sync", "role": "assistant" } ]
def x(self, job_name, handler_spec, input_reader_spec, output_writer_spec= None, params=None, shards=None, base_path=None): if shards is None: shards = parameters.config.SHARD_COUNT if base_path is None: base_path = parameters.config.BASE_PATH mapreduce_id = control.start_map(job_name, handler_spec, input_reader_spec, params or {}, mapreduce_parameters={ 'done_callback': self.get_callback_url(), 'done_callback_method': 'GET', 'pipeline_id': self.pipeline_id, 'base_path': base_path}, shard_count=shards, output_writer_spec=output_writer_spec, queue_name=self.queue_name) self.fill(self.outputs.job_id, mapreduce_id) self.set_status(console_url='%s/detail?mapreduce_id=%s' % (base_path, mapreduce_id))
run
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, job_name, handler_spec, input_reader_spec, output_writer_spec=\n None, params=None, shards=None, base_path=None):\n if shards is None:\n shards = parameters.config.SHARD_COUNT\n if base_path is None:\n base_path = parameters.config.BASE_PATH\n mapreduce_id = control.start_map(job_name, handler_spec,\n input_reader_spec, params or {}, mapreduce_parameters={\n 'done_callback': self.get_callback_url(), 'done_callback_method':\n 'GET', 'pipeline_id': self.pipeline_id, 'base_path': base_path},\n shard_count=shards, output_writer_spec=output_writer_spec,\n queue_name=self.queue_name)\n self.fill(self.outputs.job_id, mapreduce_id)\n self.set_status(console_url='%s/detail?mapreduce_id=%s' % (base_path,\n mapreduce_id))\n", "role": "user" }, { "content": "run", "role": "assistant" } ]
def x(self): if self.was_aborted: return mapreduce_id = self.outputs.job_id.value mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id) if mapreduce_state.result_status != model.MapreduceState.RESULT_SUCCESS: self.retry('Job %s had status %s' % (mapreduce_id, mapreduce_state. result_status)) return mapper_spec = mapreduce_state.mapreduce_spec.mapper outputs = [] output_writer_class = mapper_spec.output_writer_class() if (output_writer_class and mapreduce_state.result_status == model. MapreduceState.RESULT_SUCCESS): outputs = output_writer_class.get_filenames(mapreduce_state) self.fill(self.outputs.result_status, mapreduce_state.result_status) self.fill(self.outputs.counters, mapreduce_state.counters_map.to_dict()) self.complete(outputs)
callback
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self.was_aborted:\n return\n mapreduce_id = self.outputs.job_id.value\n mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)\n if mapreduce_state.result_status != model.MapreduceState.RESULT_SUCCESS:\n self.retry('Job %s had status %s' % (mapreduce_id, mapreduce_state.\n result_status))\n return\n mapper_spec = mapreduce_state.mapreduce_spec.mapper\n outputs = []\n output_writer_class = mapper_spec.output_writer_class()\n if (output_writer_class and mapreduce_state.result_status == model.\n MapreduceState.RESULT_SUCCESS):\n outputs = output_writer_class.get_filenames(mapreduce_state)\n self.fill(self.outputs.result_status, mapreduce_state.result_status)\n self.fill(self.outputs.counters, mapreduce_state.counters_map.to_dict())\n self.complete(outputs)\n", "role": "user" }, { "content": "callback", "role": "assistant" } ]
def x(name, handler_spec, reader_spec, mapper_parameters, shard_count=None, output_writer_spec=None, mapreduce_parameters=None, base_path=None, queue_name=None, eta=None, countdown=None, hooks_class_name=None, _app= None, in_xg_transaction=False): if shard_count is None: shard_count = parameters.config.SHARD_COUNT if mapper_parameters: mapper_parameters = dict(mapper_parameters) mr_params = map_job.JobConfig._get_default_mr_params() if mapreduce_parameters: mr_params.update(mapreduce_parameters) if base_path: mr_params['base_path'] = base_path mr_params['queue_name'] = util.get_queue_name(queue_name) mapper_spec = model.MapperSpec(handler_spec, reader_spec, mapper_parameters, shard_count, output_writer_spec=output_writer_spec) if in_xg_transaction and not db.is_in_transaction(): logging.warning( 'Expects an opened xg transaction to start mapreduce when transactional is True.' ) return handlers.StartJobHandler._start_map(name, mapper_spec, mr_params, queue_name=mr_params['queue_name'], eta=eta, countdown=countdown, hooks_class_name=hooks_class_name, _app=_app, in_xg_transaction= in_xg_transaction)
start_map
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(name, handler_spec, reader_spec, mapper_parameters, shard_count=None,\n output_writer_spec=None, mapreduce_parameters=None, base_path=None,\n queue_name=None, eta=None, countdown=None, hooks_class_name=None, _app=\n None, in_xg_transaction=False):\n if shard_count is None:\n shard_count = parameters.config.SHARD_COUNT\n if mapper_parameters:\n mapper_parameters = dict(mapper_parameters)\n mr_params = map_job.JobConfig._get_default_mr_params()\n if mapreduce_parameters:\n mr_params.update(mapreduce_parameters)\n if base_path:\n mr_params['base_path'] = base_path\n mr_params['queue_name'] = util.get_queue_name(queue_name)\n mapper_spec = model.MapperSpec(handler_spec, reader_spec,\n mapper_parameters, shard_count, output_writer_spec=output_writer_spec)\n if in_xg_transaction and not db.is_in_transaction():\n logging.warning(\n 'Expects an opened xg transaction to start mapreduce when transactional is True.'\n )\n return handlers.StartJobHandler._start_map(name, mapper_spec, mr_params,\n queue_name=mr_params['queue_name'], eta=eta, countdown=countdown,\n hooks_class_name=hooks_class_name, _app=_app, in_xg_transaction=\n in_xg_transaction)\n", "role": "user" }, { "content": "start_map", "role": "assistant" } ]
def x(self): self.__update_state() if self._state.active: return self.RUNNING else: return self._state.result_status
get_status
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.__update_state()\n if self._state.active:\n return self.RUNNING\n else:\n return self._state.result_status\n", "role": "user" }, { "content": "get_status", "role": "assistant" } ]
def x(self, counter_name, default=0): self.__update_state() return self._state.counters_map.get(counter_name, default)
get_counter
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, counter_name, default=0):\n self.__update_state()\n return self._state.counters_map.get(counter_name, default)\n", "role": "user" }, { "content": "get_counter", "role": "assistant" } ]
def x(self): assert self.SUCCESS == self.get_status() ss = model.ShardState.find_all_by_mapreduce_state(self._state) for s in ss: yield iter(s.writer_state.get('outs', []))
get_outputs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n assert self.SUCCESS == self.get_status()\n ss = model.ShardState.find_all_by_mapreduce_state(self._state)\n for s in ss:\n yield iter(s.writer_state.get('outs', []))\n", "role": "user" }, { "content": "get_outputs", "role": "assistant" } ]
def x(cls, job_config, in_xg_transaction=False): cls.__validate_job_config(job_config) mapper_spec = job_config._get_mapper_spec() mapreduce_params = job_config._get_mr_params() mapreduce_spec = model.MapreduceSpec(job_config.job_name, job_config. job_id, mapper_spec.to_json(), mapreduce_params, util._obj_to_path( job_config._hooks_cls)) if in_xg_transaction: propagation = db.MANDATORY else: propagation = db.INDEPENDENT state = None @db.transactional(propagation=propagation) def x(): state = cls.__create_and_save_state(job_config, mapreduce_spec) cls.__add_kickoff_task(job_config, mapreduce_spec) return state state = _txn() return cls(state)
submit
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config, in_xg_transaction=False):\n cls.__validate_job_config(job_config)\n mapper_spec = job_config._get_mapper_spec()\n mapreduce_params = job_config._get_mr_params()\n mapreduce_spec = model.MapreduceSpec(job_config.job_name, job_config.\n job_id, mapper_spec.to_json(), mapreduce_params, util._obj_to_path(\n job_config._hooks_cls))\n if in_xg_transaction:\n propagation = db.MANDATORY\n else:\n propagation = db.INDEPENDENT\n state = None\n\n @db.transactional(propagation=propagation)\n def x():\n state = cls.__create_and_save_state(job_config, mapreduce_spec)\n cls.__add_kickoff_task(job_config, mapreduce_spec)\n return state\n state = _txn()\n return cls(state)\n", "role": "user" }, { "content": "submit", "role": "assistant" } ]
def x(self): if self._state.active: self._state = self.__get_state_by_id(self.job_config.job_id)
update_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self._state.active:\n self._state = self.__get_state_by_id(self.job_config.job_id)\n", "role": "user" }, { "content": "update_state", "role": "assistant" } ]
def x(cls, job_id): state = model.MapreduceState.get_by_job_id(job_id) if state is None: raise ValueError('Job state for job %s is missing.' % job_id) return state
get_state_by_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_id):\n state = model.MapreduceState.get_by_job_id(job_id)\n if state is None:\n raise ValueError('Job state for job %s is missing.' % job_id)\n return state\n", "role": "user" }, { "content": "get_state_by_id", "role": "assistant" } ]
def x(cls, job_config, mapreduce_spec): state = model.MapreduceState.create_new(job_config.job_id) state.mapreduce_spec = mapreduce_spec state.active = True state.active_shards = 0 state.app_id = job_config._app config = datastore_rpc.Configuration(force_writes=job_config._force_writes) state.put(config=config) return state
create_and_save_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config, mapreduce_spec):\n state = model.MapreduceState.create_new(job_config.job_id)\n state.mapreduce_spec = mapreduce_spec\n state.active = True\n state.active_shards = 0\n state.app_id = job_config._app\n config = datastore_rpc.Configuration(force_writes=job_config._force_writes)\n state.put(config=config)\n return state\n", "role": "user" }, { "content": "create_and_save_state", "role": "assistant" } ]
def x(cls, job_config, mapreduce_spec): params = {'mapreduce_id': job_config.job_id} kickoff_task = taskqueue.Task(url=job_config._base_path + '/kickoffjob_callback/' + job_config.job_id, headers=util. _get_task_headers(job_config.job_id), params=params) if job_config._hooks_cls: hooks = job_config._hooks_cls(mapreduce_spec) try: hooks.enqueue_kickoff_task(kickoff_task, job_config.queue_name) return except NotImplementedError: pass kickoff_task.add(job_config.queue_name, transactional=True)
add_kickoff_task
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config, mapreduce_spec):\n params = {'mapreduce_id': job_config.job_id}\n kickoff_task = taskqueue.Task(url=job_config._base_path +\n '/kickoffjob_callback/' + job_config.job_id, headers=util.\n _get_task_headers(job_config.job_id), params=params)\n if job_config._hooks_cls:\n hooks = job_config._hooks_cls(mapreduce_spec)\n try:\n hooks.enqueue_kickoff_task(kickoff_task, job_config.queue_name)\n return\n except NotImplementedError:\n pass\n kickoff_task.add(job_config.queue_name, transactional=True)\n", "role": "user" }, { "content": "add_kickoff_task", "role": "assistant" } ]
def x(self): _json = self.to_json() try: return json.dumps(_json, sort_keys=True, cls=JsonEncoder) except: logging.exception('Could not serialize JSON: %r', _json) raise
to_json_str
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n _json = self.to_json()\n try:\n return json.dumps(_json, sort_keys=True, cls=JsonEncoder)\n except:\n logging.exception('Could not serialize JSON: %r', _json)\n raise\n", "role": "user" }, { "content": "to_json_str", "role": "assistant" } ]
def x(cls, json_str): return cls.from_json(json.loads(json_str, cls=JsonDecoder))
from_json_str
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json_str):\n return cls.from_json(json.loads(json_str, cls=JsonDecoder))\n", "role": "user" }, { "content": "from_json_str", "role": "assistant" } ]
def x(self, model_instance): value = super(JsonProperty, self).get_value_for_datastore(model_instance) if not value: return None json_value = value if not isinstance(value, dict): json_value = value.to_json() if not json_value: return None return datastore_types.Text(json.dumps(json_value, sort_keys=True, cls= JsonEncoder))
get_value_for_datastore
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, model_instance):\n value = super(JsonProperty, self).get_value_for_datastore(model_instance)\n if not value:\n return None\n json_value = value\n if not isinstance(value, dict):\n json_value = value.to_json()\n if not json_value:\n return None\n return datastore_types.Text(json.dumps(json_value, sort_keys=True, cls=\n JsonEncoder))\n", "role": "user" }, { "content": "get_value_for_datastore", "role": "assistant" } ]
def x(self, value): if value is None: return None _json = json.loads(value, cls=JsonDecoder) if self.data_type == dict: return _json return self.data_type.from_json(_json)
make_value_from_datastore
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value):\n if value is None:\n return None\n _json = json.loads(value, cls=JsonDecoder)\n if self.data_type == dict:\n return _json\n return self.data_type.from_json(_json)\n", "role": "user" }, { "content": "make_value_from_datastore", "role": "assistant" } ]
def x(self, value): if value is not None and not isinstance(value, self.data_type): raise datastore_errors.BadValueError( 'Property %s must be convertible to a %s instance (%s)' % (self .name, self.data_type, value)) return super(JsonProperty, self).validate(value)
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value):\n if value is not None and not isinstance(value, self.data_type):\n raise datastore_errors.BadValueError(\n 'Property %s must be convertible to a %s instance (%s)' % (self\n .name, self.data_type, value))\n return super(JsonProperty, self).validate(value)\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]
def x(self, queue_name, transactional=False): task = self.to_task() task.add(queue_name, transactional)
add
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, queue_name, transactional=False):\n task = self.to_task()\n task.add(queue_name, transactional)\n", "role": "user" }, { "content": "add", "role": "assistant" } ]
def x(self): return taskqueue.Task(url=self.url, payload=self._payload, name=self. name, eta=self.eta, countdown=self.countdown, headers=self._headers)
to_task
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return taskqueue.Task(url=self.url, payload=self._payload, name=self.\n name, eta=self.eta, countdown=self.countdown, headers=self._headers)\n", "role": "user" }, { "content": "to_task", "role": "assistant" } ]
def x(cls, request): if request.headers.get(cls.PAYLOAD_VERSION_HEADER) != cls.PAYLOAD_VERSION: raise DeprecationWarning( 'Task is generated by an older incompatible version of mapreduce. Please kill this job manually' ) return cls._decode_payload(request.body)
decode_payload
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, request):\n if request.headers.get(cls.PAYLOAD_VERSION_HEADER) != cls.PAYLOAD_VERSION:\n raise DeprecationWarning(\n 'Task is generated by an older incompatible version of mapreduce. Please kill this job manually'\n )\n return cls._decode_payload(request.body)\n", "role": "user" }, { "content": "decode_payload", "role": "assistant" } ]
def x(self, counter_name, delta): current_value = self.counters.get(counter_name, 0) new_value = current_value + delta self.counters[counter_name] = new_value return new_value
increment
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, counter_name, delta):\n current_value = self.counters.get(counter_name, 0)\n new_value = current_value + delta\n self.counters[counter_name] = new_value\n return new_value\n", "role": "user" }, { "content": "increment", "role": "assistant" } ]
def x(self, counters_map): for counter_name in counters_map.counters: self.increment(counter_name, counters_map.counters[counter_name])
add_map
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, counters_map):\n for counter_name in counters_map.counters:\n self.increment(counter_name, counters_map.counters[counter_name])\n", "role": "user" }, { "content": "add_map", "role": "assistant" } ]
def x(self, counters_map): for counter_name in counters_map.counters: self.increment(counter_name, -counters_map.counters[counter_name])
sub_map
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, counters_map):\n for counter_name in counters_map.counters:\n self.increment(counter_name, -counters_map.counters[counter_name])\n", "role": "user" }, { "content": "sub_map", "role": "assistant" } ]
def x(self): result = {'mapper_handler_spec': self.handler_spec, 'mapper_input_reader': self.input_reader_spec, 'mapper_params': self.params, 'mapper_shard_count': self.shard_count} if self.output_writer_spec: result['mapper_output_writer'] = self.output_writer_spec return result
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n result = {'mapper_handler_spec': self.handler_spec,\n 'mapper_input_reader': self.input_reader_spec, 'mapper_params':\n self.params, 'mapper_shard_count': self.shard_count}\n if self.output_writer_spec:\n result['mapper_output_writer'] = self.output_writer_spec\n return result\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(self): if self.__hooks is None and self.hooks_class_name is not None: hooks_class = util.for_name(self.hooks_class_name) if not isinstance(hooks_class, type): raise ValueError( 'hooks_class_name must refer to a class, got %s' % type( hooks_class).__name__) if not issubclass(hooks_class, hooks.Hooks): raise ValueError( 'hooks_class_name must refer to a hooks.Hooks subclass') self.__hooks = hooks_class(self) return self.__hooks
get_hooks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self.__hooks is None and self.hooks_class_name is not None:\n hooks_class = util.for_name(self.hooks_class_name)\n if not isinstance(hooks_class, type):\n raise ValueError(\n 'hooks_class_name must refer to a class, got %s' % type(\n hooks_class).__name__)\n if not issubclass(hooks_class, hooks.Hooks):\n raise ValueError(\n 'hooks_class_name must refer to a hooks.Hooks subclass')\n self.__hooks = hooks_class(self)\n return self.__hooks\n", "role": "user" }, { "content": "get_hooks", "role": "assistant" } ]
def x(self): mapper_spec = self.mapper.to_json() return {'name': self.name, 'mapreduce_id': self.mapreduce_id, 'mapper_spec': mapper_spec, 'params': self.params, 'hooks_class_name': self.hooks_class_name}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n mapper_spec = self.mapper.to_json()\n return {'name': self.name, 'mapreduce_id': self.mapreduce_id,\n 'mapper_spec': mapper_spec, 'params': self.params,\n 'hooks_class_name': self.hooks_class_name}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): mapreduce_spec = cls(json['name'], json['mapreduce_id'], json[ 'mapper_spec'], json.get('params'), json.get('hooks_class_name')) return mapreduce_spec
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n mapreduce_spec = cls(json['name'], json['mapreduce_id'], json[\n 'mapper_spec'], json.get('params'), json.get('hooks_class_name'))\n return mapreduce_spec\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(cls, mr_id): key = 'GAE-MR-spec: %s' % mr_id spec_json = memcache.get(key) if spec_json: return cls.from_json(spec_json) state = MapreduceState.get_by_job_id(mr_id) spec = state.mapreduce_spec spec_json = spec.to_json() memcache.set(key, spec_json) return spec
get_mapreduce_spec
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mr_id):\n key = 'GAE-MR-spec: %s' % mr_id\n spec_json = memcache.get(key)\n if spec_json:\n return cls.from_json(spec_json)\n state = MapreduceState.get_by_job_id(mr_id)\n spec = state.mapreduce_spec\n spec_json = spec.to_json()\n memcache.set(key, spec_json)\n return spec\n", "role": "user" }, { "content": "get_mapreduce_spec", "role": "assistant" } ]
def x(cls, mapreduce_id): return db.Key.from_path(cls.kind(), str(mapreduce_id))
get_key_by_job_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_id):\n return db.Key.from_path(cls.kind(), str(mapreduce_id))\n", "role": "user" }, { "content": "get_key_by_job_id", "role": "assistant" } ]
def x(self, shards_processed, shards_status): chart = google_chart_api.BarChart() def x(status_to_filter): return [(count if status == status_to_filter else 0) for count, status in zip(shards_processed, shards_status)] if shards_status: chart.stacked = True chart.AddBars(filter_status('unknown'), color='404040') chart.AddBars(filter_status('success'), color='00ac42') chart.AddBars(filter_status('running'), color='3636a9') chart.AddBars(filter_status('aborted'), color='e29e24') chart.AddBars(filter_status('failed'), color='f6350f') else: chart.AddBars(shards_processed) shard_count = len(shards_processed) if shard_count > 95: pixels_per_shard = 700.0 / shard_count bar_thickness = int(pixels_per_shard * 0.9) chart.style = bar_chart.BarChartStyle(bar_thickness=bar_thickness, bar_gap=0.1, use_fractional_gap_spacing=True) if shards_processed and shard_count <= 95: stride_length = max(1, shard_count / 16) chart.bottom.labels = [] for x in xrange(shard_count): if x % stride_length == 0 or x == shard_count - 1: chart.bottom.labels.append(x) else: chart.bottom.labels.append('') chart.left.labels = ['0', str(max(shards_processed))] chart.left.min = 0 self.chart_width = min(700, max(300, shard_count * 20)) self.chart_url = chart.display.Url(self.chart_width, 200)
set_processed_counts
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shards_processed, shards_status):\n chart = google_chart_api.BarChart()\n\n def x(status_to_filter):\n return [(count if status == status_to_filter else 0) for count,\n status in zip(shards_processed, shards_status)]\n if shards_status:\n chart.stacked = True\n chart.AddBars(filter_status('unknown'), color='404040')\n chart.AddBars(filter_status('success'), color='00ac42')\n chart.AddBars(filter_status('running'), color='3636a9')\n chart.AddBars(filter_status('aborted'), color='e29e24')\n chart.AddBars(filter_status('failed'), color='f6350f')\n else:\n chart.AddBars(shards_processed)\n shard_count = len(shards_processed)\n if shard_count > 95:\n pixels_per_shard = 700.0 / shard_count\n bar_thickness = int(pixels_per_shard * 0.9)\n chart.style = bar_chart.BarChartStyle(bar_thickness=bar_thickness,\n bar_gap=0.1, use_fractional_gap_spacing=True)\n if shards_processed and shard_count <= 95:\n stride_length = max(1, shard_count / 16)\n chart.bottom.labels = []\n for x in xrange(shard_count):\n if x % stride_length == 0 or x == shard_count - 1:\n chart.bottom.labels.append(x)\n else:\n chart.bottom.labels.append('')\n chart.left.labels = ['0', str(max(shards_processed))]\n chart.left.min = 0\n self.chart_width = min(700, max(300, shard_count * 20))\n self.chart_url = chart.display.Url(self.chart_width, 200)\n", "role": "user" }, { "content": "set_processed_counts", "role": "assistant" } ]
def x(mapreduce_id=None, gettime=datetime.datetime.now): if not mapreduce_id: mapreduce_id = MapreduceState.new_mapreduce_id() state = MapreduceState(key_name=mapreduce_id, last_poll_time=gettime()) state.set_processed_counts([], []) return state
create_new
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(mapreduce_id=None, gettime=datetime.datetime.now):\n if not mapreduce_id:\n mapreduce_id = MapreduceState.new_mapreduce_id()\n state = MapreduceState(key_name=mapreduce_id, last_poll_time=gettime())\n state.set_processed_counts([], [])\n return state\n", "role": "user" }, { "content": "create_new", "role": "assistant" } ]
def x(self, output_writer): self.input_reader = self.initial_input_reader self.slice_id = 0 self.retries += 1 self.output_writer = output_writer self.handler = self.mapreduce_spec.mapper.handler
reset_for_retry
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, output_writer):\n self.input_reader = self.initial_input_reader\n self.slice_id = 0\n self.retries += 1\n self.output_writer = output_writer\n self.handler = self.mapreduce_spec.mapper.handler\n", "role": "user" }, { "content": "reset_for_retry", "role": "assistant" } ]
def x(self, recovery_slice=False): if recovery_slice: self.slice_id += 2 self.input_reader = self.input_reader.from_json(self._input_reader_json ) else: self.slice_id += 1
advance_for_next_slice
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, recovery_slice=False):\n if recovery_slice:\n self.slice_id += 2\n self.input_reader = self.input_reader.from_json(self._input_reader_json\n )\n else:\n self.slice_id += 1\n", "role": "user" }, { "content": "advance_for_next_slice", "role": "assistant" } ]
def x(self): result = {'mapreduce_spec': self.mapreduce_spec.to_json_str(), 'shard_id': self.shard_id, 'slice_id': str(self.slice_id), 'input_reader_state': self.input_reader.to_json_str(), 'initial_input_reader_state': self.initial_input_reader.to_json_str (), 'retries': str(self.retries)} if self.output_writer: result['output_writer_state'] = self.output_writer.to_json_str() serialized_handler = util.try_serialize_handler(self.handler) if serialized_handler: result['serialized_handler'] = serialized_handler return result
to_dict
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n result = {'mapreduce_spec': self.mapreduce_spec.to_json_str(),\n 'shard_id': self.shard_id, 'slice_id': str(self.slice_id),\n 'input_reader_state': self.input_reader.to_json_str(),\n 'initial_input_reader_state': self.initial_input_reader.to_json_str\n (), 'retries': str(self.retries)}\n if self.output_writer:\n result['output_writer_state'] = self.output_writer.to_json_str()\n serialized_handler = util.try_serialize_handler(self.handler)\n if serialized_handler:\n result['serialized_handler'] = serialized_handler\n return result\n", "role": "user" }, { "content": "to_dict", "role": "assistant" } ]
def x(cls, request): mapreduce_spec = MapreduceSpec.from_json_str(request.get('mapreduce_spec')) mapper_spec = mapreduce_spec.mapper input_reader_spec_dict = json.loads(request.get('input_reader_state'), cls=json_util.JsonDecoder) input_reader = mapper_spec.input_reader_class().from_json( input_reader_spec_dict) initial_input_reader_spec_dict = json.loads(request.get( 'initial_input_reader_state'), cls=json_util.JsonDecoder) initial_input_reader = mapper_spec.input_reader_class().from_json( initial_input_reader_spec_dict) output_writer = None if mapper_spec.output_writer_class(): output_writer = mapper_spec.output_writer_class().from_json(json. loads(request.get('output_writer_state', '{}'), cls=json_util. JsonDecoder)) assert isinstance(output_writer, mapper_spec.output_writer_class() ), '%s.from_json returned an instance of wrong class: %s' % ( mapper_spec.output_writer_class(), output_writer.__class__) handler = util.try_deserialize_handler(request.get('serialized_handler')) if not handler: handler = mapreduce_spec.mapper.handler return cls(mapreduce_spec.params['base_path'], mapreduce_spec, str( request.get('shard_id')), int(request.get('slice_id')), input_reader, initial_input_reader, output_writer=output_writer, retries=int(request.get('retries')), handler=handler)
from_request
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, request):\n mapreduce_spec = MapreduceSpec.from_json_str(request.get('mapreduce_spec'))\n mapper_spec = mapreduce_spec.mapper\n input_reader_spec_dict = json.loads(request.get('input_reader_state'),\n cls=json_util.JsonDecoder)\n input_reader = mapper_spec.input_reader_class().from_json(\n input_reader_spec_dict)\n initial_input_reader_spec_dict = json.loads(request.get(\n 'initial_input_reader_state'), cls=json_util.JsonDecoder)\n initial_input_reader = mapper_spec.input_reader_class().from_json(\n initial_input_reader_spec_dict)\n output_writer = None\n if mapper_spec.output_writer_class():\n output_writer = mapper_spec.output_writer_class().from_json(json.\n loads(request.get('output_writer_state', '{}'), cls=json_util.\n JsonDecoder))\n assert isinstance(output_writer, mapper_spec.output_writer_class()\n ), '%s.from_json returned an instance of wrong class: %s' % (\n mapper_spec.output_writer_class(), output_writer.__class__)\n handler = util.try_deserialize_handler(request.get('serialized_handler'))\n if not handler:\n handler = mapreduce_spec.mapper.handler\n return cls(mapreduce_spec.params['base_path'], mapreduce_spec, str(\n request.get('shard_id')), int(request.get('slice_id')),\n input_reader, initial_input_reader, output_writer=output_writer,\n retries=int(request.get('retries')), handler=handler)\n", "role": "user" }, { "content": "from_request", "role": "assistant" } ]
def x(self): self.retries += 1 self.last_work_item = '' self.active = True self.result_status = None self.input_finished = False self.counters_map = CountersMap() self.slice_id = 0 self.slice_start_time = None self.slice_request_id = None self.slice_retries = 0 self.acquired_once = False
reset_for_retry
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.retries += 1\n self.last_work_item = ''\n self.active = True\n self.result_status = None\n self.input_finished = False\n self.counters_map = CountersMap()\n self.slice_id = 0\n self.slice_start_time = None\n self.slice_request_id = None\n self.slice_retries = 0\n self.acquired_once = False\n", "role": "user" }, { "content": "reset_for_retry", "role": "assistant" } ]
def x(self, recovery_slice=False): self.slice_start_time = None self.slice_request_id = None self.slice_retries = 0 self.acquired_once = False if recovery_slice: self.slice_id += 2 else: self.slice_id += 1
advance_for_next_slice
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, recovery_slice=False):\n self.slice_start_time = None\n self.slice_request_id = None\n self.slice_retries = 0\n self.acquired_once = False\n if recovery_slice:\n self.slice_id += 2\n else:\n self.slice_id += 1\n", "role": "user" }, { "content": "advance_for_next_slice", "role": "assistant" } ]
def x(self, other_state): for prop in self.properties().values(): setattr(self, prop.name, getattr(other_state, prop.name))
copy_from
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, other_state):\n for prop in self.properties().values():\n setattr(self, prop.name, getattr(other_state, prop.name))\n", "role": "user" }, { "content": "copy_from", "role": "assistant" } ]
def x(cls, mapreduce_state): keys = cls.calculate_keys_by_mapreduce_state(mapreduce_state) i = 0 while i < len(keys): @db.non_transactional def x(i): return db.get(keys[i:i + cls._MAX_STATES_IN_MEMORY]) states = no_tx_get(i) for s in states: i += 1 if s is not None: yield s
find_all_by_mapreduce_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_state):\n keys = cls.calculate_keys_by_mapreduce_state(mapreduce_state)\n i = 0\n while i < len(keys):\n\n @db.non_transactional\n def x(i):\n return db.get(keys[i:i + cls._MAX_STATES_IN_MEMORY])\n states = no_tx_get(i)\n for s in states:\n i += 1\n if s is not None:\n yield s\n", "role": "user" }, { "content": "find_all_by_mapreduce_state", "role": "assistant" } ]
def x(cls, mapreduce_state): if mapreduce_state is None: return [] keys = [] for i in range(mapreduce_state.mapreduce_spec.mapper.shard_count): shard_id = cls.shard_id_from_number(mapreduce_state.key().name(), i) keys.append(cls.get_key_by_shard_id(shard_id)) return keys
calculate_keys_by_mapreduce_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_state):\n if mapreduce_state is None:\n return []\n keys = []\n for i in range(mapreduce_state.mapreduce_spec.mapper.shard_count):\n shard_id = cls.shard_id_from_number(mapreduce_state.key().name(), i)\n keys.append(cls.get_key_by_shard_id(shard_id))\n return keys\n", "role": "user" }, { "content": "calculate_keys_by_mapreduce_state", "role": "assistant" } ]
def x(cls, mapreduce_id, shard_number): shard_id = cls.shard_id_from_number(mapreduce_id, shard_number) state = cls(key_name=shard_id, mapreduce_id=mapreduce_id) return state
create_new
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_id, shard_number):\n shard_id = cls.shard_id_from_number(mapreduce_id, shard_number)\n state = cls(key_name=shard_id, mapreduce_id=mapreduce_id)\n return state\n", "role": "user" }, { "content": "create_new", "role": "assistant" } ]
def x(cls, mapreduce_id): return db.Key.from_path(cls.kind(), '%s:%s' % (mapreduce_id, cls._KEY_NAME) )
get_key_by_job_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_id):\n return db.Key.from_path(cls.kind(), '%s:%s' % (mapreduce_id, cls._KEY_NAME)\n )\n", "role": "user" }, { "content": "get_key_by_job_id", "role": "assistant" } ]
def x(cls, mapreduce_id, **kwargs): cls(key_name='%s:%s' % (mapreduce_id, cls._KEY_NAME), command=cls.ABORT ).put(**kwargs)
abort
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_id, **kwargs):\n cls(key_name='%s:%s' % (mapreduce_id, cls._KEY_NAME), command=cls.ABORT\n ).put(**kwargs)\n", "role": "user" }, { "content": "abort", "role": "assistant" } ]
def x(cls, job_config): super(DatastoreInputReader, cls).validate(job_config) params = job_config.input_reader_params entity_kind = params[cls.ENTITY_KIND_PARAM] if '.' in entity_kind: logging.warning( '. detected in entity kind %s specified for reader %s.Assuming entity kind contains the dot.' , entity_kind, cls.__name__) if cls.FILTERS_PARAM in params: filters = params[cls.FILTERS_PARAM] for f in filters: if f[1] != '=': raise errors.BadReaderParamsError( 'Only equality filters are supported: %s', f)
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config):\n super(DatastoreInputReader, cls).validate(job_config)\n params = job_config.input_reader_params\n entity_kind = params[cls.ENTITY_KIND_PARAM]\n if '.' in entity_kind:\n logging.warning(\n '. detected in entity kind %s specified for reader %s.Assuming entity kind contains the dot.'\n , entity_kind, cls.__name__)\n if cls.FILTERS_PARAM in params:\n filters = params[cls.FILTERS_PARAM]\n for f in filters:\n if f[1] != '=':\n raise errors.BadReaderParamsError(\n 'Only equality filters are supported: %s', f)\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]