source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(self, request, response): super(TaskQueueHandler, self).initialize(request, response) if 'X-AppEngine-QueueName' not in self.request.headers: logging.error(self.request.headers) logging.error('Task queue handler received non-task queue request') self.response.set_status(403, message= 'Task queue handler received non-task queue request') return if self.task_retry_count() + 1 > parameters.config.TASK_MAX_ATTEMPTS: logging.error( 'Task %s has been attempted %s times. Dropping it permanently.', self.request.headers['X-AppEngine-TaskName'], self. task_retry_count() + 1) self._drop_gracefully() return try: self._preprocess() self._preprocess_success = True except: self._preprocess_success = False logging.error('Preprocess task %s failed. Dropping it permanently.', self.request.headers['X-AppEngine-TaskName']) self._drop_gracefully()
initialize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, request, response):\n super(TaskQueueHandler, self).initialize(request, response)\n if 'X-AppEngine-QueueName' not in self.request.headers:\n logging.error(self.request.headers)\n logging.error('Task queue handler received non-task queue request')\n self.response.set_status(403, message=\n 'Task queue handler received non-task queue request')\n return\n if self.task_retry_count() + 1 > parameters.config.TASK_MAX_ATTEMPTS:\n logging.error(\n 'Task %s has been attempted %s times. Dropping it permanently.',\n self.request.headers['X-AppEngine-TaskName'], self.\n task_retry_count() + 1)\n self._drop_gracefully()\n return\n try:\n self._preprocess()\n self._preprocess_success = True\n except:\n self._preprocess_success = False\n logging.error('Preprocess task %s failed. Dropping it permanently.',\n self.request.headers['X-AppEngine-TaskName'])\n self._drop_gracefully()\n", "role": "user" }, { "content": "initialize", "role": "assistant" } ]
def x(self): self.response.set_status(httplib.SERVICE_UNAVAILABLE, 'Retry task') self.response.clear()
retry_task
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.response.set_status(httplib.SERVICE_UNAVAILABLE, 'Retry task')\n self.response.clear()\n", "role": "user" }, { "content": "retry_task", "role": "assistant" } ]
def x(self): path = self.request.path base_path = path[:path.rfind('/')] if not base_path.endswith('/command'): raise BadRequestPathError( 'Json handlers should have /command path prefix') return base_path[:base_path.rfind('/')]
base_path
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n path = self.request.path\n base_path = path[:path.rfind('/')]\n if not base_path.endswith('/command'):\n raise BadRequestPathError(\n 'Json handlers should have /command path prefix')\n return base_path[:base_path.rfind('/')]\n", "role": "user" }, { "content": "base_path", "role": "assistant" } ]
def x(self, n): if self._EOF: return '' while self._seg_index <= self._last_seg_index: result = self._read_from_seg(n) if result != '': return result else: self._next_seg() self._EOF = True return ''
read
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, n):\n if self._EOF:\n return ''\n while self._seg_index <= self._last_seg_index:\n result = self._read_from_seg(n)\n if result != '':\n return result\n else:\n self._next_seg()\n self._EOF = True\n return ''\n", "role": "user" }, { "content": "read", "role": "assistant" } ]
def x(self): if self._seg: self._seg.close() self._seg_index += 1 if self._seg_index > self._last_seg_index: self._seg = None return filename = self._seg_prefix + str(self._seg_index) stat = cloudstorage.stat(filename) writer = output_writers._GoogleCloudStorageOutputWriter if writer._VALID_LENGTH not in stat.metadata: raise ValueError('Expect %s in metadata for file %s.' % (writer. _VALID_LENGTH, filename)) self._seg_valid_length = int(stat.metadata[writer._VALID_LENGTH]) if self._seg_valid_length > stat.st_size: raise ValueError( 'Valid length %s is too big for file %s of length %s' % (self. _seg_valid_length, filename, stat.st_size)) self._seg = cloudstorage.open(filename)
next_seg
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self._seg:\n self._seg.close()\n self._seg_index += 1\n if self._seg_index > self._last_seg_index:\n self._seg = None\n return\n filename = self._seg_prefix + str(self._seg_index)\n stat = cloudstorage.stat(filename)\n writer = output_writers._GoogleCloudStorageOutputWriter\n if writer._VALID_LENGTH not in stat.metadata:\n raise ValueError('Expect %s in metadata for file %s.' % (writer.\n _VALID_LENGTH, filename))\n self._seg_valid_length = int(stat.metadata[writer._VALID_LENGTH])\n if self._seg_valid_length > stat.st_size:\n raise ValueError(\n 'Valid length %s is too big for file %s of length %s' % (self.\n _seg_valid_length, filename, stat.st_size))\n self._seg = cloudstorage.open(filename)\n", "role": "user" }, { "content": "next_seg", "role": "assistant" } ]
def x(self, n): result = self._seg.read(size=n) if result == '': return result offset = self._seg.tell() if offset > self._seg_valid_length: extra = offset - self._seg_valid_length result = result[:-1 * extra] self._offset += len(result) return result
read_from_seg
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, n):\n result = self._seg.read(size=n)\n if result == '':\n return result\n offset = self._seg.tell()\n if offset > self._seg_valid_length:\n extra = offset - self._seg_valid_length\n result = result[:-1 * extra]\n self._offset += len(result)\n return result\n", "role": "user" }, { "content": "read_from_seg", "role": "assistant" } ]
def x(gettime=time.time): now_descending = int((_FUTURE_TIME - gettime()) * 100) request_id_hash = os.environ.get('REQUEST_ID_HASH') if not request_id_hash: request_id_hash = str(random.getrandbits(32)) return '%d%s' % (now_descending, request_id_hash)
get_descending_key
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(gettime=time.time):\n now_descending = int((_FUTURE_TIME - gettime()) * 100)\n request_id_hash = os.environ.get('REQUEST_ID_HASH')\n if not request_id_hash:\n request_id_hash = str(random.getrandbits(32))\n return '%d%s' % (now_descending, request_id_hash)\n", "role": "user" }, { "content": "get_descending_key", "role": "assistant" } ]
def x(): version = os.environ['CURRENT_VERSION_ID'].split('.')[0] default_host = os.environ['DEFAULT_VERSION_HOSTNAME'] module = os.environ['CURRENT_MODULE_ID'] if os.environ['CURRENT_MODULE_ID'] == 'default': return '%s.%s' % (version, default_host) return '%s.%s.%s' % (version, module, default_host)
get_task_host
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n version = os.environ['CURRENT_VERSION_ID'].split('.')[0]\n default_host = os.environ['DEFAULT_VERSION_HOSTNAME']\n module = os.environ['CURRENT_MODULE_ID']\n if os.environ['CURRENT_MODULE_ID'] == 'default':\n return '%s.%s' % (version, default_host)\n return '%s.%s.%s' % (version, module, default_host)\n", "role": "user" }, { "content": "get_task_host", "role": "assistant" } ]
def x(queue_name): if queue_name: return queue_name queue_name = os.environ.get('HTTP_X_APPENGINE_QUEUENAME', parameters. config.QUEUE_NAME) if len(queue_name) > 1 and queue_name[0:2] == '__': return parameters.config.QUEUE_NAME else: return queue_name
get_queue_name
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(queue_name):\n if queue_name:\n return queue_name\n queue_name = os.environ.get('HTTP_X_APPENGINE_QUEUENAME', parameters.\n config.QUEUE_NAME)\n if len(queue_name) > 1 and queue_name[0:2] == '__':\n return parameters.config.QUEUE_NAME\n else:\n return queue_name\n", "role": "user" }, { "content": "get_queue_name", "role": "assistant" } ]
def x(td): secs = td.seconds + td.days * 24 * 3600 if td.microseconds: secs += 1 return secs
total_seconds
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(td):\n secs = td.seconds + td.days * 24 * 3600\n if td.microseconds:\n secs += 1\n return secs\n", "role": "user" }, { "content": "total_seconds", "role": "assistant" } ]
def x(fq_name): resolved_name = for_name(fq_name) if isinstance(resolved_name, (type, types.ClassType)): return resolved_name() elif isinstance(resolved_name, types.MethodType): return getattr(resolved_name.im_class(), resolved_name.__name__) else: return resolved_name
handler_for_name
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(fq_name):\n resolved_name = for_name(fq_name)\n if isinstance(resolved_name, (type, types.ClassType)):\n return resolved_name()\n elif isinstance(resolved_name, types.MethodType):\n return getattr(resolved_name.im_class(), resolved_name.__name__)\n else:\n return resolved_name\n", "role": "user" }, { "content": "handler_for_name", "role": "assistant" } ]
def x(handler): if isinstance(handler, types.InstanceType) or (isinstance(handler, object) and not inspect.isfunction(handler) and not inspect. ismethod(handler)) and hasattr(handler, '__call__'): return pickle.dumps(handler) return None
try_serialize_handler
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(handler):\n if isinstance(handler, types.InstanceType) or (isinstance(handler,\n object) and not inspect.isfunction(handler) and not inspect.\n ismethod(handler)) and hasattr(handler, '__call__'):\n return pickle.dumps(handler)\n return None\n", "role": "user" }, { "content": "try_serialize_handler", "role": "assistant" } ]
def x(obj): if isinstance(obj, types.GeneratorType): return True CO_GENERATOR = 32 return bool((inspect.isfunction(obj) or inspect.ismethod(obj)) and obj. func_code.co_flags & CO_GENERATOR)
is_generator
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(obj):\n if isinstance(obj, types.GeneratorType):\n return True\n CO_GENERATOR = 32\n return bool((inspect.isfunction(obj) or inspect.ismethod(obj)) and obj.\n func_code.co_flags & CO_GENERATOR)\n", "role": "user" }, { "content": "is_generator", "role": "assistant" } ]
def x(obj): if type(obj) is str: TRUTH_VALUE_SET = ['true', '1', 'yes', 't', 'on'] return obj.lower() in TRUTH_VALUE_SET else: return bool(obj)
parse_bool
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(obj):\n if type(obj) is str:\n TRUTH_VALUE_SET = ['true', '1', 'yes', 't', 'on']\n return obj.lower() in TRUTH_VALUE_SET\n else:\n return bool(obj)\n", "role": "user" }, { "content": "parse_bool", "role": "assistant" } ]
def x(mapreduce_spec): force_writes = parse_bool(mapreduce_spec.params.get('force_writes', 'false')) if force_writes: return datastore_rpc.Configuration(force_writes=force_writes) else: return datastore_rpc.Configuration()
create_datastore_write_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(mapreduce_spec):\n force_writes = parse_bool(mapreduce_spec.params.get('force_writes',\n 'false'))\n if force_writes:\n return datastore_rpc.Configuration(force_writes=force_writes)\n else:\n return datastore_rpc.Configuration()\n", "role": "user" }, { "content": "create_datastore_write_config", "role": "assistant" } ]
def x(): ndb_ctx = ndb.get_context() ndb_ctx.set_cache_policy(lambda key: False) ndb_ctx.set_memcache_policy(lambda key: False)
set_ndb_cache_policy
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n ndb_ctx = ndb.get_context()\n ndb_ctx.set_cache_policy(lambda key: False)\n ndb_ctx.set_memcache_policy(lambda key: False)\n", "role": "user" }, { "content": "set_ndb_cache_policy", "role": "assistant" } ]
def x(obj): if obj is None: return obj if inspect.isclass(obj) or inspect.isfunction(obj): fetched = getattr(sys.modules[obj.__module__], obj.__name__, None) if fetched is None: raise ValueError( 'Object %r must be defined on the top level of a module.' % obj ) return '%s.%s' % (obj.__module__, obj.__name__) raise TypeError('Unexpected type %s.' % type(obj))
obj_to_path
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(obj):\n if obj is None:\n return obj\n if inspect.isclass(obj) or inspect.isfunction(obj):\n fetched = getattr(sys.modules[obj.__module__], obj.__name__, None)\n if fetched is None:\n raise ValueError(\n 'Object %r must be defined on the top level of a module.' % obj\n )\n return '%s.%s' % (obj.__module__, obj.__name__)\n raise TypeError('Unexpected type %s.' % type(obj))\n", "role": "user" }, { "content": "obj_to_path", "role": "assistant" } ]
def x(prefix, items): items_no_prefix = [] for item in items: if item.startswith(prefix): items_no_prefix.append(item[len(prefix):]) else: items_no_prefix.append(item) return items_no_prefix
strip_prefix_from_items
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(prefix, items):\n items_no_prefix = []\n for item in items:\n if item.startswith(prefix):\n items_no_prefix.append(item[len(prefix):])\n else:\n items_no_prefix.append(item)\n return items_no_prefix\n", "role": "user" }, { "content": "strip_prefix_from_items", "role": "assistant" } ]
def x(hooks, method, task, queue_name): if hooks is not None: try: getattr(hooks, method)(task, queue_name) except NotImplementedError: return False return True return False
run_task_hook
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(hooks, method, task, queue_name):\n if hooks is not None:\n try:\n getattr(hooks, method)(task, queue_name)\n except NotImplementedError:\n return False\n return True\n return False\n", "role": "user" }, { "content": "run_task_hook", "role": "assistant" } ]
def x(self): shard_id = self.request.headers[util._MR_SHARD_ID_TASK_HEADER] mr_id = self.request.headers[util._MR_ID_TASK_HEADER] shard_state, mr_state = db.get([model.ShardState.get_key_by_shard_id( shard_id), model.MapreduceState.get_key_by_job_id(mr_id)]) if shard_state and shard_state.active: shard_state.set_for_failure() config = util.create_datastore_write_config(mr_state.mapreduce_spec) shard_state.put(config=config)
drop_gracefully
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n shard_id = self.request.headers[util._MR_SHARD_ID_TASK_HEADER]\n mr_id = self.request.headers[util._MR_ID_TASK_HEADER]\n shard_state, mr_state = db.get([model.ShardState.get_key_by_shard_id(\n shard_id), model.MapreduceState.get_key_by_job_id(mr_id)])\n if shard_state and shard_state.active:\n shard_state.set_for_failure()\n config = util.create_datastore_write_config(mr_state.mapreduce_spec)\n shard_state.put(config=config)\n", "role": "user" }, { "content": "drop_gracefully", "role": "assistant" } ]
def x(self, shard_state, tstate): if not shard_state: logging.warning( 'State not found for shard %s; Possible spurious task execution. Dropping this task.' , tstate.shard_id) return self._TASK_DIRECTIVE.DROP_TASK if not shard_state.active: logging.warning( 'Shard %s is not active. Possible spurious task execution. Dropping this task.' , tstate.shard_id) logging.warning(str(shard_state)) return self._TASK_DIRECTIVE.DROP_TASK if shard_state.retries > tstate.retries: logging.warning( 'Got shard %s from previous shard retry %s. Possible spurious task execution. Dropping this task.' , tstate.shard_id, tstate.retries) logging.warning(str(shard_state)) return self._TASK_DIRECTIVE.DROP_TASK elif shard_state.retries < tstate.retries: logging.warning( 'ShardState for %s is behind slice. Waiting for it to catch up', shard_state.shard_id) return self._TASK_DIRECTIVE.RETRY_TASK if shard_state.slice_id > tstate.slice_id: logging.warning('Task %s-%s is behind ShardState %s. Dropping task.', tstate.shard_id, tstate.slice_id, shard_state.slice_id) return self._TASK_DIRECTIVE.DROP_TASK elif shard_state.slice_id < tstate.slice_id: logging.warning( 'Task %s-%s is ahead of ShardState %s. Waiting for it to catch up.' , tstate.shard_id, tstate.slice_id, shard_state.slice_id) return self._TASK_DIRECTIVE.RETRY_TASK if shard_state.slice_start_time: countdown = self._wait_time(shard_state, parameters._LEASE_DURATION_SEC ) if countdown > 0: logging.warning( 'Last retry of slice %s-%s may be still running.Will try again in %s seconds' , tstate.shard_id, tstate.slice_id, countdown) time.sleep(countdown) return self._TASK_DIRECTIVE.RETRY_TASK elif self._wait_time(shard_state, parameters._MAX_LEASE_DURATION_SEC): if not self._has_old_request_ended(shard_state): logging.warning( 'Last retry of slice %s-%s is still in flight with request_id %s. Will try again later.' , tstate.shard_id, tstate.slice_id, shard_state. slice_request_id) return self._TASK_DIRECTIVE.RETRY_TASK else: logging.warning( 'Last retry of slice %s-%s has no log entry and hastimed out after %s seconds' , tstate.shard_id, tstate.slice_id, parameters. _MAX_LEASE_DURATION_SEC) config = util.create_datastore_write_config(tstate.mapreduce_spec) @db.transactional(retries=5) def x(): fresh_state = model.ShardState.get_by_shard_id(tstate.shard_id) if not fresh_state: logging.warning('ShardState missing.') raise db.Rollback() if (fresh_state.active and fresh_state.slice_id == shard_state. slice_id and fresh_state.slice_start_time == shard_state. slice_start_time): shard_state.slice_start_time = datetime.datetime.now() shard_state.slice_request_id = os.environ.get('REQUEST_LOG_ID') shard_state.acquired_once = True shard_state.put(config=config) return self._TASK_DIRECTIVE.PROCEED_TASK else: logging.warning( 'Contention on slice %s-%s execution. Will retry again.', tstate.shard_id, tstate.slice_id) time.sleep(random.randrange(1, 5)) return self._TASK_DIRECTIVE.RETRY_TASK return _tx()
try_acquire_lease
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state, tstate):\n if not shard_state:\n logging.warning(\n 'State not found for shard %s; Possible spurious task execution. Dropping this task.'\n , tstate.shard_id)\n return self._TASK_DIRECTIVE.DROP_TASK\n if not shard_state.active:\n logging.warning(\n 'Shard %s is not active. Possible spurious task execution. Dropping this task.'\n , tstate.shard_id)\n logging.warning(str(shard_state))\n return self._TASK_DIRECTIVE.DROP_TASK\n if shard_state.retries > tstate.retries:\n logging.warning(\n 'Got shard %s from previous shard retry %s. Possible spurious task execution. Dropping this task.'\n , tstate.shard_id, tstate.retries)\n logging.warning(str(shard_state))\n return self._TASK_DIRECTIVE.DROP_TASK\n elif shard_state.retries < tstate.retries:\n logging.warning(\n 'ShardState for %s is behind slice. Waiting for it to catch up',\n shard_state.shard_id)\n return self._TASK_DIRECTIVE.RETRY_TASK\n if shard_state.slice_id > tstate.slice_id:\n logging.warning('Task %s-%s is behind ShardState %s. Dropping task.',\n tstate.shard_id, tstate.slice_id, shard_state.slice_id)\n return self._TASK_DIRECTIVE.DROP_TASK\n elif shard_state.slice_id < tstate.slice_id:\n logging.warning(\n 'Task %s-%s is ahead of ShardState %s. Waiting for it to catch up.'\n , tstate.shard_id, tstate.slice_id, shard_state.slice_id)\n return self._TASK_DIRECTIVE.RETRY_TASK\n if shard_state.slice_start_time:\n countdown = self._wait_time(shard_state, parameters._LEASE_DURATION_SEC\n )\n if countdown > 0:\n logging.warning(\n 'Last retry of slice %s-%s may be still running.Will try again in %s seconds'\n , tstate.shard_id, tstate.slice_id, countdown)\n time.sleep(countdown)\n return self._TASK_DIRECTIVE.RETRY_TASK\n elif self._wait_time(shard_state, parameters._MAX_LEASE_DURATION_SEC):\n if not self._has_old_request_ended(shard_state):\n logging.warning(\n 'Last retry of slice %s-%s is still in flight with request_id %s. Will try again later.'\n , tstate.shard_id, tstate.slice_id, shard_state.\n slice_request_id)\n return self._TASK_DIRECTIVE.RETRY_TASK\n else:\n logging.warning(\n 'Last retry of slice %s-%s has no log entry and hastimed out after %s seconds'\n , tstate.shard_id, tstate.slice_id, parameters.\n _MAX_LEASE_DURATION_SEC)\n config = util.create_datastore_write_config(tstate.mapreduce_spec)\n\n @db.transactional(retries=5)\n def x():\n fresh_state = model.ShardState.get_by_shard_id(tstate.shard_id)\n if not fresh_state:\n logging.warning('ShardState missing.')\n raise db.Rollback()\n if (fresh_state.active and fresh_state.slice_id == shard_state.\n slice_id and fresh_state.slice_start_time == shard_state.\n slice_start_time):\n shard_state.slice_start_time = datetime.datetime.now()\n shard_state.slice_request_id = os.environ.get('REQUEST_LOG_ID')\n shard_state.acquired_once = True\n shard_state.put(config=config)\n return self._TASK_DIRECTIVE.PROCEED_TASK\n else:\n logging.warning(\n 'Contention on slice %s-%s execution. Will retry again.',\n tstate.shard_id, tstate.slice_id)\n time.sleep(random.randrange(1, 5))\n return self._TASK_DIRECTIVE.RETRY_TASK\n return _tx()\n", "role": "user" }, { "content": "try_acquire_lease", "role": "assistant" } ]
def x(self, shard_state): assert shard_state.slice_start_time is not None assert shard_state.slice_request_id is not None request_ids = [shard_state.slice_request_id] logs = None try: logs = list(logservice.fetch(request_ids=request_ids)) except (apiproxy_errors.FeatureNotEnabledError, apiproxy_errors. CapabilityDisabledError) as e: logging.warning('Ignoring exception: %s', e) if not logs or not logs[0].finished: return False return True
has_old_request_ended
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state):\n assert shard_state.slice_start_time is not None\n assert shard_state.slice_request_id is not None\n request_ids = [shard_state.slice_request_id]\n logs = None\n try:\n logs = list(logservice.fetch(request_ids=request_ids))\n except (apiproxy_errors.FeatureNotEnabledError, apiproxy_errors.\n CapabilityDisabledError) as e:\n logging.warning('Ignoring exception: %s', e)\n if not logs or not logs[0].finished:\n return False\n return True\n", "role": "user" }, { "content": "has_old_request_ended", "role": "assistant" } ]
def x(self, shard_state, secs, now=datetime.datetime.now): assert shard_state.slice_start_time is not None delta = now() - shard_state.slice_start_time duration = datetime.timedelta(seconds=secs) if delta < duration: return util.total_seconds(duration - delta) else: return 0
wait_time
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state, secs, now=datetime.datetime.now):\n assert shard_state.slice_start_time is not None\n delta = now() - shard_state.slice_start_time\n duration = datetime.timedelta(seconds=secs)\n if delta < duration:\n return util.total_seconds(duration - delta)\n else:\n return 0\n", "role": "user" }, { "content": "wait_time", "role": "assistant" } ]
def x(self, obj, slice_id, last_slice=False, begin_slice=True, shard_ctx= None, slice_ctx=None): if obj is None or not isinstance(obj, shard_life_cycle._ShardLifeCycle): return shard_context = shard_ctx or self.shard_context slice_context = slice_ctx or self.slice_context if begin_slice: if slice_id == 0: obj.begin_shard(shard_context) obj.begin_slice(slice_context) else: obj.end_slice(slice_context) if last_slice: obj.end_shard(shard_context)
maintain_LC
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, obj, slice_id, last_slice=False, begin_slice=True, shard_ctx=\n None, slice_ctx=None):\n if obj is None or not isinstance(obj, shard_life_cycle._ShardLifeCycle):\n return\n shard_context = shard_ctx or self.shard_context\n slice_context = slice_ctx or self.slice_context\n if begin_slice:\n if slice_id == 0:\n obj.begin_shard(shard_context)\n obj.begin_slice(slice_context)\n else:\n obj.end_slice(slice_context)\n if last_slice:\n obj.end_shard(shard_context)\n", "role": "user" }, { "content": "maintain_LC", "role": "assistant" } ]
def x(self, shard_state, tstate, task_directive): task_directive = self._set_state(shard_state, tstate, task_directive) self._save_state_and_schedule_next(shard_state, tstate, task_directive) context.Context._set(None)
return
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state, tstate, task_directive):\n task_directive = self._set_state(shard_state, tstate, task_directive)\n self._save_state_and_schedule_next(shard_state, tstate, task_directive)\n context.Context._set(None)\n", "role": "user" }, { "content": "return", "role": "assistant" } ]
def x(self, input_reader, shard_state, tstate, ctx): processing_limit = self._processing_limit(tstate.mapreduce_spec) if processing_limit == 0: return finished_shard = True iterator = iter(input_reader) while True: try: entity = iterator.next() except StopIteration: break if isinstance(entity, db.Model): shard_state.last_work_item = repr(entity.key()) elif isinstance(entity, ndb.Model): shard_state.last_work_item = repr(entity.key) else: shard_state.last_work_item = repr(entity)[:100] processing_limit -= 1 if not self._process_datum(entity, input_reader, ctx, tstate): finished_shard = False break elif processing_limit == 0: finished_shard = False break self.slice_context.incr(context.COUNTER_MAPPER_WALLTIME_MS, int((self. _time() - self._start_time) * 1000)) return finished_shard
process_inputs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, input_reader, shard_state, tstate, ctx):\n processing_limit = self._processing_limit(tstate.mapreduce_spec)\n if processing_limit == 0:\n return\n finished_shard = True\n iterator = iter(input_reader)\n while True:\n try:\n entity = iterator.next()\n except StopIteration:\n break\n if isinstance(entity, db.Model):\n shard_state.last_work_item = repr(entity.key())\n elif isinstance(entity, ndb.Model):\n shard_state.last_work_item = repr(entity.key)\n else:\n shard_state.last_work_item = repr(entity)[:100]\n processing_limit -= 1\n if not self._process_datum(entity, input_reader, ctx, tstate):\n finished_shard = False\n break\n elif processing_limit == 0:\n finished_shard = False\n break\n self.slice_context.incr(context.COUNTER_MAPPER_WALLTIME_MS, int((self.\n _time() - self._start_time) * 1000))\n return finished_shard\n", "role": "user" }, { "content": "process_inputs", "role": "assistant" } ]
def x(self, data, input_reader, ctx, transient_shard_state): if data is not input_readers.ALLOW_CHECKPOINT: self.slice_context.incr(context.COUNTER_MAPPER_CALLS) handler = transient_shard_state.handler if isinstance(handler, map_job.Mapper): handler(self.slice_context, data) else: if input_reader.expand_parameters: result = handler(*data) else: result = handler(data) if util.is_generator(result): for output in result: if isinstance(output, operation.Operation): output(ctx) else: output_writer = transient_shard_state.output_writer if not output_writer: logging.warning( 'Handler yielded %s, but no output writer is set.' , output) else: output_writer.write(output) if self._time( ) - self._start_time >= parameters.config._SLICE_DURATION_SEC: return False return True
process_datum
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data, input_reader, ctx, transient_shard_state):\n if data is not input_readers.ALLOW_CHECKPOINT:\n self.slice_context.incr(context.COUNTER_MAPPER_CALLS)\n handler = transient_shard_state.handler\n if isinstance(handler, map_job.Mapper):\n handler(self.slice_context, data)\n else:\n if input_reader.expand_parameters:\n result = handler(*data)\n else:\n result = handler(data)\n if util.is_generator(result):\n for output in result:\n if isinstance(output, operation.Operation):\n output(ctx)\n else:\n output_writer = transient_shard_state.output_writer\n if not output_writer:\n logging.warning(\n 'Handler yielded %s, but no output writer is set.'\n , output)\n else:\n output_writer.write(output)\n if self._time(\n ) - self._start_time >= parameters.config._SLICE_DURATION_SEC:\n return False\n return True\n", "role": "user" }, { "content": "process_datum", "role": "assistant" } ]
def x(self, shard_state, tstate, task_directive): if task_directive in (self._TASK_DIRECTIVE.RETRY_TASK, self. _TASK_DIRECTIVE.DROP_TASK): return task_directive if task_directive == self._TASK_DIRECTIVE.ABORT_SHARD: shard_state.set_for_abort() return task_directive if task_directive == self._TASK_DIRECTIVE.PROCEED_TASK: shard_state.advance_for_next_slice() tstate.advance_for_next_slice() return task_directive if task_directive == self._TASK_DIRECTIVE.RECOVER_SLICE: tstate.advance_for_next_slice(recovery_slice=True) shard_state.advance_for_next_slice(recovery_slice=True) return task_directive if task_directive == self._TASK_DIRECTIVE.RETRY_SLICE: task_directive = self._attempt_slice_retry(shard_state, tstate) if task_directive == self._TASK_DIRECTIVE.RETRY_SHARD: task_directive = self._attempt_shard_retry(shard_state, tstate) if task_directive == self._TASK_DIRECTIVE.FAIL_TASK: shard_state.set_for_failure() return task_directive
set_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state, tstate, task_directive):\n if task_directive in (self._TASK_DIRECTIVE.RETRY_TASK, self.\n _TASK_DIRECTIVE.DROP_TASK):\n return task_directive\n if task_directive == self._TASK_DIRECTIVE.ABORT_SHARD:\n shard_state.set_for_abort()\n return task_directive\n if task_directive == self._TASK_DIRECTIVE.PROCEED_TASK:\n shard_state.advance_for_next_slice()\n tstate.advance_for_next_slice()\n return task_directive\n if task_directive == self._TASK_DIRECTIVE.RECOVER_SLICE:\n tstate.advance_for_next_slice(recovery_slice=True)\n shard_state.advance_for_next_slice(recovery_slice=True)\n return task_directive\n if task_directive == self._TASK_DIRECTIVE.RETRY_SLICE:\n task_directive = self._attempt_slice_retry(shard_state, tstate)\n if task_directive == self._TASK_DIRECTIVE.RETRY_SHARD:\n task_directive = self._attempt_shard_retry(shard_state, tstate)\n if task_directive == self._TASK_DIRECTIVE.FAIL_TASK:\n shard_state.set_for_failure()\n return task_directive\n", "role": "user" }, { "content": "set_state", "role": "assistant" } ]
def x(self, shard_state, tstate): mapper_spec = tstate.mapreduce_spec.mapper if not (tstate.output_writer and tstate.output_writer. _supports_slice_recovery(mapper_spec)): return self._TASK_DIRECTIVE.PROCEED_TASK tstate.output_writer = tstate.output_writer._recover(tstate. mapreduce_spec, shard_state.shard_number, shard_state.retries + 1) return self._TASK_DIRECTIVE.RECOVER_SLICE
attempt_slice_recovery
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state, tstate):\n mapper_spec = tstate.mapreduce_spec.mapper\n if not (tstate.output_writer and tstate.output_writer.\n _supports_slice_recovery(mapper_spec)):\n return self._TASK_DIRECTIVE.PROCEED_TASK\n tstate.output_writer = tstate.output_writer._recover(tstate.\n mapreduce_spec, shard_state.shard_number, shard_state.retries + 1)\n return self._TASK_DIRECTIVE.RECOVER_SLICE\n", "role": "user" }, { "content": "attempt_slice_recovery", "role": "assistant" } ]
def x(self, shard_state, tstate): shard_attempts = shard_state.retries + 1 if shard_attempts >= parameters.config.SHARD_MAX_ATTEMPTS: logging.warning('Shard attempt %s exceeded %s max attempts.', shard_attempts, parameters.config.SHARD_MAX_ATTEMPTS) return self._TASK_DIRECTIVE.FAIL_TASK if tstate.output_writer and not tstate.output_writer._supports_shard_retry( tstate): logging.warning('Output writer %s does not support shard retry.', tstate.output_writer.__class__.__name__) return self._TASK_DIRECTIVE.FAIL_TASK shard_state.reset_for_retry() logging.warning('Shard %s attempt %s failed with up to %s attempts.', shard_state.shard_id, shard_state.retries, parameters.config. SHARD_MAX_ATTEMPTS) output_writer = None if tstate.output_writer: output_writer = tstate.output_writer.create(tstate.mapreduce_spec, shard_state.shard_number, shard_attempts + 1) tstate.reset_for_retry(output_writer) return self._TASK_DIRECTIVE.RETRY_SHARD
attempt_shard_retry
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state, tstate):\n shard_attempts = shard_state.retries + 1\n if shard_attempts >= parameters.config.SHARD_MAX_ATTEMPTS:\n logging.warning('Shard attempt %s exceeded %s max attempts.',\n shard_attempts, parameters.config.SHARD_MAX_ATTEMPTS)\n return self._TASK_DIRECTIVE.FAIL_TASK\n if tstate.output_writer and not tstate.output_writer._supports_shard_retry(\n tstate):\n logging.warning('Output writer %s does not support shard retry.',\n tstate.output_writer.__class__.__name__)\n return self._TASK_DIRECTIVE.FAIL_TASK\n shard_state.reset_for_retry()\n logging.warning('Shard %s attempt %s failed with up to %s attempts.',\n shard_state.shard_id, shard_state.retries, parameters.config.\n SHARD_MAX_ATTEMPTS)\n output_writer = None\n if tstate.output_writer:\n output_writer = tstate.output_writer.create(tstate.mapreduce_spec,\n shard_state.shard_number, shard_attempts + 1)\n tstate.reset_for_retry(output_writer)\n return self._TASK_DIRECTIVE.RETRY_SHARD\n", "role": "user" }, { "content": "attempt_shard_retry", "role": "assistant" } ]
def x(self, shard_state, tstate): if (shard_state.slice_retries + 1 < parameters.config. TASK_MAX_DATA_PROCESSING_ATTEMPTS): logging.warning( 'Slice %s %s failed for the %s of up to %s attempts (%s of %s taskqueue execution attempts). Will retry now.' , tstate.shard_id, tstate.slice_id, shard_state.slice_retries + 1, parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS, self. task_retry_count() + 1, parameters.config.TASK_MAX_ATTEMPTS) sys.exc_clear() self._try_free_lease(shard_state, slice_retry=True) return self._TASK_DIRECTIVE.RETRY_SLICE if parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS > 0: logging.warning('Slice attempt %s exceeded %s max attempts.', self. task_retry_count() + 1, parameters.config. TASK_MAX_DATA_PROCESSING_ATTEMPTS) return self._TASK_DIRECTIVE.RETRY_SHARD
attempt_slice_retry
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, shard_state, tstate):\n if (shard_state.slice_retries + 1 < parameters.config.\n TASK_MAX_DATA_PROCESSING_ATTEMPTS):\n logging.warning(\n 'Slice %s %s failed for the %s of up to %s attempts (%s of %s taskqueue execution attempts). Will retry now.'\n , tstate.shard_id, tstate.slice_id, shard_state.slice_retries +\n 1, parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS, self.\n task_retry_count() + 1, parameters.config.TASK_MAX_ATTEMPTS)\n sys.exc_clear()\n self._try_free_lease(shard_state, slice_retry=True)\n return self._TASK_DIRECTIVE.RETRY_SLICE\n if parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS > 0:\n logging.warning('Slice attempt %s exceeded %s max attempts.', self.\n task_retry_count() + 1, parameters.config.\n TASK_MAX_DATA_PROCESSING_ATTEMPTS)\n return self._TASK_DIRECTIVE.RETRY_SHARD\n", "role": "user" }, { "content": "attempt_slice_retry", "role": "assistant" } ]
def x(self, spec): countdown = 0 if self._processing_limit(spec) != -1: countdown = max(int(parameters.config._SLICE_DURATION_SEC - (self. _time() - self._start_time)), 0) return countdown
get_countdown_for_next_slice
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, spec):\n countdown = 0\n if self._processing_limit(spec) != -1:\n countdown = max(int(parameters.config._SLICE_DURATION_SEC - (self.\n _time() - self._start_time)), 0)\n return countdown\n", "role": "user" }, { "content": "get_countdown_for_next_slice", "role": "assistant" } ]
def x(cls, tstate, shard_state, eta=None, countdown=None): base_path = tstate.base_path task_name = MapperWorkerCallbackHandler.get_task_name(tstate.shard_id, tstate.slice_id, tstate.retries) headers = util._get_task_headers(tstate.mapreduce_spec.mapreduce_id) headers[util._MR_SHARD_ID_TASK_HEADER] = tstate.shard_id worker_task = model.HugeTask(url=base_path + '/worker_callback/' + tstate.shard_id, params=tstate.to_dict(), name=task_name, eta=eta, countdown=countdown, parent=shard_state, headers=headers) return worker_task
state_to_task
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, tstate, shard_state, eta=None, countdown=None):\n base_path = tstate.base_path\n task_name = MapperWorkerCallbackHandler.get_task_name(tstate.shard_id,\n tstate.slice_id, tstate.retries)\n headers = util._get_task_headers(tstate.mapreduce_spec.mapreduce_id)\n headers[util._MR_SHARD_ID_TASK_HEADER] = tstate.shard_id\n worker_task = model.HugeTask(url=base_path + '/worker_callback/' +\n tstate.shard_id, params=tstate.to_dict(), name=task_name, eta=eta,\n countdown=countdown, parent=shard_state, headers=headers)\n return worker_task\n", "role": "user" }, { "content": "state_to_task", "role": "assistant" } ]
def x(self, spec): processing_rate = float(spec.mapper.params.get('processing_rate', 0)) slice_processing_limit = -1 if processing_rate > 0: slice_processing_limit = int(math.ceil(parameters.config. _SLICE_DURATION_SEC * processing_rate / int(spec.mapper. shard_count))) return slice_processing_limit
processing_limit
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, spec):\n processing_rate = float(spec.mapper.params.get('processing_rate', 0))\n slice_processing_limit = -1\n if processing_rate > 0:\n slice_processing_limit = int(math.ceil(parameters.config.\n _SLICE_DURATION_SEC * processing_rate / int(spec.mapper.\n shard_count)))\n return slice_processing_limit\n", "role": "user" }, { "content": "processing_limit", "role": "assistant" } ]
def x(cls, shard_state, tstate, queue_name=None, eta=None, countdown=None): queue_name = queue_name or os.environ.get('HTTP_X_APPENGINE_QUEUENAME', 'default') task = cls._state_to_task(tstate, shard_state, eta, countdown) cls._add_task(task, tstate.mapreduce_spec, queue_name)
schedule_slice
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, shard_state, tstate, queue_name=None, eta=None, countdown=None):\n queue_name = queue_name or os.environ.get('HTTP_X_APPENGINE_QUEUENAME',\n 'default')\n task = cls._state_to_task(tstate, shard_state, eta, countdown)\n cls._add_task(task, tstate.mapreduce_spec, queue_name)\n", "role": "user" }, { "content": "schedule_slice", "role": "assistant" } ]
def x(self): mr_id = self.request.headers[util._MR_ID_TASK_HEADER] state = model.MapreduceState.get_by_job_id(mr_id) if not state or not state.active: return state.active = False state.result_status = model.MapreduceState.RESULT_FAILED config = util.create_datastore_write_config(state.mapreduce_spec) puts = [] for ss in model.ShardState.find_all_by_mapreduce_state(state): if ss.active: ss.set_for_failure() puts.append(ss) if len(puts) > model.ShardState._MAX_STATES_IN_MEMORY: db.put(puts, config=config) puts = [] db.put(puts, config=config) db.put(state, config=config)
drop_gracefully
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n mr_id = self.request.headers[util._MR_ID_TASK_HEADER]\n state = model.MapreduceState.get_by_job_id(mr_id)\n if not state or not state.active:\n return\n state.active = False\n state.result_status = model.MapreduceState.RESULT_FAILED\n config = util.create_datastore_write_config(state.mapreduce_spec)\n puts = []\n for ss in model.ShardState.find_all_by_mapreduce_state(state):\n if ss.active:\n ss.set_for_failure()\n puts.append(ss)\n if len(puts) > model.ShardState._MAX_STATES_IN_MEMORY:\n db.put(puts, config=config)\n puts = []\n db.put(puts, config=config)\n db.put(state, config=config)\n", "role": "user" }, { "content": "drop_gracefully", "role": "assistant" } ]
def x(self): spec = model.MapreduceSpec.from_json_str(self.request.get('mapreduce_spec') ) state, control = db.get([model.MapreduceState.get_key_by_job_id(spec. mapreduce_id), model.MapreduceControl.get_key_by_job_id(spec. mapreduce_id)]) if not state: logging.warning( "State not found for MR '%s'; dropping controller task.", spec. mapreduce_id) return if not state.active: logging.warning( 'MR %r is not active. Looks like spurious controller task execution.' , spec.mapreduce_id) self._clean_up_mr(spec) return shard_states = model.ShardState.find_all_by_mapreduce_state(state) self._update_state_from_shard_states(state, shard_states, control) if state.active: ControllerCallbackHandler.reschedule(state, spec, self.serial_id() + 1)
handle
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n spec = model.MapreduceSpec.from_json_str(self.request.get('mapreduce_spec')\n )\n state, control = db.get([model.MapreduceState.get_key_by_job_id(spec.\n mapreduce_id), model.MapreduceControl.get_key_by_job_id(spec.\n mapreduce_id)])\n if not state:\n logging.warning(\n \"State not found for MR '%s'; dropping controller task.\", spec.\n mapreduce_id)\n return\n if not state.active:\n logging.warning(\n 'MR %r is not active. Looks like spurious controller task execution.'\n , spec.mapreduce_id)\n self._clean_up_mr(spec)\n return\n shard_states = model.ShardState.find_all_by_mapreduce_state(state)\n self._update_state_from_shard_states(state, shard_states, control)\n if state.active:\n ControllerCallbackHandler.reschedule(state, spec, self.serial_id() + 1)\n", "role": "user" }, { "content": "handle", "role": "assistant" } ]
def x(self, state, shard_states, control): state.active_shards, state.aborted_shards, state.failed_shards = 0, 0, 0 total_shards = 0 processed_counts = [] processed_status = [] state.counters_map.clear() for s in shard_states: total_shards += 1 status = 'unknown' if s.active: state.active_shards += 1 status = 'running' if s.result_status == model.ShardState.RESULT_SUCCESS: status = 'success' elif s.result_status == model.ShardState.RESULT_ABORTED: state.aborted_shards += 1 status = 'aborted' elif s.result_status == model.ShardState.RESULT_FAILED: state.failed_shards += 1 status = 'failed' state.counters_map.add_map(s.counters_map) processed_counts.append(s.counters_map.get(context. COUNTER_MAPPER_CALLS)) processed_status.append(status) state.set_processed_counts(processed_counts, processed_status) state.last_poll_time = datetime.datetime.utcfromtimestamp(self._time()) spec = state.mapreduce_spec if total_shards != spec.mapper.shard_count: logging.error( "Found %d shard states. Expect %d. Issuing abort command to job '%s'" , total_shards, spec.mapper.shard_count, spec.mapreduce_id) model.MapreduceControl.abort(spec.mapreduce_id) state.active = bool(state.active_shards) if not control and (state.failed_shards or state.aborted_shards): model.MapreduceControl.abort(spec.mapreduce_id) if not state.active: if state.failed_shards or not total_shards: state.result_status = model.MapreduceState.RESULT_FAILED elif state.aborted_shards: state.result_status = model.MapreduceState.RESULT_ABORTED else: state.result_status = model.MapreduceState.RESULT_SUCCESS self._finalize_outputs(spec, state) self._finalize_job(spec, state) else: @db.transactional(retries=5) def x(): fresh_state = model.MapreduceState.get_by_job_id(spec.mapreduce_id) if not fresh_state.active: logging.warning( 'Job %s is not active. Looks like spurious task execution. Dropping controller task.' , spec.mapreduce_id) return config = util.create_datastore_write_config(spec) state.put(config=config) _put_state()
update_state_from_shard_states
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, state, shard_states, control):\n state.active_shards, state.aborted_shards, state.failed_shards = 0, 0, 0\n total_shards = 0\n processed_counts = []\n processed_status = []\n state.counters_map.clear()\n for s in shard_states:\n total_shards += 1\n status = 'unknown'\n if s.active:\n state.active_shards += 1\n status = 'running'\n if s.result_status == model.ShardState.RESULT_SUCCESS:\n status = 'success'\n elif s.result_status == model.ShardState.RESULT_ABORTED:\n state.aborted_shards += 1\n status = 'aborted'\n elif s.result_status == model.ShardState.RESULT_FAILED:\n state.failed_shards += 1\n status = 'failed'\n state.counters_map.add_map(s.counters_map)\n processed_counts.append(s.counters_map.get(context.\n COUNTER_MAPPER_CALLS))\n processed_status.append(status)\n state.set_processed_counts(processed_counts, processed_status)\n state.last_poll_time = datetime.datetime.utcfromtimestamp(self._time())\n spec = state.mapreduce_spec\n if total_shards != spec.mapper.shard_count:\n logging.error(\n \"Found %d shard states. Expect %d. Issuing abort command to job '%s'\"\n , total_shards, spec.mapper.shard_count, spec.mapreduce_id)\n model.MapreduceControl.abort(spec.mapreduce_id)\n state.active = bool(state.active_shards)\n if not control and (state.failed_shards or state.aborted_shards):\n model.MapreduceControl.abort(spec.mapreduce_id)\n if not state.active:\n if state.failed_shards or not total_shards:\n state.result_status = model.MapreduceState.RESULT_FAILED\n elif state.aborted_shards:\n state.result_status = model.MapreduceState.RESULT_ABORTED\n else:\n state.result_status = model.MapreduceState.RESULT_SUCCESS\n self._finalize_outputs(spec, state)\n self._finalize_job(spec, state)\n else:\n\n @db.transactional(retries=5)\n def x():\n fresh_state = model.MapreduceState.get_by_job_id(spec.mapreduce_id)\n if not fresh_state.active:\n logging.warning(\n 'Job %s is not active. Looks like spurious task execution. Dropping controller task.'\n , spec.mapreduce_id)\n return\n config = util.create_datastore_write_config(spec)\n state.put(config=config)\n _put_state()\n", "role": "user" }, { "content": "update_state_from_shard_states", "role": "assistant" } ]
def x(cls, mapreduce_spec, mapreduce_state): if mapreduce_spec.mapper.output_writer_class( ) and mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS: mapreduce_spec.mapper.output_writer_class().finalize_job( mapreduce_state)
finalize_outputs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_spec, mapreduce_state):\n if mapreduce_spec.mapper.output_writer_class(\n ) and mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS:\n mapreduce_spec.mapper.output_writer_class().finalize_job(\n mapreduce_state)\n", "role": "user" }, { "content": "finalize_outputs", "role": "assistant" } ]
def x(cls, mapreduce_spec, mapreduce_state): config = util.create_datastore_write_config(mapreduce_spec) queue_name = util.get_queue_name(mapreduce_spec.params.get(model. MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE)) done_callback = mapreduce_spec.params.get(model.MapreduceSpec. PARAM_DONE_CALLBACK) done_task = None if done_callback: done_task = taskqueue.Task(url=done_callback, headers=util. _get_task_headers(mapreduce_spec.mapreduce_id, util. CALLBACK_MR_ID_TASK_HEADER), method=mapreduce_spec.params.get( 'done_callback_method', 'POST')) @db.transactional(retries=5) def x(): fresh_state = model.MapreduceState.get_by_job_id(mapreduce_spec. mapreduce_id) if not fresh_state.active: logging.warning( 'Job %s is not active. Looks like spurious task execution. Dropping task.' , mapreduce_spec.mapreduce_id) return mapreduce_state.put(config=config) if done_task and not _run_task_hook(mapreduce_spec.get_hooks(), 'enqueue_done_task', done_task, queue_name): done_task.add(queue_name, transactional=True) _put_state() logging.info("Final result for job '%s' is '%s'", mapreduce_spec. mapreduce_id, mapreduce_state.result_status) cls._clean_up_mr(mapreduce_spec)
finalize_job
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_spec, mapreduce_state):\n config = util.create_datastore_write_config(mapreduce_spec)\n queue_name = util.get_queue_name(mapreduce_spec.params.get(model.\n MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE))\n done_callback = mapreduce_spec.params.get(model.MapreduceSpec.\n PARAM_DONE_CALLBACK)\n done_task = None\n if done_callback:\n done_task = taskqueue.Task(url=done_callback, headers=util.\n _get_task_headers(mapreduce_spec.mapreduce_id, util.\n CALLBACK_MR_ID_TASK_HEADER), method=mapreduce_spec.params.get(\n 'done_callback_method', 'POST'))\n\n @db.transactional(retries=5)\n def x():\n fresh_state = model.MapreduceState.get_by_job_id(mapreduce_spec.\n mapreduce_id)\n if not fresh_state.active:\n logging.warning(\n 'Job %s is not active. Looks like spurious task execution. Dropping task.'\n , mapreduce_spec.mapreduce_id)\n return\n mapreduce_state.put(config=config)\n if done_task and not _run_task_hook(mapreduce_spec.get_hooks(),\n 'enqueue_done_task', done_task, queue_name):\n done_task.add(queue_name, transactional=True)\n _put_state()\n logging.info(\"Final result for job '%s' is '%s'\", mapreduce_spec.\n mapreduce_id, mapreduce_state.result_status)\n cls._clean_up_mr(mapreduce_spec)\n", "role": "user" }, { "content": "finalize_job", "role": "assistant" } ]
def x(self): mr_id = self.request.get('mapreduce_id') logging.info('Processing kickoff for job %s', mr_id) state = model.MapreduceState.get_by_job_id(mr_id) if not self._check_mr_state(state, mr_id): return readers, serialized_readers_entity = self._get_input_readers(state) if readers is None: logging.warning('Found no mapper input data to process.') state.active = False state.result_status = model.MapreduceState.RESULT_SUCCESS ControllerCallbackHandler._finalize_job(state.mapreduce_spec, state) return False self._setup_output_writer(state) result = self._save_states(state, serialized_readers_entity) if result is None: readers, _ = self._get_input_readers(state) elif not result: return queue_name = self.request.headers.get('X-AppEngine-QueueName') KickOffJobHandler._schedule_shards(state.mapreduce_spec, readers, queue_name, state.mapreduce_spec.params['base_path'], state) ControllerCallbackHandler.reschedule(state, state.mapreduce_spec, serial_id=0, queue_name=queue_name)
handle
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n mr_id = self.request.get('mapreduce_id')\n logging.info('Processing kickoff for job %s', mr_id)\n state = model.MapreduceState.get_by_job_id(mr_id)\n if not self._check_mr_state(state, mr_id):\n return\n readers, serialized_readers_entity = self._get_input_readers(state)\n if readers is None:\n logging.warning('Found no mapper input data to process.')\n state.active = False\n state.result_status = model.MapreduceState.RESULT_SUCCESS\n ControllerCallbackHandler._finalize_job(state.mapreduce_spec, state)\n return False\n self._setup_output_writer(state)\n result = self._save_states(state, serialized_readers_entity)\n if result is None:\n readers, _ = self._get_input_readers(state)\n elif not result:\n return\n queue_name = self.request.headers.get('X-AppEngine-QueueName')\n KickOffJobHandler._schedule_shards(state.mapreduce_spec, readers,\n queue_name, state.mapreduce_spec.params['base_path'], state)\n ControllerCallbackHandler.reschedule(state, state.mapreduce_spec,\n serial_id=0, queue_name=queue_name)\n", "role": "user" }, { "content": "handle", "role": "assistant" } ]
def x(self): mr_id = self.request.get('mapreduce_id') logging.error('Failed to kick off job %s', mr_id) state = model.MapreduceState.get_by_job_id(mr_id) if not self._check_mr_state(state, mr_id): return config = util.create_datastore_write_config(state.mapreduce_spec) model.MapreduceControl.abort(mr_id, config=config) state.active = False state.result_status = model.MapreduceState.RESULT_FAILED ControllerCallbackHandler._finalize_job(state.mapreduce_spec, state)
drop_gracefully
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n mr_id = self.request.get('mapreduce_id')\n logging.error('Failed to kick off job %s', mr_id)\n state = model.MapreduceState.get_by_job_id(mr_id)\n if not self._check_mr_state(state, mr_id):\n return\n config = util.create_datastore_write_config(state.mapreduce_spec)\n model.MapreduceControl.abort(mr_id, config=config)\n state.active = False\n state.result_status = model.MapreduceState.RESULT_FAILED\n ControllerCallbackHandler._finalize_job(state.mapreduce_spec, state)\n", "role": "user" }, { "content": "drop_gracefully", "role": "assistant" } ]
def x(self, state): serialized_input_readers_key = (self._SERIALIZED_INPUT_READERS_KEY % state.key().id_or_name()) serialized_input_readers = model._HugeTaskPayload.get_by_key_name( serialized_input_readers_key, parent=state) input_reader_class = state.mapreduce_spec.mapper.input_reader_class() split_param = state.mapreduce_spec.mapper if issubclass(input_reader_class, map_job.InputReader): split_param = map_job.JobConfig._to_map_job_config(state. mapreduce_spec, os.environ.get('HTTP_X_APPENGINE_QUEUENAME')) if serialized_input_readers is None: readers = input_reader_class.split_input(split_param) else: readers = [input_reader_class.from_json_str(_json) for _json in json.loads(zlib.decompress(serialized_input_readers.payload))] if not readers: return None, None state.mapreduce_spec.mapper.shard_count = len(readers) state.active_shards = len(readers) if serialized_input_readers is None: serialized_input_readers = model._HugeTaskPayload(key_name= serialized_input_readers_key, parent=state) readers_json_str = [i.to_json_str() for i in readers] serialized_input_readers.payload = zlib.compress(json.dumps( readers_json_str)) return readers, serialized_input_readers
get_input_readers
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, state):\n serialized_input_readers_key = (self._SERIALIZED_INPUT_READERS_KEY %\n state.key().id_or_name())\n serialized_input_readers = model._HugeTaskPayload.get_by_key_name(\n serialized_input_readers_key, parent=state)\n input_reader_class = state.mapreduce_spec.mapper.input_reader_class()\n split_param = state.mapreduce_spec.mapper\n if issubclass(input_reader_class, map_job.InputReader):\n split_param = map_job.JobConfig._to_map_job_config(state.\n mapreduce_spec, os.environ.get('HTTP_X_APPENGINE_QUEUENAME'))\n if serialized_input_readers is None:\n readers = input_reader_class.split_input(split_param)\n else:\n readers = [input_reader_class.from_json_str(_json) for _json in\n json.loads(zlib.decompress(serialized_input_readers.payload))]\n if not readers:\n return None, None\n state.mapreduce_spec.mapper.shard_count = len(readers)\n state.active_shards = len(readers)\n if serialized_input_readers is None:\n serialized_input_readers = model._HugeTaskPayload(key_name=\n serialized_input_readers_key, parent=state)\n readers_json_str = [i.to_json_str() for i in readers]\n serialized_input_readers.payload = zlib.compress(json.dumps(\n readers_json_str))\n return readers, serialized_input_readers\n", "role": "user" }, { "content": "get_input_readers", "role": "assistant" } ]
def x(self, state, serialized_readers_entity): mr_id = state.key().id_or_name() fresh_state = model.MapreduceState.get_by_job_id(mr_id) if not self._check_mr_state(fresh_state, mr_id): return False if fresh_state.active_shards != 0: logging.warning( 'Mapreduce %s already has active shards. Looks like spurious task execution.' , mr_id) return None config = util.create_datastore_write_config(state.mapreduce_spec) db.put([state, serialized_readers_entity], config=config) return True
save_states
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, state, serialized_readers_entity):\n mr_id = state.key().id_or_name()\n fresh_state = model.MapreduceState.get_by_job_id(mr_id)\n if not self._check_mr_state(fresh_state, mr_id):\n return False\n if fresh_state.active_shards != 0:\n logging.warning(\n 'Mapreduce %s already has active shards. Looks like spurious task execution.'\n , mr_id)\n return None\n config = util.create_datastore_write_config(state.mapreduce_spec)\n db.put([state, serialized_readers_entity], config=config)\n return True\n", "role": "user" }, { "content": "save_states", "role": "assistant" } ]
def x(cls, spec, readers, queue_name, base_path, mr_state): shard_states = [] for shard_number, input_reader in enumerate(readers): shard_state = model.ShardState.create_new(spec.mapreduce_id, shard_number) shard_state.shard_description = str(input_reader) shard_states.append(shard_state) existing_shard_states = db.get(shard.key() for shard in shard_states) existing_shard_keys = set(shard.key() for shard in existing_shard_states if shard is not None) db.put((shard for shard in shard_states if shard.key() not in existing_shard_keys), config=util.create_datastore_write_config(spec)) writer_class = spec.mapper.output_writer_class() writers = [None] * len(readers) if writer_class: for shard_number, shard_state in enumerate(shard_states): writers[shard_number] = writer_class.create(mr_state. mapreduce_spec, shard_state.shard_number, shard_state. retries + 1, mr_state.writer_state) for shard_number, (input_reader, output_writer) in enumerate(zip( readers, writers)): shard_id = model.ShardState.shard_id_from_number(spec.mapreduce_id, shard_number) task = MapperWorkerCallbackHandler._state_to_task(model. TransientShardState(base_path, spec, shard_id, 0, input_reader, input_reader, output_writer=output_writer, handler=spec.mapper. handler), shard_states[shard_number]) MapperWorkerCallbackHandler._add_task(task, spec, queue_name)
schedule_shards
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, spec, readers, queue_name, base_path, mr_state):\n shard_states = []\n for shard_number, input_reader in enumerate(readers):\n shard_state = model.ShardState.create_new(spec.mapreduce_id,\n shard_number)\n shard_state.shard_description = str(input_reader)\n shard_states.append(shard_state)\n existing_shard_states = db.get(shard.key() for shard in shard_states)\n existing_shard_keys = set(shard.key() for shard in\n existing_shard_states if shard is not None)\n db.put((shard for shard in shard_states if shard.key() not in\n existing_shard_keys), config=util.create_datastore_write_config(spec))\n writer_class = spec.mapper.output_writer_class()\n writers = [None] * len(readers)\n if writer_class:\n for shard_number, shard_state in enumerate(shard_states):\n writers[shard_number] = writer_class.create(mr_state.\n mapreduce_spec, shard_state.shard_number, shard_state.\n retries + 1, mr_state.writer_state)\n for shard_number, (input_reader, output_writer) in enumerate(zip(\n readers, writers)):\n shard_id = model.ShardState.shard_id_from_number(spec.mapreduce_id,\n shard_number)\n task = MapperWorkerCallbackHandler._state_to_task(model.\n TransientShardState(base_path, spec, shard_id, 0, input_reader,\n input_reader, output_writer=output_writer, handler=spec.mapper.\n handler), shard_states[shard_number])\n MapperWorkerCallbackHandler._add_task(task, spec, queue_name)\n", "role": "user" }, { "content": "schedule_shards", "role": "assistant" } ]
def x(cls, state, mr_id): if state is None: logging.warning('Mapreduce State for job %s is missing. Dropping Task.' , mr_id) return False if not state.active: logging.warning( 'Mapreduce %s is not active. Looks like spurious task execution. Dropping Task.' , mr_id) return False return True
check_mr_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, state, mr_id):\n if state is None:\n logging.warning('Mapreduce State for job %s is missing. Dropping Task.'\n , mr_id)\n return False\n if not state.active:\n logging.warning(\n 'Mapreduce %s is not active. Looks like spurious task execution. Dropping Task.'\n , mr_id)\n return False\n return True\n", "role": "user" }, { "content": "check_mr_state", "role": "assistant" } ]
def x(self): mapreduce_name = self._get_required_param('name') mapper_input_reader_spec = self._get_required_param('mapper_input_reader') mapper_handler_spec = self._get_required_param('mapper_handler') mapper_output_writer_spec = self.request.get('mapper_output_writer') mapper_params = self._get_params('mapper_params_validator', 'mapper_params.') params = self._get_params('params_validator', 'params.') mr_params = map_job.JobConfig._get_default_mr_params() mr_params.update(params) if 'queue_name' in mapper_params: mr_params['queue_name'] = mapper_params['queue_name'] mapper_params['processing_rate'] = int(mapper_params.get( 'processing_rate') or parameters.config.PROCESSING_RATE_PER_SEC) mapper_spec = model.MapperSpec(mapper_handler_spec, mapper_input_reader_spec, mapper_params, int(mapper_params.get( 'shard_count', parameters.config.SHARD_COUNT)), output_writer_spec= mapper_output_writer_spec) mapreduce_id = self._start_map(mapreduce_name, mapper_spec, mr_params, queue_name=mr_params['queue_name'], _app=mapper_params.get('_app')) self.json_response['mapreduce_id'] = mapreduce_id
handle
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n mapreduce_name = self._get_required_param('name')\n mapper_input_reader_spec = self._get_required_param('mapper_input_reader')\n mapper_handler_spec = self._get_required_param('mapper_handler')\n mapper_output_writer_spec = self.request.get('mapper_output_writer')\n mapper_params = self._get_params('mapper_params_validator',\n 'mapper_params.')\n params = self._get_params('params_validator', 'params.')\n mr_params = map_job.JobConfig._get_default_mr_params()\n mr_params.update(params)\n if 'queue_name' in mapper_params:\n mr_params['queue_name'] = mapper_params['queue_name']\n mapper_params['processing_rate'] = int(mapper_params.get(\n 'processing_rate') or parameters.config.PROCESSING_RATE_PER_SEC)\n mapper_spec = model.MapperSpec(mapper_handler_spec,\n mapper_input_reader_spec, mapper_params, int(mapper_params.get(\n 'shard_count', parameters.config.SHARD_COUNT)), output_writer_spec=\n mapper_output_writer_spec)\n mapreduce_id = self._start_map(mapreduce_name, mapper_spec, mr_params,\n queue_name=mr_params['queue_name'], _app=mapper_params.get('_app'))\n self.json_response['mapreduce_id'] = mapreduce_id\n", "role": "user" }, { "content": "handle", "role": "assistant" } ]
def x(self, validator_parameter, name_prefix): params_validator = self.request.get(validator_parameter) user_params = {} for key in self.request.arguments(): if key.startswith(name_prefix): values = self.request.get_all(key) adjusted_key = key[len(name_prefix):] if len(values) == 1: user_params[adjusted_key] = values[0] else: user_params[adjusted_key] = values if params_validator: resolved_validator = util.for_name(params_validator) resolved_validator(user_params) return user_params
get_params
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, validator_parameter, name_prefix):\n params_validator = self.request.get(validator_parameter)\n user_params = {}\n for key in self.request.arguments():\n if key.startswith(name_prefix):\n values = self.request.get_all(key)\n adjusted_key = key[len(name_prefix):]\n if len(values) == 1:\n user_params[adjusted_key] = values[0]\n else:\n user_params[adjusted_key] = values\n if params_validator:\n resolved_validator = util.for_name(params_validator)\n resolved_validator(user_params)\n return user_params\n", "role": "user" }, { "content": "get_params", "role": "assistant" } ]
def x(self, param_name): value = self.request.get(param_name) if not value: raise errors.NotEnoughArgumentsError(param_name + ' not specified') return value
get_required_param
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, param_name):\n value = self.request.get(param_name)\n if not value:\n raise errors.NotEnoughArgumentsError(param_name + ' not specified')\n return value\n", "role": "user" }, { "content": "get_required_param", "role": "assistant" } ]
def x(cls, name, mapper_spec, mapreduce_params, queue_name, eta=None, countdown=None, hooks_class_name=None, _app=None, in_xg_transaction=False): mapper_input_reader_class = mapper_spec.input_reader_class() mapper_input_reader_class.validate(mapper_spec) mapper_output_writer_class = mapper_spec.output_writer_class() if mapper_output_writer_class: mapper_output_writer_class.validate(mapper_spec) mapreduce_id = model.MapreduceState.new_mapreduce_id() mapreduce_spec = model.MapreduceSpec(name, mapreduce_id, mapper_spec. to_json(), mapreduce_params, hooks_class_name) ctx = context.Context(mapreduce_spec, None) context.Context._set(ctx) try: mapper_spec.handler finally: context.Context._set(None) if in_xg_transaction: propagation = db.MANDATORY else: propagation = db.INDEPENDENT @db.transactional(propagation=propagation) def x(): cls._create_and_save_state(mapreduce_spec, _app) cls._add_kickoff_task(mapreduce_params['base_path'], mapreduce_spec, eta, countdown, queue_name) _txn() return mapreduce_id
start_map
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, name, mapper_spec, mapreduce_params, queue_name, eta=None,\n countdown=None, hooks_class_name=None, _app=None, in_xg_transaction=False):\n mapper_input_reader_class = mapper_spec.input_reader_class()\n mapper_input_reader_class.validate(mapper_spec)\n mapper_output_writer_class = mapper_spec.output_writer_class()\n if mapper_output_writer_class:\n mapper_output_writer_class.validate(mapper_spec)\n mapreduce_id = model.MapreduceState.new_mapreduce_id()\n mapreduce_spec = model.MapreduceSpec(name, mapreduce_id, mapper_spec.\n to_json(), mapreduce_params, hooks_class_name)\n ctx = context.Context(mapreduce_spec, None)\n context.Context._set(ctx)\n try:\n mapper_spec.handler\n finally:\n context.Context._set(None)\n if in_xg_transaction:\n propagation = db.MANDATORY\n else:\n propagation = db.INDEPENDENT\n\n @db.transactional(propagation=propagation)\n def x():\n cls._create_and_save_state(mapreduce_spec, _app)\n cls._add_kickoff_task(mapreduce_params['base_path'], mapreduce_spec,\n eta, countdown, queue_name)\n _txn()\n return mapreduce_id\n", "role": "user" }, { "content": "start_map", "role": "assistant" } ]
def x(cls, mapreduce_spec, _app): state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id) state.mapreduce_spec = mapreduce_spec state.active = True state.active_shards = 0 if _app: state.app_id = _app config = util.create_datastore_write_config(mapreduce_spec) state.put(config=config) return state
create_and_save_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapreduce_spec, _app):\n state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id)\n state.mapreduce_spec = mapreduce_spec\n state.active = True\n state.active_shards = 0\n if _app:\n state.app_id = _app\n config = util.create_datastore_write_config(mapreduce_spec)\n state.put(config=config)\n return state\n", "role": "user" }, { "content": "create_and_save_state", "role": "assistant" } ]
def x(cls, base_path, mapreduce_spec, eta, countdown, queue_name): params = {'mapreduce_id': mapreduce_spec.mapreduce_id} kickoff_task = taskqueue.Task(url=base_path + '/kickoffjob_callback/' + mapreduce_spec.mapreduce_id, headers=util._get_task_headers( mapreduce_spec.mapreduce_id), params=params, eta=eta, countdown= countdown) hooks = mapreduce_spec.get_hooks() if hooks is not None: try: hooks.enqueue_kickoff_task(kickoff_task, queue_name) return except NotImplementedError: pass kickoff_task.add(queue_name, transactional=True)
add_kickoff_task
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, base_path, mapreduce_spec, eta, countdown, queue_name):\n params = {'mapreduce_id': mapreduce_spec.mapreduce_id}\n kickoff_task = taskqueue.Task(url=base_path + '/kickoffjob_callback/' +\n mapreduce_spec.mapreduce_id, headers=util._get_task_headers(\n mapreduce_spec.mapreduce_id), params=params, eta=eta, countdown=\n countdown)\n hooks = mapreduce_spec.get_hooks()\n if hooks is not None:\n try:\n hooks.enqueue_kickoff_task(kickoff_task, queue_name)\n return\n except NotImplementedError:\n pass\n kickoff_task.add(queue_name, transactional=True)\n", "role": "user" }, { "content": "add_kickoff_task", "role": "assistant" } ]
def x(mapper_spec, allowed_keys=None, allow_old=True): if 'input_reader' not in mapper_spec.params: message = ( "Input reader's parameters should be specified in input_reader subdictionary." ) if not allow_old or allowed_keys: raise errors.BadReaderParamsError(message) params = mapper_spec.params params = dict((str(n), v) for n, v in params.iteritems()) else: if not isinstance(mapper_spec.params.get('input_reader'), dict): raise errors.BadReaderParamsError( 'Input reader parameters should be a dictionary') params = mapper_spec.params.get('input_reader') params = dict((str(n), v) for n, v in params.iteritems()) if allowed_keys: params_diff = set(params.keys()) - allowed_keys if params_diff: raise errors.BadReaderParamsError( 'Invalid input_reader parameters: %s' % ','.join( params_diff)) return params
get_params
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(mapper_spec, allowed_keys=None, allow_old=True):\n if 'input_reader' not in mapper_spec.params:\n message = (\n \"Input reader's parameters should be specified in input_reader subdictionary.\"\n )\n if not allow_old or allowed_keys:\n raise errors.BadReaderParamsError(message)\n params = mapper_spec.params\n params = dict((str(n), v) for n, v in params.iteritems())\n else:\n if not isinstance(mapper_spec.params.get('input_reader'), dict):\n raise errors.BadReaderParamsError(\n 'Input reader parameters should be a dictionary')\n params = mapper_spec.params.get('input_reader')\n params = dict((str(n), v) for n, v in params.iteritems())\n if allowed_keys:\n params_diff = set(params.keys()) - allowed_keys\n if params_diff:\n raise errors.BadReaderParamsError(\n 'Invalid input_reader parameters: %s' % ','.join(\n params_diff))\n return params\n", "role": "user" }, { "content": "get_params", "role": "assistant" } ]
def x(cls, sorted_keys, shard_count): assert len(sorted_keys) >= shard_count index_stride = len(sorted_keys) / float(shard_count) return [sorted_keys[int(round(index_stride * i))] for i in range(1, shard_count)]
choose_split_points
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, sorted_keys, shard_count):\n assert len(sorted_keys) >= shard_count\n index_stride = len(sorted_keys) / float(shard_count)\n return [sorted_keys[int(round(index_stride * i))] for i in range(1,\n shard_count)]\n", "role": "user" }, { "content": "choose_split_points", "role": "assistant" } ]
def x(cls, mapper_spec): super(RawDatastoreInputReader, cls).validate(mapper_spec) params = _get_params(mapper_spec) entity_kind = params[cls.ENTITY_KIND_PARAM] if '.' in entity_kind: logging.warning( '. detected in entity kind %s specified for reader %s.Assuming entity kind contains the dot.' , entity_kind, cls.__name__) if cls.FILTERS_PARAM in params: filters = params[cls.FILTERS_PARAM] for f in filters: if f[1] != '=': raise BadReaderParamsError( 'Only equality filters are supported: %s', f)
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n super(RawDatastoreInputReader, cls).validate(mapper_spec)\n params = _get_params(mapper_spec)\n entity_kind = params[cls.ENTITY_KIND_PARAM]\n if '.' in entity_kind:\n logging.warning(\n '. detected in entity kind %s specified for reader %s.Assuming entity kind contains the dot.'\n , entity_kind, cls.__name__)\n if cls.FILTERS_PARAM in params:\n filters = params[cls.FILTERS_PARAM]\n for f in filters:\n if f[1] != '=':\n raise BadReaderParamsError(\n 'Only equality filters are supported: %s', f)\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]
def x(cls, mapper_spec): shard_count = mapper_spec.shard_count query_spec = cls._get_query_spec(mapper_spec) if not property_range.should_shard_by_property_range(query_spec.filters): return super(DatastoreInputReader, cls).split_input(mapper_spec) oversplit_factor = query_spec.oversplit_factor oversplit_shard_count = oversplit_factor * shard_count p_range = property_range.PropertyRange(query_spec.filters, query_spec. model_class_path) p_ranges = p_range.split(oversplit_shard_count) if query_spec.ns is not None: ns_range = namespace_range.NamespaceRange(namespace_start= query_spec.ns, namespace_end=query_spec.ns, _app=query_spec.app) ns_ranges = [copy.copy(ns_range) for _ in p_ranges] else: ns_keys = namespace_range.get_namespace_keys(query_spec.app, cls. MAX_NAMESPACES_FOR_KEY_SHARD + 1) if not ns_keys: return if len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD: ns_ranges = [namespace_range.NamespaceRange(_app=query_spec.app ) for _ in p_ranges] else: ns_ranges = namespace_range.NamespaceRange.split(n= oversplit_shard_count, contiguous=False, can_query=lambda : True, _app=query_spec.app) p_ranges = [copy.copy(p_range) for _ in ns_ranges] assert len(p_ranges) == len(ns_ranges) iters = [db_iters.RangeIteratorFactory.create_property_range_iterator(p, ns, query_spec) for p, ns in zip(p_ranges, ns_ranges)] if len(iters) > shard_count: iters = [db_iters.RangeIteratorFactory. create_multi_property_range_iterator([iters[i] for i in xrange( start_index, len(iters), shard_count)]) for start_index in xrange(shard_count)] return [cls(i) for i in iters]
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n shard_count = mapper_spec.shard_count\n query_spec = cls._get_query_spec(mapper_spec)\n if not property_range.should_shard_by_property_range(query_spec.filters):\n return super(DatastoreInputReader, cls).split_input(mapper_spec)\n oversplit_factor = query_spec.oversplit_factor\n oversplit_shard_count = oversplit_factor * shard_count\n p_range = property_range.PropertyRange(query_spec.filters, query_spec.\n model_class_path)\n p_ranges = p_range.split(oversplit_shard_count)\n if query_spec.ns is not None:\n ns_range = namespace_range.NamespaceRange(namespace_start=\n query_spec.ns, namespace_end=query_spec.ns, _app=query_spec.app)\n ns_ranges = [copy.copy(ns_range) for _ in p_ranges]\n else:\n ns_keys = namespace_range.get_namespace_keys(query_spec.app, cls.\n MAX_NAMESPACES_FOR_KEY_SHARD + 1)\n if not ns_keys:\n return\n if len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD:\n ns_ranges = [namespace_range.NamespaceRange(_app=query_spec.app\n ) for _ in p_ranges]\n else:\n ns_ranges = namespace_range.NamespaceRange.split(n=\n oversplit_shard_count, contiguous=False, can_query=lambda :\n True, _app=query_spec.app)\n p_ranges = [copy.copy(p_range) for _ in ns_ranges]\n assert len(p_ranges) == len(ns_ranges)\n iters = [db_iters.RangeIteratorFactory.create_property_range_iterator(p,\n ns, query_spec) for p, ns in zip(p_ranges, ns_ranges)]\n if len(iters) > shard_count:\n iters = [db_iters.RangeIteratorFactory.\n create_multi_property_range_iterator([iters[i] for i in xrange(\n start_index, len(iters), shard_count)]) for start_index in\n xrange(shard_count)]\n return [cls(i) for i in iters]\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(self): while True: if self._current_key_range is None: if self._key_ranges: self._current_key_range = self._key_ranges.pop() continue else: break for key, o in self._iter_key_range(copy.deepcopy(self. _current_key_range)): self._current_key_range.advance(key) yield o self._current_key_range = None
iter_key_ranges
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n while True:\n if self._current_key_range is None:\n if self._key_ranges:\n self._current_key_range = self._key_ranges.pop()\n continue\n else:\n break\n for key, o in self._iter_key_range(copy.deepcopy(self.\n _current_key_range)):\n self._current_key_range.advance(key)\n yield o\n self._current_key_range = None\n", "role": "user" }, { "content": "iter_key_ranges", "role": "assistant" } ]
def x(self): while True: if self._current_key_range is None: query = self._ns_range.make_datastore_query() namespace_result = query.Get(1) if not namespace_result: break namespace = namespace_result[0].name() or '' self._current_key_range = key_range.KeyRange(namespace= namespace, _app=self._ns_range.app) yield ALLOW_CHECKPOINT for key, o in self._iter_key_range(copy.deepcopy(self. _current_key_range)): self._current_key_range.advance(key) yield o if (self._ns_range.is_single_namespace or self._current_key_range. namespace == self._ns_range.namespace_end): break self._ns_range = self._ns_range.with_start_after(self. _current_key_range.namespace) self._current_key_range = None
iter_ns_range
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n while True:\n if self._current_key_range is None:\n query = self._ns_range.make_datastore_query()\n namespace_result = query.Get(1)\n if not namespace_result:\n break\n namespace = namespace_result[0].name() or ''\n self._current_key_range = key_range.KeyRange(namespace=\n namespace, _app=self._ns_range.app)\n yield ALLOW_CHECKPOINT\n for key, o in self._iter_key_range(copy.deepcopy(self.\n _current_key_range)):\n self._current_key_range.advance(key)\n yield o\n if (self._ns_range.is_single_namespace or self._current_key_range.\n namespace == self._ns_range.namespace_end):\n break\n self._ns_range = self._ns_range.with_start_after(self.\n _current_key_range.namespace)\n self._current_key_range = None\n", "role": "user" }, { "content": "iter_ns_range", "role": "assistant" } ]
def x(cls, app, namespace, entity_kind, shard_count): raw_entity_kind = cls._get_raw_entity_kind(entity_kind) if shard_count == 1: return [key_range.KeyRange(namespace=namespace, _app=app)] ds_query = datastore.Query(kind=raw_entity_kind, namespace=namespace, _app=app, keys_only=True) ds_query.Order('__scatter__') random_keys = ds_query.Get(shard_count * cls._OVERSAMPLING_FACTOR) if not random_keys: return [key_range.KeyRange(namespace=namespace, _app=app)] + [None] * ( shard_count - 1) random_keys.sort() if len(random_keys) >= shard_count: random_keys = cls._choose_split_points(random_keys, shard_count) key_ranges = [] key_ranges.append(key_range.KeyRange(key_start=None, key_end= random_keys[0], direction=key_range.KeyRange.ASC, include_start= False, include_end=False, namespace=namespace, _app=app)) for i in range(0, len(random_keys) - 1): key_ranges.append(key_range.KeyRange(key_start=random_keys[i], key_end=random_keys[i + 1], direction=key_range.KeyRange.ASC, include_start=True, include_end=False, namespace=namespace, _app=app)) key_ranges.append(key_range.KeyRange(key_start=random_keys[-1], key_end =None, direction=key_range.KeyRange.ASC, include_start=True, include_end=False, namespace=namespace, _app=app)) if len(key_ranges) < shard_count: key_ranges += [None] * (shard_count - len(key_ranges)) return key_ranges
split_input_from_namespace
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, app, namespace, entity_kind, shard_count):\n raw_entity_kind = cls._get_raw_entity_kind(entity_kind)\n if shard_count == 1:\n return [key_range.KeyRange(namespace=namespace, _app=app)]\n ds_query = datastore.Query(kind=raw_entity_kind, namespace=namespace,\n _app=app, keys_only=True)\n ds_query.Order('__scatter__')\n random_keys = ds_query.Get(shard_count * cls._OVERSAMPLING_FACTOR)\n if not random_keys:\n return [key_range.KeyRange(namespace=namespace, _app=app)] + [None] * (\n shard_count - 1)\n random_keys.sort()\n if len(random_keys) >= shard_count:\n random_keys = cls._choose_split_points(random_keys, shard_count)\n key_ranges = []\n key_ranges.append(key_range.KeyRange(key_start=None, key_end=\n random_keys[0], direction=key_range.KeyRange.ASC, include_start=\n False, include_end=False, namespace=namespace, _app=app))\n for i in range(0, len(random_keys) - 1):\n key_ranges.append(key_range.KeyRange(key_start=random_keys[i],\n key_end=random_keys[i + 1], direction=key_range.KeyRange.ASC,\n include_start=True, include_end=False, namespace=namespace,\n _app=app))\n key_ranges.append(key_range.KeyRange(key_start=random_keys[-1], key_end\n =None, direction=key_range.KeyRange.ASC, include_start=True,\n include_end=False, namespace=namespace, _app=app))\n if len(key_ranges) < shard_count:\n key_ranges += [None] * (shard_count - len(key_ranges))\n return key_ranges\n", "role": "user" }, { "content": "split_input_from_namespace", "role": "assistant" } ]
def x(cls, app, namespaces, entity_kind_name, params, shard_count): key_ranges = [] for namespace in namespaces: key_ranges.extend(cls._split_input_from_namespace(app, namespace, entity_kind_name, shard_count)) shared_ranges = [[] for _ in range(shard_count)] for i, k_range in enumerate(key_ranges): shared_ranges[i % shard_count].append(k_range) batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)) return [cls(entity_kind_name, key_ranges=key_ranges, ns_range=None, batch_size=batch_size) for key_ranges in shared_ranges if key_ranges]
split_input_from_params
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, app, namespaces, entity_kind_name, params, shard_count):\n key_ranges = []\n for namespace in namespaces:\n key_ranges.extend(cls._split_input_from_namespace(app, namespace,\n entity_kind_name, shard_count))\n shared_ranges = [[] for _ in range(shard_count)]\n for i, k_range in enumerate(key_ranges):\n shared_ranges[i % shard_count].append(k_range)\n batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE))\n return [cls(entity_kind_name, key_ranges=key_ranges, ns_range=None,\n batch_size=batch_size) for key_ranges in shared_ranges if key_ranges]\n", "role": "user" }, { "content": "split_input_from_params", "role": "assistant" } ]
def x(cls, mapper_spec): params = _get_params(mapper_spec) entity_kind_name = params[cls.ENTITY_KIND_PARAM] batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)) shard_count = mapper_spec.shard_count namespace = params.get(cls.NAMESPACE_PARAM) app = params.get(cls._APP_PARAM) filters = params.get(cls.FILTERS_PARAM) if namespace is None: namespace_query = datastore.Query('__namespace__', keys_only=True, _app=app) namespace_keys = namespace_query.Get(limit=cls. MAX_NAMESPACES_FOR_KEY_SHARD + 1) if len(namespace_keys) > cls.MAX_NAMESPACES_FOR_KEY_SHARD: ns_ranges = namespace_range.NamespaceRange.split(n=shard_count, contiguous=True, _app=app) return [cls(entity_kind_name, key_ranges=None, ns_range= ns_range, batch_size=batch_size, filters=filters) for ns_range in ns_ranges] elif not namespace_keys: return [cls(entity_kind_name, key_ranges=None, ns_range= namespace_range.NamespaceRange(_app=app), batch_size= shard_count, filters=filters)] else: namespaces = [(namespace_key.name() or '') for namespace_key in namespace_keys] else: namespaces = [namespace] readers = cls._split_input_from_params(app, namespaces, entity_kind_name, params, shard_count) if filters: for reader in readers: reader._filters = filters return readers
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n params = _get_params(mapper_spec)\n entity_kind_name = params[cls.ENTITY_KIND_PARAM]\n batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE))\n shard_count = mapper_spec.shard_count\n namespace = params.get(cls.NAMESPACE_PARAM)\n app = params.get(cls._APP_PARAM)\n filters = params.get(cls.FILTERS_PARAM)\n if namespace is None:\n namespace_query = datastore.Query('__namespace__', keys_only=True,\n _app=app)\n namespace_keys = namespace_query.Get(limit=cls.\n MAX_NAMESPACES_FOR_KEY_SHARD + 1)\n if len(namespace_keys) > cls.MAX_NAMESPACES_FOR_KEY_SHARD:\n ns_ranges = namespace_range.NamespaceRange.split(n=shard_count,\n contiguous=True, _app=app)\n return [cls(entity_kind_name, key_ranges=None, ns_range=\n ns_range, batch_size=batch_size, filters=filters) for\n ns_range in ns_ranges]\n elif not namespace_keys:\n return [cls(entity_kind_name, key_ranges=None, ns_range=\n namespace_range.NamespaceRange(_app=app), batch_size=\n shard_count, filters=filters)]\n else:\n namespaces = [(namespace_key.name() or '') for namespace_key in\n namespace_keys]\n else:\n namespaces = [namespace]\n readers = cls._split_input_from_params(app, namespaces,\n entity_kind_name, params, shard_count)\n if filters:\n for reader in readers:\n reader._filters = filters\n return readers\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(self): if self._key_ranges is None: key_ranges_json = None else: key_ranges_json = [] for k in self._key_ranges: if k: key_ranges_json.append(k.to_json()) else: key_ranges_json.append(None) if self._ns_range is None: namespace_range_json = None else: namespace_range_json = self._ns_range.to_json_object() if self._current_key_range is None: current_key_range_json = None else: current_key_range_json = self._current_key_range.to_json() json_dict = {self.KEY_RANGE_PARAM: key_ranges_json, self. NAMESPACE_RANGE_PARAM: namespace_range_json, self. CURRENT_KEY_RANGE_PARAM: current_key_range_json, self. ENTITY_KIND_PARAM: self._entity_kind, self.BATCH_SIZE_PARAM: self. _batch_size, self.FILTERS_PARAM: self._filters} return json_dict
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self._key_ranges is None:\n key_ranges_json = None\n else:\n key_ranges_json = []\n for k in self._key_ranges:\n if k:\n key_ranges_json.append(k.to_json())\n else:\n key_ranges_json.append(None)\n if self._ns_range is None:\n namespace_range_json = None\n else:\n namespace_range_json = self._ns_range.to_json_object()\n if self._current_key_range is None:\n current_key_range_json = None\n else:\n current_key_range_json = self._current_key_range.to_json()\n json_dict = {self.KEY_RANGE_PARAM: key_ranges_json, self.\n NAMESPACE_RANGE_PARAM: namespace_range_json, self.\n CURRENT_KEY_RANGE_PARAM: current_key_range_json, self.\n ENTITY_KIND_PARAM: self._entity_kind, self.BATCH_SIZE_PARAM: self.\n _batch_size, self.FILTERS_PARAM: self._filters}\n return json_dict\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): if json[cls.KEY_RANGE_PARAM] is None: key_ranges = None else: key_ranges = [] for k in json[cls.KEY_RANGE_PARAM]: if k: key_ranges.append(key_range.KeyRange.from_json(k)) else: key_ranges.append(None) if json[cls.NAMESPACE_RANGE_PARAM] is None: ns_range = None else: ns_range = namespace_range.NamespaceRange.from_json_object(json[cls .NAMESPACE_RANGE_PARAM]) if json[cls.CURRENT_KEY_RANGE_PARAM] is None: current_key_range = None else: current_key_range = key_range.KeyRange.from_json(json[cls. CURRENT_KEY_RANGE_PARAM]) return cls(json[cls.ENTITY_KIND_PARAM], key_ranges, ns_range, json[cls. BATCH_SIZE_PARAM], current_key_range, filters=json.get(cls. FILTERS_PARAM))
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n if json[cls.KEY_RANGE_PARAM] is None:\n key_ranges = None\n else:\n key_ranges = []\n for k in json[cls.KEY_RANGE_PARAM]:\n if k:\n key_ranges.append(key_range.KeyRange.from_json(k))\n else:\n key_ranges.append(None)\n if json[cls.NAMESPACE_RANGE_PARAM] is None:\n ns_range = None\n else:\n ns_range = namespace_range.NamespaceRange.from_json_object(json[cls\n .NAMESPACE_RANGE_PARAM])\n if json[cls.CURRENT_KEY_RANGE_PARAM] is None:\n current_key_range = None\n else:\n current_key_range = key_range.KeyRange.from_json(json[cls.\n CURRENT_KEY_RANGE_PARAM])\n return cls(json[cls.ENTITY_KIND_PARAM], key_ranges, ns_range, json[cls.\n BATCH_SIZE_PARAM], current_key_range, filters=json.get(cls.\n FILTERS_PARAM))\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self): self._has_iterated = True if self._read_before_start: self._blob_reader.readline() self._read_before_start = False start_position = self._blob_reader.tell() if start_position > self._end_position: raise StopIteration() line = self._blob_reader.readline() if not line: raise StopIteration() return start_position, line.rstrip('\n')
next
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self._has_iterated = True\n if self._read_before_start:\n self._blob_reader.readline()\n self._read_before_start = False\n start_position = self._blob_reader.tell()\n if start_position > self._end_position:\n raise StopIteration()\n line = self._blob_reader.readline()\n if not line:\n raise StopIteration()\n return start_position, line.rstrip('\\n')\n", "role": "user" }, { "content": "next", "role": "assistant" } ]
def x(self): new_pos = self._blob_reader.tell() if self._has_iterated: new_pos -= 1 return {self.BLOB_KEY_PARAM: self._blob_key, self. INITIAL_POSITION_PARAM: new_pos, self.END_POSITION_PARAM: self. _end_position}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n new_pos = self._blob_reader.tell()\n if self._has_iterated:\n new_pos -= 1\n return {self.BLOB_KEY_PARAM: self._blob_key, self.\n INITIAL_POSITION_PARAM: new_pos, self.END_POSITION_PARAM: self.\n _end_position}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): return cls(json[cls.BLOB_KEY_PARAM], json[cls.INITIAL_POSITION_PARAM], json[cls.END_POSITION_PARAM])
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n return cls(json[cls.BLOB_KEY_PARAM], json[cls.INITIAL_POSITION_PARAM],\n json[cls.END_POSITION_PARAM])\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(cls, mapper_spec): if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError('Mapper input reader class mismatch') params = _get_params(mapper_spec) if cls.BLOB_KEYS_PARAM not in params: raise BadReaderParamsError("Must specify 'blob_keys' for mapper input") blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): blob_keys = blob_keys.split(',') if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT: raise BadReaderParamsError("Too many 'blob_keys' for mapper input") if not blob_keys: raise BadReaderParamsError("No 'blob_keys' specified for mapper input") for blob_key in blob_keys: blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) if not blob_info: raise BadReaderParamsError('Could not find blobinfo for key %s' % blob_key)
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n if mapper_spec.input_reader_class() != cls:\n raise BadReaderParamsError('Mapper input reader class mismatch')\n params = _get_params(mapper_spec)\n if cls.BLOB_KEYS_PARAM not in params:\n raise BadReaderParamsError(\"Must specify 'blob_keys' for mapper input\")\n blob_keys = params[cls.BLOB_KEYS_PARAM]\n if isinstance(blob_keys, basestring):\n blob_keys = blob_keys.split(',')\n if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT:\n raise BadReaderParamsError(\"Too many 'blob_keys' for mapper input\")\n if not blob_keys:\n raise BadReaderParamsError(\"No 'blob_keys' specified for mapper input\")\n for blob_key in blob_keys:\n blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))\n if not blob_info:\n raise BadReaderParamsError('Could not find blobinfo for key %s' %\n blob_key)\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]
def x(cls, mapper_spec): params = _get_params(mapper_spec) blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): blob_keys = blob_keys.split(',') blob_sizes = {} for blob_key in blob_keys: blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) blob_sizes[blob_key] = blob_info.size shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count) shards_per_blob = shard_count // len(blob_keys) if shards_per_blob == 0: shards_per_blob = 1 chunks = [] for blob_key, blob_size in blob_sizes.items(): blob_chunk_size = blob_size // shards_per_blob for i in xrange(shards_per_blob - 1): chunks.append(BlobstoreLineInputReader.from_json({cls. BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: blob_chunk_size * i, cls.END_POSITION_PARAM: blob_chunk_size * (i + 1)})) chunks.append(BlobstoreLineInputReader.from_json({cls. BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: blob_chunk_size * (shards_per_blob - 1), cls.END_POSITION_PARAM: blob_size})) return chunks
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n params = _get_params(mapper_spec)\n blob_keys = params[cls.BLOB_KEYS_PARAM]\n if isinstance(blob_keys, basestring):\n blob_keys = blob_keys.split(',')\n blob_sizes = {}\n for blob_key in blob_keys:\n blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))\n blob_sizes[blob_key] = blob_info.size\n shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)\n shards_per_blob = shard_count // len(blob_keys)\n if shards_per_blob == 0:\n shards_per_blob = 1\n chunks = []\n for blob_key, blob_size in blob_sizes.items():\n blob_chunk_size = blob_size // shards_per_blob\n for i in xrange(shards_per_blob - 1):\n chunks.append(BlobstoreLineInputReader.from_json({cls.\n BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: \n blob_chunk_size * i, cls.END_POSITION_PARAM: \n blob_chunk_size * (i + 1)}))\n chunks.append(BlobstoreLineInputReader.from_json({cls.\n BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: \n blob_chunk_size * (shards_per_blob - 1), cls.END_POSITION_PARAM:\n blob_size}))\n return chunks\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(self): if not self._zip: self._zip = zipfile.ZipFile(self._reader(self._blob_key)) self._entries = self._zip.infolist()[self._start_index:self._end_index] self._entries.reverse() if not self._entries: raise StopIteration() entry = self._entries.pop() self._start_index += 1 return entry, lambda : self._read(entry)
next
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if not self._zip:\n self._zip = zipfile.ZipFile(self._reader(self._blob_key))\n self._entries = self._zip.infolist()[self._start_index:self._end_index]\n self._entries.reverse()\n if not self._entries:\n raise StopIteration()\n entry = self._entries.pop()\n self._start_index += 1\n return entry, lambda : self._read(entry)\n", "role": "user" }, { "content": "next", "role": "assistant" } ]
def x(self, entry): start_time = time.time() content = self._zip.read(entry.filename) ctx = context.get() if ctx: operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx) operation.counters.Increment(COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) return content
read
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, entry):\n start_time = time.time()\n content = self._zip.read(entry.filename)\n ctx = context.get()\n if ctx:\n operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx)\n operation.counters.Increment(COUNTER_IO_READ_MSEC, int((time.time() -\n start_time) * 1000))(ctx)\n return content\n", "role": "user" }, { "content": "read", "role": "assistant" } ]
def x(cls, json): return cls(json[cls.BLOB_KEY_PARAM], json[cls.START_INDEX_PARAM], json[ cls.END_INDEX_PARAM])
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n return cls(json[cls.BLOB_KEY_PARAM], json[cls.START_INDEX_PARAM], json[\n cls.END_INDEX_PARAM])\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self): return {self.BLOB_KEY_PARAM: self._blob_key, self.START_INDEX_PARAM: self._start_index, self.END_INDEX_PARAM: self._end_index}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return {self.BLOB_KEY_PARAM: self._blob_key, self.START_INDEX_PARAM:\n self._start_index, self.END_INDEX_PARAM: self._end_index}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, mapper_spec): if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError('Mapper input reader class mismatch') params = _get_params(mapper_spec) if cls.BLOB_KEY_PARAM not in params: raise BadReaderParamsError("Must specify 'blob_key' for mapper input") blob_key = params[cls.BLOB_KEY_PARAM] blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) if not blob_info: raise BadReaderParamsError('Could not find blobinfo for key %s' % blob_key)
validate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n if mapper_spec.input_reader_class() != cls:\n raise BadReaderParamsError('Mapper input reader class mismatch')\n params = _get_params(mapper_spec)\n if cls.BLOB_KEY_PARAM not in params:\n raise BadReaderParamsError(\"Must specify 'blob_key' for mapper input\")\n blob_key = params[cls.BLOB_KEY_PARAM]\n blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))\n if not blob_info:\n raise BadReaderParamsError('Could not find blobinfo for key %s' %\n blob_key)\n", "role": "user" }, { "content": "validate", "role": "assistant" } ]
def x(cls, mapper_spec, _reader=blobstore.BlobReader): params = _get_params(mapper_spec) blob_key = params[cls.BLOB_KEY_PARAM] zip_input = zipfile.ZipFile(_reader(blob_key)) zfiles = zip_input.infolist() total_size = sum(x.file_size for x in zfiles) num_shards = min(mapper_spec.shard_count, cls._MAX_SHARD_COUNT) size_per_shard = total_size // num_shards shard_start_indexes = [0] current_shard_size = 0 for i, fileinfo in enumerate(zfiles): current_shard_size += fileinfo.file_size if current_shard_size >= size_per_shard: shard_start_indexes.append(i + 1) current_shard_size = 0 if shard_start_indexes[-1] != len(zfiles): shard_start_indexes.append(len(zfiles)) return [cls(blob_key, start_index, end_index, _reader) for start_index, end_index in zip(shard_start_indexes, shard_start_indexes[1:])]
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec, _reader=blobstore.BlobReader):\n params = _get_params(mapper_spec)\n blob_key = params[cls.BLOB_KEY_PARAM]\n zip_input = zipfile.ZipFile(_reader(blob_key))\n zfiles = zip_input.infolist()\n total_size = sum(x.file_size for x in zfiles)\n num_shards = min(mapper_spec.shard_count, cls._MAX_SHARD_COUNT)\n size_per_shard = total_size // num_shards\n shard_start_indexes = [0]\n current_shard_size = 0\n for i, fileinfo in enumerate(zfiles):\n current_shard_size += fileinfo.file_size\n if current_shard_size >= size_per_shard:\n shard_start_indexes.append(i + 1)\n current_shard_size = 0\n if shard_start_indexes[-1] != len(zfiles):\n shard_start_indexes.append(len(zfiles))\n return [cls(blob_key, start_index, end_index, _reader) for start_index,\n end_index in zip(shard_start_indexes, shard_start_indexes[1:])]\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(cls, mapper_spec, _reader=blobstore.BlobReader): params = _get_params(mapper_spec) blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): blob_keys = blob_keys.split(',') blob_files = {} total_size = 0 for blob_key in blob_keys: zip_input = zipfile.ZipFile(_reader(blob_key)) blob_files[blob_key] = zip_input.infolist() total_size += sum(x.file_size for x in blob_files[blob_key]) shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count) size_per_shard = total_size // shard_count readers = [] for blob_key in blob_keys: bfiles = blob_files[blob_key] current_shard_size = 0 start_file_index = 0 next_file_index = 0 for fileinfo in bfiles: next_file_index += 1 current_shard_size += fileinfo.file_size if current_shard_size >= size_per_shard: readers.append(cls(blob_key, start_file_index, next_file_index, 0, _reader)) current_shard_size = 0 start_file_index = next_file_index if current_shard_size != 0: readers.append(cls(blob_key, start_file_index, next_file_index, 0, _reader)) return readers
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec, _reader=blobstore.BlobReader):\n params = _get_params(mapper_spec)\n blob_keys = params[cls.BLOB_KEYS_PARAM]\n if isinstance(blob_keys, basestring):\n blob_keys = blob_keys.split(',')\n blob_files = {}\n total_size = 0\n for blob_key in blob_keys:\n zip_input = zipfile.ZipFile(_reader(blob_key))\n blob_files[blob_key] = zip_input.infolist()\n total_size += sum(x.file_size for x in blob_files[blob_key])\n shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)\n size_per_shard = total_size // shard_count\n readers = []\n for blob_key in blob_keys:\n bfiles = blob_files[blob_key]\n current_shard_size = 0\n start_file_index = 0\n next_file_index = 0\n for fileinfo in bfiles:\n next_file_index += 1\n current_shard_size += fileinfo.file_size\n if current_shard_size >= size_per_shard:\n readers.append(cls(blob_key, start_file_index,\n next_file_index, 0, _reader))\n current_shard_size = 0\n start_file_index = next_file_index\n if current_shard_size != 0:\n readers.append(cls(blob_key, start_file_index, next_file_index,\n 0, _reader))\n return readers\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(self): if not self._filestream: if not self._zip: self._zip = zipfile.ZipFile(self._reader(self._blob_key)) self._entries = self._zip.infolist()[self._start_file_index: self._end_file_index] self._entries.reverse() if not self._entries: raise StopIteration() entry = self._entries.pop() value = self._zip.read(entry.filename) self._filestream = StringIO.StringIO(value) if self._initial_offset: self._filestream.seek(self._initial_offset) self._filestream.readline() start_position = self._filestream.tell() line = self._filestream.readline() if not line: self._filestream.close() self._filestream = None self._start_file_index += 1 self._initial_offset = 0 return self.next() return (self._blob_key, self._start_file_index, start_position ), line.rstrip('\n')
next
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if not self._filestream:\n if not self._zip:\n self._zip = zipfile.ZipFile(self._reader(self._blob_key))\n self._entries = self._zip.infolist()[self._start_file_index:\n self._end_file_index]\n self._entries.reverse()\n if not self._entries:\n raise StopIteration()\n entry = self._entries.pop()\n value = self._zip.read(entry.filename)\n self._filestream = StringIO.StringIO(value)\n if self._initial_offset:\n self._filestream.seek(self._initial_offset)\n self._filestream.readline()\n start_position = self._filestream.tell()\n line = self._filestream.readline()\n if not line:\n self._filestream.close()\n self._filestream = None\n self._start_file_index += 1\n self._initial_offset = 0\n return self.next()\n return (self._blob_key, self._start_file_index, start_position\n ), line.rstrip('\\n')\n", "role": "user" }, { "content": "next", "role": "assistant" } ]
def x(self): if self._filestream: offset = self._filestream.tell() if offset: offset -= 1 else: offset = self._initial_offset return offset
next_offset
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self._filestream:\n offset = self._filestream.tell()\n if offset:\n offset -= 1\n else:\n offset = self._initial_offset\n return offset\n", "role": "user" }, { "content": "next_offset", "role": "assistant" } ]
def x(self): return {self.BLOB_KEY_PARAM: self._blob_key, self. START_FILE_INDEX_PARAM: self._start_file_index, self. END_FILE_INDEX_PARAM: self._end_file_index, self.OFFSET_PARAM: self ._next_offset()}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return {self.BLOB_KEY_PARAM: self._blob_key, self.\n START_FILE_INDEX_PARAM: self._start_file_index, self.\n END_FILE_INDEX_PARAM: self._end_file_index, self.OFFSET_PARAM: self\n ._next_offset()}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json, _reader=blobstore.BlobReader): return cls(json[cls.BLOB_KEY_PARAM], json[cls.START_FILE_INDEX_PARAM], json[cls.END_FILE_INDEX_PARAM], json[cls.OFFSET_PARAM], _reader)
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json, _reader=blobstore.BlobReader):\n return cls(json[cls.BLOB_KEY_PARAM], json[cls.START_FILE_INDEX_PARAM],\n json[cls.END_FILE_INDEX_PARAM], json[cls.OFFSET_PARAM], _reader)\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self): return {self.NAMESPACE_RANGE_PARAM: self.ns_range.to_json_object(), self.BATCH_SIZE_PARAM: self._batch_size}
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return {self.NAMESPACE_RANGE_PARAM: self.ns_range.to_json_object(),\n self.BATCH_SIZE_PARAM: self._batch_size}\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): return cls(namespace_range.NamespaceRange.from_json_object(json[cls. NAMESPACE_RANGE_PARAM]), json[cls.BATCH_SIZE_PARAM])
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n return cls(namespace_range.NamespaceRange.from_json_object(json[cls.\n NAMESPACE_RANGE_PARAM]), json[cls.BATCH_SIZE_PARAM])\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(cls, mapper_spec): batch_size = int(_get_params(mapper_spec).get(cls.BATCH_SIZE_PARAM, cls ._BATCH_SIZE)) shard_count = mapper_spec.shard_count namespace_ranges = namespace_range.NamespaceRange.split(shard_count, contiguous=True) return [NamespaceInputReader(ns_range, batch_size) for ns_range in namespace_ranges]
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n batch_size = int(_get_params(mapper_spec).get(cls.BATCH_SIZE_PARAM, cls\n ._BATCH_SIZE))\n shard_count = mapper_spec.shard_count\n namespace_ranges = namespace_range.NamespaceRange.split(shard_count,\n contiguous=True)\n return [NamespaceInputReader(ns_range, batch_size) for ns_range in\n namespace_ranges]\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(cls, json): params = dict((str(k), v) for k, v in json.iteritems() if k in cls._PARAMS) if cls._OFFSET_PARAM in params: params[cls._OFFSET_PARAM] = base64.b64decode(params[cls._OFFSET_PARAM]) return cls(**params)
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n params = dict((str(k), v) for k, v in json.iteritems() if k in cls._PARAMS)\n if cls._OFFSET_PARAM in params:\n params[cls._OFFSET_PARAM] = base64.b64decode(params[cls._OFFSET_PARAM])\n return cls(**params)\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self): params = dict(self.__params) if self._PROTOTYPE_REQUEST_PARAM in params: prototype_request = params[self._PROTOTYPE_REQUEST_PARAM] params[self._PROTOTYPE_REQUEST_PARAM] = prototype_request.Encode() if self._OFFSET_PARAM in params: params[self._OFFSET_PARAM] = base64.b64encode(params[self. _OFFSET_PARAM]) return params
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n params = dict(self.__params)\n if self._PROTOTYPE_REQUEST_PARAM in params:\n prototype_request = params[self._PROTOTYPE_REQUEST_PARAM]\n params[self._PROTOTYPE_REQUEST_PARAM] = prototype_request.Encode()\n if self._OFFSET_PARAM in params:\n params[self._OFFSET_PARAM] = base64.b64encode(params[self.\n _OFFSET_PARAM])\n return params\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, mapper_spec): params = _get_params(mapper_spec) shard_count = mapper_spec.shard_count start_time = params[cls.START_TIME_PARAM] end_time = params[cls.END_TIME_PARAM] seconds_per_shard = (end_time - start_time) / shard_count shards = [] for _ in xrange(shard_count - 1): params[cls.END_TIME_PARAM] = params[cls.START_TIME_PARAM ] + seconds_per_shard shards.append(LogInputReader(**params)) params[cls.START_TIME_PARAM] = params[cls.END_TIME_PARAM] params[cls.END_TIME_PARAM] = end_time return shards + [LogInputReader(**params)]
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n params = _get_params(mapper_spec)\n shard_count = mapper_spec.shard_count\n start_time = params[cls.START_TIME_PARAM]\n end_time = params[cls.END_TIME_PARAM]\n seconds_per_shard = (end_time - start_time) / shard_count\n shards = []\n for _ in xrange(shard_count - 1):\n params[cls.END_TIME_PARAM] = params[cls.START_TIME_PARAM\n ] + seconds_per_shard\n shards.append(LogInputReader(**params))\n params[cls.START_TIME_PARAM] = params[cls.END_TIME_PARAM]\n params[cls.END_TIME_PARAM] = end_time\n return shards + [LogInputReader(**params)]\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(self): while True: if self._bucket_iter: try: return self._bucket_iter.next().filename except StopIteration: self._bucket_iter = None self._bucket = None if self._index >= len(self._filenames): return filename = self._filenames[self._index] self._index += 1 if self._delimiter is None or not filename.endswith(self._delimiter): return filename self._bucket = cloudstorage.listbucket(filename, delimiter=self. _delimiter) self._bucket_iter = iter(self._bucket)
next_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n while True:\n if self._bucket_iter:\n try:\n return self._bucket_iter.next().filename\n except StopIteration:\n self._bucket_iter = None\n self._bucket = None\n if self._index >= len(self._filenames):\n return\n filename = self._filenames[self._index]\n self._index += 1\n if self._delimiter is None or not filename.endswith(self._delimiter):\n return filename\n self._bucket = cloudstorage.listbucket(filename, delimiter=self.\n _delimiter)\n self._bucket_iter = iter(self._bucket)\n", "role": "user" }, { "content": "next_file", "role": "assistant" } ]
def x(cls, mapper_spec): reader_spec = cls.get_params(mapper_spec, allow_old=False) bucket = reader_spec[cls.BUCKET_NAME_PARAM] filenames = reader_spec[cls.OBJECT_NAMES_PARAM] delimiter = reader_spec.get(cls.DELIMITER_PARAM) account_id = reader_spec.get(cls._ACCOUNT_ID_PARAM) buffer_size = reader_spec.get(cls.BUFFER_SIZE_PARAM) fail_on_missing_input = reader_spec.get(cls.FAIL_ON_MISSING_INPUT) all_filenames = [] for filename in filenames: if filename.endswith('*'): all_filenames.extend([file_stat.filename for file_stat in cloudstorage.listbucket('/' + bucket + '/' + filename[:-1], delimiter=delimiter, _account_id=account_id)]) else: all_filenames.append('/%s/%s' % (bucket, filename)) readers = [] for shard in range(0, mapper_spec.shard_count): shard_filenames = all_filenames[shard::mapper_spec.shard_count] if shard_filenames: reader = cls(shard_filenames, buffer_size=buffer_size, _account_id=account_id, delimiter=delimiter) reader._fail_on_missing_input = fail_on_missing_input readers.append(reader) return readers
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, mapper_spec):\n reader_spec = cls.get_params(mapper_spec, allow_old=False)\n bucket = reader_spec[cls.BUCKET_NAME_PARAM]\n filenames = reader_spec[cls.OBJECT_NAMES_PARAM]\n delimiter = reader_spec.get(cls.DELIMITER_PARAM)\n account_id = reader_spec.get(cls._ACCOUNT_ID_PARAM)\n buffer_size = reader_spec.get(cls.BUFFER_SIZE_PARAM)\n fail_on_missing_input = reader_spec.get(cls.FAIL_ON_MISSING_INPUT)\n all_filenames = []\n for filename in filenames:\n if filename.endswith('*'):\n all_filenames.extend([file_stat.filename for file_stat in\n cloudstorage.listbucket('/' + bucket + '/' + filename[:-1],\n delimiter=delimiter, _account_id=account_id)])\n else:\n all_filenames.append('/%s/%s' % (bucket, filename))\n readers = []\n for shard in range(0, mapper_spec.shard_count):\n shard_filenames = all_filenames[shard::mapper_spec.shard_count]\n if shard_filenames:\n reader = cls(shard_filenames, buffer_size=buffer_size,\n _account_id=account_id, delimiter=delimiter)\n reader._fail_on_missing_input = fail_on_missing_input\n readers.append(reader)\n return readers\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(self): options = {} if self._buffer_size: options['read_buffer_size'] = self._buffer_size if self._account_id: options['_account_id'] = self._account_id while True: filename = self._next_file() if filename is None: raise StopIteration() try: start_time = time.time() handle = cloudstorage.open(filename, **options) ctx = context.get() if ctx: operation.counters.Increment(COUNTER_IO_READ_MSEC, int(( time.time() - start_time) * 1000))(ctx) return handle except cloudstorage.NotFoundError: if getattr(self, '_fail_on_missing_input', False): raise errors.FailJobError( 'File missing in GCS, aborting: %s' % filename) logging.warning('File %s may have been removed. Skipping file.', filename)
next
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n options = {}\n if self._buffer_size:\n options['read_buffer_size'] = self._buffer_size\n if self._account_id:\n options['_account_id'] = self._account_id\n while True:\n filename = self._next_file()\n if filename is None:\n raise StopIteration()\n try:\n start_time = time.time()\n handle = cloudstorage.open(filename, **options)\n ctx = context.get()\n if ctx:\n operation.counters.Increment(COUNTER_IO_READ_MSEC, int((\n time.time() - start_time) * 1000))(ctx)\n return handle\n except cloudstorage.NotFoundError:\n if getattr(self, '_fail_on_missing_input', False):\n raise errors.FailJobError(\n 'File missing in GCS, aborting: %s' % filename)\n logging.warning('File %s may have been removed. Skipping file.',\n filename)\n", "role": "user" }, { "content": "next", "role": "assistant" } ]
def x(self): while True: if not hasattr(self, '_cur_handle') or self._cur_handle is None: self._cur_handle = super(_GoogleCloudStorageRecordInputReader, self ).next() if not hasattr(self, '_record_reader') or self._record_reader is None: self._record_reader = records.RecordsReader(self._cur_handle) try: start_time = time.time() content = self._record_reader.read() ctx = context.get() if ctx: operation.counters.Increment(COUNTER_IO_READ_BYTES, len( content))(ctx) operation.counters.Increment(COUNTER_IO_READ_MSEC, int(( time.time() - start_time) * 1000))(ctx) return content except EOFError: self._cur_handle = None self._record_reader = None
next
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n while True:\n if not hasattr(self, '_cur_handle') or self._cur_handle is None:\n self._cur_handle = super(_GoogleCloudStorageRecordInputReader, self\n ).next()\n if not hasattr(self, '_record_reader') or self._record_reader is None:\n self._record_reader = records.RecordsReader(self._cur_handle)\n try:\n start_time = time.time()\n content = self._record_reader.read()\n ctx = context.get()\n if ctx:\n operation.counters.Increment(COUNTER_IO_READ_BYTES, len(\n content))(ctx)\n operation.counters.Increment(COUNTER_IO_READ_MSEC, int((\n time.time() - start_time) * 1000))(ctx)\n return content\n except EOFError:\n self._cur_handle = None\n self._record_reader = None\n", "role": "user" }, { "content": "next", "role": "assistant" } ]
def x(self): result = super(_ReducerReader, self).to_json() result['current_key'] = self.encode_data(self.current_key) result['current_values'] = self.encode_data(self.current_values) return result
to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n result = super(_ReducerReader, self).to_json()\n result['current_key'] = self.encode_data(self.current_key)\n result['current_values'] = self.encode_data(self.current_values)\n return result\n", "role": "user" }, { "content": "to_json", "role": "assistant" } ]
def x(cls, json): result = super(_ReducerReader, cls).from_json(json) result.current_key = _ReducerReader.decode_data(json['current_key']) result.current_values = _ReducerReader.decode_data(json['current_values']) return result
from_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, json):\n result = super(_ReducerReader, cls).from_json(json)\n result.current_key = _ReducerReader.decode_data(json['current_key'])\n result.current_values = _ReducerReader.decode_data(json['current_values'])\n return result\n", "role": "user" }, { "content": "from_json", "role": "assistant" } ]
def x(self, counter_name, delta=1): self._state.counters_map.increment(counter_name, delta)
incr
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, counter_name, delta=1):\n self._state.counters_map.increment(counter_name, delta)\n", "role": "user" }, { "content": "incr", "role": "assistant" } ]
def x(self, counter_name, default=0): return self._state.counters_map.get(counter_name, default)
counter
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, counter_name, default=0):\n return self._state.counters_map.get(counter_name, default)\n", "role": "user" }, { "content": "counter", "role": "assistant" } ]
def x(self, value): if not self._tstate.output_writer: logging.error('emit is called, but no output writer is set.') return self._tstate.output_writer.write(value)
emit
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value):\n if not self._tstate.output_writer:\n logging.error('emit is called, but no output writer is set.')\n return\n self._tstate.output_writer.write(value)\n", "role": "user" }, { "content": "emit", "role": "assistant" } ]
def x(cls, job_config): reader_params = job_config.input_reader_params bucket = reader_params[cls.BUCKET_NAME_PARAM] filenames = reader_params[cls.OBJECT_NAMES_PARAM] delimiter = reader_params.get(cls.DELIMITER_PARAM) account_id = reader_params.get(cls._ACCOUNT_ID_PARAM) buffer_size = reader_params.get(cls.BUFFER_SIZE_PARAM) path_filter = reader_params.get(cls.PATH_FILTER_PARAM) all_filenames = [] for filename in filenames: if filename.endswith('*'): all_filenames.extend([file_stat.filename for file_stat in cloudstorage.listbucket('/' + bucket + '/' + filename[:-1], delimiter=delimiter, _account_id=account_id)]) else: all_filenames.append('/%s/%s' % (bucket, filename)) readers = [] for shard in range(0, job_config.shard_count): shard_filenames = all_filenames[shard::job_config.shard_count] if shard_filenames: readers.append(cls(shard_filenames, buffer_size=buffer_size, _account_id=account_id, delimiter=delimiter, path_filter= path_filter)) return readers
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config):\n reader_params = job_config.input_reader_params\n bucket = reader_params[cls.BUCKET_NAME_PARAM]\n filenames = reader_params[cls.OBJECT_NAMES_PARAM]\n delimiter = reader_params.get(cls.DELIMITER_PARAM)\n account_id = reader_params.get(cls._ACCOUNT_ID_PARAM)\n buffer_size = reader_params.get(cls.BUFFER_SIZE_PARAM)\n path_filter = reader_params.get(cls.PATH_FILTER_PARAM)\n all_filenames = []\n for filename in filenames:\n if filename.endswith('*'):\n all_filenames.extend([file_stat.filename for file_stat in\n cloudstorage.listbucket('/' + bucket + '/' + filename[:-1],\n delimiter=delimiter, _account_id=account_id)])\n else:\n all_filenames.append('/%s/%s' % (bucket, filename))\n readers = []\n for shard in range(0, job_config.shard_count):\n shard_filenames = all_filenames[shard::job_config.shard_count]\n if shard_filenames:\n readers.append(cls(shard_filenames, buffer_size=buffer_size,\n _account_id=account_id, delimiter=delimiter, path_filter=\n path_filter))\n return readers\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]
def x(self): options = {} if self._buffer_size: options['read_buffer_size'] = self._buffer_size if self._account_id: options['_account_id'] = self._account_id while True: filename = self._next_file() if filename is None: raise StopIteration() if self._path_filter and not self._path_filter.accept(self. _slice_ctx, filename): continue try: start_time = time.time() handle = cloudstorage.open(filename, **options) self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC, int(time.time() - start_time) * 1000) self._slice_ctx.incr(self.COUNTER_FILE_READ) return handle except cloudstorage.NotFoundError: logging.warning('File %s may have been removed. Skipping file.', filename) self._slice_ctx.incr(self.COUNTER_FILE_MISSING)
next
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n options = {}\n if self._buffer_size:\n options['read_buffer_size'] = self._buffer_size\n if self._account_id:\n options['_account_id'] = self._account_id\n while True:\n filename = self._next_file()\n if filename is None:\n raise StopIteration()\n if self._path_filter and not self._path_filter.accept(self.\n _slice_ctx, filename):\n continue\n try:\n start_time = time.time()\n handle = cloudstorage.open(filename, **options)\n self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC, int(time.time() -\n start_time) * 1000)\n self._slice_ctx.incr(self.COUNTER_FILE_READ)\n return handle\n except cloudstorage.NotFoundError:\n logging.warning('File %s may have been removed. Skipping file.',\n filename)\n self._slice_ctx.incr(self.COUNTER_FILE_MISSING)\n", "role": "user" }, { "content": "next", "role": "assistant" } ]
def x(cls, params): params_cp = dict(params) if cls.PATH_FILTER_PARAM in params_cp: path_filter = params_cp[cls.PATH_FILTER_PARAM] params_cp[cls.PATH_FILTER_PARAM] = pickle.dumps(path_filter) return params_cp
params_to_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, params):\n params_cp = dict(params)\n if cls.PATH_FILTER_PARAM in params_cp:\n path_filter = params_cp[cls.PATH_FILTER_PARAM]\n params_cp[cls.PATH_FILTER_PARAM] = pickle.dumps(path_filter)\n return params_cp\n", "role": "user" }, { "content": "params_to_json", "role": "assistant" } ]
def x(self): while True: if not hasattr(self, '_cur_handle') or self._cur_handle is None: self._cur_handle = super(GCSRecordInputReader, self).next() if not hasattr(self, '_record_reader') or self._record_reader is None: self._record_reader = records.RecordsReader(self._cur_handle) try: start_time = time.time() content = self._record_reader.read() self._slice_ctx.incr(self.COUNTER_IO_READ_BYTE, len(content)) self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC, int(time.time() - start_time) * 1000) return content except EOFError: self._cur_handle = None self._record_reader = None
next
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n while True:\n if not hasattr(self, '_cur_handle') or self._cur_handle is None:\n self._cur_handle = super(GCSRecordInputReader, self).next()\n if not hasattr(self, '_record_reader') or self._record_reader is None:\n self._record_reader = records.RecordsReader(self._cur_handle)\n try:\n start_time = time.time()\n content = self._record_reader.read()\n self._slice_ctx.incr(self.COUNTER_IO_READ_BYTE, len(content))\n self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC, int(time.time() -\n start_time) * 1000)\n return content\n except EOFError:\n self._cur_handle = None\n self._record_reader = None\n", "role": "user" }, { "content": "next", "role": "assistant" } ]
def x(cls, params): entity_kind = params[cls.ENTITY_KIND_PARAM] filters = params.get(cls.FILTERS_PARAM) app = params.get(cls._APP_PARAM) ns = params.get(cls.NAMESPACE_PARAM) return model.QuerySpec(entity_kind=cls._get_raw_entity_kind(entity_kind ), keys_only=bool(params.get(cls.KEYS_ONLY_PARAM, False)), filters= filters, batch_size=int(params.get(cls.BATCH_SIZE_PARAM, cls. _BATCH_SIZE)), model_class_path=entity_kind, app=app, ns=ns)
get_query_spec
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, params):\n entity_kind = params[cls.ENTITY_KIND_PARAM]\n filters = params.get(cls.FILTERS_PARAM)\n app = params.get(cls._APP_PARAM)\n ns = params.get(cls.NAMESPACE_PARAM)\n return model.QuerySpec(entity_kind=cls._get_raw_entity_kind(entity_kind\n ), keys_only=bool(params.get(cls.KEYS_ONLY_PARAM, False)), filters=\n filters, batch_size=int(params.get(cls.BATCH_SIZE_PARAM, cls.\n _BATCH_SIZE)), model_class_path=entity_kind, app=app, ns=ns)\n", "role": "user" }, { "content": "get_query_spec", "role": "assistant" } ]
def x(cls, job_config): shard_count = job_config.shard_count params = job_config.input_reader_params query_spec = cls._get_query_spec(params) namespaces = None if query_spec.ns is not None: k_ranges = cls._to_key_ranges_by_shard(query_spec.app, [query_spec. ns], shard_count, query_spec) else: ns_keys = namespace_range.get_namespace_keys(query_spec.app, cls. MAX_NAMESPACES_FOR_KEY_SHARD + 1) if not ns_keys: return elif len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD: namespaces = [(ns_key.name() or '') for ns_key in ns_keys] k_ranges = cls._to_key_ranges_by_shard(query_spec.app, namespaces, shard_count, query_spec) else: ns_ranges = namespace_range.NamespaceRange.split(n=shard_count, contiguous=False, can_query=lambda : True, _app=query_spec.app) k_ranges = [key_ranges.KeyRangesFactory.create_from_ns_range( ns_range) for ns_range in ns_ranges] iters = [db_iters.RangeIteratorFactory.create_key_ranges_iterator(r, query_spec, cls._KEY_RANGE_ITER_CLS) for r in k_ranges] return [cls(i) for i in iters]
split_input
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, job_config):\n shard_count = job_config.shard_count\n params = job_config.input_reader_params\n query_spec = cls._get_query_spec(params)\n namespaces = None\n if query_spec.ns is not None:\n k_ranges = cls._to_key_ranges_by_shard(query_spec.app, [query_spec.\n ns], shard_count, query_spec)\n else:\n ns_keys = namespace_range.get_namespace_keys(query_spec.app, cls.\n MAX_NAMESPACES_FOR_KEY_SHARD + 1)\n if not ns_keys:\n return\n elif len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD:\n namespaces = [(ns_key.name() or '') for ns_key in ns_keys]\n k_ranges = cls._to_key_ranges_by_shard(query_spec.app,\n namespaces, shard_count, query_spec)\n else:\n ns_ranges = namespace_range.NamespaceRange.split(n=shard_count,\n contiguous=False, can_query=lambda : True, _app=query_spec.app)\n k_ranges = [key_ranges.KeyRangesFactory.create_from_ns_range(\n ns_range) for ns_range in ns_ranges]\n iters = [db_iters.RangeIteratorFactory.create_key_ranges_iterator(r,\n query_spec, cls._KEY_RANGE_ITER_CLS) for r in k_ranges]\n return [cls(i) for i in iters]\n", "role": "user" }, { "content": "split_input", "role": "assistant" } ]