source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(self, key, value): if key in self._position: raise KeyError('%s is already in the queue' % repr(key)) self[key] = value
additem
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, key, value):\n if key in self._position:\n raise KeyError('%s is already in the queue' % repr(key))\n self[key] = value\n", "role": "user" }, { "content": "additem", "role": "assistant" } ]
def x(self, key, value, node_factory=_Node): heap = self._heap position = self._position precedes = self._precedes prio = self._keyfn(value) if self._keyfn else value node = node_factory(key, value, prio) if key in self: raise KeyError('%s is already in the queue' % repr(key)) if heap and precedes(heap[0].prio, node.prio): node, heap[0] = heap[0], node position[key] = 0 del position[node.key] self._sink(0) return node.key, node.value
pushpopitem
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, key, value, node_factory=_Node):\n heap = self._heap\n position = self._position\n precedes = self._precedes\n prio = self._keyfn(value) if self._keyfn else value\n node = node_factory(key, value, prio)\n if key in self:\n raise KeyError('%s is already in the queue' % repr(key))\n if heap and precedes(heap[0].prio, node.prio):\n node, heap[0] = heap[0], node\n position[key] = 0\n del position[node.key]\n self._sink(0)\n return node.key, node.value\n", "role": "user" }, { "content": "pushpopitem", "role": "assistant" } ]
def x(self, key, new_val): if key not in self._position: raise KeyError(key) self[key] = new_val
updateitem
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, key, new_val):\n if key not in self._position:\n raise KeyError(key)\n self[key] = new_val\n", "role": "user" }, { "content": "updateitem", "role": "assistant" } ]
def x(self, key, new_key): heap = self._heap position = self._position if new_key in self: raise KeyError('%s is already in the queue' % repr(new_key)) pos = position.pop(key) position[new_key] = pos heap[pos].key = new_key
replace_key
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, key, new_key):\n heap = self._heap\n position = self._position\n if new_key in self:\n raise KeyError('%s is already in the queue' % repr(new_key))\n pos = position.pop(key)\n position[new_key] = pos\n heap[pos].key = new_key\n", "role": "user" }, { "content": "replace_key", "role": "assistant" } ]
def x(self, key1, key2): heap = self._heap position = self._position if key1 not in self or key2 not in self: raise KeyError pos1, pos2 = position[key1], position[key2] heap[pos1].key, heap[pos2].key = key2, key1 position[key1], position[key2] = pos2, pos1
swap_priority
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, key1, key2):\n heap = self._heap\n position = self._position\n if key1 not in self or key2 not in self:\n raise KeyError\n pos1, pos2 = position[key1], position[key2]\n heap[pos1].key, heap[pos2].key = key2, key1\n position[key1], position[key2] = pos2, pos1\n", "role": "user" }, { "content": "swap_priority", "role": "assistant" } ]
def x(self, key=__marker): if key is self.__marker: n = len(self._heap) for pos in reversed(range(n // 2)): self._sink(pos) else: try: pos = self._position[key] except KeyError: raise KeyError(key) self._reheapify(pos)
heapify
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, key=__marker):\n if key is self.__marker:\n n = len(self._heap)\n for pos in reversed(range(n // 2)):\n self._sink(pos)\n else:\n try:\n pos = self._position[key]\n except KeyError:\n raise KeyError(key)\n self._reheapify(pos)\n", "role": "user" }, { "content": "heapify", "role": "assistant" } ]
def x(package_name): version_file_path = helpers.package_file_path('_version.py', package_name) return os.path.isfile(version_file_path)
package_has_version_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(package_name):\n version_file_path = helpers.package_file_path('_version.py', package_name)\n return os.path.isfile(version_file_path)\n", "role": "user" }, { "content": "package_has_version_file", "role": "assistant" } ]
def x(): setup_py_content = helpers.get_file_content('setup.py') ret = helpers.value_of_named_argument_in_function('name', 'setup', setup_py_content, resolve_varname=True) if ret and ret[0] == ret[-1] in ('"', "'"): ret = ret[1:-1] return ret
get_project_name
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n setup_py_content = helpers.get_file_content('setup.py')\n ret = helpers.value_of_named_argument_in_function('name', 'setup',\n setup_py_content, resolve_varname=True)\n if ret and ret[0] == ret[-1] in ('\"', \"'\"):\n ret = ret[1:-1]\n return ret\n", "role": "user" }, { "content": "get_project_name", "role": "assistant" } ]
def x(package_name, ignore_cache=False): if ignore_cache: with microcache.temporarily_disabled(): found = helpers.regex_in_package_file(VERSION_SET_REGEX, '_version.py', package_name, return_match=True) else: found = helpers.regex_in_package_file(VERSION_SET_REGEX, '_version.py', package_name, return_match=True) if found is None: raise ProjectError('found {}, but __version__ is not defined') current_version = found['version'] return current_version
get_version
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(package_name, ignore_cache=False):\n if ignore_cache:\n with microcache.temporarily_disabled():\n found = helpers.regex_in_package_file(VERSION_SET_REGEX,\n '_version.py', package_name, return_match=True)\n else:\n found = helpers.regex_in_package_file(VERSION_SET_REGEX,\n '_version.py', package_name, return_match=True)\n if found is None:\n raise ProjectError('found {}, but __version__ is not defined')\n current_version = found['version']\n return current_version\n", "role": "user" }, { "content": "get_version", "role": "assistant" } ]
def x(package_name, version_str): current_version = get_version(package_name) version_file_path = helpers.package_file_path('_version.py', package_name) version_file_content = helpers.get_file_content(version_file_path) version_file_content = version_file_content.replace(current_version, version_str) with open(version_file_path, 'w') as version_file: version_file.write(version_file_content)
set_version
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(package_name, version_str):\n current_version = get_version(package_name)\n version_file_path = helpers.package_file_path('_version.py', package_name)\n version_file_content = helpers.get_file_content(version_file_path)\n version_file_content = version_file_content.replace(current_version,\n version_str)\n with open(version_file_path, 'w') as version_file:\n version_file.write(version_file_content)\n", "role": "user" }, { "content": "set_version", "role": "assistant" } ]
def x(version_str): try: packaging.version.Version(version_str) except packaging.version.InvalidVersion: return False return True
version_is_valid
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(version_str):\n try:\n packaging.version.Version(version_str)\n except packaging.version.InvalidVersion:\n return False\n return True\n", "role": "user" }, { "content": "version_is_valid", "role": "assistant" } ]
def x(project_name, index_url, requests_verify=True): url = '/'.join((index_url, project_name, 'json')) response = requests.get(url, verify=requests_verify) if response.status_code == 200: return response.json()['releases'].keys() return None
get_uploaded_versions_warehouse
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(project_name, index_url, requests_verify=True):\n url = '/'.join((index_url, project_name, 'json'))\n response = requests.get(url, verify=requests_verify)\n if response.status_code == 200:\n return response.json()['releases'].keys()\n return None\n", "role": "user" }, { "content": "get_uploaded_versions_warehouse", "role": "assistant" } ]
def x(project_name, index_url, requests_verify=True): api_url = index_url for suffix in ('/pypi', '/pypi/', '/simple', '/simple/'): if api_url.endswith(suffix): api_url = api_url[:len(suffix) * -1] + '/api/package' break url = '/'.join((api_url, project_name)) response = requests.get(url, verify=requests_verify) if response.status_code == 200: return [p['version'] for p in response.json()['packages']] return None
get_uploaded_versions_pypicloud
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(project_name, index_url, requests_verify=True):\n api_url = index_url\n for suffix in ('/pypi', '/pypi/', '/simple', '/simple/'):\n if api_url.endswith(suffix):\n api_url = api_url[:len(suffix) * -1] + '/api/package'\n break\n url = '/'.join((api_url, project_name))\n response = requests.get(url, verify=requests_verify)\n if response.status_code == 200:\n return [p['version'] for p in response.json()['packages']]\n return None\n", "role": "user" }, { "content": "get_uploaded_versions_pypicloud", "role": "assistant" } ]
def x(project_name, version_str, index_url, requests_verify=True): all_versions = _get_uploaded_versions(project_name, index_url, requests_verify) return version_str in all_versions
version_already_uploaded
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(project_name, version_str, index_url, requests_verify=True):\n all_versions = _get_uploaded_versions(project_name, index_url,\n requests_verify)\n return version_str in all_versions\n", "role": "user" }, { "content": "version_already_uploaded", "role": "assistant" } ]
def x(): project_files = os.listdir('.') for filename in project_files: if filename.lower() == 'readme': raise ProjectError('found {} in project directory...'.format( filename) + 'not sure what to do with it, refusing to convert') elif filename.lower() == 'readme.rst': raise ProjectError('found {} in project directory...'.format( filename) + 'refusing to overwrite') for filename in project_files: if filename.lower() == 'readme.md': rst_filename = 'README.rst' logger.info('converting {} to {}'.format(filename, rst_filename)) try: rst_content = pypandoc.convert(filename, 'rst') with open('README.rst', 'w') as rst_file: rst_file.write(rst_content) return except OSError as e: raise ProjectError( 'could not convert readme to rst due to pypandoc error:' + os.linesep + str(e)) raise ProjectError('could not find any README.md file to convert')
convert_readme_to_rst
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n project_files = os.listdir('.')\n for filename in project_files:\n if filename.lower() == 'readme':\n raise ProjectError('found {} in project directory...'.format(\n filename) + 'not sure what to do with it, refusing to convert')\n elif filename.lower() == 'readme.rst':\n raise ProjectError('found {} in project directory...'.format(\n filename) + 'refusing to overwrite')\n for filename in project_files:\n if filename.lower() == 'readme.md':\n rst_filename = 'README.rst'\n logger.info('converting {} to {}'.format(filename, rst_filename))\n try:\n rst_content = pypandoc.convert(filename, 'rst')\n with open('README.rst', 'w') as rst_file:\n rst_file.write(rst_content)\n return\n except OSError as e:\n raise ProjectError(\n 'could not convert readme to rst due to pypandoc error:' +\n os.linesep + str(e))\n raise ProjectError('could not find any README.md file to convert')\n", "role": "user" }, { "content": "convert_readme_to_rst", "role": "assistant" } ]
def x(package_name): if not os.path.isdir('dist'): return [] return [os.path.join('dist', filename) for filename in os.listdir('dist')]
get_packaged_files
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(package_name):\n if not os.path.isdir('dist'):\n return []\n return [os.path.join('dist', filename) for filename in os.listdir('dist')]\n", "role": "user" }, { "content": "get_packaged_files", "role": "assistant" } ]
def x(package_name): dist_files = os.listdir('dist') versions = set() for filename in dist_files: version = funcy.re_find('{}-(.+).tar.gz'.format(package_name), filename ) if version: versions.add(version) return len(versions) > 1
multiple_packaged_versions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(package_name):\n dist_files = os.listdir('dist')\n versions = set()\n for filename in dist_files:\n version = funcy.re_find('{}-(.+).tar.gz'.format(package_name), filename\n )\n if version:\n versions.add(version)\n return len(versions) > 1\n", "role": "user" }, { "content": "multiple_packaged_versions", "role": "assistant" } ]
def x(self, radius, smoothing, threshold, stride, start_time, end_time): neighbor_x = self.x[::stride, ::stride] neighbor_y = self.y[::stride, ::stride] neighbor_kd_tree = cKDTree(np.vstack((neighbor_x.ravel(), neighbor_y. ravel())).T) neighbor_prob = np.zeros((self.data.shape[0], neighbor_x.shape[0], neighbor_x.shape[1])) print('Forecast Hours: {0}-{1}'.format(start_time, end_time)) for m in range(len(self.members)): period_max = self.data[m, start_time:end_time, :, :].max(axis=0) valid_i, valid_j = np.where(period_max >= threshold) print(self.members[m], len(valid_i)) if len(valid_i) > 0: var_kd_tree = cKDTree(np.vstack((self.x[valid_i, valid_j], self .y[valid_i, valid_j])).T) exceed_points = np.unique(np.concatenate(var_kd_tree. query_ball_tree(neighbor_kd_tree, radius))).astype(int) exceed_i, exceed_j = np.unravel_index(exceed_points, neighbor_x .shape) neighbor_prob[m][exceed_i, exceed_j] = 1 if smoothing > 0: neighbor_prob[m] = gaussian_filter(neighbor_prob[m], smoothing, mode='constant') return neighbor_prob
period_neighborhood_probability
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, radius, smoothing, threshold, stride, start_time, end_time):\n neighbor_x = self.x[::stride, ::stride]\n neighbor_y = self.y[::stride, ::stride]\n neighbor_kd_tree = cKDTree(np.vstack((neighbor_x.ravel(), neighbor_y.\n ravel())).T)\n neighbor_prob = np.zeros((self.data.shape[0], neighbor_x.shape[0],\n neighbor_x.shape[1]))\n print('Forecast Hours: {0}-{1}'.format(start_time, end_time))\n for m in range(len(self.members)):\n period_max = self.data[m, start_time:end_time, :, :].max(axis=0)\n valid_i, valid_j = np.where(period_max >= threshold)\n print(self.members[m], len(valid_i))\n if len(valid_i) > 0:\n var_kd_tree = cKDTree(np.vstack((self.x[valid_i, valid_j], self\n .y[valid_i, valid_j])).T)\n exceed_points = np.unique(np.concatenate(var_kd_tree.\n query_ball_tree(neighbor_kd_tree, radius))).astype(int)\n exceed_i, exceed_j = np.unravel_index(exceed_points, neighbor_x\n .shape)\n neighbor_prob[m][exceed_i, exceed_j] = 1\n if smoothing > 0:\n neighbor_prob[m] = gaussian_filter(neighbor_prob[m],\n smoothing, mode='constant')\n return neighbor_prob\n", "role": "user" }, { "content": "period_neighborhood_probability", "role": "assistant" } ]
def x(retorno): if '|' not in retorno: raise ErroRespostaSATInvalida( 'Resposta nao possui pipes separando os campos: {!r}'.format( retorno)) resposta = _RespostaParcial(*retorno.split('|')[:2]) for faixa, construtor in _RESPOSTAS_POSSIVEIS: if int(resposta.EEEEE) in xrange(faixa, faixa + 1000): return construtor(retorno) return RespostaConsultarNumeroSessao._pos_analise(retorno)
analisar
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n if '|' not in retorno:\n raise ErroRespostaSATInvalida(\n 'Resposta nao possui pipes separando os campos: {!r}'.format(\n retorno))\n resposta = _RespostaParcial(*retorno.split('|')[:2])\n for faixa, construtor in _RESPOSTAS_POSSIVEIS:\n if int(resposta.EEEEE) in xrange(faixa, faixa + 1000):\n return construtor(retorno)\n return RespostaConsultarNumeroSessao._pos_analise(retorno)\n", "role": "user" }, { "content": "analisar", "role": "assistant" } ]
def x(retorno, classe_resposta=RespostaSAT, campos=RespostaSAT.CAMPOS, campos_alternativos=[], funcao=None, manter_verbatim=True): if '|' not in retorno: raise ErroRespostaSATInvalida( 'Resposta nao possui pipes separando os campos: "%s"' % as_ascii(retorno)) partes = retorno.split('|') if len(partes) != len(campos): for relacao_alternativa in campos_alternativos: if len(partes) == len(relacao_alternativa): relacao_campos = relacao_alternativa break else: raise ErroRespostaSATInvalida( 'Resposta nao possui o numero esperado de campos. Esperados %d campos, mas contem %d: "%s"' % (len(campos), len(partes), as_ascii(retorno))) else: relacao_campos = campos resultado = {} def x(sequence): for index, value in enumerate(sequence): yield index, value[0], value[1] for indice, campo, conversor in _enumerate(relacao_campos): resultado[campo] = conversor(partes[indice]) resposta = classe_resposta(**resultado) resposta.atributos.funcao = funcao resposta.atributos.verbatim = retorno if manter_verbatim else None return resposta
analisar_retorno
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno, classe_resposta=RespostaSAT, campos=RespostaSAT.CAMPOS,\n campos_alternativos=[], funcao=None, manter_verbatim=True):\n if '|' not in retorno:\n raise ErroRespostaSATInvalida(\n 'Resposta nao possui pipes separando os campos: \"%s\"' %\n as_ascii(retorno))\n partes = retorno.split('|')\n if len(partes) != len(campos):\n for relacao_alternativa in campos_alternativos:\n if len(partes) == len(relacao_alternativa):\n relacao_campos = relacao_alternativa\n break\n else:\n raise ErroRespostaSATInvalida(\n 'Resposta nao possui o numero esperado de campos. Esperados %d campos, mas contem %d: \"%s\"'\n % (len(campos), len(partes), as_ascii(retorno)))\n else:\n relacao_campos = campos\n resultado = {}\n\n def x(sequence):\n for index, value in enumerate(sequence):\n yield index, value[0], value[1]\n for indice, campo, conversor in _enumerate(relacao_campos):\n resultado[campo] = conversor(partes[indice])\n resposta = classe_resposta(**resultado)\n resposta.atributos.funcao = funcao\n resposta.atributos.verbatim = retorno if manter_verbatim else None\n return resposta\n", "role": "user" }, { "content": "analisar_retorno", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao= 'ComunicarCertificadoICPBRASIL') if resposta.EEEEE not in ('05000',): raise ExcecaoRespostaSAT(resposta) return resposta
comunicar_certificado_icpbrasil
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao=\n 'ComunicarCertificadoICPBRASIL')\n if resposta.EEEEE not in ('05000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "comunicar_certificado_icpbrasil", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao='ConsultarSAT') if resposta.EEEEE not in ('08000',): raise ExcecaoRespostaSAT(resposta) return resposta
consultar_sat
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao='ConsultarSAT')\n if resposta.EEEEE not in ('08000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "consultar_sat", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao= 'ConfigurarInterfaceDeRede') if resposta.EEEEE not in ('12000',): raise ExcecaoRespostaSAT(resposta) return resposta
configurar_interface_de_rede
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao=\n 'ConfigurarInterfaceDeRede')\n if resposta.EEEEE not in ('12000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "configurar_interface_de_rede", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao= 'AssociarAssinatura') if resposta.EEEEE not in ('13000',): raise ExcecaoRespostaSAT(resposta) return resposta
associar_assinatura
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao=\n 'AssociarAssinatura')\n if resposta.EEEEE not in ('13000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "associar_assinatura", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao= 'AtualizarSoftwareSAT') if resposta.EEEEE not in ('14000',): raise ExcecaoRespostaSAT(resposta) return resposta
atualizar_software_sat
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao=\n 'AtualizarSoftwareSAT')\n if resposta.EEEEE not in ('14000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "atualizar_software_sat", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao='BloquearSAT') if resposta.EEEEE not in ('16000',): raise ExcecaoRespostaSAT(resposta) return resposta
bloquear_sat
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao='BloquearSAT')\n if resposta.EEEEE not in ('16000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "bloquear_sat", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao= 'DesbloquearSAT') if resposta.EEEEE not in ('17000',): raise ExcecaoRespostaSAT(resposta) return resposta
desbloquear_sat
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao=\n 'DesbloquearSAT')\n if resposta.EEEEE not in ('17000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "desbloquear_sat", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao= 'TrocarCodigoDeAtivacao') if resposta.EEEEE not in ('18000',): raise ExcecaoRespostaSAT(resposta) return resposta
trocar_codigo_de_ativacao
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao=\n 'TrocarCodigoDeAtivacao')\n if resposta.EEEEE not in ('18000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "trocar_codigo_de_ativacao", "role": "assistant" } ]
def x(self): if self.ensemble_name.upper() == 'SSEF': if self.variable[0:2] == 'rh': pressure_level = self.variable[2:] relh_vars = ['sph', 'tmp'] relh_vals = {} for var in relh_vars: mg = SSEFModelGrid(self.member_name, self.run_date, var + pressure_level, self.start_date, self.end_date, self. path, single_step=self.single_step) relh_vals[var], units = mg.load_data() mg.close() self.data = relative_humidity_pressure_level(relh_vals['tmp'], relh_vals['sph'], float(pressure_level) * 100) self.units = '%' elif self.variable == 'melth': input_vars = ['hgtsfc', 'hgt700', 'hgt500', 'tmp700', 'tmp500'] input_vals = {} for var in input_vars: mg = SSEFModelGrid(self.member_name, self.run_date, var, self.start_date, self.end_date, self.path, single_step= self.single_step) input_vals[var], units = mg.load_data() mg.close() self.data = melting_layer_height(input_vals['hgtsfc'], input_vals['hgt700'], input_vals['hgt500'], input_vals[ 'tmp700'], input_vals['tmp500']) self.units = 'm' else: mg = SSEFModelGrid(self.member_name, self.run_date, self. variable, self.start_date, self.end_date, self.path, single_step=self.single_step) self.data, self.units = mg.load_data() mg.close() elif self.ensemble_name.upper() == 'NCAR': mg = NCARModelGrid(self.member_name, self.run_date, self.variable, self.start_date, self.end_date, self.path, single_step=self. single_step) self.data, self.units = mg.load_data() mg.close() elif self.ensemble_name.upper() == 'HREFV2': proj_dict, grid_dict = read_ncar_map_file(self.map_file) mapping_data = make_proj_grids(proj_dict, grid_dict) mg = HREFv2ModelGrid(self.member_name, self.run_date, self.variable, self.start_date, self.end_date, self.path, mapping_data, self. sector_ind_path, single_step=self.single_step) self.data, self.units = mg.load_data() elif self.ensemble_name.upper() == 'VSE': mg = VSEModelGrid(self.member_name, self.run_date, self.variable, self.start_date, self.end_date, self.path, single_step=self. single_step) self.data, self.units = mg.load_data() mg.close() elif self.ensemble_name.upper() == 'HRRR': mg = HRRRModelGrid(self.run_date, self.variable, self.start_date, self.end_date, self.path) self.data, self.units = mg.load_data() mg.close() elif self.ensemble_name.upper() == 'NCARSTORM': mg = NCARStormEventModelGrid(self.run_date, self.variable, self. start_date, self.end_date, self.path) self.data, self.units = mg.load_data() mg.close() else: print(self.ensemble_name + ' not supported.')
load_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self.ensemble_name.upper() == 'SSEF':\n if self.variable[0:2] == 'rh':\n pressure_level = self.variable[2:]\n relh_vars = ['sph', 'tmp']\n relh_vals = {}\n for var in relh_vars:\n mg = SSEFModelGrid(self.member_name, self.run_date, var +\n pressure_level, self.start_date, self.end_date, self.\n path, single_step=self.single_step)\n relh_vals[var], units = mg.load_data()\n mg.close()\n self.data = relative_humidity_pressure_level(relh_vals['tmp'],\n relh_vals['sph'], float(pressure_level) * 100)\n self.units = '%'\n elif self.variable == 'melth':\n input_vars = ['hgtsfc', 'hgt700', 'hgt500', 'tmp700', 'tmp500']\n input_vals = {}\n for var in input_vars:\n mg = SSEFModelGrid(self.member_name, self.run_date, var,\n self.start_date, self.end_date, self.path, single_step=\n self.single_step)\n input_vals[var], units = mg.load_data()\n mg.close()\n self.data = melting_layer_height(input_vals['hgtsfc'],\n input_vals['hgt700'], input_vals['hgt500'], input_vals[\n 'tmp700'], input_vals['tmp500'])\n self.units = 'm'\n else:\n mg = SSEFModelGrid(self.member_name, self.run_date, self.\n variable, self.start_date, self.end_date, self.path,\n single_step=self.single_step)\n self.data, self.units = mg.load_data()\n mg.close()\n elif self.ensemble_name.upper() == 'NCAR':\n mg = NCARModelGrid(self.member_name, self.run_date, self.variable,\n self.start_date, self.end_date, self.path, single_step=self.\n single_step)\n self.data, self.units = mg.load_data()\n mg.close()\n elif self.ensemble_name.upper() == 'HREFV2':\n proj_dict, grid_dict = read_ncar_map_file(self.map_file)\n mapping_data = make_proj_grids(proj_dict, grid_dict)\n mg = HREFv2ModelGrid(self.member_name, self.run_date, self.variable,\n self.start_date, self.end_date, self.path, mapping_data, self.\n sector_ind_path, single_step=self.single_step)\n self.data, self.units = mg.load_data()\n elif self.ensemble_name.upper() == 'VSE':\n mg = VSEModelGrid(self.member_name, self.run_date, self.variable,\n self.start_date, self.end_date, self.path, single_step=self.\n single_step)\n self.data, self.units = mg.load_data()\n mg.close()\n elif self.ensemble_name.upper() == 'HRRR':\n mg = HRRRModelGrid(self.run_date, self.variable, self.start_date,\n self.end_date, self.path)\n self.data, self.units = mg.load_data()\n mg.close()\n elif self.ensemble_name.upper() == 'NCARSTORM':\n mg = NCARStormEventModelGrid(self.run_date, self.variable, self.\n start_date, self.end_date, self.path)\n self.data, self.units = mg.load_data()\n mg.close()\n else:\n print(self.ensemble_name + ' not supported.')\n", "role": "user" }, { "content": "load_data", "role": "assistant" } ]
def x(self, map_file): if self.ensemble_name.upper() == 'SSEF': proj_dict, grid_dict = read_arps_map_file(map_file) self.dx = int(grid_dict['dx']) mapping_data = make_proj_grids(proj_dict, grid_dict) for m, v in mapping_data.items(): setattr(self, m, v) self.i, self.j = np.indices(self.lon.shape) self.proj = get_proj_obj(proj_dict) elif self.ensemble_name.upper() in ['NCAR', 'NCARSTORM', 'HRRR', 'VSE', 'HREFV2']: proj_dict, grid_dict = read_ncar_map_file(map_file) if self.member_name[0:7] == '1km_pbl': grid_dict['dx'] = 1000 grid_dict['dy'] = 1000 grid_dict['sw_lon'] = 258.697 grid_dict['sw_lat'] = 23.999 grid_dict['ne_lon'] = 282.868269206236 grid_dict['ne_lat'] = 36.4822338520542 self.dx = int(grid_dict['dx']) mapping_data = make_proj_grids(proj_dict, grid_dict) for m, v in mapping_data.items(): setattr(self, m, v) self.i, self.j = np.indices(self.lon.shape) self.proj = get_proj_obj(proj_dict)
load_map_info
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, map_file):\n if self.ensemble_name.upper() == 'SSEF':\n proj_dict, grid_dict = read_arps_map_file(map_file)\n self.dx = int(grid_dict['dx'])\n mapping_data = make_proj_grids(proj_dict, grid_dict)\n for m, v in mapping_data.items():\n setattr(self, m, v)\n self.i, self.j = np.indices(self.lon.shape)\n self.proj = get_proj_obj(proj_dict)\n elif self.ensemble_name.upper() in ['NCAR', 'NCARSTORM', 'HRRR', 'VSE',\n 'HREFV2']:\n proj_dict, grid_dict = read_ncar_map_file(map_file)\n if self.member_name[0:7] == '1km_pbl':\n grid_dict['dx'] = 1000\n grid_dict['dy'] = 1000\n grid_dict['sw_lon'] = 258.697\n grid_dict['sw_lat'] = 23.999\n grid_dict['ne_lon'] = 282.868269206236\n grid_dict['ne_lat'] = 36.4822338520542\n self.dx = int(grid_dict['dx'])\n mapping_data = make_proj_grids(proj_dict, grid_dict)\n for m, v in mapping_data.items():\n setattr(self, m, v)\n self.i, self.j = np.indices(self.lon.shape)\n self.proj = get_proj_obj(proj_dict)\n", "role": "user" }, { "content": "load_map_info", "role": "assistant" } ]
def x(filename): json_file = open(filename) data = json.load(json_file) json_file.close() times = data['properties']['times'] main_data = dict(timesteps=[], masks=[], x=[], y=[], i=[], j=[]) attribute_data = dict() for feature in data['features']: for main_name in main_data.keys(): main_data[main_name].append(np.array(feature['properties'][ main_name])) for k, v in feature['properties']['attributes'].items(): if k not in attribute_data.keys(): attribute_data[k] = [np.array(v)] else: attribute_data[k].append(np.array(v)) kwargs = {} for kw in ['dx', 'step', 'u', 'v']: if kw in data['properties'].keys(): kwargs[kw] = data['properties'][kw] sto = STObject(main_data['timesteps'], main_data['masks'], main_data[ 'x'], main_data['y'], main_data['i'], main_data['j'], times[0], times[-1], **kwargs) for k, v in attribute_data.items(): sto.attributes[k] = v return sto
read_geojson
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(filename):\n json_file = open(filename)\n data = json.load(json_file)\n json_file.close()\n times = data['properties']['times']\n main_data = dict(timesteps=[], masks=[], x=[], y=[], i=[], j=[])\n attribute_data = dict()\n for feature in data['features']:\n for main_name in main_data.keys():\n main_data[main_name].append(np.array(feature['properties'][\n main_name]))\n for k, v in feature['properties']['attributes'].items():\n if k not in attribute_data.keys():\n attribute_data[k] = [np.array(v)]\n else:\n attribute_data[k].append(np.array(v))\n kwargs = {}\n for kw in ['dx', 'step', 'u', 'v']:\n if kw in data['properties'].keys():\n kwargs[kw] = data['properties'][kw]\n sto = STObject(main_data['timesteps'], main_data['masks'], main_data[\n 'x'], main_data['y'], main_data['i'], main_data['j'], times[0],\n times[-1], **kwargs)\n for k, v in attribute_data.items():\n sto.attributes[k] = v\n return sto\n", "role": "user" }, { "content": "read_geojson", "role": "assistant" } ]
def x(self, time): if self.start_time <= time <= self.end_time: diff = time - self.start_time valid = np.flatnonzero(self.masks[diff] != 0) if valid.size > 0: com_x = 1.0 / self.timesteps[diff].ravel()[valid].sum() * np.sum( self.timesteps[diff].ravel()[valid] * self.x[diff].ravel()[ valid]) com_y = 1.0 / self.timesteps[diff].ravel()[valid].sum() * np.sum( self.timesteps[diff].ravel()[valid] * self.y[diff].ravel()[ valid]) else: com_x = np.mean(self.x[diff]) com_y = np.mean(self.y[diff]) else: com_x = None com_y = None return com_x, com_y
center_of_mass
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, time):\n if self.start_time <= time <= self.end_time:\n diff = time - self.start_time\n valid = np.flatnonzero(self.masks[diff] != 0)\n if valid.size > 0:\n com_x = 1.0 / self.timesteps[diff].ravel()[valid].sum() * np.sum(\n self.timesteps[diff].ravel()[valid] * self.x[diff].ravel()[\n valid])\n com_y = 1.0 / self.timesteps[diff].ravel()[valid].sum() * np.sum(\n self.timesteps[diff].ravel()[valid] * self.y[diff].ravel()[\n valid])\n else:\n com_x = np.mean(self.x[diff])\n com_y = np.mean(self.y[diff])\n else:\n com_x = None\n com_y = None\n return com_x, com_y\n", "role": "user" }, { "content": "center_of_mass", "role": "assistant" } ]
def x(self): traj = np.zeros((2, self.times.size)) for t, time in enumerate(self.times): traj[:, t] = self.center_of_mass(time) return traj
trajectory
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n traj = np.zeros((2, self.times.size))\n for t, time in enumerate(self.times):\n traj[:, t] = self.center_of_mass(time)\n return traj\n", "role": "user" }, { "content": "trajectory", "role": "assistant" } ]
def x(self, time): if self.start_time <= time <= self.end_time: diff = time - self.start_time return self.i[diff][0, 0], self.j[diff][0, 0] else: return -1, -1
get_corner
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, time):\n if self.start_time <= time <= self.end_time:\n diff = time - self.start_time\n return self.i[diff][0, 0], self.j[diff][0, 0]\n else:\n return -1, -1\n", "role": "user" }, { "content": "get_corner", "role": "assistant" } ]
def x(self, time): if self.start_time <= time <= self.end_time: return self.masks[time - self.start_time].sum() else: return 0
size
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, time):\n if self.start_time <= time <= self.end_time:\n return self.masks[time - self.start_time].sum()\n else:\n return 0\n", "role": "user" }, { "content": "size", "role": "assistant" } ]
def x(self): sizes = np.array([m.sum() for m in self.masks]) return sizes.max()
max_size
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n sizes = np.array([m.sum() for m in self.masks])\n return sizes.max()\n", "role": "user" }, { "content": "max_size", "role": "assistant" } ]
def x(self, time): ti = np.where(time == self.times)[0][0] return self.timesteps[ti].max()
max_intensity
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, time):\n ti = np.where(time == self.times)[0][0]\n return self.timesteps[ti].max()\n", "role": "user" }, { "content": "max_intensity", "role": "assistant" } ]
def x(self, step): self.timesteps.extend(step.timesteps) self.masks.extend(step.masks) self.x.extend(step.x) self.y.extend(step.y) self.i.extend(step.i) self.j.extend(step.j) self.end_time = step.end_time self.times = np.arange(self.start_time, self.end_time + self.step, self .step) self.u = np.concatenate((self.u, step.u)) self.v = np.concatenate((self.v, step.v)) for attr in self.attributes.keys(): if attr in step.attributes.keys(): self.attributes[attr].extend(step.attributes[attr])
extend
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, step):\n self.timesteps.extend(step.timesteps)\n self.masks.extend(step.masks)\n self.x.extend(step.x)\n self.y.extend(step.y)\n self.i.extend(step.i)\n self.j.extend(step.j)\n self.end_time = step.end_time\n self.times = np.arange(self.start_time, self.end_time + self.step, self\n .step)\n self.u = np.concatenate((self.u, step.u))\n self.v = np.concatenate((self.v, step.v))\n for attr in self.attributes.keys():\n if attr in step.attributes.keys():\n self.attributes[attr].extend(step.attributes[attr])\n", "role": "user" }, { "content": "extend", "role": "assistant" } ]
def x(self, time): ti = np.where(time == self.times)[0][0] com_x, com_y = self.center_of_mass(time) padded_mask = np.pad(self.masks[ti], 1, 'constant', constant_values=0) chull = convex_hull_image(padded_mask) boundary_image = find_boundaries(chull, mode='inner', background=0) boundary_image = boundary_image[1:-1, 1:-1] boundary_x = self.x[ti].ravel()[boundary_image.ravel()] boundary_y = self.y[ti].ravel()[boundary_image.ravel()] r = np.sqrt((boundary_x - com_x) ** 2 + (boundary_y - com_y) ** 2) theta = np.arctan2(boundary_y - com_y, boundary_x - com_x ) * 180.0 / np.pi + 360 polar_coords = np.array([(r[x], theta[x]) for x in range(r.size)], dtype=[('r', 'f4'), ('theta', 'f4')]) coord_order = np.argsort(polar_coords, order=['theta', 'r']) ordered_coords = np.vstack([boundary_x[coord_order], boundary_y[ coord_order]]) return ordered_coords
boundary_polygon
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, time):\n ti = np.where(time == self.times)[0][0]\n com_x, com_y = self.center_of_mass(time)\n padded_mask = np.pad(self.masks[ti], 1, 'constant', constant_values=0)\n chull = convex_hull_image(padded_mask)\n boundary_image = find_boundaries(chull, mode='inner', background=0)\n boundary_image = boundary_image[1:-1, 1:-1]\n boundary_x = self.x[ti].ravel()[boundary_image.ravel()]\n boundary_y = self.y[ti].ravel()[boundary_image.ravel()]\n r = np.sqrt((boundary_x - com_x) ** 2 + (boundary_y - com_y) ** 2)\n theta = np.arctan2(boundary_y - com_y, boundary_x - com_x\n ) * 180.0 / np.pi + 360\n polar_coords = np.array([(r[x], theta[x]) for x in range(r.size)],\n dtype=[('r', 'f4'), ('theta', 'f4')])\n coord_order = np.argsort(polar_coords, order=['theta', 'r'])\n ordered_coords = np.vstack([boundary_x[coord_order], boundary_y[\n coord_order]])\n return ordered_coords\n", "role": "user" }, { "content": "boundary_polygon", "role": "assistant" } ]
def x(self, time, intensity_grid, max_u, max_v): ti = np.where(time == self.times)[0][0] mask_vals = np.where(self.masks[ti].ravel() == 1) i_vals = self.i[ti].ravel()[mask_vals] j_vals = self.j[ti].ravel()[mask_vals] obj_vals = self.timesteps[ti].ravel()[mask_vals] u_shifts = np.arange(-max_u, max_u + 1) v_shifts = np.arange(-max_v, max_v + 1) min_error = 99999999999.0 best_u = 0 best_v = 0 for u in u_shifts: j_shift = j_vals - u for v in v_shifts: i_shift = i_vals - v if np.all((0 <= i_shift) & (i_shift < intensity_grid.shape[0]) & (0 <= j_shift) & (j_shift < intensity_grid.shape[1])): shift_vals = intensity_grid[i_shift, j_shift] else: shift_vals = np.zeros(i_shift.shape) error = np.abs(shift_vals - obj_vals).mean() if error < min_error: min_error = error best_u = u * self.dx best_v = v * self.dx self.u[ti] = best_u self.v[ti] = best_v return best_u, best_v, min_error
estimate_motion
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, time, intensity_grid, max_u, max_v):\n ti = np.where(time == self.times)[0][0]\n mask_vals = np.where(self.masks[ti].ravel() == 1)\n i_vals = self.i[ti].ravel()[mask_vals]\n j_vals = self.j[ti].ravel()[mask_vals]\n obj_vals = self.timesteps[ti].ravel()[mask_vals]\n u_shifts = np.arange(-max_u, max_u + 1)\n v_shifts = np.arange(-max_v, max_v + 1)\n min_error = 99999999999.0\n best_u = 0\n best_v = 0\n for u in u_shifts:\n j_shift = j_vals - u\n for v in v_shifts:\n i_shift = i_vals - v\n if np.all((0 <= i_shift) & (i_shift < intensity_grid.shape[0]) &\n (0 <= j_shift) & (j_shift < intensity_grid.shape[1])):\n shift_vals = intensity_grid[i_shift, j_shift]\n else:\n shift_vals = np.zeros(i_shift.shape)\n error = np.abs(shift_vals - obj_vals).mean()\n if error < min_error:\n min_error = error\n best_u = u * self.dx\n best_v = v * self.dx\n self.u[ti] = best_u\n self.v[ti] = best_v\n return best_u, best_v, min_error\n", "role": "user" }, { "content": "estimate_motion", "role": "assistant" } ]
def x(self, time, other_object, other_time): ti = np.where(time == self.times)[0][0] ma = np.where(self.masks[ti].ravel() == 1) oti = np.where(other_time == other_object.times)[0] obj_coords = np.zeros(self.masks[ti].sum(), dtype=[('x', int), ('y', int)]) other_obj_coords = np.zeros(other_object.masks[oti].sum(), dtype=[('x', int), ('y', int)]) obj_coords['x'] = self.i[ti].ravel()[ma] obj_coords['y'] = self.j[ti].ravel()[ma] other_obj_coords['x'] = other_object.i[oti][other_object.masks[oti] == 1] other_obj_coords['y'] = other_object.j[oti][other_object.masks[oti] == 1] return float(np.intersect1d(obj_coords, other_obj_coords).size ) / np.maximum(self.masks[ti].sum(), other_object.masks[oti].sum())
count_overlap
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, time, other_object, other_time):\n ti = np.where(time == self.times)[0][0]\n ma = np.where(self.masks[ti].ravel() == 1)\n oti = np.where(other_time == other_object.times)[0]\n obj_coords = np.zeros(self.masks[ti].sum(), dtype=[('x', int), ('y', int)])\n other_obj_coords = np.zeros(other_object.masks[oti].sum(), dtype=[('x',\n int), ('y', int)])\n obj_coords['x'] = self.i[ti].ravel()[ma]\n obj_coords['y'] = self.j[ti].ravel()[ma]\n other_obj_coords['x'] = other_object.i[oti][other_object.masks[oti] == 1]\n other_obj_coords['y'] = other_object.j[oti][other_object.masks[oti] == 1]\n return float(np.intersect1d(obj_coords, other_obj_coords).size\n ) / np.maximum(self.masks[ti].sum(), other_object.masks[oti].sum())\n", "role": "user" }, { "content": "count_overlap", "role": "assistant" } ]
def x(self, model_grid, potential=False, future=False): if potential: var_name = model_grid.variable + '-potential' timesteps = np.arange(self.start_time - 1, self.end_time) elif future: var_name = model_grid.variable + '-future' timesteps = np.arange(self.start_time + 1, self.end_time + 2) else: var_name = model_grid.variable timesteps = np.arange(self.start_time, self.end_time + 1) self.attributes[var_name] = [] for ti, t in enumerate(timesteps): self.attributes[var_name].append(model_grid.data[t - model_grid. start_hour, self.i[ti], self.j[ti]])
extract_attribute_grid
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, model_grid, potential=False, future=False):\n if potential:\n var_name = model_grid.variable + '-potential'\n timesteps = np.arange(self.start_time - 1, self.end_time)\n elif future:\n var_name = model_grid.variable + '-future'\n timesteps = np.arange(self.start_time + 1, self.end_time + 2)\n else:\n var_name = model_grid.variable\n timesteps = np.arange(self.start_time, self.end_time + 1)\n self.attributes[var_name] = []\n for ti, t in enumerate(timesteps):\n self.attributes[var_name].append(model_grid.data[t - model_grid.\n start_hour, self.i[ti], self.j[ti]])\n", "role": "user" }, { "content": "extract_attribute_grid", "role": "assistant" } ]
def x(self, data_array, var_name): if var_name not in self.attributes.keys(): self.attributes[var_name] = [] for t in range(self.times.size): self.attributes[var_name].append(data_array[self.i[t], self.j[t]])
extract_attribute_array
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data_array, var_name):\n if var_name not in self.attributes.keys():\n self.attributes[var_name] = []\n for t in range(self.times.size):\n self.attributes[var_name].append(data_array[self.i[t], self.j[t]])\n", "role": "user" }, { "content": "extract_attribute_array", "role": "assistant" } ]
def x(self, model_grid): var_name = model_grid.variable + '-tendency' self.attributes[var_name] = [] timesteps = np.arange(self.start_time, self.end_time + 1) for ti, t in enumerate(timesteps): t_index = t - model_grid.start_hour self.attributes[var_name].append(model_grid.data[t_index, self.i[ti ], self.j[ti]] - model_grid.data[t_index - 1, self.i[ti], self. j[ti]])
extract_tendency_grid
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, model_grid):\n var_name = model_grid.variable + '-tendency'\n self.attributes[var_name] = []\n timesteps = np.arange(self.start_time, self.end_time + 1)\n for ti, t in enumerate(timesteps):\n t_index = t - model_grid.start_hour\n self.attributes[var_name].append(model_grid.data[t_index, self.i[ti\n ], self.j[ti]] - model_grid.data[t_index - 1, self.i[ti], self.\n j[ti]])\n", "role": "user" }, { "content": "extract_tendency_grid", "role": "assistant" } ]
def x(self, statistic_name): stats = {} for var, grids in self.attributes.items(): if len(grids) > 1: stats[var] = getattr(np.array([getattr(np.ma.array(x, mask=self .masks[t] == 0), statistic_name)() for t, x in enumerate( grids)]), statistic_name)() else: stats[var] = getattr(np.ma.array(grids[0], mask=self.masks[0] == 0), statistic_name)() return stats
calc_attribute_statistics
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, statistic_name):\n stats = {}\n for var, grids in self.attributes.items():\n if len(grids) > 1:\n stats[var] = getattr(np.array([getattr(np.ma.array(x, mask=self\n .masks[t] == 0), statistic_name)() for t, x in enumerate(\n grids)]), statistic_name)()\n else:\n stats[var] = getattr(np.ma.array(grids[0], mask=self.masks[0] ==\n 0), statistic_name)()\n return stats\n", "role": "user" }, { "content": "calc_attribute_statistics", "role": "assistant" } ]
def x(self, attribute, statistic, time): ti = np.where(self.times == time)[0][0] ma = np.where(self.masks[ti].ravel() == 1) if statistic in ['mean', 'max', 'min', 'std', 'ptp']: stat_val = getattr(self.attributes[attribute][ti].ravel()[ma], statistic)() elif statistic == 'median': stat_val = np.median(self.attributes[attribute][ti].ravel()[ma]) elif statistic == 'skew': stat_val = np.mean(self.attributes[attribute][ti].ravel()[ma] ) - np.median(self.attributes[attribute][ti].ravel()[ma]) elif 'percentile' in statistic: per = int(statistic.split('_')[1]) stat_val = np.percentile(self.attributes[attribute][ti].ravel()[ma], per) elif 'dt' in statistic: stat_name = statistic[:-3] if ti == 0: stat_val = 0 else: stat_val = self.calc_attribute_statistic(attribute, stat_name, time ) - self.calc_attribute_statistic(attribute, stat_name, time - 1) else: stat_val = np.nan return stat_val
calc_attribute_statistic
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, attribute, statistic, time):\n ti = np.where(self.times == time)[0][0]\n ma = np.where(self.masks[ti].ravel() == 1)\n if statistic in ['mean', 'max', 'min', 'std', 'ptp']:\n stat_val = getattr(self.attributes[attribute][ti].ravel()[ma],\n statistic)()\n elif statistic == 'median':\n stat_val = np.median(self.attributes[attribute][ti].ravel()[ma])\n elif statistic == 'skew':\n stat_val = np.mean(self.attributes[attribute][ti].ravel()[ma]\n ) - np.median(self.attributes[attribute][ti].ravel()[ma])\n elif 'percentile' in statistic:\n per = int(statistic.split('_')[1])\n stat_val = np.percentile(self.attributes[attribute][ti].ravel()[ma],\n per)\n elif 'dt' in statistic:\n stat_name = statistic[:-3]\n if ti == 0:\n stat_val = 0\n else:\n stat_val = self.calc_attribute_statistic(attribute, stat_name, time\n ) - self.calc_attribute_statistic(attribute, stat_name, \n time - 1)\n else:\n stat_val = np.nan\n return stat_val\n", "role": "user" }, { "content": "calc_attribute_statistic", "role": "assistant" } ]
def x(self, statistic, time): ti = np.where(self.times == time)[0][0] ma = np.where(self.masks[ti].ravel() == 1) if statistic in ['mean', 'max', 'min', 'std', 'ptp']: stat_val = getattr(self.timesteps[ti].ravel()[ma], statistic)() elif statistic == 'median': stat_val = np.median(self.timesteps[ti].ravel()[ma]) elif 'percentile' in statistic: per = int(statistic.split('_')[1]) stat_val = np.percentile(self.timesteps[ti].ravel()[ma], per) elif 'dt' in statistic: stat_name = statistic[:-3] if ti == 0: stat_val = 0 else: stat_val = self.calc_timestep_statistic(stat_name, time ) - self.calc_timestep_statistic(stat_name, time - 1) else: stat_val = np.nan return stat_val
calc_timestep_statistic
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, statistic, time):\n ti = np.where(self.times == time)[0][0]\n ma = np.where(self.masks[ti].ravel() == 1)\n if statistic in ['mean', 'max', 'min', 'std', 'ptp']:\n stat_val = getattr(self.timesteps[ti].ravel()[ma], statistic)()\n elif statistic == 'median':\n stat_val = np.median(self.timesteps[ti].ravel()[ma])\n elif 'percentile' in statistic:\n per = int(statistic.split('_')[1])\n stat_val = np.percentile(self.timesteps[ti].ravel()[ma], per)\n elif 'dt' in statistic:\n stat_name = statistic[:-3]\n if ti == 0:\n stat_val = 0\n else:\n stat_val = self.calc_timestep_statistic(stat_name, time\n ) - self.calc_timestep_statistic(stat_name, time - 1)\n else:\n stat_val = np.nan\n return stat_val\n", "role": "user" }, { "content": "calc_timestep_statistic", "role": "assistant" } ]
def x(self, stat_names): stats = {} try: all_props = [regionprops(m) for m in self.masks] except TypeError: print(self.masks) exit() for stat in stat_names: stats[stat] = np.mean([p[0][stat] for p in all_props]) return stats
calc_shape_statistics
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, stat_names):\n stats = {}\n try:\n all_props = [regionprops(m) for m in self.masks]\n except TypeError:\n print(self.masks)\n exit()\n for stat in stat_names:\n stats[stat] = np.mean([p[0][stat] for p in all_props])\n return stats\n", "role": "user" }, { "content": "calc_shape_statistics", "role": "assistant" } ]
def x(self, stat_names, time): ti = np.where(self.times == time)[0][0] props = regionprops(self.masks[ti], self.timesteps[ti])[0] shape_stats = [] for stat_name in stat_names: if 'moments_hu' in stat_name: hu_index = int(stat_name.split('_')[-1]) hu_name = '_'.join(stat_name.split('_')[:-1]) hu_val = np.log(props[hu_name][hu_index]) if np.isnan(hu_val): shape_stats.append(0) else: shape_stats.append(hu_val) else: shape_stats.append(props[stat_name]) return shape_stats
calc_shape_step
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, stat_names, time):\n ti = np.where(self.times == time)[0][0]\n props = regionprops(self.masks[ti], self.timesteps[ti])[0]\n shape_stats = []\n for stat_name in stat_names:\n if 'moments_hu' in stat_name:\n hu_index = int(stat_name.split('_')[-1])\n hu_name = '_'.join(stat_name.split('_')[:-1])\n hu_val = np.log(props[hu_name][hu_index])\n if np.isnan(hu_val):\n shape_stats.append(0)\n else:\n shape_stats.append(hu_val)\n else:\n shape_stats.append(props[stat_name])\n return shape_stats\n", "role": "user" }, { "content": "calc_shape_step", "role": "assistant" } ]
def x(self, filename, proj, metadata=None): if metadata is None: metadata = {} json_obj = {'type': 'FeatureCollection', 'features': [], 'properties': {}} json_obj['properties']['times'] = self.times.tolist() json_obj['properties']['dx'] = self.dx json_obj['properties']['step'] = self.step json_obj['properties']['u'] = self.u.tolist() json_obj['properties']['v'] = self.v.tolist() for k, v in metadata.items(): json_obj['properties'][k] = v for t, time in enumerate(self.times): feature = {'type': 'Feature', 'geometry': {'type': 'Polygon'}, 'properties': {}} boundary_coords = self.boundary_polygon(time) lonlat = np.vstack(proj(boundary_coords[0], boundary_coords[1], inverse=True)) lonlat_list = lonlat.T.tolist() if len(lonlat_list) > 0: lonlat_list.append(lonlat_list[0]) feature['geometry']['coordinates'] = [lonlat_list] for attr in ['timesteps', 'masks', 'x', 'y', 'i', 'j']: feature['properties'][attr] = getattr(self, attr)[t].tolist() feature['properties']['attributes'] = {} for attr_name, steps in self.attributes.items(): feature['properties']['attributes'][attr_name] = steps[t].tolist() json_obj['features'].append(feature) file_obj = open(filename, 'w') json.dump(json_obj, file_obj, indent=1, sort_keys=True) file_obj.close() return
to_geojson
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, filename, proj, metadata=None):\n if metadata is None:\n metadata = {}\n json_obj = {'type': 'FeatureCollection', 'features': [], 'properties': {}}\n json_obj['properties']['times'] = self.times.tolist()\n json_obj['properties']['dx'] = self.dx\n json_obj['properties']['step'] = self.step\n json_obj['properties']['u'] = self.u.tolist()\n json_obj['properties']['v'] = self.v.tolist()\n for k, v in metadata.items():\n json_obj['properties'][k] = v\n for t, time in enumerate(self.times):\n feature = {'type': 'Feature', 'geometry': {'type': 'Polygon'},\n 'properties': {}}\n boundary_coords = self.boundary_polygon(time)\n lonlat = np.vstack(proj(boundary_coords[0], boundary_coords[1],\n inverse=True))\n lonlat_list = lonlat.T.tolist()\n if len(lonlat_list) > 0:\n lonlat_list.append(lonlat_list[0])\n feature['geometry']['coordinates'] = [lonlat_list]\n for attr in ['timesteps', 'masks', 'x', 'y', 'i', 'j']:\n feature['properties'][attr] = getattr(self, attr)[t].tolist()\n feature['properties']['attributes'] = {}\n for attr_name, steps in self.attributes.items():\n feature['properties']['attributes'][attr_name] = steps[t].tolist()\n json_obj['features'].append(feature)\n file_obj = open(filename, 'w')\n json.dump(json_obj, file_obj, indent=1, sort_keys=True)\n file_obj.close()\n return\n", "role": "user" }, { "content": "to_geojson", "role": "assistant" } ]
def x(self, properties): with self.__library_storage_lock: self.__library_storage = properties self.__write_properties(None)
rewrite_properties
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, properties):\n with self.__library_storage_lock:\n self.__library_storage = properties\n self.__write_properties(None)\n", "role": "user" }, { "content": "rewrite_properties", "role": "assistant" } ]
def x(self, v=None): if v is None: v = self.estopping hist = self.hist trace = self.trace(v) ins = None if self._base._probability_calibration is not None: node = hist[-1] node.normalize() X = np.array([x.full_array() for x in node.hy]).T y = np.array(self._base._y_klass.full_array()) mask = np.ones(X.shape[0], dtype=np.bool) mask[np.array(self._base._mask_ts.index)] = False ins = self._base._probability_calibration().fit(X[mask], y[mask]) if self._classifier: nclasses = self._labels.shape[0] else: nclasses = None m = Model(trace, hist, nvar=self._base._nvar, classifier=self. _classifier, labels=self._labels, probability_calibration=ins, nclasses=nclasses) return m
model
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, v=None):\n if v is None:\n v = self.estopping\n hist = self.hist\n trace = self.trace(v)\n ins = None\n if self._base._probability_calibration is not None:\n node = hist[-1]\n node.normalize()\n X = np.array([x.full_array() for x in node.hy]).T\n y = np.array(self._base._y_klass.full_array())\n mask = np.ones(X.shape[0], dtype=np.bool)\n mask[np.array(self._base._mask_ts.index)] = False\n ins = self._base._probability_calibration().fit(X[mask], y[mask])\n if self._classifier:\n nclasses = self._labels.shape[0]\n else:\n nclasses = None\n m = Model(trace, hist, nvar=self._base._nvar, classifier=self.\n _classifier, labels=self._labels, probability_calibration=ins,\n nclasses=nclasses)\n return m\n", "role": "user" }, { "content": "model", "role": "assistant" } ]
def x(self, n): trace_map = {} self._trace(n, trace_map) s = list(trace_map.keys()) s.sort() return s
trace
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, n):\n trace_map = {}\n self._trace(n, trace_map)\n s = list(trace_map.keys())\n s.sort()\n return s\n", "role": "user" }, { "content": "trace", "role": "assistant" } ]
def x(self, negative=False): if self.generation <= self._random_generations and not negative: return self.random_selection() if not self._negative_selection and negative: return self.random_selection(negative=negative) vars = self.random() fit = [(k, self.population[x].fitness) for k, x in enumerate(vars)] if negative: fit = min(fit, key=lambda x: x[1]) else: fit = max(fit, key=lambda x: x[1]) index = fit[0] return vars[index]
tournament
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, negative=False):\n if self.generation <= self._random_generations and not negative:\n return self.random_selection()\n if not self._negative_selection and negative:\n return self.random_selection(negative=negative)\n vars = self.random()\n fit = [(k, self.population[x].fitness) for k, x in enumerate(vars)]\n if negative:\n fit = min(fit, key=lambda x: x[1])\n else:\n fit = max(fit, key=lambda x: x[1])\n index = fit[0]\n return vars[index]\n", "role": "user" }, { "content": "tournament", "role": "assistant" } ]
def x(self): base = self._base if base._share_inputs: used_inputs_var = SelectNumbers([x for x in range(base.nvar)]) used_inputs_naive = used_inputs_var if base._pr_variable == 0: used_inputs_var = SelectNumbers([]) used_inputs_naive = SelectNumbers([x for x in range(base.nvar)]) elif base._pr_variable == 1: used_inputs_var = SelectNumbers([x for x in range(base.nvar)]) used_inputs_naive = SelectNumbers([]) else: used_inputs_var = SelectNumbers([x for x in range(base.nvar)]) used_inputs_naive = SelectNumbers([x for x in range(base.nvar)]) nb_input = Inputs(base, used_inputs_naive, functions=base._input_functions) while base._all_inputs and not base.stopping_criteria_tl( ) or self.popsize < base.popsize and not base.stopping_criteria(): if base._all_inputs and used_inputs_var.empty( ) and used_inputs_naive.empty(): base._init_popsize = self.popsize break if nb_input.use_all_variables(): v = nb_input.all_variables() if v is None: continue elif not used_inputs_var.empty() and np.random.random( ) < base._pr_variable: v = self.variable_input(used_inputs_var) if v is None: used_inputs_var.pos = used_inputs_var.size continue elif not used_inputs_naive.empty(): v = nb_input.input() if not used_inputs_var.empty() and used_inputs_naive.empty(): base._pr_variable = 1 if v is None: used_inputs_naive.pos = used_inputs_naive.size if not used_inputs_var.empty(): base._pr_variable = 1 continue else: gen = self.generation self.generation = 0 v = base.random_offspring() self.generation = gen self.add(v)
create_population
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n base = self._base\n if base._share_inputs:\n used_inputs_var = SelectNumbers([x for x in range(base.nvar)])\n used_inputs_naive = used_inputs_var\n if base._pr_variable == 0:\n used_inputs_var = SelectNumbers([])\n used_inputs_naive = SelectNumbers([x for x in range(base.nvar)])\n elif base._pr_variable == 1:\n used_inputs_var = SelectNumbers([x for x in range(base.nvar)])\n used_inputs_naive = SelectNumbers([])\n else:\n used_inputs_var = SelectNumbers([x for x in range(base.nvar)])\n used_inputs_naive = SelectNumbers([x for x in range(base.nvar)])\n nb_input = Inputs(base, used_inputs_naive, functions=base._input_functions)\n while base._all_inputs and not base.stopping_criteria_tl(\n ) or self.popsize < base.popsize and not base.stopping_criteria():\n if base._all_inputs and used_inputs_var.empty(\n ) and used_inputs_naive.empty():\n base._init_popsize = self.popsize\n break\n if nb_input.use_all_variables():\n v = nb_input.all_variables()\n if v is None:\n continue\n elif not used_inputs_var.empty() and np.random.random(\n ) < base._pr_variable:\n v = self.variable_input(used_inputs_var)\n if v is None:\n used_inputs_var.pos = used_inputs_var.size\n continue\n elif not used_inputs_naive.empty():\n v = nb_input.input()\n if not used_inputs_var.empty() and used_inputs_naive.empty():\n base._pr_variable = 1\n if v is None:\n used_inputs_naive.pos = used_inputs_naive.size\n if not used_inputs_var.empty():\n base._pr_variable = 1\n continue\n else:\n gen = self.generation\n self.generation = 0\n v = base.random_offspring()\n self.generation = gen\n self.add(v)\n", "role": "user" }, { "content": "create_population", "role": "assistant" } ]
def x(self, v): self.population.append(v) self._current_popsize += 1 v.position = len(self._hist) self._hist.append(v) self.bsf = v self.estopping = v self._density += self.get_density(v)
add
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, v):\n self.population.append(v)\n self._current_popsize += 1\n v.position = len(self._hist)\n self._hist.append(v)\n self.bsf = v\n self.estopping = v\n self._density += self.get_density(v)\n", "role": "user" }, { "content": "add", "role": "assistant" } ]
def x(self, v): if self.popsize < self._popsize: return self.add(v) k = self.tournament(negative=True) self.clean(self.population[k]) self.population[k] = v v.position = len(self._hist) self._hist.append(v) self.bsf = v self.estopping = v self._inds_replace += 1 self._density += self.get_density(v) if self._inds_replace == self._popsize: self._inds_replace = 0 self.generation += 1 gc.collect()
replace
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, v):\n if self.popsize < self._popsize:\n return self.add(v)\n k = self.tournament(negative=True)\n self.clean(self.population[k])\n self.population[k] = v\n v.position = len(self._hist)\n self._hist.append(v)\n self.bsf = v\n self.estopping = v\n self._inds_replace += 1\n self._density += self.get_density(v)\n if self._inds_replace == self._popsize:\n self._inds_replace = 0\n self.generation += 1\n gc.collect()\n", "role": "user" }, { "content": "replace", "role": "assistant" } ]
def x(directory_path): if os.path.exists(directory_path): if not os.path.isdir(directory_path): raise OSError('Path is not a directory:', directory_path) else: os.makedirs(directory_path)
make_directory_if_needed
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(directory_path):\n if os.path.exists(directory_path):\n if not os.path.isdir(directory_path):\n raise OSError('Path is not a directory:', directory_path)\n else:\n os.makedirs(directory_path)\n", "role": "user" }, { "content": "make_directory_if_needed", "role": "assistant" } ]
def x(): args = docopt.docopt(__doc__) task_list = args['<task>'] if not task_list or 'help' in task_list or args['--help']: print(__doc__.format(version=_version.__version__, config_files= config.CONFIG_LOCATIONS)) return 0 level_str = args['--log-level'] try: level_const = getattr(logging, level_str.upper()) logging.basicConfig(level=level_const) if level_const == logging.DEBUG: workdir.options.debug = True except LookupError: logging.basicConfig() logger.error('received invalid log level: ' + level_str) return 1 for task in task_list: if task not in ORDERED_TASKS: logger.info('starting task: check') logger.error('received invalid task: ' + task) return 1 for task in CHECK_TASKS: if task in task_list: task_check(args) break if 'package' in task_list and not args['--release-version']: logger.error('--release-version is required for the package task') return 1 config_dict = _get_config_or_die(calling_task='hatchery', required_params=['auto_push_tag']) if config_dict['auto_push_tag'] and 'upload' in task_list: logger.info('adding task: tag (auto_push_tag==True)') task_list.append('tag') for task in ORDERED_TASKS: if task in task_list and task != 'check': logger.info('starting task: ' + task) globals()['task_' + task](args) logger.info("all's well that ends well...hatchery out") return 0
hatchery
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n args = docopt.docopt(__doc__)\n task_list = args['<task>']\n if not task_list or 'help' in task_list or args['--help']:\n print(__doc__.format(version=_version.__version__, config_files=\n config.CONFIG_LOCATIONS))\n return 0\n level_str = args['--log-level']\n try:\n level_const = getattr(logging, level_str.upper())\n logging.basicConfig(level=level_const)\n if level_const == logging.DEBUG:\n workdir.options.debug = True\n except LookupError:\n logging.basicConfig()\n logger.error('received invalid log level: ' + level_str)\n return 1\n for task in task_list:\n if task not in ORDERED_TASKS:\n logger.info('starting task: check')\n logger.error('received invalid task: ' + task)\n return 1\n for task in CHECK_TASKS:\n if task in task_list:\n task_check(args)\n break\n if 'package' in task_list and not args['--release-version']:\n logger.error('--release-version is required for the package task')\n return 1\n config_dict = _get_config_or_die(calling_task='hatchery',\n required_params=['auto_push_tag'])\n if config_dict['auto_push_tag'] and 'upload' in task_list:\n logger.info('adding task: tag (auto_push_tag==True)')\n task_list.append('tag')\n for task in ORDERED_TASKS:\n if task in task_list and task != 'check':\n logger.info('starting task: ' + task)\n globals()['task_' + task](args)\n logger.info(\"all's well that ends well...hatchery out\")\n return 0\n", "role": "user" }, { "content": "hatchery", "role": "assistant" } ]
def x(cmd_args, suppress_output=False): if not funcy.is_list(cmd_args) and not funcy.is_tuple(cmd_args): cmd_args = shlex.split(cmd_args) logger.info('executing `{}`'.format(' '.join(cmd_args))) call_request = CallRequest(cmd_args, suppress_output=suppress_output) call_result = call_request.run() if call_result.exitval: logger.error('`{}` returned error code {}'.format(' '.join(cmd_args ), call_result.exitval)) return call_result
call
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cmd_args, suppress_output=False):\n if not funcy.is_list(cmd_args) and not funcy.is_tuple(cmd_args):\n cmd_args = shlex.split(cmd_args)\n logger.info('executing `{}`'.format(' '.join(cmd_args)))\n call_request = CallRequest(cmd_args, suppress_output=suppress_output)\n call_result = call_request.run()\n if call_result.exitval:\n logger.error('`{}` returned error code {}'.format(' '.join(cmd_args\n ), call_result.exitval))\n return call_result\n", "role": "user" }, { "content": "call", "role": "assistant" } ]
def x(cmd_args, suppress_output=False): if not funcy.is_list(cmd_args) and not funcy.is_tuple(cmd_args): cmd_args = shlex.split(cmd_args) cmd_args = [sys.executable, 'setup.py'] + [x for x in cmd_args] return call(cmd_args, suppress_output=suppress_output)
setup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cmd_args, suppress_output=False):\n if not funcy.is_list(cmd_args) and not funcy.is_tuple(cmd_args):\n cmd_args = shlex.split(cmd_args)\n cmd_args = [sys.executable, 'setup.py'] + [x for x in cmd_args]\n return call(cmd_args, suppress_output=suppress_output)\n", "role": "user" }, { "content": "setup", "role": "assistant" } ]
def x(self): data = [] valid_dates = [] mrms_files = np.array(sorted(os.listdir(self.path + self.variable + '/'))) mrms_file_dates = np.array([m_file.split('_')[-2].split('-')[0] for m_file in mrms_files]) old_mrms_file = None file_obj = None for t in range(self.all_dates.shape[0]): file_index = np.where(mrms_file_dates == self.all_dates[t].strftime ('%Y%m%d'))[0] if len(file_index) > 0: mrms_file = mrms_files[file_index][0] if mrms_file is not None: if file_obj is not None: file_obj.close() file_obj = Dataset(self.path + self.variable + '/' + mrms_file) if 'time' in file_obj.variables.keys(): time_var = 'time' else: time_var = 'date' file_valid_dates = pd.DatetimeIndex(num2date(file_obj. variables[time_var][:], file_obj.variables[time_var].units) ) else: file_valid_dates = pd.DatetimeIndex([]) time_index = np.where(file_valid_dates.values == self.all_dates .values[t])[0] if len(time_index) > 0: data.append(file_obj.variables[self.variable][time_index[0]]) valid_dates.append(self.all_dates[t]) if file_obj is not None: file_obj.close() self.data = np.array(data) self.data[self.data < 0] = 0 self.data[self.data > 150] = 150 self.valid_dates = pd.DatetimeIndex(valid_dates)
load_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n data = []\n valid_dates = []\n mrms_files = np.array(sorted(os.listdir(self.path + self.variable + '/')))\n mrms_file_dates = np.array([m_file.split('_')[-2].split('-')[0] for\n m_file in mrms_files])\n old_mrms_file = None\n file_obj = None\n for t in range(self.all_dates.shape[0]):\n file_index = np.where(mrms_file_dates == self.all_dates[t].strftime\n ('%Y%m%d'))[0]\n if len(file_index) > 0:\n mrms_file = mrms_files[file_index][0]\n if mrms_file is not None:\n if file_obj is not None:\n file_obj.close()\n file_obj = Dataset(self.path + self.variable + '/' + mrms_file)\n if 'time' in file_obj.variables.keys():\n time_var = 'time'\n else:\n time_var = 'date'\n file_valid_dates = pd.DatetimeIndex(num2date(file_obj.\n variables[time_var][:], file_obj.variables[time_var].units)\n )\n else:\n file_valid_dates = pd.DatetimeIndex([])\n time_index = np.where(file_valid_dates.values == self.all_dates\n .values[t])[0]\n if len(time_index) > 0:\n data.append(file_obj.variables[self.variable][time_index[0]])\n valid_dates.append(self.all_dates[t])\n if file_obj is not None:\n file_obj.close()\n self.data = np.array(data)\n self.data[self.data < 0] = 0\n self.data[self.data > 150] = 150\n self.valid_dates = pd.DatetimeIndex(valid_dates)\n", "role": "user" }, { "content": "load_data", "role": "assistant" } ]
def x(data, data_min, data_max, out_min=0.0, out_max=100.0): return (out_max - out_min) / (data_max - data_min) * (data - data_min ) + out_min
rescale_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(data, data_min, data_max, out_min=0.0, out_max=100.0):\n return (out_max - out_min) / (data_max - data_min) * (data - data_min\n ) + out_min\n", "role": "user" }, { "content": "rescale_data", "role": "assistant" } ]
def x(self, input_grid): marked = self.find_local_maxima(input_grid) marked = np.where(marked >= 0, 1, 0) markers = splabel(marked)[0] return markers
label
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, input_grid):\n marked = self.find_local_maxima(input_grid)\n marked = np.where(marked >= 0, 1, 0)\n markers = splabel(marked)[0]\n return markers\n", "role": "user" }, { "content": "label", "role": "assistant" } ]
def x(self, input_grid): pixels, q_data = self.quantize(input_grid) centers = OrderedDict() for p in pixels.keys(): centers[p] = [] marked = np.ones(q_data.shape, dtype=int) * self.UNMARKED MIN_INFL = int(np.round(1 + 0.5 * np.sqrt(self.max_size))) MAX_INFL = 2 * MIN_INFL marked_so_far = [] for b in sorted(pixels.keys(), reverse=True): infl_dist = MIN_INFL + int(np.round(float(b) / self.max_bin * ( MAX_INFL - MIN_INFL))) for p in pixels[b]: if marked[p] == self.UNMARKED: ok = False del marked_so_far[:] for (i, j), v in np.ndenumerate(marked[p[0] - infl_dist:p[0 ] + infl_dist + 1, p[1] - infl_dist:p[1] + infl_dist + 1]): if v == self.UNMARKED: ok = True marked[i - infl_dist + p[0], j - infl_dist + p[1]] = b marked_so_far.append((i - infl_dist + p[0], j - infl_dist + p[1])) else: ok = False break if ok: centers[b].append(p) else: for m in marked_so_far: marked[m] = self.UNMARKED marked[:, :] = self.UNMARKED deferred_from_last = [] deferred_to_next = [] for delta in range(0, self.delta + 1): for b in sorted(centers.keys(), reverse=True): bin_lower = b - delta deferred_from_last[:] = deferred_to_next[:] del deferred_to_next[:] foothills = [] n_centers = len(centers[b]) tot_centers = n_centers + len(deferred_from_last) for i in range(tot_centers): if i < n_centers: center = centers[b][i] else: center = deferred_from_last[i - n_centers] if bin_lower < 0: bin_lower = 0 if marked[center] == self.UNMARKED: captured = self.set_maximum(q_data, marked, center, bin_lower, foothills) if not captured: deferred_to_next.append(center) else: pass self.remove_foothills(q_data, marked, b, bin_lower, centers, foothills) del deferred_from_last[:] del deferred_to_next[:] return marked
find_local_maxima
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, input_grid):\n pixels, q_data = self.quantize(input_grid)\n centers = OrderedDict()\n for p in pixels.keys():\n centers[p] = []\n marked = np.ones(q_data.shape, dtype=int) * self.UNMARKED\n MIN_INFL = int(np.round(1 + 0.5 * np.sqrt(self.max_size)))\n MAX_INFL = 2 * MIN_INFL\n marked_so_far = []\n for b in sorted(pixels.keys(), reverse=True):\n infl_dist = MIN_INFL + int(np.round(float(b) / self.max_bin * (\n MAX_INFL - MIN_INFL)))\n for p in pixels[b]:\n if marked[p] == self.UNMARKED:\n ok = False\n del marked_so_far[:]\n for (i, j), v in np.ndenumerate(marked[p[0] - infl_dist:p[0\n ] + infl_dist + 1, p[1] - infl_dist:p[1] + infl_dist + 1]):\n if v == self.UNMARKED:\n ok = True\n marked[i - infl_dist + p[0], j - infl_dist + p[1]] = b\n marked_so_far.append((i - infl_dist + p[0], j -\n infl_dist + p[1]))\n else:\n ok = False\n break\n if ok:\n centers[b].append(p)\n else:\n for m in marked_so_far:\n marked[m] = self.UNMARKED\n marked[:, :] = self.UNMARKED\n deferred_from_last = []\n deferred_to_next = []\n for delta in range(0, self.delta + 1):\n for b in sorted(centers.keys(), reverse=True):\n bin_lower = b - delta\n deferred_from_last[:] = deferred_to_next[:]\n del deferred_to_next[:]\n foothills = []\n n_centers = len(centers[b])\n tot_centers = n_centers + len(deferred_from_last)\n for i in range(tot_centers):\n if i < n_centers:\n center = centers[b][i]\n else:\n center = deferred_from_last[i - n_centers]\n if bin_lower < 0:\n bin_lower = 0\n if marked[center] == self.UNMARKED:\n captured = self.set_maximum(q_data, marked, center,\n bin_lower, foothills)\n if not captured:\n deferred_to_next.append(center)\n else:\n pass\n self.remove_foothills(q_data, marked, b, bin_lower, centers,\n foothills)\n del deferred_from_last[:]\n del deferred_to_next[:]\n return marked\n", "role": "user" }, { "content": "find_local_maxima", "role": "assistant" } ]
def x(self, q_data, marked, center, bin_lower, foothills): as_bin = [] as_glob = [] marked_so_far = [] will_be_considered_again = False as_bin.append(center) center_data = q_data[center] while len(as_bin) > 0: p = as_bin.pop(-1) if marked[p] != self.UNMARKED: continue marked[p] = q_data[center] marked_so_far.append(p) for index, val in np.ndenumerate(marked[p[0] - 1:p[0] + 2, p[1] - 1 :p[1] + 2]): if val == self.UNMARKED: pixel = index[0] - 1 + p[0], index[1] - 1 + p[1] p_data = q_data[pixel] if (not will_be_considered_again and p_data >= 0 and p_data < center_data): will_be_considered_again = True if p_data >= bin_lower and np.abs(center_data - p_data ) <= self.delta: as_bin.append(pixel) elif p_data >= 0: as_glob.append(pixel) if bin_lower == 0: will_be_considered_again = False big_enough = len(marked_so_far) >= self.max_size if big_enough: foothills.append((center, as_glob)) elif will_be_considered_again: for m in marked_so_far: marked[m] = self.UNMARKED del as_bin[:] del as_glob[:] del marked_so_far[:] return big_enough or not will_be_considered_again
set_maximum
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, q_data, marked, center, bin_lower, foothills):\n as_bin = []\n as_glob = []\n marked_so_far = []\n will_be_considered_again = False\n as_bin.append(center)\n center_data = q_data[center]\n while len(as_bin) > 0:\n p = as_bin.pop(-1)\n if marked[p] != self.UNMARKED:\n continue\n marked[p] = q_data[center]\n marked_so_far.append(p)\n for index, val in np.ndenumerate(marked[p[0] - 1:p[0] + 2, p[1] - 1\n :p[1] + 2]):\n if val == self.UNMARKED:\n pixel = index[0] - 1 + p[0], index[1] - 1 + p[1]\n p_data = q_data[pixel]\n if (not will_be_considered_again and p_data >= 0 and p_data <\n center_data):\n will_be_considered_again = True\n if p_data >= bin_lower and np.abs(center_data - p_data\n ) <= self.delta:\n as_bin.append(pixel)\n elif p_data >= 0:\n as_glob.append(pixel)\n if bin_lower == 0:\n will_be_considered_again = False\n big_enough = len(marked_so_far) >= self.max_size\n if big_enough:\n foothills.append((center, as_glob))\n elif will_be_considered_again:\n for m in marked_so_far:\n marked[m] = self.UNMARKED\n del as_bin[:]\n del as_glob[:]\n del marked_so_far[:]\n return big_enough or not will_be_considered_again\n", "role": "user" }, { "content": "set_maximum", "role": "assistant" } ]
def x(self, q_data, marked, bin_num, bin_lower, centers, foothills): hills = [] for foot in foothills: center = foot[0] hills[:] = foot[1][:] while len(hills) > 0: pt = hills.pop(-1) marked[pt] = self.GLOBBED for s_index, val in np.ndenumerate(marked[pt[0] - 1:pt[0] + 2, pt[1] - 1:pt[1] + 2]): index = s_index[0] - 1 + pt[0], s_index[1] - 1 + pt[1] if val == self.UNMARKED: if q_data[index] >= 0 and q_data[index] < bin_lower and ( q_data[index] <= q_data[pt] or self.is_closest( index, center, centers, bin_num)): hills.append(index) del foothills[:]
remove_foothills
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, q_data, marked, bin_num, bin_lower, centers, foothills):\n hills = []\n for foot in foothills:\n center = foot[0]\n hills[:] = foot[1][:]\n while len(hills) > 0:\n pt = hills.pop(-1)\n marked[pt] = self.GLOBBED\n for s_index, val in np.ndenumerate(marked[pt[0] - 1:pt[0] + 2, \n pt[1] - 1:pt[1] + 2]):\n index = s_index[0] - 1 + pt[0], s_index[1] - 1 + pt[1]\n if val == self.UNMARKED:\n if q_data[index] >= 0 and q_data[index] < bin_lower and (\n q_data[index] <= q_data[pt] or self.is_closest(\n index, center, centers, bin_num)):\n hills.append(index)\n del foothills[:]\n", "role": "user" }, { "content": "remove_foothills", "role": "assistant" } ]
def x(self, input_grid): pixels = {} for i in range(self.max_bin + 1): pixels[i] = [] data = (np.array(input_grid, dtype=int) - self.min_thresh ) / self.data_increment data[data < 0] = -1 data[data > self.max_bin] = self.max_bin good_points = np.where(data >= 0) for g in np.arange(good_points[0].shape[0]): pixels[data[good_points[0][g], good_points[1][g]]].append(( good_points[0][g], good_points[1][g])) return pixels, data
quantize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, input_grid):\n pixels = {}\n for i in range(self.max_bin + 1):\n pixels[i] = []\n data = (np.array(input_grid, dtype=int) - self.min_thresh\n ) / self.data_increment\n data[data < 0] = -1\n data[data > self.max_bin] = self.max_bin\n good_points = np.where(data >= 0)\n for g in np.arange(good_points[0].shape[0]):\n pixels[data[good_points[0][g], good_points[1][g]]].append((\n good_points[0][g], good_points[1][g]))\n return pixels, data\n", "role": "user" }, { "content": "quantize", "role": "assistant" } ]
def x(self): file_name = [] r = requests.get('%s/users/%s/gists' % (BASE_URL, self.user), headers= self.gist.header) r_text = json.loads(r.text) limit = len(r.json()) if r.status_code == 200: for g, no in zip(r_text, range(0, limit)): for key, value in r.json()[no]['files'].iteritems(): file_name.append(value['filename']) return file_name raise Exception('Username not found')
listall
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n file_name = []\n r = requests.get('%s/users/%s/gists' % (BASE_URL, self.user), headers=\n self.gist.header)\n r_text = json.loads(r.text)\n limit = len(r.json())\n if r.status_code == 200:\n for g, no in zip(r_text, range(0, limit)):\n for key, value in r.json()[no]['files'].iteritems():\n file_name.append(value['filename'])\n return file_name\n raise Exception('Username not found')\n", "role": "user" }, { "content": "listall", "role": "assistant" } ]
def x(self, **args): self.gist_name = '' if 'name' in args: self.gist_name = args['name'] self.gist_id = self.getMyID(self.gist_name) elif 'id' in args: self.gist_id = args['id'] else: raise Exception( "Either provide authenticated user's Unambigious Gistname or any unique Gistid" ) if self.gist_id: r = requests.get('%s' % BASE_URL + '/gists/%s' % self.gist_id, headers=self.gist.header) if r.status_code == 200: r_text = json.loads(r.text) if self.gist_name != '': content = r.json()['files'][self.gist_name]['content'] else: for key, value in r.json()['files'].iteritems(): content = r.json()['files'][value['filename']]['content'] return content raise Exception('No such gist found')
content
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **args):\n self.gist_name = ''\n if 'name' in args:\n self.gist_name = args['name']\n self.gist_id = self.getMyID(self.gist_name)\n elif 'id' in args:\n self.gist_id = args['id']\n else:\n raise Exception(\n \"Either provide authenticated user's Unambigious Gistname or any unique Gistid\"\n )\n if self.gist_id:\n r = requests.get('%s' % BASE_URL + '/gists/%s' % self.gist_id,\n headers=self.gist.header)\n if r.status_code == 200:\n r_text = json.loads(r.text)\n if self.gist_name != '':\n content = r.json()['files'][self.gist_name]['content']\n else:\n for key, value in r.json()['files'].iteritems():\n content = r.json()['files'][value['filename']]['content']\n return content\n raise Exception('No such gist found')\n", "role": "user" }, { "content": "content", "role": "assistant" } ]
def x(self, **args): self.gist_name = '' if 'description' in args: self.description = args['description'] else: self.description = '' if 'name' in args and 'id' in args: self.gist_name = args['name'] self.gist_id = args['id'] elif 'name' in args: self.gist_name = args['name'] self.gist_id = self.getMyID(self.gist_name) elif 'id' in args: self.gist_id = args['id'] else: raise Exception('Gist Name/ID must be provided') if 'content' in args: self.content = args['content'] else: raise Exception("Gist content can't be empty") if self.gist_name == '': self.gist_name = self.getgist(id=self.gist_id) data = {'description': self.description, 'files': {self.gist_name: {'content': self.content}}} else: data = {'description': self.description, 'files': {self.gist_name: {'content': self.content}}} if self.gist_id: r = requests.patch('%s/gists/%s' % (BASE_URL, self.gist_id), headers=self.gist.header, data=json.dumps(data)) if r.status_code == 200: r_text = json.loads(r.text) response = {'updated_content': self.content, 'created_at': r. json()['created_at'], 'comments': r.json()['comments']} return response raise Exception('No such gist found')
edit
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **args):\n self.gist_name = ''\n if 'description' in args:\n self.description = args['description']\n else:\n self.description = ''\n if 'name' in args and 'id' in args:\n self.gist_name = args['name']\n self.gist_id = args['id']\n elif 'name' in args:\n self.gist_name = args['name']\n self.gist_id = self.getMyID(self.gist_name)\n elif 'id' in args:\n self.gist_id = args['id']\n else:\n raise Exception('Gist Name/ID must be provided')\n if 'content' in args:\n self.content = args['content']\n else:\n raise Exception(\"Gist content can't be empty\")\n if self.gist_name == '':\n self.gist_name = self.getgist(id=self.gist_id)\n data = {'description': self.description, 'files': {self.gist_name:\n {'content': self.content}}}\n else:\n data = {'description': self.description, 'files': {self.gist_name:\n {'content': self.content}}}\n if self.gist_id:\n r = requests.patch('%s/gists/%s' % (BASE_URL, self.gist_id),\n headers=self.gist.header, data=json.dumps(data))\n if r.status_code == 200:\n r_text = json.loads(r.text)\n response = {'updated_content': self.content, 'created_at': r.\n json()['created_at'], 'comments': r.json()['comments']}\n return response\n raise Exception('No such gist found')\n", "role": "user" }, { "content": "edit", "role": "assistant" } ]
def x(self, **args): if 'name' in args: self.gist_name = args['name'] self.gist_id = self.getMyID(self.gist_name) elif 'id' in args: self.gist_id = args['id'] else: raise Exception('Provide GistName to delete') url = 'gists' if self.gist_id: r = requests.delete('%s/%s/%s' % (BASE_URL, url, self.gist_id), headers=self.gist.header) if r.status_code == 204: response = {'id': self.gist_id} return response raise Exception('Can not delete gist')
delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **args):\n if 'name' in args:\n self.gist_name = args['name']\n self.gist_id = self.getMyID(self.gist_name)\n elif 'id' in args:\n self.gist_id = args['id']\n else:\n raise Exception('Provide GistName to delete')\n url = 'gists'\n if self.gist_id:\n r = requests.delete('%s/%s/%s' % (BASE_URL, url, self.gist_id),\n headers=self.gist.header)\n if r.status_code == 204:\n response = {'id': self.gist_id}\n return response\n raise Exception('Can not delete gist')\n", "role": "user" }, { "content": "delete", "role": "assistant" } ]
def x(self, **args): ids = [] r = requests.get('%s/gists/starred' % BASE_URL, headers=self.gist.header) if 'limit' in args: limit = args['limit'] else: limit = len(r.json()) if r.status_code == 200: for g in range(0, limit): ids.append('%s/%s/%s' % (GIST_URL, r.json()[g]['user']['login'], r.json()[g]['id'])) return ids raise Exception('Username not found')
starred
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **args):\n ids = []\n r = requests.get('%s/gists/starred' % BASE_URL, headers=self.gist.header)\n if 'limit' in args:\n limit = args['limit']\n else:\n limit = len(r.json())\n if r.status_code == 200:\n for g in range(0, limit):\n ids.append('%s/%s/%s' % (GIST_URL, r.json()[g]['user']['login'],\n r.json()[g]['id']))\n return ids\n raise Exception('Username not found')\n", "role": "user" }, { "content": "starred", "role": "assistant" } ]
def x(self, **args): if 'name' in args: self.gist_name = args['name'] self.gist_id = self.getMyID(self.gist_name) elif 'id' in args: self.gist_id = args['id'] else: raise Exception('Gist Name/ID must be provided') if self.gist_id: r = requests.get('%s/gists/%s' % (BASE_URL, self.gist_id), headers= self.gist.header) if r.status_code == 200: content = {'Github-User': r.json()['user']['login'], 'GistID': r.json()['id'], 'Gist-Link': '%s/%s/%s' % (GIST_URL, self. gist.username, r.json()['id']), 'Clone-Link': '%s/%s.git' % (GIST_URL, r.json()['id']), 'Embed-Script': '<script src="%s/%s/%s.js"</script>' % (GIST_URL, self.gist .username, r.json()['id'])} return content raise Exception('No such gist found')
links
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, **args):\n if 'name' in args:\n self.gist_name = args['name']\n self.gist_id = self.getMyID(self.gist_name)\n elif 'id' in args:\n self.gist_id = args['id']\n else:\n raise Exception('Gist Name/ID must be provided')\n if self.gist_id:\n r = requests.get('%s/gists/%s' % (BASE_URL, self.gist_id), headers=\n self.gist.header)\n if r.status_code == 200:\n content = {'Github-User': r.json()['user']['login'], 'GistID':\n r.json()['id'], 'Gist-Link': '%s/%s/%s' % (GIST_URL, self.\n gist.username, r.json()['id']), 'Clone-Link': '%s/%s.git' %\n (GIST_URL, r.json()['id']), 'Embed-Script': \n '<script src=\"%s/%s/%s.js\"</script>' % (GIST_URL, self.gist\n .username, r.json()['id'])}\n return content\n raise Exception('No such gist found')\n", "role": "user" }, { "content": "links", "role": "assistant" } ]
def x(self): run_date_str = self.run_date.strftime('%Y%m%d') forecast_file = (self.forecast_path + '{0}/{1}_{2}_{3}_consensus_{0}.nc'.format(run_date_str, self. ensemble_name, self.model_name, self.forecast_variable)) print('Forecast file: ' + forecast_file) forecast_data = Dataset(forecast_file) for size_threshold in self.size_thresholds: for smoothing_radius in self.smoothing_radii: for neighbor_radius in self.neighbor_radii: hour_var = 'neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}'.format( neighbor_radius, smoothing_radius, self. forecast_variable, float(size_threshold)) period_var = ( 'neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}' .format(self.end_hour - self.start_hour + 1, neighbor_radius, smoothing_radius, self. forecast_variable, float(size_threshold))) print('Loading forecasts {0} {1} {2} {3} {4}'.format(self. run_date, self.model_name, self.forecast_variable, size_threshold, smoothing_radius)) if hour_var in forecast_data.variables.keys(): self.hourly_forecasts[hour_var] = forecast_data.variables[ hour_var][:] if period_var in forecast_data.variables.keys(): self.period_forecasts[period_var ] = forecast_data.variables[period_var][:] forecast_data.close()
load_forecasts
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n run_date_str = self.run_date.strftime('%Y%m%d')\n forecast_file = (self.forecast_path +\n '{0}/{1}_{2}_{3}_consensus_{0}.nc'.format(run_date_str, self.\n ensemble_name, self.model_name, self.forecast_variable))\n print('Forecast file: ' + forecast_file)\n forecast_data = Dataset(forecast_file)\n for size_threshold in self.size_thresholds:\n for smoothing_radius in self.smoothing_radii:\n for neighbor_radius in self.neighbor_radii:\n hour_var = 'neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}'.format(\n neighbor_radius, smoothing_radius, self.\n forecast_variable, float(size_threshold))\n period_var = (\n 'neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}'\n .format(self.end_hour - self.start_hour + 1,\n neighbor_radius, smoothing_radius, self.\n forecast_variable, float(size_threshold)))\n print('Loading forecasts {0} {1} {2} {3} {4}'.format(self.\n run_date, self.model_name, self.forecast_variable,\n size_threshold, smoothing_radius))\n if hour_var in forecast_data.variables.keys():\n self.hourly_forecasts[hour_var] = forecast_data.variables[\n hour_var][:]\n if period_var in forecast_data.variables.keys():\n self.period_forecasts[period_var\n ] = forecast_data.variables[period_var][:]\n forecast_data.close()\n", "role": "user" }, { "content": "load_forecasts", "role": "assistant" } ]
def x(self, mask_threshold=0.5): print('Loading obs ', self.run_date, self.model_name, self. forecast_variable) start_date = self.run_date + timedelta(hours=self.start_hour) end_date = self.run_date + timedelta(hours=self.end_hour) mrms_grid = MRMSGrid(start_date, end_date, self.mrms_variable, self. mrms_path) mrms_grid.load_data() if len(mrms_grid.data) > 0: self.raw_obs[self.mrms_variable] = np.where(mrms_grid.data > 100, 100, mrms_grid.data) self.period_obs[self.mrms_variable] = self.raw_obs[self.mrms_variable ].max(axis=0) if self.obs_mask: mask_grid = MRMSGrid(start_date, end_date, self.mask_variable, self.mrms_path) mask_grid.load_data() self.raw_obs[self.mask_variable] = np.where(mask_grid.data >= mask_threshold, 1, 0) self.period_obs[self.mask_variable] = self.raw_obs[self. mask_variable].max(axis=0)
load_obs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, mask_threshold=0.5):\n print('Loading obs ', self.run_date, self.model_name, self.\n forecast_variable)\n start_date = self.run_date + timedelta(hours=self.start_hour)\n end_date = self.run_date + timedelta(hours=self.end_hour)\n mrms_grid = MRMSGrid(start_date, end_date, self.mrms_variable, self.\n mrms_path)\n mrms_grid.load_data()\n if len(mrms_grid.data) > 0:\n self.raw_obs[self.mrms_variable] = np.where(mrms_grid.data > 100, \n 100, mrms_grid.data)\n self.period_obs[self.mrms_variable] = self.raw_obs[self.mrms_variable\n ].max(axis=0)\n if self.obs_mask:\n mask_grid = MRMSGrid(start_date, end_date, self.mask_variable,\n self.mrms_path)\n mask_grid.load_data()\n self.raw_obs[self.mask_variable] = np.where(mask_grid.data >=\n mask_threshold, 1, 0)\n self.period_obs[self.mask_variable] = self.raw_obs[self.\n mask_variable].max(axis=0)\n", "role": "user" }, { "content": "load_obs", "role": "assistant" } ]
def x(self): coord_file = Dataset(self.coordinate_file) if 'lon' in coord_file.variables.keys(): self.coordinates['lon'] = coord_file.variables['lon'][:] self.coordinates['lat'] = coord_file.variables['lat'][:] else: self.coordinates['lon'] = coord_file.variables['XLONG'][0] self.coordinates['lat'] = coord_file.variables['XLAT'][0] coord_file.close()
load_coordinates
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n coord_file = Dataset(self.coordinate_file)\n if 'lon' in coord_file.variables.keys():\n self.coordinates['lon'] = coord_file.variables['lon'][:]\n self.coordinates['lat'] = coord_file.variables['lat'][:]\n else:\n self.coordinates['lon'] = coord_file.variables['XLONG'][0]\n self.coordinates['lat'] = coord_file.variables['XLAT'][0]\n coord_file.close()\n", "role": "user" }, { "content": "load_coordinates", "role": "assistant" } ]
def x(self): score_columns = ['Run_Date', 'Forecast_Hour', 'Ensemble Name', 'Model_Name', 'Forecast_Variable', 'Neighbor_Radius', 'Smoothing_Radius', 'Size_Threshold', 'ROC', 'Reliability'] all_scores = pd.DataFrame(columns=score_columns) for h, hour in enumerate(range(self.start_hour, self.end_hour + 1)): for neighbor_radius in self.neighbor_radii: n_filter = disk(neighbor_radius) for s, size_threshold in enumerate(self.size_thresholds): print('Eval hourly forecast {0:02d} {1} {2} {3} {4:d} {5:d}' .format(hour, self.model_name, self.forecast_variable, self.run_date, neighbor_radius, size_threshold)) hour_obs = fftconvolve(self.raw_obs[self.mrms_variable][h] >= self.obs_thresholds[s], n_filter, mode='same') hour_obs[hour_obs > 1] = 1 hour_obs[hour_obs < 1] = 0 if self.obs_mask: hour_obs = hour_obs[self.raw_obs[self.mask_variable][h] > 0 ] for smoothing_radius in self.smoothing_radii: hour_var = ('neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}' .format(neighbor_radius, smoothing_radius, self. forecast_variable, size_threshold)) if self.obs_mask: hour_forecast = self.hourly_forecasts[hour_var][h][ self.raw_obs[self.mask_variable][h] > 0] else: hour_forecast = self.hourly_forecasts[hour_var][h] roc = DistributedROC(thresholds=self.probability_levels, obs_threshold=0.5) roc.update(hour_forecast, hour_obs) rel = DistributedReliability(thresholds=self. probability_levels, obs_threshold=0.5) rel.update(hour_forecast, hour_obs) row = [self.run_date, hour, self.ensemble_name, self. model_name, self.forecast_variable, neighbor_radius, smoothing_radius, size_threshold, roc, rel] all_scores.loc[hour_var + '_{0:d}'.format(hour)] = row return all_scores
evaluate_hourly_forecasts
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n score_columns = ['Run_Date', 'Forecast_Hour', 'Ensemble Name',\n 'Model_Name', 'Forecast_Variable', 'Neighbor_Radius',\n 'Smoothing_Radius', 'Size_Threshold', 'ROC', 'Reliability']\n all_scores = pd.DataFrame(columns=score_columns)\n for h, hour in enumerate(range(self.start_hour, self.end_hour + 1)):\n for neighbor_radius in self.neighbor_radii:\n n_filter = disk(neighbor_radius)\n for s, size_threshold in enumerate(self.size_thresholds):\n print('Eval hourly forecast {0:02d} {1} {2} {3} {4:d} {5:d}'\n .format(hour, self.model_name, self.forecast_variable,\n self.run_date, neighbor_radius, size_threshold))\n hour_obs = fftconvolve(self.raw_obs[self.mrms_variable][h] >=\n self.obs_thresholds[s], n_filter, mode='same')\n hour_obs[hour_obs > 1] = 1\n hour_obs[hour_obs < 1] = 0\n if self.obs_mask:\n hour_obs = hour_obs[self.raw_obs[self.mask_variable][h] > 0\n ]\n for smoothing_radius in self.smoothing_radii:\n hour_var = ('neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}'\n .format(neighbor_radius, smoothing_radius, self.\n forecast_variable, size_threshold))\n if self.obs_mask:\n hour_forecast = self.hourly_forecasts[hour_var][h][\n self.raw_obs[self.mask_variable][h] > 0]\n else:\n hour_forecast = self.hourly_forecasts[hour_var][h]\n roc = DistributedROC(thresholds=self.probability_levels,\n obs_threshold=0.5)\n roc.update(hour_forecast, hour_obs)\n rel = DistributedReliability(thresholds=self.\n probability_levels, obs_threshold=0.5)\n rel.update(hour_forecast, hour_obs)\n row = [self.run_date, hour, self.ensemble_name, self.\n model_name, self.forecast_variable, neighbor_radius,\n smoothing_radius, size_threshold, roc, rel]\n all_scores.loc[hour_var + '_{0:d}'.format(hour)] = row\n return all_scores\n", "role": "user" }, { "content": "evaluate_hourly_forecasts", "role": "assistant" } ]
def x(self): score_columns = ['Run_Date', 'Ensemble Name', 'Model_Name', 'Forecast_Variable', 'Neighbor_Radius', 'Smoothing_Radius', 'Size_Threshold', 'ROC', 'Reliability'] all_scores = pd.DataFrame(columns=score_columns) if self.coordinate_file is not None: coord_mask = np.where((self.coordinates['lon'] >= self.lon_bounds[0 ]) & (self.coordinates['lon'] <= self.lon_bounds[1]) & (self. coordinates['lat'] >= self.lat_bounds[0]) & (self.coordinates[ 'lat'] <= self.lat_bounds[1]) & (self.period_obs[self. mask_variable] > 0)) else: coord_mask = None for neighbor_radius in self.neighbor_radii: n_filter = disk(neighbor_radius) for s, size_threshold in enumerate(self.size_thresholds): period_obs = fftconvolve(self.period_obs[self.mrms_variable] >= self.obs_thresholds[s], n_filter, mode='same') period_obs[period_obs > 1] = 1 if self.obs_mask and self.coordinate_file is None: period_obs = period_obs[self.period_obs[self.mask_variable] > 0 ] elif self.obs_mask and self.coordinate_file is not None: period_obs = period_obs[coord_mask[0], coord_mask[1]] else: period_obs = period_obs.ravel() for smoothing_radius in self.smoothing_radii: print('Eval period forecast {0} {1} {2} {3} {4} {5}'.format (self.model_name, self.forecast_variable, self.run_date, neighbor_radius, size_threshold, smoothing_radius)) period_var = ( 'neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}' .format(self.end_hour - self.start_hour + 1, neighbor_radius, smoothing_radius, self. forecast_variable, size_threshold)) if self.obs_mask and self.coordinate_file is None: period_forecast = self.period_forecasts[period_var][ self.period_obs[self.mask_variable] > 0] elif self.obs_mask and self.coordinate_file is not None: period_forecast = self.period_forecasts[period_var][ coord_mask[0], coord_mask[1]] else: period_forecast = self.period_forecasts[period_var].ravel() roc = DistributedROC(thresholds=self.probability_levels, obs_threshold=0.5) roc.update(period_forecast, period_obs) rel = DistributedReliability(thresholds=self. probability_levels, obs_threshold=0.5) rel.update(period_forecast, period_obs) row = [self.run_date, self.ensemble_name, self.model_name, self.forecast_variable, neighbor_radius, smoothing_radius, size_threshold, roc, rel] all_scores.loc[period_var] = row return all_scores
evaluate_period_forecasts
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n score_columns = ['Run_Date', 'Ensemble Name', 'Model_Name',\n 'Forecast_Variable', 'Neighbor_Radius', 'Smoothing_Radius',\n 'Size_Threshold', 'ROC', 'Reliability']\n all_scores = pd.DataFrame(columns=score_columns)\n if self.coordinate_file is not None:\n coord_mask = np.where((self.coordinates['lon'] >= self.lon_bounds[0\n ]) & (self.coordinates['lon'] <= self.lon_bounds[1]) & (self.\n coordinates['lat'] >= self.lat_bounds[0]) & (self.coordinates[\n 'lat'] <= self.lat_bounds[1]) & (self.period_obs[self.\n mask_variable] > 0))\n else:\n coord_mask = None\n for neighbor_radius in self.neighbor_radii:\n n_filter = disk(neighbor_radius)\n for s, size_threshold in enumerate(self.size_thresholds):\n period_obs = fftconvolve(self.period_obs[self.mrms_variable] >=\n self.obs_thresholds[s], n_filter, mode='same')\n period_obs[period_obs > 1] = 1\n if self.obs_mask and self.coordinate_file is None:\n period_obs = period_obs[self.period_obs[self.mask_variable] > 0\n ]\n elif self.obs_mask and self.coordinate_file is not None:\n period_obs = period_obs[coord_mask[0], coord_mask[1]]\n else:\n period_obs = period_obs.ravel()\n for smoothing_radius in self.smoothing_radii:\n print('Eval period forecast {0} {1} {2} {3} {4} {5}'.format\n (self.model_name, self.forecast_variable, self.run_date,\n neighbor_radius, size_threshold, smoothing_radius))\n period_var = (\n 'neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}'\n .format(self.end_hour - self.start_hour + 1,\n neighbor_radius, smoothing_radius, self.\n forecast_variable, size_threshold))\n if self.obs_mask and self.coordinate_file is None:\n period_forecast = self.period_forecasts[period_var][\n self.period_obs[self.mask_variable] > 0]\n elif self.obs_mask and self.coordinate_file is not None:\n period_forecast = self.period_forecasts[period_var][\n coord_mask[0], coord_mask[1]]\n else:\n period_forecast = self.period_forecasts[period_var].ravel()\n roc = DistributedROC(thresholds=self.probability_levels,\n obs_threshold=0.5)\n roc.update(period_forecast, period_obs)\n rel = DistributedReliability(thresholds=self.\n probability_levels, obs_threshold=0.5)\n rel.update(period_forecast, period_obs)\n row = [self.run_date, self.ensemble_name, self.model_name,\n self.forecast_variable, neighbor_radius,\n smoothing_radius, size_threshold, roc, rel]\n all_scores.loc[period_var] = row\n return all_scores\n", "role": "user" }, { "content": "evaluate_period_forecasts", "role": "assistant" } ]
def x(args): version_info = sys.version_info if version_info.major != 3 or version_info.minor < 6: return None, 'python36' main_fn = load_module_as_package('nionui_app.nionswift') if main_fn: return main_fn(['nionui_app.nionswift'] + args, {'pyqt': None}), None return None, 'main'
bootstrap_main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args):\n version_info = sys.version_info\n if version_info.major != 3 or version_info.minor < 6:\n return None, 'python36'\n main_fn = load_module_as_package('nionui_app.nionswift')\n if main_fn:\n return main_fn(['nionui_app.nionswift'] + args, {'pyqt': None}), None\n return None, 'main'\n", "role": "user" }, { "content": "bootstrap_main", "role": "assistant" } ]
def x(workspace_dir: pathlib.Path, do_logging: bool=True) ->pathlib.Path: library_path_11 = workspace_dir / 'Nion Swift Workspace.nslib' library_path_12 = workspace_dir / 'Nion Swift Library 12.nslib' library_path_13 = workspace_dir / 'Nion Swift Library 13.nslib' library_paths = library_path_11, library_path_12 library_path_latest = library_path_13 if not os.path.exists(library_path_latest): for library_path in reversed(library_paths): if os.path.exists(library_path): if do_logging: logging.info('Migrating library: %s -> %s', library_path, library_path_latest) shutil.copyfile(library_path, library_path_latest) break return library_path_latest
migrate_library
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workspace_dir: pathlib.Path, do_logging: bool=True) ->pathlib.Path:\n library_path_11 = workspace_dir / 'Nion Swift Workspace.nslib'\n library_path_12 = workspace_dir / 'Nion Swift Library 12.nslib'\n library_path_13 = workspace_dir / 'Nion Swift Library 13.nslib'\n library_paths = library_path_11, library_path_12\n library_path_latest = library_path_13\n if not os.path.exists(library_path_latest):\n for library_path in reversed(library_paths):\n if os.path.exists(library_path):\n if do_logging:\n logging.info('Migrating library: %s -> %s',\n library_path, library_path_latest)\n shutil.copyfile(library_path, library_path_latest)\n break\n return library_path_latest\n", "role": "user" }, { "content": "migrate_library", "role": "assistant" } ]
def x(self): for valor, rotulo in ESTADOS_OPERACAO: if self.ESTADO_OPERACAO == valor: return rotulo return u'(desconhecido: {})'.format(self.ESTADO_OPERACAO)
status
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n for valor, rotulo in ESTADOS_OPERACAO:\n if self.ESTADO_OPERACAO == valor:\n return rotulo\n return u'(desconhecido: {})'.format(self.ESTADO_OPERACAO)\n", "role": "user" }, { "content": "status", "role": "assistant" } ]
def x(retorno): resposta = analisar_retorno(forcar_unicode(retorno), funcao= 'ConsultarStatusOperacional', classe_resposta= RespostaConsultarStatusOperacional, campos=RespostaSAT.CAMPOS + (( 'NSERIE', as_clean_unicode), ('TIPO_LAN', as_clean_unicode), ( 'LAN_IP', normalizar_ip), ('LAN_MAC', unicode), ('LAN_MASK', normalizar_ip), ('LAN_GW', normalizar_ip), ('LAN_DNS_1', normalizar_ip), ('LAN_DNS_2', normalizar_ip), ('STATUS_LAN', as_clean_unicode), ('NIVEL_BATERIA', as_clean_unicode), ('MT_TOTAL', as_clean_unicode), ('MT_USADA', as_clean_unicode), ('DH_ATUAL', as_datetime), ('VER_SB', as_clean_unicode), ('VER_LAYOUT', as_clean_unicode), ('ULTIMO_CF_E_SAT', as_clean_unicode), ( 'LISTA_INICIAL', as_clean_unicode), ('LISTA_FINAL', as_clean_unicode), ('DH_CFE', as_datetime_or_none), ('DH_ULTIMA', as_datetime), ('CERT_EMISSAO', as_date), ('CERT_VENCIMENTO', as_date), ('ESTADO_OPERACAO', int)), campos_alternativos=[ RespostaSAT.CAMPOS]) if resposta.EEEEE not in ('10000',): raise ExcecaoRespostaSAT(resposta) return resposta
analisar
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(retorno):\n resposta = analisar_retorno(forcar_unicode(retorno), funcao=\n 'ConsultarStatusOperacional', classe_resposta=\n RespostaConsultarStatusOperacional, campos=RespostaSAT.CAMPOS + ((\n 'NSERIE', as_clean_unicode), ('TIPO_LAN', as_clean_unicode), (\n 'LAN_IP', normalizar_ip), ('LAN_MAC', unicode), ('LAN_MASK',\n normalizar_ip), ('LAN_GW', normalizar_ip), ('LAN_DNS_1',\n normalizar_ip), ('LAN_DNS_2', normalizar_ip), ('STATUS_LAN',\n as_clean_unicode), ('NIVEL_BATERIA', as_clean_unicode), ('MT_TOTAL',\n as_clean_unicode), ('MT_USADA', as_clean_unicode), ('DH_ATUAL',\n as_datetime), ('VER_SB', as_clean_unicode), ('VER_LAYOUT',\n as_clean_unicode), ('ULTIMO_CF_E_SAT', as_clean_unicode), (\n 'LISTA_INICIAL', as_clean_unicode), ('LISTA_FINAL',\n as_clean_unicode), ('DH_CFE', as_datetime_or_none), ('DH_ULTIMA',\n as_datetime), ('CERT_EMISSAO', as_date), ('CERT_VENCIMENTO',\n as_date), ('ESTADO_OPERACAO', int)), campos_alternativos=[\n RespostaSAT.CAMPOS])\n if resposta.EEEEE not in ('10000',):\n raise ExcecaoRespostaSAT(resposta)\n return resposta\n", "role": "user" }, { "content": "analisar", "role": "assistant" } ]
def x(input_csv_file, forecast_json_path, condition_models, dist_models): try: run_date = input_csv_file[:-4].split('_')[-1] print(run_date) ens_member = '_'.join(input_csv_file.split('/')[-1][:-4].split('_') [3:-1]) ens_name = input_csv_file.split('/')[-1].split('_')[2] input_data = pd.read_csv(input_csv_file, index_col='Step_ID') full_json_path = forecast_json_path + '{0}/{1}/'.format(run_date, ens_member) track_ids = sorted(input_data['Track_ID'].unique()) model_pred_cols = [] condition_models_ns = [] dist_models_ns = [] gamma_params = ['Shape', 'Location', 'Scale'] for condition_model in condition_models: model_pred_cols.append(condition_model.replace(' ', '-') + '_Condition') condition_models_ns.append(condition_model.replace(' ', '-')) for dist_model in dist_models: dist_models_ns.append(dist_model.replace(' ', '-')) for param in gamma_params: model_pred_cols.append(dist_model.replace(' ', '-') + '_' + param) pred_data = pd.DataFrame(index=input_data.index, columns= model_pred_cols, dtype=float) for track_id in track_ids: track_id_num = track_id.split('_')[-1] json_filename = (full_json_path + '{0}_{1}_{2}_model_track_{3}.json'.format(ens_name, run_date, ens_member, track_id_num)) json_file = open(json_filename) json_data = json.load(json_file) json_file.close() for s, step in enumerate(json_data['features']): step_id = track_id + '_{0:02d}'.format(s) for cond_model in condition_models_ns: pred_data.loc[step_id, cond_model + '_Condition'] = step[ 'properties']['condition_' + cond_model] for dist_model in dist_models_ns: pred_data.loc[step_id, [(dist_model + '_' + p) for p in gamma_params]] = step['properties']['dist_' + dist_model] out_data = input_data.merge(pred_data, left_index=True, right_index =True) return out_data, ens_name, ens_member except Exception as e: print(traceback.format_exc()) raise e
merge_input_csv_forecast_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(input_csv_file, forecast_json_path, condition_models, dist_models):\n try:\n run_date = input_csv_file[:-4].split('_')[-1]\n print(run_date)\n ens_member = '_'.join(input_csv_file.split('/')[-1][:-4].split('_')\n [3:-1])\n ens_name = input_csv_file.split('/')[-1].split('_')[2]\n input_data = pd.read_csv(input_csv_file, index_col='Step_ID')\n full_json_path = forecast_json_path + '{0}/{1}/'.format(run_date,\n ens_member)\n track_ids = sorted(input_data['Track_ID'].unique())\n model_pred_cols = []\n condition_models_ns = []\n dist_models_ns = []\n gamma_params = ['Shape', 'Location', 'Scale']\n for condition_model in condition_models:\n model_pred_cols.append(condition_model.replace(' ', '-') +\n '_Condition')\n condition_models_ns.append(condition_model.replace(' ', '-'))\n for dist_model in dist_models:\n dist_models_ns.append(dist_model.replace(' ', '-'))\n for param in gamma_params:\n model_pred_cols.append(dist_model.replace(' ', '-') + '_' +\n param)\n pred_data = pd.DataFrame(index=input_data.index, columns=\n model_pred_cols, dtype=float)\n for track_id in track_ids:\n track_id_num = track_id.split('_')[-1]\n json_filename = (full_json_path +\n '{0}_{1}_{2}_model_track_{3}.json'.format(ens_name,\n run_date, ens_member, track_id_num))\n json_file = open(json_filename)\n json_data = json.load(json_file)\n json_file.close()\n for s, step in enumerate(json_data['features']):\n step_id = track_id + '_{0:02d}'.format(s)\n for cond_model in condition_models_ns:\n pred_data.loc[step_id, cond_model + '_Condition'] = step[\n 'properties']['condition_' + cond_model]\n for dist_model in dist_models_ns:\n pred_data.loc[step_id, [(dist_model + '_' + p) for p in\n gamma_params]] = step['properties']['dist_' +\n dist_model]\n out_data = input_data.merge(pred_data, left_index=True, right_index\n =True)\n return out_data, ens_name, ens_member\n except Exception as e:\n print(traceback.format_exc())\n raise e\n", "role": "user" }, { "content": "merge_input_csv_forecast_json", "role": "assistant" } ]
def x(self): self.__cache.set_cached_value_dirty(self.__display_item, self. __cache_property_name) self.__initialize_cache() self.__cached_value_dirty = True
mark_data_dirty
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.__cache.set_cached_value_dirty(self.__display_item, self.\n __cache_property_name)\n self.__initialize_cache()\n self.__cached_value_dirty = True\n", "role": "user" }, { "content": "mark_data_dirty", "role": "assistant" } ]
def x(self): if self.__cached_value_dirty is None: self.__cached_value_dirty = self.__cache.is_cached_value_dirty(self .__display_item, self.__cache_property_name) self.__cached_value = self.__cache.get_cached_value(self. __display_item, self.__cache_property_name)
initialize_cache
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self.__cached_value_dirty is None:\n self.__cached_value_dirty = self.__cache.is_cached_value_dirty(self\n .__display_item, self.__cache_property_name)\n self.__cached_value = self.__cache.get_cached_value(self.\n __display_item, self.__cache_property_name)\n", "role": "user" }, { "content": "initialize_cache", "role": "assistant" } ]
def x(self, ui): self.__initialize_cache() if self.__cached_value_dirty: with self.__is_recomputing_lock: is_recomputing = self.__is_recomputing self.__is_recomputing = True if is_recomputing: pass else: def x(): try: if self.__recompute_thread_cancel.wait(0.01): return minimum_time = 0.5 current_time = time.time() if current_time < self.__cached_value_time + minimum_time: if self.__recompute_thread_cancel.wait(self. __cached_value_time + minimum_time - current_time): return self.recompute_data(ui) finally: self.__is_recomputing = False self.__recompute_thread = None with self.__is_recomputing_lock: self.__recompute_thread = threading.Thread(target=recompute) self.__recompute_thread.start()
recompute_if_necessary
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, ui):\n self.__initialize_cache()\n if self.__cached_value_dirty:\n with self.__is_recomputing_lock:\n is_recomputing = self.__is_recomputing\n self.__is_recomputing = True\n if is_recomputing:\n pass\n else:\n\n def x():\n try:\n if self.__recompute_thread_cancel.wait(0.01):\n return\n minimum_time = 0.5\n current_time = time.time()\n if current_time < self.__cached_value_time + minimum_time:\n if self.__recompute_thread_cancel.wait(self.\n __cached_value_time + minimum_time - current_time):\n return\n self.recompute_data(ui)\n finally:\n self.__is_recomputing = False\n self.__recompute_thread = None\n with self.__is_recomputing_lock:\n self.__recompute_thread = threading.Thread(target=recompute)\n self.__recompute_thread.start()\n", "role": "user" }, { "content": "recompute_if_necessary", "role": "assistant" } ]
def x(self, ui): self.__initialize_cache() with self.__recompute_lock: if self.__cached_value_dirty: try: calculated_data = self.get_calculated_data(ui) except Exception as e: import traceback traceback.print_exc() traceback.print_stack() raise self.__cache.set_cached_value(self.__display_item, self. __cache_property_name, calculated_data) self.__cached_value = calculated_data self.__cached_value_dirty = False self.__cached_value_time = time.time() else: calculated_data = None if calculated_data is None: calculated_data = self.get_default_data() if calculated_data is not None: self.__cache.set_cached_value(self.__display_item, self. __cache_property_name, calculated_data) self.__cached_value = calculated_data self.__cached_value_dirty = False self.__cached_value_time = time.time() else: self.__cache.remove_cached_value(self.__display_item, self. __cache_property_name) self.__cached_value = None self.__cached_value_dirty = None self.__cached_value_time = 0 self.__recompute_lock.release() if callable(self.on_thumbnail_updated): self.on_thumbnail_updated() self.__recompute_lock.acquire()
recompute_data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, ui):\n self.__initialize_cache()\n with self.__recompute_lock:\n if self.__cached_value_dirty:\n try:\n calculated_data = self.get_calculated_data(ui)\n except Exception as e:\n import traceback\n traceback.print_exc()\n traceback.print_stack()\n raise\n self.__cache.set_cached_value(self.__display_item, self.\n __cache_property_name, calculated_data)\n self.__cached_value = calculated_data\n self.__cached_value_dirty = False\n self.__cached_value_time = time.time()\n else:\n calculated_data = None\n if calculated_data is None:\n calculated_data = self.get_default_data()\n if calculated_data is not None:\n self.__cache.set_cached_value(self.__display_item, self.\n __cache_property_name, calculated_data)\n self.__cached_value = calculated_data\n self.__cached_value_dirty = False\n self.__cached_value_time = time.time()\n else:\n self.__cache.remove_cached_value(self.__display_item, self.\n __cache_property_name)\n self.__cached_value = None\n self.__cached_value_dirty = None\n self.__cached_value_time = 0\n self.__recompute_lock.release()\n if callable(self.on_thumbnail_updated):\n self.on_thumbnail_updated()\n self.__recompute_lock.acquire()\n", "role": "user" }, { "content": "recompute_data", "role": "assistant" } ]
def x(self, ui, display_item: DisplayItem.DisplayItem) ->ThumbnailSource: with self.__lock: thumbnail_source = self.__thumbnail_sources.get(display_item) if not thumbnail_source: thumbnail_source = ThumbnailSource(ui, display_item) self.__thumbnail_sources[display_item] = thumbnail_source def x(thumbnail_source): del self.__thumbnail_sources[thumbnail_source._display_item] thumbnail_source._on_will_delete = will_delete else: assert thumbnail_source._ui == ui return thumbnail_source.add_ref()
thumbnail_source_for_display_item
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, ui, display_item: DisplayItem.DisplayItem) ->ThumbnailSource:\n with self.__lock:\n thumbnail_source = self.__thumbnail_sources.get(display_item)\n if not thumbnail_source:\n thumbnail_source = ThumbnailSource(ui, display_item)\n self.__thumbnail_sources[display_item] = thumbnail_source\n\n def x(thumbnail_source):\n del self.__thumbnail_sources[thumbnail_source._display_item]\n thumbnail_source._on_will_delete = will_delete\n else:\n assert thumbnail_source._ui == ui\n return thumbnail_source.add_ref()\n", "role": "user" }, { "content": "thumbnail_source_for_display_item", "role": "assistant" } ]
def x(app, root_dir): global extensions ui = app.ui subdirectories = [] if root_dir: subdirectories.append(root_dir) data_location = ui.get_data_location() if data_location is not None: subdirectories.append(data_location) plugins_dir = os.path.abspath(os.path.join(data_location, 'PlugIns')) if not os.path.exists(plugins_dir): logging.info('Creating plug-ins directory %s', plugins_dir) os.makedirs(plugins_dir) document_location = ui.get_document_location() if document_location is not None: subdirectories.append(os.path.join(document_location, 'Nion', 'Swift')) PlugInDir = collections.namedtuple('PlugInDir', ['directory', 'relative_path']) plugin_dirs = list() seen_plugin_dirs = list() for subdirectory in subdirectories: plugins_dir = os.path.abspath(os.path.join(subdirectory, 'PlugIns')) if os.path.exists(plugins_dir) and not plugins_dir in seen_plugin_dirs: logging.info('Loading plug-ins from %s', plugins_dir) sys.path.append(plugins_dir) sorted_relative_paths = sorted([d for d in os.listdir( plugins_dir) if os.path.isdir(os.path.join(plugins_dir, d))]) plugin_dirs.extend([PlugInDir(plugins_dir, sorted_relative_path ) for sorted_relative_path in sorted_relative_paths]) seen_plugin_dirs.append(plugins_dir) else: logging.info('NOT Loading plug-ins from %s (missing)', plugins_dir) version_map = dict() module_exists_map = dict() plugin_adapters = list() import nionswift_plugin for module_info in pkgutil.iter_modules(nionswift_plugin.__path__): plugin_adapters.append(ModuleAdapter(nionswift_plugin.__name__, module_info)) for directory, relative_path in plugin_dirs: plugin_adapters.append(PlugInAdapter(directory, relative_path)) progress = True while progress: progress = False plugin_adapters_copy = copy.deepcopy(plugin_adapters) plugin_adapters = list() for plugin_adapter in plugin_adapters_copy: manifest_path = plugin_adapter.manifest_path manifest = plugin_adapter.manifest if manifest: manifest_valid = True if not 'name' in manifest: logging.info("Invalid manifest (missing 'name'): %s", manifest_path) manifest_valid = False if not 'identifier' in manifest: logging.info("Invalid manifest (missing 'identifier'): %s", manifest_path) manifest_valid = False if 'identifier' in manifest and not re.match( '[_\\-a-zA-Z][_\\-a-zA-Z0-9.]*$', manifest['identifier']): logging.info( "Invalid manifest (invalid 'identifier': '%s'): %s", manifest['identifier'], manifest_path) manifest_valid = False if not 'version' in manifest: logging.info("Invalid manifest (missing 'version'): %s", manifest_path) manifest_valid = False if 'requires' in manifest and not isinstance(manifest[ 'requires'], list): logging.info("Invalid manifest ('requires' not a list): %s" , manifest_path) manifest_valid = False if not manifest_valid: continue for module in manifest.get('modules', list()): if module in module_exists_map: module_exists = module_exists_map.get(module) else: module_exists = importlib.util.find_spec(module ) is not None module_exists_map[module] = module_exists if not module_exists: logging.info("Plug-in '" + plugin_adapter. module_name + "' NOT loaded (" + plugin_adapter .module_path + ').') logging.info('Cannot satisfy requirement (%s): %s', module, manifest_path) manifest_valid = False break for requirement in manifest.get('requires', list()): requirement_components = requirement.split() if len(requirement_components ) != 3 or requirement_components[1] != '~=': logging.info( "Invalid manifest (requirement '%s' invalid): %s", requirement, manifest_path) manifest_valid = False break identifier, operator, version_specifier = ( requirement_components[0], requirement_components[1 ], requirement_components[2]) if identifier in version_map: if Utility.compare_versions('~' + version_specifier, version_map[identifier]) != 0: logging.info("Plug-in '" + plugin_adapter. module_name + "' NOT loaded (" + plugin_adapter.module_path + ').') logging.info('Cannot satisfy requirement (%s): %s', requirement, manifest_path) manifest_valid = False break else: logging.info("Plug-in '" + plugin_adapter. module_name + "' delayed (%s) (" + plugin_adapter.module_path + ').', requirement) plugin_adapters.append(plugin_adapter) manifest_valid = False break if not manifest_valid: continue version_map[manifest['identifier']] = manifest['version'] module = plugin_adapter.load() if module: __modules.append(module) progress = True for plugin_adapter in plugin_adapters: logging.info("Plug-in '" + plugin_adapter.module_name + "' NOT loaded (requirements) (" + plugin_adapter.module_path + ').' ) notify_modules('run')
load_plug_ins
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(app, root_dir):\n global extensions\n ui = app.ui\n subdirectories = []\n if root_dir:\n subdirectories.append(root_dir)\n data_location = ui.get_data_location()\n if data_location is not None:\n subdirectories.append(data_location)\n plugins_dir = os.path.abspath(os.path.join(data_location, 'PlugIns'))\n if not os.path.exists(plugins_dir):\n logging.info('Creating plug-ins directory %s', plugins_dir)\n os.makedirs(plugins_dir)\n document_location = ui.get_document_location()\n if document_location is not None:\n subdirectories.append(os.path.join(document_location, 'Nion', 'Swift'))\n PlugInDir = collections.namedtuple('PlugInDir', ['directory',\n 'relative_path'])\n plugin_dirs = list()\n seen_plugin_dirs = list()\n for subdirectory in subdirectories:\n plugins_dir = os.path.abspath(os.path.join(subdirectory, 'PlugIns'))\n if os.path.exists(plugins_dir) and not plugins_dir in seen_plugin_dirs:\n logging.info('Loading plug-ins from %s', plugins_dir)\n sys.path.append(plugins_dir)\n sorted_relative_paths = sorted([d for d in os.listdir(\n plugins_dir) if os.path.isdir(os.path.join(plugins_dir, d))])\n plugin_dirs.extend([PlugInDir(plugins_dir, sorted_relative_path\n ) for sorted_relative_path in sorted_relative_paths])\n seen_plugin_dirs.append(plugins_dir)\n else:\n logging.info('NOT Loading plug-ins from %s (missing)', plugins_dir)\n version_map = dict()\n module_exists_map = dict()\n plugin_adapters = list()\n import nionswift_plugin\n for module_info in pkgutil.iter_modules(nionswift_plugin.__path__):\n plugin_adapters.append(ModuleAdapter(nionswift_plugin.__name__,\n module_info))\n for directory, relative_path in plugin_dirs:\n plugin_adapters.append(PlugInAdapter(directory, relative_path))\n progress = True\n while progress:\n progress = False\n plugin_adapters_copy = copy.deepcopy(plugin_adapters)\n plugin_adapters = list()\n for plugin_adapter in plugin_adapters_copy:\n manifest_path = plugin_adapter.manifest_path\n manifest = plugin_adapter.manifest\n if manifest:\n manifest_valid = True\n if not 'name' in manifest:\n logging.info(\"Invalid manifest (missing 'name'): %s\",\n manifest_path)\n manifest_valid = False\n if not 'identifier' in manifest:\n logging.info(\"Invalid manifest (missing 'identifier'): %s\",\n manifest_path)\n manifest_valid = False\n if 'identifier' in manifest and not re.match(\n '[_\\\\-a-zA-Z][_\\\\-a-zA-Z0-9.]*$', manifest['identifier']):\n logging.info(\n \"Invalid manifest (invalid 'identifier': '%s'): %s\",\n manifest['identifier'], manifest_path)\n manifest_valid = False\n if not 'version' in manifest:\n logging.info(\"Invalid manifest (missing 'version'): %s\",\n manifest_path)\n manifest_valid = False\n if 'requires' in manifest and not isinstance(manifest[\n 'requires'], list):\n logging.info(\"Invalid manifest ('requires' not a list): %s\"\n , manifest_path)\n manifest_valid = False\n if not manifest_valid:\n continue\n for module in manifest.get('modules', list()):\n if module in module_exists_map:\n module_exists = module_exists_map.get(module)\n else:\n module_exists = importlib.util.find_spec(module\n ) is not None\n module_exists_map[module] = module_exists\n if not module_exists:\n logging.info(\"Plug-in '\" + plugin_adapter.\n module_name + \"' NOT loaded (\" + plugin_adapter\n .module_path + ').')\n logging.info('Cannot satisfy requirement (%s): %s',\n module, manifest_path)\n manifest_valid = False\n break\n for requirement in manifest.get('requires', list()):\n requirement_components = requirement.split()\n if len(requirement_components\n ) != 3 or requirement_components[1] != '~=':\n logging.info(\n \"Invalid manifest (requirement '%s' invalid): %s\",\n requirement, manifest_path)\n manifest_valid = False\n break\n identifier, operator, version_specifier = (\n requirement_components[0], requirement_components[1\n ], requirement_components[2])\n if identifier in version_map:\n if Utility.compare_versions('~' + version_specifier,\n version_map[identifier]) != 0:\n logging.info(\"Plug-in '\" + plugin_adapter.\n module_name + \"' NOT loaded (\" +\n plugin_adapter.module_path + ').')\n logging.info('Cannot satisfy requirement (%s): %s',\n requirement, manifest_path)\n manifest_valid = False\n break\n else:\n logging.info(\"Plug-in '\" + plugin_adapter.\n module_name + \"' delayed (%s) (\" +\n plugin_adapter.module_path + ').', requirement)\n plugin_adapters.append(plugin_adapter)\n manifest_valid = False\n break\n if not manifest_valid:\n continue\n version_map[manifest['identifier']] = manifest['version']\n module = plugin_adapter.load()\n if module:\n __modules.append(module)\n progress = True\n for plugin_adapter in plugin_adapters:\n logging.info(\"Plug-in '\" + plugin_adapter.module_name +\n \"' NOT loaded (requirements) (\" + plugin_adapter.module_path + ').'\n )\n notify_modules('run')\n", "role": "user" }, { "content": "load_plug_ins", "role": "assistant" } ]
def x(self, gist_name): r = requests.get('%s' % BASE_URL + '/users/%s/gists' % self.user, headers=self.gist.header) if r.status_code == 200: r_text = json.loads(r.text) limit = len(r.json()) for g, no in zip(r_text, range(0, limit)): for ka, va in r.json()[no]['files'].iteritems(): if str(va['filename']) == str(gist_name): return r.json()[no]['id'] return 0 raise Exception('Username not found')
getMyID
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, gist_name):\n r = requests.get('%s' % BASE_URL + '/users/%s/gists' % self.user,\n headers=self.gist.header)\n if r.status_code == 200:\n r_text = json.loads(r.text)\n limit = len(r.json())\n for g, no in zip(r_text, range(0, limit)):\n for ka, va in r.json()[no]['files'].iteritems():\n if str(va['filename']) == str(gist_name):\n return r.json()[no]['id']\n return 0\n raise Exception('Username not found')\n", "role": "user" }, { "content": "getMyID", "role": "assistant" } ]
def x(self): assert self.__closed == False self.__closed = True self.finish_periodic() for weak_dialog in self.__dialogs: dialog = weak_dialog() if dialog: try: dialog.request_close() except Exception as e: pass self._file_menu = None self._edit_menu = None self._processing_menu = None self._view_menu = None self._window_menu = None self._help_menu = None self._library_menu = None self._processing_arithmetic_menu = None self._processing_reduce_menu = None self._processing_transform_menu = None self._processing_filter_menu = None self._processing_fourier_menu = None self._processing_graphics_menu = None self._processing_sequence_menu = None self._processing_redimension_menu = None self._display_type_menu = None if self.__workspace_controller: self.__workspace_controller.close() self.__workspace_controller = None self.__call_soon_event_listener.close() self.__call_soon_event_listener = None self.__filtered_display_items_model.close() self.__filtered_display_items_model = None self.filter_controller.close() self.filter_controller = None self.__display_items_model.close() self.__display_items_model = None self.document_model.remove_ref() self.document_model = None self.did_close_event.fire(self) self.did_close_event = None super().close()
close
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n assert self.__closed == False\n self.__closed = True\n self.finish_periodic()\n for weak_dialog in self.__dialogs:\n dialog = weak_dialog()\n if dialog:\n try:\n dialog.request_close()\n except Exception as e:\n pass\n self._file_menu = None\n self._edit_menu = None\n self._processing_menu = None\n self._view_menu = None\n self._window_menu = None\n self._help_menu = None\n self._library_menu = None\n self._processing_arithmetic_menu = None\n self._processing_reduce_menu = None\n self._processing_transform_menu = None\n self._processing_filter_menu = None\n self._processing_fourier_menu = None\n self._processing_graphics_menu = None\n self._processing_sequence_menu = None\n self._processing_redimension_menu = None\n self._display_type_menu = None\n if self.__workspace_controller:\n self.__workspace_controller.close()\n self.__workspace_controller = None\n self.__call_soon_event_listener.close()\n self.__call_soon_event_listener = None\n self.__filtered_display_items_model.close()\n self.__filtered_display_items_model = None\n self.filter_controller.close()\n self.filter_controller = None\n self.__display_items_model.close()\n self.__display_items_model = None\n self.document_model.remove_ref()\n self.document_model = None\n self.did_close_event.fire(self)\n self.did_close_event = None\n super().close()\n", "role": "user" }, { "content": "close", "role": "assistant" } ]
def x(self, interval: float, listener_fn): class PeriodicListener: def x(self, interval: float, listener_fn): self.interval = interval self.__listener_fn = listener_fn if callable(listener_fn): self.call = self.__listener_fn else: def x(*args, **kwargs): pass self.call = void self.next_scheduled_time = time.time() + interval def x(self): self.__listener_fn = None def x(*args, **kwargs): pass self.call = void listener = PeriodicListener(interval, listener_fn) def x(weak_listener): with self.__weak_periodic_listeners_mutex: self.__weak_periodic_listeners.remove(weak_listener) weak_listener = weakref.ref(listener, remove_listener) with self.__weak_periodic_listeners_mutex: self.__weak_periodic_listeners.append(weak_listener) return listener
add_periodic
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, interval: float, listener_fn):\n\n\n class PeriodicListener:\n\n def x(self, interval: float, listener_fn):\n self.interval = interval\n self.__listener_fn = listener_fn\n if callable(listener_fn):\n self.call = self.__listener_fn\n else:\n\n def x(*args, **kwargs):\n pass\n self.call = void\n self.next_scheduled_time = time.time() + interval\n\n def x(self):\n self.__listener_fn = None\n\n def x(*args, **kwargs):\n pass\n self.call = void\n listener = PeriodicListener(interval, listener_fn)\n\n def x(weak_listener):\n with self.__weak_periodic_listeners_mutex:\n self.__weak_periodic_listeners.remove(weak_listener)\n weak_listener = weakref.ref(listener, remove_listener)\n with self.__weak_periodic_listeners_mutex:\n self.__weak_periodic_listeners.append(weak_listener)\n return listener\n", "role": "user" }, { "content": "add_periodic", "role": "assistant" } ]
def x(self, display_items_model: ListModel.FilteredListModel, data_group: typing.Optional[DataGroup.DataGroup], filter_id: typing.Optional[str] ) ->None: with display_items_model.changes(): if data_group is not None: display_items_model.container = data_group display_items_model.filter = ListModel.Filter(True) display_items_model.sort_key = None display_items_model.filter_id = None elif filter_id == 'latest-session': display_items_model.container = self.document_model display_items_model.filter = ListModel.EqFilter('session_id', self.document_model.session_id) display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = filter_id elif filter_id == 'temporary': display_items_model.container = self.document_model display_items_model.filter = ListModel.NotEqFilter('category', 'persistent') display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = filter_id elif filter_id == 'none': display_items_model.container = self.document_model display_items_model.filter = ListModel.Filter(False) display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = filter_id else: display_items_model.container = self.document_model display_items_model.filter = ListModel.EqFilter('category', 'persistent') display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = None
update_display_items_model
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, display_items_model: ListModel.FilteredListModel, data_group:\n typing.Optional[DataGroup.DataGroup], filter_id: typing.Optional[str]\n ) ->None:\n with display_items_model.changes():\n if data_group is not None:\n display_items_model.container = data_group\n display_items_model.filter = ListModel.Filter(True)\n display_items_model.sort_key = None\n display_items_model.filter_id = None\n elif filter_id == 'latest-session':\n display_items_model.container = self.document_model\n display_items_model.filter = ListModel.EqFilter('session_id',\n self.document_model.session_id)\n display_items_model.sort_key = DataItem.sort_by_date_key\n display_items_model.sort_reverse = True\n display_items_model.filter_id = filter_id\n elif filter_id == 'temporary':\n display_items_model.container = self.document_model\n display_items_model.filter = ListModel.NotEqFilter('category',\n 'persistent')\n display_items_model.sort_key = DataItem.sort_by_date_key\n display_items_model.sort_reverse = True\n display_items_model.filter_id = filter_id\n elif filter_id == 'none':\n display_items_model.container = self.document_model\n display_items_model.filter = ListModel.Filter(False)\n display_items_model.sort_key = DataItem.sort_by_date_key\n display_items_model.sort_reverse = True\n display_items_model.filter_id = filter_id\n else:\n display_items_model.container = self.document_model\n display_items_model.filter = ListModel.EqFilter('category',\n 'persistent')\n display_items_model.sort_key = DataItem.sort_by_date_key\n display_items_model.sort_reverse = True\n display_items_model.filter_id = None\n", "role": "user" }, { "content": "update_display_items_model", "role": "assistant" } ]
def x(self) ->typing.Optional[DataItem.DataItem]: return (self.__focused_display_item.data_item if self. __focused_display_item else None)
focused_data_item
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->typing.Optional[DataItem.DataItem]:\n return (self.__focused_display_item.data_item if self.\n __focused_display_item else None)\n", "role": "user" }, { "content": "focused_data_item", "role": "assistant" } ]
def x(self) ->typing.Optional[DisplayItem.DisplayItem]: display_item = self.focused_display_item if not display_item: selected_display_panel = self.selected_display_panel display_item = (selected_display_panel.display_item if selected_display_panel else None) return display_item
selected_display_item
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->typing.Optional[DisplayItem.DisplayItem]:\n display_item = self.focused_display_item\n if not display_item:\n selected_display_panel = self.selected_display_panel\n display_item = (selected_display_panel.display_item if\n selected_display_panel else None)\n return display_item\n", "role": "user" }, { "content": "selected_display_item", "role": "assistant" } ]
def x(self): selected_display_items = self.selected_display_items if len(selected_display_items) < 2: selected_display_items = list() display_item = self.selected_display_item if display_item: selected_display_items.append(display_item) if len(selected_display_items) == 1: display_item = selected_display_items[0] data_item = display_item.data_item if display_item else None if display_item and len(display_item.graphic_selection.indexes) == 2: index1 = display_item.graphic_selection.anchor_index index2 = list(display_item.graphic_selection.indexes.difference ({index1}))[0] graphic1 = display_item.graphics[index1] graphic2 = display_item.graphics[index2] if data_item: if data_item.is_datum_1d and isinstance(graphic1, Graphics. IntervalGraphic) and isinstance(graphic2, Graphics. IntervalGraphic): crop_graphic1 = graphic1 crop_graphic2 = graphic2 elif data_item.is_datum_2d and isinstance(graphic1, Graphics.RectangleTypeGraphic) and isinstance(graphic2, Graphics.RectangleTypeGraphic): crop_graphic1 = graphic1 crop_graphic2 = graphic2 else: crop_graphic1 = self.__get_crop_graphic(display_item) crop_graphic2 = crop_graphic1 else: crop_graphic1 = self.__get_crop_graphic(display_item) crop_graphic2 = crop_graphic1 else: crop_graphic1 = self.__get_crop_graphic(display_item) crop_graphic2 = crop_graphic1 return (display_item, crop_graphic1), (display_item, crop_graphic2) if len(selected_display_items) == 2: display_item1 = selected_display_items[0] crop_graphic1 = self.__get_crop_graphic(display_item1) display_item2 = selected_display_items[1] crop_graphic2 = self.__get_crop_graphic(display_item2) return (display_item1, crop_graphic1), (display_item2, crop_graphic2) return None
get_two_data_sources
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n selected_display_items = self.selected_display_items\n if len(selected_display_items) < 2:\n selected_display_items = list()\n display_item = self.selected_display_item\n if display_item:\n selected_display_items.append(display_item)\n if len(selected_display_items) == 1:\n display_item = selected_display_items[0]\n data_item = display_item.data_item if display_item else None\n if display_item and len(display_item.graphic_selection.indexes) == 2:\n index1 = display_item.graphic_selection.anchor_index\n index2 = list(display_item.graphic_selection.indexes.difference\n ({index1}))[0]\n graphic1 = display_item.graphics[index1]\n graphic2 = display_item.graphics[index2]\n if data_item:\n if data_item.is_datum_1d and isinstance(graphic1, Graphics.\n IntervalGraphic) and isinstance(graphic2, Graphics.\n IntervalGraphic):\n crop_graphic1 = graphic1\n crop_graphic2 = graphic2\n elif data_item.is_datum_2d and isinstance(graphic1,\n Graphics.RectangleTypeGraphic) and isinstance(graphic2,\n Graphics.RectangleTypeGraphic):\n crop_graphic1 = graphic1\n crop_graphic2 = graphic2\n else:\n crop_graphic1 = self.__get_crop_graphic(display_item)\n crop_graphic2 = crop_graphic1\n else:\n crop_graphic1 = self.__get_crop_graphic(display_item)\n crop_graphic2 = crop_graphic1\n else:\n crop_graphic1 = self.__get_crop_graphic(display_item)\n crop_graphic2 = crop_graphic1\n return (display_item, crop_graphic1), (display_item, crop_graphic2)\n if len(selected_display_items) == 2:\n display_item1 = selected_display_items[0]\n crop_graphic1 = self.__get_crop_graphic(display_item1)\n display_item2 = selected_display_items[1]\n crop_graphic2 = self.__get_crop_graphic(display_item2)\n return (display_item1, crop_graphic1), (display_item2, crop_graphic2)\n return None\n", "role": "user" }, { "content": "get_two_data_sources", "role": "assistant" } ]
def x(canvas_size, data_shape, image_canvas_mode, image_zoom, image_position ) ->typing.Tuple[typing.Any, typing.Any]: if data_shape is None: return None, None if image_canvas_mode == 'fill': data_shape = data_shape scale_h = float(data_shape[1]) / canvas_size[1] scale_v = float(data_shape[0]) / canvas_size[0] if scale_v < scale_h: image_canvas_size = canvas_size[0], canvas_size[0] * data_shape[1 ] / data_shape[0] else: image_canvas_size = canvas_size[1] * data_shape[0] / data_shape[1 ], canvas_size[1] image_canvas_origin = canvas_size[0] * 0.5 - image_canvas_size[0 ] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5 elif image_canvas_mode == 'fit': image_canvas_size = canvas_size image_canvas_origin = 0, 0 elif image_canvas_mode == '1:1': image_canvas_size = data_shape image_canvas_origin = canvas_size[0] * 0.5 - image_canvas_size[0 ] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5 elif image_canvas_mode == '2:1': image_canvas_size = data_shape[0] * 0.5, data_shape[1] * 0.5 image_canvas_origin = canvas_size[0] * 0.5 - image_canvas_size[0 ] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5 else: image_canvas_size = canvas_size[0] * image_zoom, canvas_size[1 ] * image_zoom canvas_rect = Geometry.fit_to_size(((0, 0), image_canvas_size), data_shape) image_canvas_origin_y = canvas_size[0] * 0.5 - image_position[0 ] * canvas_rect[1][0] - canvas_rect[0][0] image_canvas_origin_x = canvas_size[1] * 0.5 - image_position[1 ] * canvas_rect[1][1] - canvas_rect[0][1] image_canvas_origin = image_canvas_origin_y, image_canvas_origin_x return image_canvas_origin, image_canvas_size
calculate_origin_and_size
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(canvas_size, data_shape, image_canvas_mode, image_zoom, image_position\n ) ->typing.Tuple[typing.Any, typing.Any]:\n if data_shape is None:\n return None, None\n if image_canvas_mode == 'fill':\n data_shape = data_shape\n scale_h = float(data_shape[1]) / canvas_size[1]\n scale_v = float(data_shape[0]) / canvas_size[0]\n if scale_v < scale_h:\n image_canvas_size = canvas_size[0], canvas_size[0] * data_shape[1\n ] / data_shape[0]\n else:\n image_canvas_size = canvas_size[1] * data_shape[0] / data_shape[1\n ], canvas_size[1]\n image_canvas_origin = canvas_size[0] * 0.5 - image_canvas_size[0\n ] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5\n elif image_canvas_mode == 'fit':\n image_canvas_size = canvas_size\n image_canvas_origin = 0, 0\n elif image_canvas_mode == '1:1':\n image_canvas_size = data_shape\n image_canvas_origin = canvas_size[0] * 0.5 - image_canvas_size[0\n ] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5\n elif image_canvas_mode == '2:1':\n image_canvas_size = data_shape[0] * 0.5, data_shape[1] * 0.5\n image_canvas_origin = canvas_size[0] * 0.5 - image_canvas_size[0\n ] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5\n else:\n image_canvas_size = canvas_size[0] * image_zoom, canvas_size[1\n ] * image_zoom\n canvas_rect = Geometry.fit_to_size(((0, 0), image_canvas_size),\n data_shape)\n image_canvas_origin_y = canvas_size[0] * 0.5 - image_position[0\n ] * canvas_rect[1][0] - canvas_rect[0][0]\n image_canvas_origin_x = canvas_size[1] * 0.5 - image_position[1\n ] * canvas_rect[1][1] - canvas_rect[0][1]\n image_canvas_origin = image_canvas_origin_y, image_canvas_origin_x\n return image_canvas_origin, image_canvas_size\n", "role": "user" }, { "content": "calculate_origin_and_size", "role": "assistant" } ]
def x(*, persistent_storage_system=None, new_persistent_storage_system=None, data_item_uuids=None, deletions: typing.List[uuid.UUID]=None, utilized_deletions: typing.Set[uuid.UUID]=None, ignore_older_files: bool=True): storage_handlers = persistent_storage_system.find_data_items() ReaderInfo = collections.namedtuple('ReaderInfo', ['properties', 'changed_ref', 'large_format', 'storage_handler', 'identifier']) reader_info_list = list() for storage_handler in storage_handlers: try: large_format = isinstance(storage_handler, HDF5Handler.HDF5Handler) properties = Migration.transform_to_latest(storage_handler. read_properties()) reader_info = ReaderInfo(properties, [False], large_format, storage_handler, storage_handler.reference) reader_info_list.append(reader_info) except Exception as e: logging.debug('Error reading %s', storage_handler.reference) import traceback traceback.print_exc() traceback.print_stack() library_storage_properties = (persistent_storage_system. library_storage_properties) for deletion in copy.deepcopy(library_storage_properties.get( 'data_item_deletions', list())): if not deletion in deletions: deletions.append(deletion) preliminary_library_updates = dict() library_updates = dict() if not ignore_older_files: Migration.migrate_to_latest(reader_info_list, preliminary_library_updates) good_reader_info_list = list() count = len(reader_info_list) for index, reader_info in enumerate(reader_info_list): storage_handler = reader_info.storage_handler properties = reader_info.properties try: version = properties.get('version', 0) if version == DataItem.DataItem.writer_version: data_item_uuid = uuid.UUID(properties['uuid']) if not data_item_uuid in data_item_uuids: if str(data_item_uuid) in deletions: utilized_deletions.add(data_item_uuid) else: auto_migrate_data_item(reader_info, persistent_storage_system, new_persistent_storage_system, index, count) good_reader_info_list.append(reader_info) data_item_uuids.add(data_item_uuid) library_update = preliminary_library_updates.get( data_item_uuid) if library_update: library_updates[data_item_uuid] = library_update except Exception as e: logging.debug('Error reading %s', storage_handler.reference) import traceback traceback.print_exc() traceback.print_stack() return good_reader_info_list, library_updates
auto_migrate_storage_system
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(*, persistent_storage_system=None, new_persistent_storage_system=None,\n data_item_uuids=None, deletions: typing.List[uuid.UUID]=None,\n utilized_deletions: typing.Set[uuid.UUID]=None, ignore_older_files:\n bool=True):\n storage_handlers = persistent_storage_system.find_data_items()\n ReaderInfo = collections.namedtuple('ReaderInfo', ['properties',\n 'changed_ref', 'large_format', 'storage_handler', 'identifier'])\n reader_info_list = list()\n for storage_handler in storage_handlers:\n try:\n large_format = isinstance(storage_handler, HDF5Handler.HDF5Handler)\n properties = Migration.transform_to_latest(storage_handler.\n read_properties())\n reader_info = ReaderInfo(properties, [False], large_format,\n storage_handler, storage_handler.reference)\n reader_info_list.append(reader_info)\n except Exception as e:\n logging.debug('Error reading %s', storage_handler.reference)\n import traceback\n traceback.print_exc()\n traceback.print_stack()\n library_storage_properties = (persistent_storage_system.\n library_storage_properties)\n for deletion in copy.deepcopy(library_storage_properties.get(\n 'data_item_deletions', list())):\n if not deletion in deletions:\n deletions.append(deletion)\n preliminary_library_updates = dict()\n library_updates = dict()\n if not ignore_older_files:\n Migration.migrate_to_latest(reader_info_list,\n preliminary_library_updates)\n good_reader_info_list = list()\n count = len(reader_info_list)\n for index, reader_info in enumerate(reader_info_list):\n storage_handler = reader_info.storage_handler\n properties = reader_info.properties\n try:\n version = properties.get('version', 0)\n if version == DataItem.DataItem.writer_version:\n data_item_uuid = uuid.UUID(properties['uuid'])\n if not data_item_uuid in data_item_uuids:\n if str(data_item_uuid) in deletions:\n utilized_deletions.add(data_item_uuid)\n else:\n auto_migrate_data_item(reader_info,\n persistent_storage_system,\n new_persistent_storage_system, index, count)\n good_reader_info_list.append(reader_info)\n data_item_uuids.add(data_item_uuid)\n library_update = preliminary_library_updates.get(\n data_item_uuid)\n if library_update:\n library_updates[data_item_uuid] = library_update\n except Exception as e:\n logging.debug('Error reading %s', storage_handler.reference)\n import traceback\n traceback.print_exc()\n traceback.print_stack()\n return good_reader_info_list, library_updates\n", "role": "user" }, { "content": "auto_migrate_storage_system", "role": "assistant" } ]
def x(self, properties): with self.__properties_lock: self.__properties = properties self.__write_properties(None)
rewrite_properties
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, properties):\n with self.__properties_lock:\n self.__properties = properties\n self.__write_properties(None)\n", "role": "user" }, { "content": "rewrite_properties", "role": "assistant" } ]