source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
|---|---|---|
def x(self, secret, plugin=None, mount_path=None):
has_plugin_conf = False
if plugin is not None:
has_plugin_conf = self.dj.dock_json_has_plugin_conf(plugin[0],
plugin[1])
if 'secrets' in self.template['spec']['strategy']['customStrategy']:
if not plugin or has_plugin_conf:
custom = self.template['spec']['strategy']['customStrategy']
if mount_path:
secret_path = mount_path
else:
secret_path = os.path.join(SECRETS_PATH, secret)
logger.info('Configuring %s secret at %s', secret, secret_path)
existing = [secret_mount for secret_mount in custom['secrets'] if
secret_mount['secretSource']['name'] == secret]
if existing:
logger.debug('secret %s already set', secret)
else:
custom['secrets'].append({'secretSource': {'name': secret},
'mountPath': secret_path})
if plugin and plugin[2] is not None:
self.dj.dock_json_set_arg(*(plugin + (secret_path,)))
else:
logger.debug('not setting secret for unused plugin %s', plugin[1])
|
set_secret_for_plugin
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, secret, plugin=None, mount_path=None):\n has_plugin_conf = False\n if plugin is not None:\n has_plugin_conf = self.dj.dock_json_has_plugin_conf(plugin[0],\n plugin[1])\n if 'secrets' in self.template['spec']['strategy']['customStrategy']:\n if not plugin or has_plugin_conf:\n custom = self.template['spec']['strategy']['customStrategy']\n if mount_path:\n secret_path = mount_path\n else:\n secret_path = os.path.join(SECRETS_PATH, secret)\n logger.info('Configuring %s secret at %s', secret, secret_path)\n existing = [secret_mount for secret_mount in custom['secrets'] if\n secret_mount['secretSource']['name'] == secret]\n if existing:\n logger.debug('secret %s already set', secret)\n else:\n custom['secrets'].append({'secretSource': {'name': secret},\n 'mountPath': secret_path})\n if plugin and plugin[2] is not None:\n self.dj.dock_json_set_arg(*(plugin + (secret_path,)))\n else:\n logger.debug('not setting secret for unused plugin %s', plugin[1])\n",
"role": "user"
},
{
"content": "set_secret_for_plugin",
"role": "assistant"
}
] |
def x(self, secrets):
secret_set = False
for plugin, secret in secrets.items():
if not isinstance(plugin, tuple) or len(plugin) != 3:
raise ValueError('got "%s" as secrets key, need 3-tuple' % plugin)
if secret is not None:
if isinstance(secret, list):
for secret_item in secret:
self.set_secret_for_plugin(secret_item, plugin=plugin)
else:
self.set_secret_for_plugin(secret, plugin=plugin)
secret_set = True
if not secret_set:
if 'secrets' in self.template['spec']['strategy']['customStrategy']:
del self.template['spec']['strategy']['customStrategy']['secrets']
|
set_secrets
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, secrets):\n secret_set = False\n for plugin, secret in secrets.items():\n if not isinstance(plugin, tuple) or len(plugin) != 3:\n raise ValueError('got \"%s\" as secrets key, need 3-tuple' % plugin)\n if secret is not None:\n if isinstance(secret, list):\n for secret_item in secret:\n self.set_secret_for_plugin(secret_item, plugin=plugin)\n else:\n self.set_secret_for_plugin(secret, plugin=plugin)\n secret_set = True\n if not secret_set:\n if 'secrets' in self.template['spec']['strategy']['customStrategy']:\n del self.template['spec']['strategy']['customStrategy']['secrets']\n",
"role": "user"
},
{
"content": "set_secrets",
"role": "assistant"
}
] |
def x(tag_and_push_registries, version):
registries = [uri for uri, regdict in tag_and_push_registries.items() if
regdict['version'] == version]
for registry in registries:
logger.info('removing %s registry: %s', version, registry)
del tag_and_push_registries[registry]
|
remove_tag_and_push_registries
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(tag_and_push_registries, version):\n registries = [uri for uri, regdict in tag_and_push_registries.items() if\n regdict['version'] == version]\n for registry in registries:\n logger.info('removing %s registry: %s', version, registry)\n del tag_and_push_registries[registry]\n",
"role": "user"
},
{
"content": "remove_tag_and_push_registries",
"role": "assistant"
}
] |
def x(self):
versions = self.spec.registry_api_versions.value
if 'v2' not in versions:
raise OsbsValidationException(
'v1-only docker registry API is not supported')
try:
push_conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins',
'tag_and_push')
tag_and_push_registries = push_conf['args']['registries']
except (KeyError, IndexError):
tag_and_push_registries = {}
if 'v1' not in versions:
for phase, name in [('postbuild_plugins', 'pulp_push')]:
logger.info('removing v1-only plugin: %s', name)
self.dj.remove_plugin(phase, name)
self.remove_tag_and_push_registries(tag_and_push_registries, 'v1')
for regdict in tag_and_push_registries.values():
if 'version' in regdict:
del regdict['version']
|
adjust_for_registry_api_versions
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n versions = self.spec.registry_api_versions.value\n if 'v2' not in versions:\n raise OsbsValidationException(\n 'v1-only docker registry API is not supported')\n try:\n push_conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins',\n 'tag_and_push')\n tag_and_push_registries = push_conf['args']['registries']\n except (KeyError, IndexError):\n tag_and_push_registries = {}\n if 'v1' not in versions:\n for phase, name in [('postbuild_plugins', 'pulp_push')]:\n logger.info('removing v1-only plugin: %s', name)\n self.dj.remove_plugin(phase, name)\n self.remove_tag_and_push_registries(tag_and_push_registries, 'v1')\n for regdict in tag_and_push_registries.values():\n if 'version' in regdict:\n del regdict['version']\n",
"role": "user"
},
{
"content": "adjust_for_registry_api_versions",
"role": "assistant"
}
] |
def x(self):
triggers = self.template['spec'].get('triggers', [])
remove_plugins = [('prebuild_plugins', 'check_and_set_rebuild'), (
'prebuild_plugins', 'stop_autorebuild_if_disabled')]
should_remove = False
if triggers and (self.is_custom_base_image() or self.
is_from_scratch_image()):
if self.is_custom_base_image():
msg = 'removing %s from request because custom base image'
elif self.is_from_scratch_image():
msg = 'removing %s from request because FROM scratch image'
del self.template['spec']['triggers']
should_remove = True
elif not triggers:
msg = 'removing %s from request because there are no triggers'
should_remove = True
if should_remove:
for when, which in remove_plugins:
logger.info(msg, which)
self.dj.remove_plugin(when, which)
|
adjust_for_triggers
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n triggers = self.template['spec'].get('triggers', [])\n remove_plugins = [('prebuild_plugins', 'check_and_set_rebuild'), (\n 'prebuild_plugins', 'stop_autorebuild_if_disabled')]\n should_remove = False\n if triggers and (self.is_custom_base_image() or self.\n is_from_scratch_image()):\n if self.is_custom_base_image():\n msg = 'removing %s from request because custom base image'\n elif self.is_from_scratch_image():\n msg = 'removing %s from request because FROM scratch image'\n del self.template['spec']['triggers']\n should_remove = True\n elif not triggers:\n msg = 'removing %s from request because there are no triggers'\n should_remove = True\n if should_remove:\n for when, which in remove_plugins:\n logger.info(msg, which)\n self.dj.remove_plugin(when, which)\n",
"role": "user"
},
{
"content": "adjust_for_triggers",
"role": "assistant"
}
] |
def x(self):
if self.scratch:
self.template['spec'].pop('triggers', None)
remove_plugins = [('prebuild_plugins', 'koji_parent'), (
'postbuild_plugins', 'compress'), ('postbuild_plugins',
'pulp_pull'), ('postbuild_plugins', 'koji_upload'), (
'postbuild_plugins', 'fetch_worker_metadata'), (
'postbuild_plugins', 'compare_components'), (
'postbuild_plugins', 'import_image'), ('exit_plugins',
'koji_promote'), ('exit_plugins', 'koji_import'), (
'exit_plugins', 'koji_tag_build'), ('exit_plugins',
'remove_worker_metadata'), ('exit_plugins', 'import_image')]
if not self.has_tag_suffixes_placeholder():
remove_plugins.append(('postbuild_plugins', 'tag_from_config'))
for when, which in remove_plugins:
logger.info('removing %s from scratch build request', which)
self.dj.remove_plugin(when, which)
if self.dj.dock_json_has_plugin_conf('postbuild_plugins',
'tag_by_labels'):
self.dj.dock_json_set_arg('postbuild_plugins', 'tag_by_labels',
'unique_tag_only', True)
self.set_label('scratch', 'true')
|
adjust_for_scratch
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.scratch:\n self.template['spec'].pop('triggers', None)\n remove_plugins = [('prebuild_plugins', 'koji_parent'), (\n 'postbuild_plugins', 'compress'), ('postbuild_plugins',\n 'pulp_pull'), ('postbuild_plugins', 'koji_upload'), (\n 'postbuild_plugins', 'fetch_worker_metadata'), (\n 'postbuild_plugins', 'compare_components'), (\n 'postbuild_plugins', 'import_image'), ('exit_plugins',\n 'koji_promote'), ('exit_plugins', 'koji_import'), (\n 'exit_plugins', 'koji_tag_build'), ('exit_plugins',\n 'remove_worker_metadata'), ('exit_plugins', 'import_image')]\n if not self.has_tag_suffixes_placeholder():\n remove_plugins.append(('postbuild_plugins', 'tag_from_config'))\n for when, which in remove_plugins:\n logger.info('removing %s from scratch build request', which)\n self.dj.remove_plugin(when, which)\n if self.dj.dock_json_has_plugin_conf('postbuild_plugins',\n 'tag_by_labels'):\n self.dj.dock_json_set_arg('postbuild_plugins', 'tag_by_labels',\n 'unique_tag_only', True)\n self.set_label('scratch', 'true')\n",
"role": "user"
},
{
"content": "adjust_for_scratch",
"role": "assistant"
}
] |
def x(self):
plugins = []
if self.is_custom_base_image():
plugins.append(('prebuild_plugins', 'pull_base_image'))
plugins.append(('prebuild_plugins', 'koji_parent'))
plugins.append(('prebuild_plugins', 'inject_parent_image'))
msg = 'removing %s from custom image build request'
else:
plugins.append(('prebuild_plugins', 'add_filesystem'))
msg = 'removing %s from non custom image build request'
for when, which in plugins:
logger.info(msg, which)
self.dj.remove_plugin(when, which)
|
adjust_for_custom_base_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n plugins = []\n if self.is_custom_base_image():\n plugins.append(('prebuild_plugins', 'pull_base_image'))\n plugins.append(('prebuild_plugins', 'koji_parent'))\n plugins.append(('prebuild_plugins', 'inject_parent_image'))\n msg = 'removing %s from custom image build request'\n else:\n plugins.append(('prebuild_plugins', 'add_filesystem'))\n msg = 'removing %s from non custom image build request'\n for when, which in plugins:\n logger.info(msg, which)\n self.dj.remove_plugin(when, which)\n",
"role": "user"
},
{
"content": "adjust_for_custom_base_image",
"role": "assistant"
}
] |
def x(self):
phase = 'prebuild_plugins'
plugin = 'koji'
if not self.dj.dock_json_has_plugin_conf(phase, plugin):
return
if self.spec.yum_repourls.value:
logger.info(
'removing koji from request because there is yum repo specified')
self.dj.remove_plugin(phase, plugin)
elif not (self.spec.koji_target.value and self.spec.kojiroot.value and
self.spec.kojihub.value):
logger.info('removing koji from request as not specified')
self.dj.remove_plugin(phase, plugin)
else:
self.dj.dock_json_set_arg(phase, plugin, 'target', self.spec.
koji_target.value)
self.dj.dock_json_set_arg(phase, plugin, 'root', self.spec.kojiroot
.value)
self.dj.dock_json_set_arg(phase, plugin, 'hub', self.spec.kojihub.value
)
if self.spec.proxy.value:
self.dj.dock_json_set_arg(phase, plugin, 'proxy', self.spec.
proxy.value)
|
render_koji
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'prebuild_plugins'\n plugin = 'koji'\n if not self.dj.dock_json_has_plugin_conf(phase, plugin):\n return\n if self.spec.yum_repourls.value:\n logger.info(\n 'removing koji from request because there is yum repo specified')\n self.dj.remove_plugin(phase, plugin)\n elif not (self.spec.koji_target.value and self.spec.kojiroot.value and\n self.spec.kojihub.value):\n logger.info('removing koji from request as not specified')\n self.dj.remove_plugin(phase, plugin)\n else:\n self.dj.dock_json_set_arg(phase, plugin, 'target', self.spec.\n koji_target.value)\n self.dj.dock_json_set_arg(phase, plugin, 'root', self.spec.kojiroot\n .value)\n self.dj.dock_json_set_arg(phase, plugin, 'hub', self.spec.kojihub.value\n )\n if self.spec.proxy.value:\n self.dj.dock_json_set_arg(phase, plugin, 'proxy', self.spec.\n proxy.value)\n",
"role": "user"
},
{
"content": "render_koji",
"role": "assistant"
}
] |
def x(self):
phase = 'prebuild_plugins'
plugin = 'bump_release'
if not self.dj.dock_json_has_plugin_conf(phase, plugin):
return
if self.spec.release.value:
logger.info('removing %s from request as release already specified',
plugin)
self.dj.remove_plugin(phase, plugin)
return
hub = self.spec.kojihub.value
if not hub:
logger.info('removing %s from request as koji hub not specified',
plugin)
self.dj.remove_plugin(phase, plugin)
return
self.dj.dock_json_set_arg(phase, plugin, 'hub', hub)
if self.spec.flatpak.value:
self.dj.dock_json_set_arg(phase, plugin, 'append', True)
|
render_bump_release
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'prebuild_plugins'\n plugin = 'bump_release'\n if not self.dj.dock_json_has_plugin_conf(phase, plugin):\n return\n if self.spec.release.value:\n logger.info('removing %s from request as release already specified',\n plugin)\n self.dj.remove_plugin(phase, plugin)\n return\n hub = self.spec.kojihub.value\n if not hub:\n logger.info('removing %s from request as koji hub not specified',\n plugin)\n self.dj.remove_plugin(phase, plugin)\n return\n self.dj.dock_json_set_arg(phase, plugin, 'hub', hub)\n if self.spec.flatpak.value:\n self.dj.dock_json_set_arg(phase, plugin, 'append', True)\n",
"role": "user"
},
{
"content": "render_bump_release",
"role": "assistant"
}
] |
def x(self):
phase = 'exit_plugins'
plugin = 'sendmail'
if not self.dj.dock_json_has_plugin_conf(phase, plugin):
return
if self.spec.smtp_host.value and self.spec.smtp_from.value:
self.dj.dock_json_set_arg(phase, plugin, 'url', self.spec.
builder_openshift_url.value)
self.dj.dock_json_set_arg(phase, plugin, 'smtp_host', self.spec.
smtp_host.value)
self.dj.dock_json_set_arg(phase, plugin, 'from_address', self.spec.
smtp_from.value)
else:
logger.info(
'removing sendmail from request, requires smtp_host and smtp_from')
self.dj.remove_plugin(phase, plugin)
return
if self.spec.kojihub.value and self.spec.kojiroot.value:
self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', self.spec.
kojihub.value)
self.dj.dock_json_set_arg(phase, plugin, 'koji_root', self.spec.
kojiroot.value)
if self.spec.smtp_to_submitter.value:
self.dj.dock_json_set_arg(phase, plugin, 'to_koji_submitter',
self.spec.smtp_to_submitter.value)
if self.spec.smtp_to_pkgowner.value:
self.dj.dock_json_set_arg(phase, plugin, 'to_koji_pkgowner',
self.spec.smtp_to_pkgowner.value)
if self.spec.smtp_additional_addresses.value:
self.dj.dock_json_set_arg(phase, plugin, 'additional_addresses',
self.spec.smtp_additional_addresses.value)
if self.spec.smtp_error_addresses.value:
self.dj.dock_json_set_arg(phase, plugin, 'error_addresses', self.
spec.smtp_error_addresses.value)
if self.spec.smtp_email_domain.value:
self.dj.dock_json_set_arg(phase, plugin, 'email_domain', self.spec.
smtp_email_domain.value)
|
render_sendmail
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'exit_plugins'\n plugin = 'sendmail'\n if not self.dj.dock_json_has_plugin_conf(phase, plugin):\n return\n if self.spec.smtp_host.value and self.spec.smtp_from.value:\n self.dj.dock_json_set_arg(phase, plugin, 'url', self.spec.\n builder_openshift_url.value)\n self.dj.dock_json_set_arg(phase, plugin, 'smtp_host', self.spec.\n smtp_host.value)\n self.dj.dock_json_set_arg(phase, plugin, 'from_address', self.spec.\n smtp_from.value)\n else:\n logger.info(\n 'removing sendmail from request, requires smtp_host and smtp_from')\n self.dj.remove_plugin(phase, plugin)\n return\n if self.spec.kojihub.value and self.spec.kojiroot.value:\n self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', self.spec.\n kojihub.value)\n self.dj.dock_json_set_arg(phase, plugin, 'koji_root', self.spec.\n kojiroot.value)\n if self.spec.smtp_to_submitter.value:\n self.dj.dock_json_set_arg(phase, plugin, 'to_koji_submitter',\n self.spec.smtp_to_submitter.value)\n if self.spec.smtp_to_pkgowner.value:\n self.dj.dock_json_set_arg(phase, plugin, 'to_koji_pkgowner',\n self.spec.smtp_to_pkgowner.value)\n if self.spec.smtp_additional_addresses.value:\n self.dj.dock_json_set_arg(phase, plugin, 'additional_addresses',\n self.spec.smtp_additional_addresses.value)\n if self.spec.smtp_error_addresses.value:\n self.dj.dock_json_set_arg(phase, plugin, 'error_addresses', self.\n spec.smtp_error_addresses.value)\n if self.spec.smtp_email_domain.value:\n self.dj.dock_json_set_arg(phase, plugin, 'email_domain', self.spec.\n smtp_email_domain.value)\n",
"role": "user"
},
{
"content": "render_sendmail",
"role": "assistant"
}
] |
def x(self):
phase = 'prebuild_plugins'
plugin = 'fetch_maven_artifacts'
if not self.dj.dock_json_has_plugin_conf(phase, plugin):
return
koji_hub = self.spec.kojihub.value
koji_root = self.spec.kojiroot.value
if not koji_hub and not koji_root:
logger.info(
'Removing %s because kojihub and kojiroot were not specified',
plugin)
self.dj.remove_plugin(phase, plugin)
return
self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', koji_hub)
self.dj.dock_json_set_arg(phase, plugin, 'koji_root', koji_root)
if self.spec.artifacts_allowed_domains.value:
self.dj.dock_json_set_arg(phase, plugin, 'allowed_domains', self.
spec.artifacts_allowed_domains.value)
|
render_fetch_maven_artifacts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'prebuild_plugins'\n plugin = 'fetch_maven_artifacts'\n if not self.dj.dock_json_has_plugin_conf(phase, plugin):\n return\n koji_hub = self.spec.kojihub.value\n koji_root = self.spec.kojiroot.value\n if not koji_hub and not koji_root:\n logger.info(\n 'Removing %s because kojihub and kojiroot were not specified',\n plugin)\n self.dj.remove_plugin(phase, plugin)\n return\n self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', koji_hub)\n self.dj.dock_json_set_arg(phase, plugin, 'koji_root', koji_root)\n if self.spec.artifacts_allowed_domains.value:\n self.dj.dock_json_set_arg(phase, plugin, 'allowed_domains', self.\n spec.artifacts_allowed_domains.value)\n",
"role": "user"
},
{
"content": "render_fetch_maven_artifacts",
"role": "assistant"
}
] |
def x(self):
phase = 'postbuild_plugins'
plugin = 'tag_from_config'
if not self.has_tag_suffixes_placeholder():
return
unique_tag = self.spec.image_tag.value.split(':')[-1]
tag_suffixes = {'unique': [unique_tag], 'primary': []}
if self.spec.build_type.value == BUILD_TYPE_ORCHESTRATOR:
if self.scratch:
pass
elif self.isolated:
tag_suffixes['primary'].extend(['{version}-{release}'])
elif self._repo_info.additional_tags.from_container_yaml:
tag_suffixes['primary'].extend(['{version}-{release}'])
tag_suffixes['primary'].extend(self._repo_info.additional_tags.tags
)
else:
tag_suffixes['primary'].extend(['latest', '{version}',
'{version}-{release}'])
tag_suffixes['primary'].extend(self._repo_info.additional_tags.tags
)
self.dj.dock_json_set_arg(phase, plugin, 'tag_suffixes', tag_suffixes)
|
render_tag_from_config
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'postbuild_plugins'\n plugin = 'tag_from_config'\n if not self.has_tag_suffixes_placeholder():\n return\n unique_tag = self.spec.image_tag.value.split(':')[-1]\n tag_suffixes = {'unique': [unique_tag], 'primary': []}\n if self.spec.build_type.value == BUILD_TYPE_ORCHESTRATOR:\n if self.scratch:\n pass\n elif self.isolated:\n tag_suffixes['primary'].extend(['{version}-{release}'])\n elif self._repo_info.additional_tags.from_container_yaml:\n tag_suffixes['primary'].extend(['{version}-{release}'])\n tag_suffixes['primary'].extend(self._repo_info.additional_tags.tags\n )\n else:\n tag_suffixes['primary'].extend(['latest', '{version}',\n '{version}-{release}'])\n tag_suffixes['primary'].extend(self._repo_info.additional_tags.tags\n )\n self.dj.dock_json_set_arg(phase, plugin, 'tag_suffixes', tag_suffixes)\n",
"role": "user"
},
{
"content": "render_tag_from_config",
"role": "assistant"
}
] |
def x(self):
phases = 'postbuild_plugins', 'exit_plugins'
plugin = 'pulp_pull'
for phase in phases:
if not self.dj.dock_json_has_plugin_conf(phase, plugin):
continue
pulp_registry = self.spec.pulp_registry.value
if not pulp_registry:
logger.info('removing %s from request, requires pulp_registry',
pulp_registry)
self.dj.remove_plugin(phase, plugin)
continue
if not self.spec.kojihub.value:
logger.info('Removing %s because no kojihub was specified', plugin)
self.dj.remove_plugin(phase, plugin)
continue
if self.spec.prefer_schema1_digest.value is not None:
self.dj.dock_json_set_arg(phase, 'pulp_pull',
'expect_v2schema2', not self.spec.prefer_schema1_digest.value)
|
render_pulp_pull
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phases = 'postbuild_plugins', 'exit_plugins'\n plugin = 'pulp_pull'\n for phase in phases:\n if not self.dj.dock_json_has_plugin_conf(phase, plugin):\n continue\n pulp_registry = self.spec.pulp_registry.value\n if not pulp_registry:\n logger.info('removing %s from request, requires pulp_registry',\n pulp_registry)\n self.dj.remove_plugin(phase, plugin)\n continue\n if not self.spec.kojihub.value:\n logger.info('Removing %s because no kojihub was specified', plugin)\n self.dj.remove_plugin(phase, plugin)\n continue\n if self.spec.prefer_schema1_digest.value is not None:\n self.dj.dock_json_set_arg(phase, 'pulp_pull',\n 'expect_v2schema2', not self.spec.prefer_schema1_digest.value)\n",
"role": "user"
},
{
"content": "render_pulp_pull",
"role": "assistant"
}
] |
def x(self):
if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_sync'):
return
pulp_registry = self.spec.pulp_registry.value
docker_registry = None
registry_secret = None
registries = zip_longest(self.spec.registry_uris.value, self.spec.
registry_secrets.value)
for registry, secret in registries:
if registry.version == 'v2':
docker_registry = registry.uri
registry_secret = secret
logger.info('using docker v2 registry %s for pulp_sync',
docker_registry)
break
if pulp_registry and docker_registry:
self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync',
'pulp_registry_name', pulp_registry)
self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync',
'docker_registry', docker_registry)
if registry_secret:
self.set_secret_for_plugin(registry_secret, plugin=(
'postbuild_plugins', 'pulp_sync', 'registry_secret_path'))
if self.spec.pulp_secret.value is None:
raise OsbsValidationException(
'Pulp registry specified but no auth config')
source_registry = self.spec.source_registry_uri.value
perform_delete = (source_registry is None or source_registry.
docker_uri != registry.docker_uri)
if perform_delete:
push_conf = self.dj.dock_json_get_plugin_conf('exit_plugins',
'delete_from_registry')
args = push_conf.setdefault('args', {})
delete_registries = args.setdefault('registries', {})
placeholder = '{{REGISTRY_URI}}'
if placeholder in delete_registries:
regdict = delete_registries[placeholder].copy()
del delete_registries[placeholder]
else:
regdict = {}
if registry_secret:
regdict['secret'] = os.path.join(SECRETS_PATH, registry_secret)
delete_registries[docker_registry] = regdict
self.dj.dock_json_set_arg('exit_plugins',
'delete_from_registry', 'registries', delete_registries)
else:
logger.info(
'removing delete_from_registry from request, source and target registry are identical'
)
self.dj.remove_plugin('exit_plugins', 'delete_from_registry')
else:
logger.info(
'removing pulp_sync+delete_from_registry from request, requires pulp_registry and a v2 registry'
)
self.dj.remove_plugin('postbuild_plugins', 'pulp_sync')
self.dj.remove_plugin('exit_plugins', 'delete_from_registry')
|
render_pulp_sync
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_sync'):\n return\n pulp_registry = self.spec.pulp_registry.value\n docker_registry = None\n registry_secret = None\n registries = zip_longest(self.spec.registry_uris.value, self.spec.\n registry_secrets.value)\n for registry, secret in registries:\n if registry.version == 'v2':\n docker_registry = registry.uri\n registry_secret = secret\n logger.info('using docker v2 registry %s for pulp_sync',\n docker_registry)\n break\n if pulp_registry and docker_registry:\n self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync',\n 'pulp_registry_name', pulp_registry)\n self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync',\n 'docker_registry', docker_registry)\n if registry_secret:\n self.set_secret_for_plugin(registry_secret, plugin=(\n 'postbuild_plugins', 'pulp_sync', 'registry_secret_path'))\n if self.spec.pulp_secret.value is None:\n raise OsbsValidationException(\n 'Pulp registry specified but no auth config')\n source_registry = self.spec.source_registry_uri.value\n perform_delete = (source_registry is None or source_registry.\n docker_uri != registry.docker_uri)\n if perform_delete:\n push_conf = self.dj.dock_json_get_plugin_conf('exit_plugins',\n 'delete_from_registry')\n args = push_conf.setdefault('args', {})\n delete_registries = args.setdefault('registries', {})\n placeholder = '{{REGISTRY_URI}}'\n if placeholder in delete_registries:\n regdict = delete_registries[placeholder].copy()\n del delete_registries[placeholder]\n else:\n regdict = {}\n if registry_secret:\n regdict['secret'] = os.path.join(SECRETS_PATH, registry_secret)\n delete_registries[docker_registry] = regdict\n self.dj.dock_json_set_arg('exit_plugins',\n 'delete_from_registry', 'registries', delete_registries)\n else:\n logger.info(\n 'removing delete_from_registry from request, source and target registry are identical'\n )\n self.dj.remove_plugin('exit_plugins', 'delete_from_registry')\n else:\n logger.info(\n 'removing pulp_sync+delete_from_registry from request, requires pulp_registry and a v2 registry'\n )\n self.dj.remove_plugin('postbuild_plugins', 'pulp_sync')\n self.dj.remove_plugin('exit_plugins', 'delete_from_registry')\n",
"role": "user"
},
{
"content": "render_pulp_sync",
"role": "assistant"
}
] |
def x(self):
if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_tag'):
return
pulp_registry = self.spec.pulp_registry.value
if pulp_registry:
self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_tag',
'pulp_registry_name', pulp_registry)
if self.spec.pulp_secret.value is None:
conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins',
'pulp_tag')
args = conf.get('args', {})
if 'username' not in args:
raise OsbsValidationException(
'Pulp registry specified but no auth config')
else:
logger.info('removing pulp_tag from request, requires pulp_registry')
self.dj.remove_plugin('postbuild_plugins', 'pulp_tag')
|
render_pulp_tag
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_tag'):\n return\n pulp_registry = self.spec.pulp_registry.value\n if pulp_registry:\n self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_tag',\n 'pulp_registry_name', pulp_registry)\n if self.spec.pulp_secret.value is None:\n conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins',\n 'pulp_tag')\n args = conf.get('args', {})\n if 'username' not in args:\n raise OsbsValidationException(\n 'Pulp registry specified but no auth config')\n else:\n logger.info('removing pulp_tag from request, requires pulp_registry')\n self.dj.remove_plugin('postbuild_plugins', 'pulp_tag')\n",
"role": "user"
},
{
"content": "render_pulp_tag",
"role": "assistant"
}
] |
def x(self):
if not self.dj.dock_json_has_plugin_conf('postbuild_plugins',
'group_manifests'):
return
push_conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins',
'group_manifests')
args = push_conf.setdefault('args', {})
registries = args.setdefault('registries', {})
placeholder = '{{REGISTRY_URI}}'
if placeholder in registries:
for registry, secret in zip_longest(self.spec.registry_uris.value,
self.spec.registry_secrets.value):
if not registry.uri:
continue
regdict = registries[placeholder].copy()
regdict['version'] = registry.version
if secret:
regdict['secret'] = os.path.join(SECRETS_PATH, secret)
registries[registry.docker_uri] = regdict
del registries[placeholder]
self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests',
'group', self.spec.group_manifests.value)
goarch = {}
for platform, architecture in self.platform_descriptors.items():
goarch[platform] = architecture['architecture']
self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests',
'goarch', goarch)
|
render_group_manifests
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if not self.dj.dock_json_has_plugin_conf('postbuild_plugins',\n 'group_manifests'):\n return\n push_conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins',\n 'group_manifests')\n args = push_conf.setdefault('args', {})\n registries = args.setdefault('registries', {})\n placeholder = '{{REGISTRY_URI}}'\n if placeholder in registries:\n for registry, secret in zip_longest(self.spec.registry_uris.value,\n self.spec.registry_secrets.value):\n if not registry.uri:\n continue\n regdict = registries[placeholder].copy()\n regdict['version'] = registry.version\n if secret:\n regdict['secret'] = os.path.join(SECRETS_PATH, secret)\n registries[registry.docker_uri] = regdict\n del registries[placeholder]\n self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests',\n 'group', self.spec.group_manifests.value)\n goarch = {}\n for platform, architecture in self.platform_descriptors.items():\n goarch[platform] = architecture['architecture']\n self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests',\n 'goarch', goarch)\n",
"role": "user"
},
{
"content": "render_group_manifests",
"role": "assistant"
}
] |
def x(self, use_auth=None):
phases = 'postbuild_plugins', 'exit_plugins'
plugin = 'import_image'
for phase in phases:
if (self.spec.imagestream_name.value is None or self.spec.
imagestream_url.value is None):
logger.info(
'removing %s from request, registry or repo url is not defined'
, plugin)
self.dj.remove_plugin(phase, plugin)
continue
if self.dj.dock_json_has_plugin_conf(phase, plugin):
self.dj.dock_json_set_arg(phase, plugin, 'imagestream', self.
spec.imagestream_name.value)
self.dj.dock_json_set_arg(phase, plugin, 'docker_image_repo',
self.spec.imagestream_url.value)
self.dj.dock_json_set_arg(phase, plugin, 'url', self.spec.
builder_openshift_url.value)
self.dj.dock_json_set_arg(phase, plugin, 'build_json_dir', self
.spec.builder_build_json_dir.value)
use_auth = self.spec.use_auth.value
if use_auth is not None:
self.dj.dock_json_set_arg(phase, plugin, 'use_auth', use_auth)
if self.spec.imagestream_insecure_registry.value:
self.dj.dock_json_set_arg(phase, plugin,
'insecure_registry', True)
|
render_import_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, use_auth=None):\n phases = 'postbuild_plugins', 'exit_plugins'\n plugin = 'import_image'\n for phase in phases:\n if (self.spec.imagestream_name.value is None or self.spec.\n imagestream_url.value is None):\n logger.info(\n 'removing %s from request, registry or repo url is not defined'\n , plugin)\n self.dj.remove_plugin(phase, plugin)\n continue\n if self.dj.dock_json_has_plugin_conf(phase, plugin):\n self.dj.dock_json_set_arg(phase, plugin, 'imagestream', self.\n spec.imagestream_name.value)\n self.dj.dock_json_set_arg(phase, plugin, 'docker_image_repo',\n self.spec.imagestream_url.value)\n self.dj.dock_json_set_arg(phase, plugin, 'url', self.spec.\n builder_openshift_url.value)\n self.dj.dock_json_set_arg(phase, plugin, 'build_json_dir', self\n .spec.builder_build_json_dir.value)\n use_auth = self.spec.use_auth.value\n if use_auth is not None:\n self.dj.dock_json_set_arg(phase, plugin, 'use_auth', use_auth)\n if self.spec.imagestream_insecure_registry.value:\n self.dj.dock_json_set_arg(phase, plugin,\n 'insecure_registry', True)\n",
"role": "user"
},
{
"content": "render_import_image",
"role": "assistant"
}
] |
def x(self):
disable_plugins = self.customize_conf.get('disable_plugins', [])
if not disable_plugins:
logger.debug('No site-specific plugins to disable')
else:
for plugin_dict in disable_plugins:
try:
self.dj.remove_plugin(plugin_dict['plugin_type'],
plugin_dict['plugin_name'])
logger.debug('site-specific plugin disabled -> Type:{} Name:{}'
.format(plugin_dict['plugin_type'], plugin_dict[
'plugin_name']))
except KeyError:
logger.debug(
'Invalid custom configuration found for disable_plugins')
enable_plugins = self.customize_conf.get('enable_plugins', [])
if not enable_plugins:
logger.debug('No site-specific plugins to enable')
else:
for plugin_dict in enable_plugins:
try:
self.dj.add_plugin(plugin_dict['plugin_type'], plugin_dict[
'plugin_name'], plugin_dict['plugin_args'])
logger.debug(
'site-specific plugin enabled -> Type:{} Name:{} Args: {}'
.format(plugin_dict['plugin_type'], plugin_dict[
'plugin_name'], plugin_dict['plugin_args']))
except KeyError:
logger.debug(
'Invalid custom configuration found for enable_plugins')
|
render_customizations
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n disable_plugins = self.customize_conf.get('disable_plugins', [])\n if not disable_plugins:\n logger.debug('No site-specific plugins to disable')\n else:\n for plugin_dict in disable_plugins:\n try:\n self.dj.remove_plugin(plugin_dict['plugin_type'],\n plugin_dict['plugin_name'])\n logger.debug('site-specific plugin disabled -> Type:{} Name:{}'\n .format(plugin_dict['plugin_type'], plugin_dict[\n 'plugin_name']))\n except KeyError:\n logger.debug(\n 'Invalid custom configuration found for disable_plugins')\n enable_plugins = self.customize_conf.get('enable_plugins', [])\n if not enable_plugins:\n logger.debug('No site-specific plugins to enable')\n else:\n for plugin_dict in enable_plugins:\n try:\n self.dj.add_plugin(plugin_dict['plugin_type'], plugin_dict[\n 'plugin_name'], plugin_dict['plugin_args'])\n logger.debug(\n 'site-specific plugin enabled -> Type:{} Name:{} Args: {}'\n .format(plugin_dict['plugin_type'], plugin_dict[\n 'plugin_name'], plugin_dict['plugin_args']))\n except KeyError:\n logger.debug(\n 'Invalid custom configuration found for enable_plugins')\n",
"role": "user"
},
{
"content": "render_customizations",
"role": "assistant"
}
] |
def x(self, name, image_tag, platform):
if self.scratch or self.isolated:
name = image_tag
if platform:
platform_suffix = '-{}'.format(platform)
if name.endswith(platform_suffix):
name = name[:-len(platform_suffix)]
_, salt, timestamp = name.rsplit('-', 2)
if self.scratch:
name = 'scratch-{}-{}'.format(salt, timestamp)
elif self.isolated:
name = 'isolated-{}-{}'.format(salt, timestamp)
self.template['metadata']['name'] = name
|
render_name
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, image_tag, platform):\n if self.scratch or self.isolated:\n name = image_tag\n if platform:\n platform_suffix = '-{}'.format(platform)\n if name.endswith(platform_suffix):\n name = name[:-len(platform_suffix)]\n _, salt, timestamp = name.rsplit('-', 2)\n if self.scratch:\n name = 'scratch-{}-{}'.format(salt, timestamp)\n elif self.isolated:\n name = 'isolated-{}-{}'.format(salt, timestamp)\n self.template['metadata']['name'] = name\n",
"role": "user"
},
{
"content": "render_name",
"role": "assistant"
}
] |
def x(osbs, os_conf, capture_dir):
try:
os.mkdir(capture_dir)
except OSError:
pass
finally:
osbs.os._con.request = ResponseSaver(capture_dir, os_conf.
get_openshift_api_uri(), os_conf.get_k8s_api_uri(), osbs.os.
_con.request).request
|
setup_json_capture
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(osbs, os_conf, capture_dir):\n try:\n os.mkdir(capture_dir)\n except OSError:\n pass\n finally:\n osbs.os._con.request = ResponseSaver(capture_dir, os_conf.\n get_openshift_api_uri(), os_conf.get_k8s_api_uri(), osbs.os.\n _con.request).request\n",
"role": "user"
},
{
"content": "setup_json_capture",
"role": "assistant"
}
] |
def x():
try:
rows, columns = subprocess.check_output(['stty', 'size']).split()
except subprocess.CalledProcessError:
logger.info('not attached to terminal')
return 0, 0
logger.debug('console size is %s %s', rows, columns)
return int(rows), int(columns)
|
get_terminal_size
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n try:\n rows, columns = subprocess.check_output(['stty', 'size']).split()\n except subprocess.CalledProcessError:\n logger.info('not attached to terminal')\n return 0, 0\n logger.debug('console size is %s %s', rows, columns)\n return int(rows), int(columns)\n",
"role": "user"
},
{
"content": "get_terminal_size",
"role": "assistant"
}
] |
def x(self, col):
try:
return max([len(x[col]) for x in self.table if x[col]]) + 2
except KeyError:
logger.error('there is no column %r', col)
raise
|
longest_val_in_column
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, col):\n try:\n return max([len(x[col]) for x in self.table if x[col]]) + 2\n except KeyError:\n logger.error('there is no column %r', col)\n raise\n",
"role": "user"
},
{
"content": "longest_val_in_column",
"role": "assistant"
}
] |
def x(self):
self.col_count = len(self.col_list)
self.col_longest = self.get_all_longest_col_lengths()
self.data_length = sum(self.col_longest.values())
if self.terminal_width > 0:
self.total_free_space = (self.terminal_width - self.data_length -
self.col_count + 1)
if self.total_free_space <= 0:
self.total_free_space = None
else:
self.default_column_space = self.total_free_space // self.col_count
self.default_column_space_remainder = (self.total_free_space %
self.col_count)
logger.debug(
'total free space: %d, column space: %d, remainder: %d, columns: %d'
, self.total_free_space, self.default_column_space, self.
default_column_space_remainder, self.col_count)
else:
self.total_free_space = None
|
init
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.col_count = len(self.col_list)\n self.col_longest = self.get_all_longest_col_lengths()\n self.data_length = sum(self.col_longest.values())\n if self.terminal_width > 0:\n self.total_free_space = (self.terminal_width - self.data_length -\n self.col_count + 1)\n if self.total_free_space <= 0:\n self.total_free_space = None\n else:\n self.default_column_space = self.total_free_space // self.col_count\n self.default_column_space_remainder = (self.total_free_space %\n self.col_count)\n logger.debug(\n 'total free space: %d, column space: %d, remainder: %d, columns: %d'\n , self.total_free_space, self.default_column_space, self.\n default_column_space_remainder, self.col_count)\n else:\n self.total_free_space = None\n",
"role": "user"
},
{
"content": "init",
"role": "assistant"
}
] |
def x(self):
format_list = []
header_sepa_format_list = []
self.col_widths = {}
for col in self.col_list:
col_length = self.col_longest[col]
col_width = col_length + self._separate()
format_list.append(' {%s:%d} ' % (col, col_width - 2))
header_sepa_format_list.append('{%s:%d}' % (col, col_width))
self.col_widths[col] = col_width
logger.debug('column widths %s', self.col_widths)
self.format_str = '|'.join(format_list)
self.header_format_str = '+'.join(header_sepa_format_list)
self.header_data = {}
for k in self.col_widths:
self.header_data[k] = '-' * self.col_widths[k]
|
count_sizes
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n format_list = []\n header_sepa_format_list = []\n self.col_widths = {}\n for col in self.col_list:\n col_length = self.col_longest[col]\n col_width = col_length + self._separate()\n format_list.append(' {%s:%d} ' % (col, col_width - 2))\n header_sepa_format_list.append('{%s:%d}' % (col, col_width))\n self.col_widths[col] = col_width\n logger.debug('column widths %s', self.col_widths)\n self.format_str = '|'.join(format_list)\n self.header_format_str = '+'.join(header_sepa_format_list)\n self.header_data = {}\n for k in self.col_widths:\n self.header_data[k] = '-' * self.col_widths[k]\n",
"role": "user"
},
{
"content": "count_sizes",
"role": "assistant"
}
] |
def x(self):
response = {}
for col in self.col_list:
response[col] = self._longest_val_in_column(col)
return response
|
get_all_longest_col_lengths
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n response = {}\n for col in self.col_list:\n response[col] = self._longest_val_in_column(col)\n return response\n",
"role": "user"
},
{
"content": "get_all_longest_col_lengths",
"role": "assistant"
}
] |
def x(self):
if self.total_free_space is None:
return 0
else:
sepa = self.default_column_space
if self.default_column_space_remainder > 0:
sepa += 1
self.default_column_space_remainder -= 1
logger.debug('remainder: %d, separator: %d', self.
default_column_space_remainder, sepa)
return sepa
|
separate
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.total_free_space is None:\n return 0\n else:\n sepa = self.default_column_space\n if self.default_column_space_remainder > 0:\n sepa += 1\n self.default_column_space_remainder -= 1\n logger.debug('remainder: %d, separator: %d', self.\n default_column_space_remainder, sepa)\n return sepa\n",
"role": "user"
},
{
"content": "separate",
"role": "assistant"
}
] |
def x(self):
print(self.format_str.format(**self.header), file=sys.stderr)
print(self.header_format_str.format(**self.header_data), file=sys.stderr)
for row in self.data:
print(self.format_str.format(**row))
|
render
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n print(self.format_str.format(**self.header), file=sys.stderr)\n print(self.header_format_str.format(**self.header_data), file=sys.stderr)\n for row in self.data:\n print(self.format_str.format(**row))\n",
"role": "user"
},
{
"content": "render",
"role": "assistant"
}
] |
def x(self):
try:
jsonschema.validate(self._buffer, self._schema)
except jsonschema.ValidationError as e:
raise ValidationError(e)
|
validate_source_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n try:\n jsonschema.validate(self._buffer, self._schema)\n except jsonschema.ValidationError as e:\n raise ValidationError(e)\n",
"role": "user"
},
{
"content": "validate_source_data",
"role": "assistant"
}
] |
def x(self):
self._validate_source_data()
self._loader.inc_table_count()
yield TableData(self._make_table_name(), ['key', 'value'], [record for
record in self._buffer.items()], dp_extractor=self._loader.
dp_extractor, type_hints=self._extract_type_hints())
|
to_table_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._validate_source_data()\n self._loader.inc_table_count()\n yield TableData(self._make_table_name(), ['key', 'value'], [record for\n record in self._buffer.items()], dp_extractor=self._loader.\n dp_extractor, type_hints=self._extract_type_hints())\n",
"role": "user"
},
{
"content": "to_table_data",
"role": "assistant"
}
] |
def x(self):
self._validate_source_data()
for table_key, json_records in six.iteritems(self._buffer):
attr_name_set = set()
for json_record in json_records:
attr_name_set = attr_name_set.union(six.viewkeys(json_record))
headers = sorted(attr_name_set)
self._loader.inc_table_count()
self._table_key = table_key
yield TableData(self._make_table_name(), headers, json_records,
dp_extractor=self._loader.dp_extractor, type_hints=self.
_extract_type_hints(headers))
|
to_table_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._validate_source_data()\n for table_key, json_records in six.iteritems(self._buffer):\n attr_name_set = set()\n for json_record in json_records:\n attr_name_set = attr_name_set.union(six.viewkeys(json_record))\n headers = sorted(attr_name_set)\n self._loader.inc_table_count()\n self._table_key = table_key\n yield TableData(self._make_table_name(), headers, json_records,\n dp_extractor=self._loader.dp_extractor, type_hints=self.\n _extract_type_hints(headers))\n",
"role": "user"
},
{
"content": "to_table_data",
"role": "assistant"
}
] |
def x(self):
self._validate_source_data()
for table_key, json_records in six.iteritems(self._buffer):
headers = sorted(six.viewkeys(json_records))
self._loader.inc_table_count()
self._table_key = table_key
yield TableData(self._make_table_name(), headers, zip(*[
json_records.get(header) for header in headers]), dp_extractor=
self._loader.dp_extractor, type_hints=self._extract_type_hints(
headers))
|
to_table_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._validate_source_data()\n for table_key, json_records in six.iteritems(self._buffer):\n headers = sorted(six.viewkeys(json_records))\n self._loader.inc_table_count()\n self._table_key = table_key\n yield TableData(self._make_table_name(), headers, zip(*[\n json_records.get(header) for header in headers]), dp_extractor=\n self._loader.dp_extractor, type_hints=self._extract_type_hints(\n headers))\n",
"role": "user"
},
{
"content": "to_table_data",
"role": "assistant"
}
] |
def x(self):
self._validate_source_data()
for table_key, json_records in six.iteritems(self._buffer):
self._loader.inc_table_count()
self._table_key = table_key
yield TableData(self._make_table_name(), ['key', 'value'], [record for
record in json_records.items()], dp_extractor=self._loader.
dp_extractor, type_hints=self._extract_type_hints())
|
to_table_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._validate_source_data()\n for table_key, json_records in six.iteritems(self._buffer):\n self._loader.inc_table_count()\n self._table_key = table_key\n yield TableData(self._make_table_name(), ['key', 'value'], [record for\n record in json_records.items()], dp_extractor=self._loader.\n dp_extractor, type_hints=self._extract_type_hints())\n",
"role": "user"
},
{
"content": "to_table_data",
"role": "assistant"
}
] |
def x(self, **kwargs):
self.scratch = kwargs.get('scratch')
self.is_auto = kwargs.pop('is_auto', False)
self.isolated = kwargs.get('isolated')
self.osbs_api = kwargs.pop('osbs_api', None)
self.validate_build_variation()
self.base_image = kwargs.get('base_image')
self.platform_node_selector = kwargs.get('platform_node_selector', {})
self.scratch_build_node_selector = kwargs.get('scratch_build_node_selector'
, {})
self.explicit_build_node_selector = kwargs.get(
'explicit_build_node_selector', {})
self.auto_build_node_selector = kwargs.get('auto_build_node_selector', {})
self.isolated_build_node_selector = kwargs.get(
'isolated_build_node_selector', {})
logger.debug("now setting params '%s' for user_params", kwargs)
self.user_params.set_params(**kwargs)
self.source_registry = None
self.organization = None
|
set_params
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, **kwargs):\n self.scratch = kwargs.get('scratch')\n self.is_auto = kwargs.pop('is_auto', False)\n self.isolated = kwargs.get('isolated')\n self.osbs_api = kwargs.pop('osbs_api', None)\n self.validate_build_variation()\n self.base_image = kwargs.get('base_image')\n self.platform_node_selector = kwargs.get('platform_node_selector', {})\n self.scratch_build_node_selector = kwargs.get('scratch_build_node_selector'\n , {})\n self.explicit_build_node_selector = kwargs.get(\n 'explicit_build_node_selector', {})\n self.auto_build_node_selector = kwargs.get('auto_build_node_selector', {})\n self.isolated_build_node_selector = kwargs.get(\n 'isolated_build_node_selector', {})\n logger.debug(\"now setting params '%s' for user_params\", kwargs)\n self.user_params.set_params(**kwargs)\n self.source_registry = None\n self.organization = None\n",
"role": "user"
},
{
"content": "set_params",
"role": "assistant"
}
] |
def x(self):
reactor_config_override = self.user_params.reactor_config_override.value
reactor_config_map = self.user_params.reactor_config_map.value
data = None
if reactor_config_override:
data = reactor_config_override
elif reactor_config_map:
config_map = self.osbs_api.get_config_map(reactor_config_map)
data = config_map.get_data_by_key('config.yaml')
if not data:
if self.user_params.flatpak.value:
raise OsbsValidationException('flatpak_base_image must be provided'
)
else:
return
source_registry_key = 'source_registry'
registry_organization_key = 'registries_organization'
req_secrets_key = 'required_secrets'
token_secrets_key = 'worker_token_secrets'
flatpak_key = 'flatpak'
flatpak_base_image_key = 'base_image'
if source_registry_key in data:
self.source_registry = data[source_registry_key]
if registry_organization_key in data:
self.organization = data[registry_organization_key]
if self.user_params.flatpak.value:
flatpack_base_image = data.get(flatpak_key, {}).get(
flatpak_base_image_key, None)
if flatpack_base_image:
self.base_image = flatpack_base_image
self.user_params.base_image.value = flatpack_base_image
else:
raise OsbsValidationException('flatpak_base_image must be provided'
)
required_secrets = data.get(req_secrets_key, [])
token_secrets = data.get(token_secrets_key, [])
self._set_required_secrets(required_secrets, token_secrets)
|
set_data_from_reactor_config
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n reactor_config_override = self.user_params.reactor_config_override.value\n reactor_config_map = self.user_params.reactor_config_map.value\n data = None\n if reactor_config_override:\n data = reactor_config_override\n elif reactor_config_map:\n config_map = self.osbs_api.get_config_map(reactor_config_map)\n data = config_map.get_data_by_key('config.yaml')\n if not data:\n if self.user_params.flatpak.value:\n raise OsbsValidationException('flatpak_base_image must be provided'\n )\n else:\n return\n source_registry_key = 'source_registry'\n registry_organization_key = 'registries_organization'\n req_secrets_key = 'required_secrets'\n token_secrets_key = 'worker_token_secrets'\n flatpak_key = 'flatpak'\n flatpak_base_image_key = 'base_image'\n if source_registry_key in data:\n self.source_registry = data[source_registry_key]\n if registry_organization_key in data:\n self.organization = data[registry_organization_key]\n if self.user_params.flatpak.value:\n flatpack_base_image = data.get(flatpak_key, {}).get(\n flatpak_base_image_key, None)\n if flatpack_base_image:\n self.base_image = flatpack_base_image\n self.user_params.base_image.value = flatpack_base_image\n else:\n raise OsbsValidationException('flatpak_base_image must be provided'\n )\n required_secrets = data.get(req_secrets_key, [])\n token_secrets = data.get(token_secrets_key, [])\n self._set_required_secrets(required_secrets, token_secrets)\n",
"role": "user"
},
{
"content": "set_data_from_reactor_config",
"role": "assistant"
}
] |
def x(self, required_secrets, token_secrets):
if self.user_params.build_type.value == BUILD_TYPE_ORCHESTRATOR:
required_secrets += token_secrets
if not required_secrets:
return
secrets = self.template['spec']['strategy']['customStrategy'].setdefault(
'secrets', [])
existing = set(secret_mount['secretSource']['name'] for secret_mount in
secrets)
required_secrets = set(required_secrets)
already_set = required_secrets.intersection(existing)
if already_set:
logger.debug('secrets %s are already set', already_set)
for secret in (required_secrets - existing):
secret_path = os.path.join(SECRETS_PATH, secret)
logger.info('Configuring %s secret at %s', secret, secret_path)
secrets.append({'secretSource': {'name': secret}, 'mountPath':
secret_path})
|
set_required_secrets
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, required_secrets, token_secrets):\n if self.user_params.build_type.value == BUILD_TYPE_ORCHESTRATOR:\n required_secrets += token_secrets\n if not required_secrets:\n return\n secrets = self.template['spec']['strategy']['customStrategy'].setdefault(\n 'secrets', [])\n existing = set(secret_mount['secretSource']['name'] for secret_mount in\n secrets)\n required_secrets = set(required_secrets)\n already_set = required_secrets.intersection(existing)\n if already_set:\n logger.debug('secrets %s are already set', already_set)\n for secret in (required_secrets - existing):\n secret_path = os.path.join(SECRETS_PATH, secret)\n logger.info('Configuring %s secret at %s', secret, secret_path)\n secrets.append({'secretSource': {'name': secret}, 'mountPath':\n secret_path})\n",
"role": "user"
},
{
"content": "set_required_secrets",
"role": "assistant"
}
] |
def x(self, phase, name, reason=None):
for p in self.template[phase]:
if p.get('name') == name:
self.template[phase].remove(p)
if reason:
logger.info('Removing {}:{}, {}'.format(phase, name, reason))
break
|
remove_plugin
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, phase, name, reason=None):\n for p in self.template[phase]:\n if p.get('name') == name:\n self.template[phase].remove(p)\n if reason:\n logger.info('Removing {}:{}, {}'.format(phase, name, reason))\n break\n",
"role": "user"
},
{
"content": "remove_plugin",
"role": "assistant"
}
] |
def x(self, phase, name, args, reason=None):
plugin_modified = False
for plugin in self.template[phase]:
if plugin['name'] == name:
plugin['args'] = args
plugin_modified = True
if not plugin_modified:
self.template[phase].append({'name': name, 'args': args})
if reason:
logger.info('{}:{} with args {}, {}'.format(phase, name, args,
reason))
|
add_plugin
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, phase, name, args, reason=None):\n plugin_modified = False\n for plugin in self.template[phase]:\n if plugin['name'] == name:\n plugin['args'] = args\n plugin_modified = True\n if not plugin_modified:\n self.template[phase].append({'name': name, 'args': args})\n if reason:\n logger.info('{}:{} with args {}, {}'.format(phase, name, args,\n reason))\n",
"role": "user"
},
{
"content": "add_plugin",
"role": "assistant"
}
] |
def x(self, phase, name):
match = [x for x in self.template[phase] if x.get('name') == name]
return match[0]
|
get_plugin_conf
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, phase, name):\n match = [x for x in self.template[phase] if x.get('name') == name]\n return match[0]\n",
"role": "user"
},
{
"content": "get_plugin_conf",
"role": "assistant"
}
] |
def x(self, phase, name):
try:
self.get_plugin_conf(phase, name)
return True
except (KeyError, IndexError):
return False
|
has_plugin_conf
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, phase, name):\n try:\n self.get_plugin_conf(phase, name)\n return True\n except (KeyError, IndexError):\n return False\n",
"role": "user"
},
{
"content": "has_plugin_conf",
"role": "assistant"
}
] |
def x(self):
if self.user_params.scratch.value:
remove_plugins = [('prebuild_plugins', 'koji_parent'), (
'postbuild_plugins', 'compress'), ('postbuild_plugins',
'pulp_pull'), ('postbuild_plugins', 'compare_components'), (
'postbuild_plugins', 'import_image'), ('exit_plugins',
'koji_promote'), ('exit_plugins', 'koji_tag_build'), (
'exit_plugins', 'import_image'), ('prebuild_plugins',
'check_and_set_rebuild'), ('prebuild_plugins',
'stop_autorebuild_if_disabled')]
if not self.has_tag_suffixes_placeholder():
remove_plugins.append(('postbuild_plugins', 'tag_from_config'))
for when, which in remove_plugins:
self.pt.remove_plugin(when, which,
'removed from scratch build request')
|
adjust_for_scratch
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.user_params.scratch.value:\n remove_plugins = [('prebuild_plugins', 'koji_parent'), (\n 'postbuild_plugins', 'compress'), ('postbuild_plugins',\n 'pulp_pull'), ('postbuild_plugins', 'compare_components'), (\n 'postbuild_plugins', 'import_image'), ('exit_plugins',\n 'koji_promote'), ('exit_plugins', 'koji_tag_build'), (\n 'exit_plugins', 'import_image'), ('prebuild_plugins',\n 'check_and_set_rebuild'), ('prebuild_plugins',\n 'stop_autorebuild_if_disabled')]\n if not self.has_tag_suffixes_placeholder():\n remove_plugins.append(('postbuild_plugins', 'tag_from_config'))\n for when, which in remove_plugins:\n self.pt.remove_plugin(when, which,\n 'removed from scratch build request')\n",
"role": "user"
},
{
"content": "adjust_for_scratch",
"role": "assistant"
}
] |
def x(self):
if self.user_params.isolated.value:
remove_plugins = [('prebuild_plugins', 'check_and_set_rebuild'), (
'prebuild_plugins', 'stop_autorebuild_if_disabled')]
for when, which in remove_plugins:
self.pt.remove_plugin(when, which,
'removed from isolated build request')
|
adjust_for_isolated
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.user_params.isolated.value:\n remove_plugins = [('prebuild_plugins', 'check_and_set_rebuild'), (\n 'prebuild_plugins', 'stop_autorebuild_if_disabled')]\n for when, which in remove_plugins:\n self.pt.remove_plugin(when, which,\n 'removed from isolated build request')\n",
"role": "user"
},
{
"content": "adjust_for_isolated",
"role": "assistant"
}
] |
def x(self):
if self.user_params.flatpak.value:
remove_plugins = [('prebuild_plugins', 'resolve_composes'), (
'prepublish_plugins', 'squash'), ('postbuild_plugins',
'pulp_push'), ('postbuild_plugins', 'pulp_tag'), (
'postbuild_plugins', 'pulp_sync'), ('exit_plugins',
'pulp_publish'), ('exit_plugins', 'pulp_pull'), ('exit_plugins',
'delete_from_registry')]
for when, which in remove_plugins:
self.pt.remove_plugin(when, which, 'not needed for flatpak build')
|
adjust_for_flatpak
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.user_params.flatpak.value:\n remove_plugins = [('prebuild_plugins', 'resolve_composes'), (\n 'prepublish_plugins', 'squash'), ('postbuild_plugins',\n 'pulp_push'), ('postbuild_plugins', 'pulp_tag'), (\n 'postbuild_plugins', 'pulp_sync'), ('exit_plugins',\n 'pulp_publish'), ('exit_plugins', 'pulp_pull'), ('exit_plugins',\n 'delete_from_registry')]\n for when, which in remove_plugins:\n self.pt.remove_plugin(when, which, 'not needed for flatpak build')\n",
"role": "user"
},
{
"content": "adjust_for_flatpak",
"role": "assistant"
}
] |
def x(self):
disable_plugins = self.pt.customize_conf.get('disable_plugins', [])
if not disable_plugins:
logger.debug('No site-user specified plugins to disable')
else:
for plugin in disable_plugins:
try:
self.pt.remove_plugin(plugin['plugin_type'], plugin[
'plugin_name'], 'disabled at user request')
except KeyError:
logger.info(
'Invalid custom configuration found for disable_plugins')
enable_plugins = self.pt.customize_conf.get('enable_plugins', [])
if not enable_plugins:
logger.debug('No site-user specified plugins to enable"')
else:
for plugin in enable_plugins:
try:
msg = 'enabled at user request'
self.pt.add_plugin(plugin['plugin_type'], plugin[
'plugin_name'], plugin['plugin_args'], msg)
except KeyError:
logger.info(
'Invalid custom configuration found for enable_plugins')
|
render_customizations
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n disable_plugins = self.pt.customize_conf.get('disable_plugins', [])\n if not disable_plugins:\n logger.debug('No site-user specified plugins to disable')\n else:\n for plugin in disable_plugins:\n try:\n self.pt.remove_plugin(plugin['plugin_type'], plugin[\n 'plugin_name'], 'disabled at user request')\n except KeyError:\n logger.info(\n 'Invalid custom configuration found for disable_plugins')\n enable_plugins = self.pt.customize_conf.get('enable_plugins', [])\n if not enable_plugins:\n logger.debug('No site-user specified plugins to enable\"')\n else:\n for plugin in enable_plugins:\n try:\n msg = 'enabled at user request'\n self.pt.add_plugin(plugin['plugin_type'], plugin[\n 'plugin_name'], plugin['plugin_args'], msg)\n except KeyError:\n logger.info(\n 'Invalid custom configuration found for enable_plugins')\n",
"role": "user"
},
{
"content": "render_customizations",
"role": "assistant"
}
] |
def x(self):
phase = 'prebuild_plugins'
plugin = 'koji'
if not self.pt.has_plugin_conf(phase, plugin):
return
if self.user_params.yum_repourls.value:
self.pt.remove_plugin(phase, plugin,
'there is a yum repo user parameter')
elif not self.pt.set_plugin_arg_valid(phase, plugin, 'target', self.
user_params.koji_target.value):
self.pt.remove_plugin(phase, plugin,
'no koji target supplied in user parameters')
|
render_koji
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'prebuild_plugins'\n plugin = 'koji'\n if not self.pt.has_plugin_conf(phase, plugin):\n return\n if self.user_params.yum_repourls.value:\n self.pt.remove_plugin(phase, plugin,\n 'there is a yum repo user parameter')\n elif not self.pt.set_plugin_arg_valid(phase, plugin, 'target', self.\n user_params.koji_target.value):\n self.pt.remove_plugin(phase, plugin,\n 'no koji target supplied in user parameters')\n",
"role": "user"
},
{
"content": "render_koji",
"role": "assistant"
}
] |
def x(self):
phase = 'prebuild_plugins'
plugin = 'bump_release'
if not self.pt.has_plugin_conf(phase, plugin):
return
if self.user_params.release.value:
self.pt.remove_plugin(phase, plugin,
'release value supplied as user parameter')
return
if self.user_params.flatpak.value:
self.pt.set_plugin_arg(phase, plugin, 'append', True)
|
render_bump_release
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'prebuild_plugins'\n plugin = 'bump_release'\n if not self.pt.has_plugin_conf(phase, plugin):\n return\n if self.user_params.release.value:\n self.pt.remove_plugin(phase, plugin,\n 'release value supplied as user parameter')\n return\n if self.user_params.flatpak.value:\n self.pt.set_plugin_arg(phase, plugin, 'append', True)\n",
"role": "user"
},
{
"content": "render_bump_release",
"role": "assistant"
}
] |
def x(self):
phase = 'prebuild_plugins'
plugin = 'check_and_set_platforms'
if not self.pt.has_plugin_conf(phase, plugin):
return
if self.user_params.koji_target.value:
self.pt.set_plugin_arg(phase, plugin, 'koji_target', self.
user_params.koji_target.value)
|
render_check_and_set_platforms
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'prebuild_plugins'\n plugin = 'check_and_set_platforms'\n if not self.pt.has_plugin_conf(phase, plugin):\n return\n if self.user_params.koji_target.value:\n self.pt.set_plugin_arg(phase, plugin, 'koji_target', self.\n user_params.koji_target.value)\n",
"role": "user"
},
{
"content": "render_check_and_set_platforms",
"role": "assistant"
}
] |
def x(self, use_auth=None):
if self.user_params.imagestream_name.value is None:
self.pt.remove_plugin('exit_plugins', 'import_image',
'imagestream not in user parameters')
elif self.pt.has_plugin_conf('exit_plugins', 'import_image'):
self.pt.set_plugin_arg('exit_plugins', 'import_image',
'imagestream', self.user_params.imagestream_name.value)
|
render_import_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, use_auth=None):\n if self.user_params.imagestream_name.value is None:\n self.pt.remove_plugin('exit_plugins', 'import_image',\n 'imagestream not in user parameters')\n elif self.pt.has_plugin_conf('exit_plugins', 'import_image'):\n self.pt.set_plugin_arg('exit_plugins', 'import_image',\n 'imagestream', self.user_params.imagestream_name.value)\n",
"role": "user"
},
{
"content": "render_import_image",
"role": "assistant"
}
] |
def x(self):
phase = 'postbuild_plugins'
plugin = 'tag_from_config'
if not self.has_tag_suffixes_placeholder():
return
unique_tag = self.user_params.image_tag.value.split(':')[-1]
tag_suffixes = {'unique': [unique_tag], 'primary': []}
if self.user_params.build_type.value == BUILD_TYPE_ORCHESTRATOR:
additional_tags = self.user_params.additional_tags.value or set()
if self.user_params.scratch.value:
pass
elif self.user_params.isolated.value:
tag_suffixes['primary'].extend(['{version}-{release}'])
elif self.user_params.tags_from_yaml.value:
tag_suffixes['primary'].extend(['{version}-{release}'])
tag_suffixes['primary'].extend(additional_tags)
else:
tag_suffixes['primary'].extend(['latest', '{version}',
'{version}-{release}'])
tag_suffixes['primary'].extend(additional_tags)
self.pt.set_plugin_arg(phase, plugin, 'tag_suffixes', tag_suffixes)
|
render_tag_from_config
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'postbuild_plugins'\n plugin = 'tag_from_config'\n if not self.has_tag_suffixes_placeholder():\n return\n unique_tag = self.user_params.image_tag.value.split(':')[-1]\n tag_suffixes = {'unique': [unique_tag], 'primary': []}\n if self.user_params.build_type.value == BUILD_TYPE_ORCHESTRATOR:\n additional_tags = self.user_params.additional_tags.value or set()\n if self.user_params.scratch.value:\n pass\n elif self.user_params.isolated.value:\n tag_suffixes['primary'].extend(['{version}-{release}'])\n elif self.user_params.tags_from_yaml.value:\n tag_suffixes['primary'].extend(['{version}-{release}'])\n tag_suffixes['primary'].extend(additional_tags)\n else:\n tag_suffixes['primary'].extend(['latest', '{version}',\n '{version}-{release}'])\n tag_suffixes['primary'].extend(additional_tags)\n self.pt.set_plugin_arg(phase, plugin, 'tag_suffixes', tag_suffixes)\n",
"role": "user"
},
{
"content": "render_tag_from_config",
"role": "assistant"
}
] |
def x(self):
phase = 'prebuild_plugins'
plugin = 'pull_base_image'
if self.user_params.parent_images_digests.value:
self.pt.set_plugin_arg(phase, plugin, 'parent_images_digests', self
.user_params.parent_images_digests.value)
|
render_pull_base_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n phase = 'prebuild_plugins'\n plugin = 'pull_base_image'\n if self.user_params.parent_images_digests.value:\n self.pt.set_plugin_arg(phase, plugin, 'parent_images_digests', self\n .user_params.parent_images_digests.value)\n",
"role": "user"
},
{
"content": "render_pull_base_image",
"role": "assistant"
}
] |
def x(self, username='~'):
url = self._build_url('users/%s/' % username, _prepend_namespace=False)
response = self._get(url)
check_response(response)
return response
|
get_user
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, username='~'):\n url = self._build_url('users/%s/' % username, _prepend_namespace=False)\n response = self._get(url)\n check_response(response)\n return response\n",
"role": "user"
},
{
"content": "get_user",
"role": "assistant"
}
] |
def x(self, build_json):
url = self._build_url('builds/')
logger.debug(build_json)
return self._post(url, data=json.dumps(build_json), headers={
'Content-Type': 'application/json'})
|
create_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_json):\n url = self._build_url('builds/')\n logger.debug(build_json)\n return self._post(url, data=json.dumps(build_json), headers={\n 'Content-Type': 'application/json'})\n",
"role": "user"
},
{
"content": "create_build",
"role": "assistant"
}
] |
def x(self, label_selectors):
labels = [('%s=%s' % (field, value)) for field, value in label_selectors]
labels = ','.join(labels)
url = self._build_url('buildconfigs/', labelSelector=labels)
return self._get(url).json()['items']
|
get_all_build_configs_by_labels
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, label_selectors):\n labels = [('%s=%s' % (field, value)) for field, value in label_selectors]\n labels = ','.join(labels)\n url = self._build_url('buildconfigs/', labelSelector=labels)\n return self._get(url).json()['items']\n",
"role": "user"
},
{
"content": "get_all_build_configs_by_labels",
"role": "assistant"
}
] |
def x(self, label_selectors):
items = self.get_all_build_configs_by_labels(label_selectors)
if not items:
raise OsbsException('Build config not found for labels: %r' % (
label_selectors,))
if len(items) > 1:
raise OsbsException(
'More than one build config found for labels: %r' % (
label_selectors,))
return items[0]
|
get_build_config_by_labels
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, label_selectors):\n items = self.get_all_build_configs_by_labels(label_selectors)\n if not items:\n raise OsbsException('Build config not found for labels: %r' % (\n label_selectors,))\n if len(items) > 1:\n raise OsbsException(\n 'More than one build config found for labels: %r' % (\n label_selectors,))\n return items[0]\n",
"role": "user"
},
{
"content": "get_build_config_by_labels",
"role": "assistant"
}
] |
def x(self, label_selectors, filter_key, filter_value):
items = self.get_all_build_configs_by_labels(label_selectors)
if filter_value is not None:
build_configs = []
for build_config in items:
match_value = graceful_chain_get(build_config, *filter_key.
split('.'))
if filter_value == match_value:
build_configs.append(build_config)
items = build_configs
if not items:
raise OsbsException('Build config not found for labels: %r' % (
label_selectors,))
if len(items) > 1:
raise OsbsException(
'More than one build config found for labels: %r' % (
label_selectors,))
return items[0]
|
get_build_config_by_labels_filtered
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, label_selectors, filter_key, filter_value):\n items = self.get_all_build_configs_by_labels(label_selectors)\n if filter_value is not None:\n build_configs = []\n for build_config in items:\n match_value = graceful_chain_get(build_config, *filter_key.\n split('.'))\n if filter_value == match_value:\n build_configs.append(build_config)\n items = build_configs\n if not items:\n raise OsbsException('Build config not found for labels: %r' % (\n label_selectors,))\n if len(items) > 1:\n raise OsbsException(\n 'More than one build config found for labels: %r' % (\n label_selectors,))\n return items[0]\n",
"role": "user"
},
{
"content": "get_build_config_by_labels_filtered",
"role": "assistant"
}
] |
def x(self, build_config_json):
url = self._build_url('buildconfigs/')
return self._post(url, data=build_config_json, headers={'Content-Type':
'application/json'})
|
create_build_config
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_config_json):\n url = self._build_url('buildconfigs/')\n return self._post(url, data=build_config_json, headers={'Content-Type':\n 'application/json'})\n",
"role": "user"
},
{
"content": "create_build_config",
"role": "assistant"
}
] |
def x(self, build_id):
kwargs = {'follow': 1}
min_idle_timeout = 60
last_activity = time.time()
while True:
buildlogs_url = self._build_url('builds/%s/log/' % build_id, **kwargs)
try:
response = self._get(buildlogs_url, stream=1, headers={
'Connection': 'close'})
check_response(response)
for line in response.iter_lines():
last_activity = time.time()
yield line
except OsbsException as exc:
if not isinstance(exc.cause, ConnectionError):
raise
idle = time.time() - last_activity
logger.debug('connection closed after %ds', idle)
if idle < min_idle_timeout:
return
since = int(idle - 1)
logger.debug('fetching logs starting from %ds ago', since)
kwargs['sinceSeconds'] = since
|
stream_logs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_id):\n kwargs = {'follow': 1}\n min_idle_timeout = 60\n last_activity = time.time()\n while True:\n buildlogs_url = self._build_url('builds/%s/log/' % build_id, **kwargs)\n try:\n response = self._get(buildlogs_url, stream=1, headers={\n 'Connection': 'close'})\n check_response(response)\n for line in response.iter_lines():\n last_activity = time.time()\n yield line\n except OsbsException as exc:\n if not isinstance(exc.cause, ConnectionError):\n raise\n idle = time.time() - last_activity\n logger.debug('connection closed after %ds', idle)\n if idle < min_idle_timeout:\n return\n since = int(idle - 1)\n logger.debug('fetching logs starting from %ds ago', since)\n kwargs['sinceSeconds'] = since\n",
"role": "user"
},
{
"content": "stream_logs",
"role": "assistant"
}
] |
def x(self, build_id, follow=False, build_json=None, wait_if_missing=False):
try:
build_json = build_json or self.get_build(build_id).json()
except OsbsResponseException as ex:
if ex.status_code == 404:
if not wait_if_missing:
raise OsbsException("Build '%s' doesn't exist." % build_id)
else:
raise
if follow or wait_if_missing:
build_json = self.wait_for_build_to_get_scheduled(build_id)
br = BuildResponse(build_json)
if br.is_pending():
return
if follow:
return self.stream_logs(build_id)
buildlogs_url = self._build_url('builds/%s/log/' % build_id)
response = self._get(buildlogs_url, headers={'Connection': 'close'})
check_response(response)
return response.content
|
logs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_id, follow=False, build_json=None, wait_if_missing=False):\n try:\n build_json = build_json or self.get_build(build_id).json()\n except OsbsResponseException as ex:\n if ex.status_code == 404:\n if not wait_if_missing:\n raise OsbsException(\"Build '%s' doesn't exist.\" % build_id)\n else:\n raise\n if follow or wait_if_missing:\n build_json = self.wait_for_build_to_get_scheduled(build_id)\n br = BuildResponse(build_json)\n if br.is_pending():\n return\n if follow:\n return self.stream_logs(build_id)\n buildlogs_url = self._build_url('builds/%s/log/' % build_id)\n response = self._get(buildlogs_url, headers={'Connection': 'close'})\n check_response(response)\n return response.content\n",
"role": "user"
},
{
"content": "logs",
"role": "assistant"
}
] |
def x(self, build_config_id=None, koji_task_id=None, field_selector=None,
labels=None):
query = {}
selector = '{key}={value}'
label = {}
if labels is not None:
label.update(labels)
if build_config_id is not None:
label['buildconfig'] = build_config_id
if koji_task_id is not None:
label['koji-task-id'] = str(koji_task_id)
if label:
query['labelSelector'] = ','.join([selector.format(key=key, value=
value) for key, value in label.items()])
if field_selector is not None:
query['fieldSelector'] = field_selector
url = self._build_url('builds/', **query)
return self._get(url)
|
list_builds
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_config_id=None, koji_task_id=None, field_selector=None,\n labels=None):\n query = {}\n selector = '{key}={value}'\n label = {}\n if labels is not None:\n label.update(labels)\n if build_config_id is not None:\n label['buildconfig'] = build_config_id\n if koji_task_id is not None:\n label['koji-task-id'] = str(koji_task_id)\n if label:\n query['labelSelector'] = ','.join([selector.format(key=key, value=\n value) for key, value in label.items()])\n if field_selector is not None:\n query['fieldSelector'] = field_selector\n url = self._build_url('builds/', **query)\n return self._get(url)\n",
"role": "user"
},
{
"content": "list_builds",
"role": "assistant"
}
] |
def x(self, name, quota_json):
url = self._build_k8s_url('resourcequotas/')
response = self._post(url, data=json.dumps(quota_json), headers={
'Content-Type': 'application/json'})
if response.status_code == http_client.CONFLICT:
url = self._build_k8s_url('resourcequotas/%s' % name)
response = self._put(url, data=json.dumps(quota_json), headers={
'Content-Type': 'application/json'})
check_response(response)
return response
|
create_resource_quota
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, quota_json):\n url = self._build_k8s_url('resourcequotas/')\n response = self._post(url, data=json.dumps(quota_json), headers={\n 'Content-Type': 'application/json'})\n if response.status_code == http_client.CONFLICT:\n url = self._build_k8s_url('resourcequotas/%s' % name)\n response = self._put(url, data=json.dumps(quota_json), headers={\n 'Content-Type': 'application/json'})\n check_response(response)\n return response\n",
"role": "user"
},
{
"content": "create_resource_quota",
"role": "assistant"
}
] |
def x(self, build_id, states):
logger.info("watching build '%s'", build_id)
for changetype, obj in self.watch_resource('builds', build_id):
try:
obj_name = obj['metadata']['name']
except KeyError:
logger.error("'object' doesn't have any name")
continue
try:
obj_status = obj['status']['phase']
except KeyError:
logger.error("'object' doesn't have any status")
continue
else:
obj_status_lower = obj_status.lower()
logger.info("object has changed: '%s', status: '%s', name: '%s'",
changetype, obj_status, obj_name)
if obj_name == build_id:
logger.info('matching build found')
logger.debug('is %s in %s?', repr(obj_status_lower), states)
if obj_status_lower in states:
logger.debug("Yes, build is in the state I'm waiting for.")
return obj
else:
logger.debug("No, build is not in the state I'm waiting for.")
else:
logger.info("The build %r isn't me %r", obj_name, build_id)
logger.warning("build '%s' was not found during wait", build_id)
raise OsbsWatchBuildNotFound(
"build '%s' was not found and response stream ended" % build_id)
|
wait
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_id, states):\n logger.info(\"watching build '%s'\", build_id)\n for changetype, obj in self.watch_resource('builds', build_id):\n try:\n obj_name = obj['metadata']['name']\n except KeyError:\n logger.error(\"'object' doesn't have any name\")\n continue\n try:\n obj_status = obj['status']['phase']\n except KeyError:\n logger.error(\"'object' doesn't have any status\")\n continue\n else:\n obj_status_lower = obj_status.lower()\n logger.info(\"object has changed: '%s', status: '%s', name: '%s'\",\n changetype, obj_status, obj_name)\n if obj_name == build_id:\n logger.info('matching build found')\n logger.debug('is %s in %s?', repr(obj_status_lower), states)\n if obj_status_lower in states:\n logger.debug(\"Yes, build is in the state I'm waiting for.\")\n return obj\n else:\n logger.debug(\"No, build is not in the state I'm waiting for.\")\n else:\n logger.info(\"The build %r isn't me %r\", obj_name, build_id)\n logger.warning(\"build '%s' was not found during wait\", build_id)\n raise OsbsWatchBuildNotFound(\n \"build '%s' was not found and response stream ended\" % build_id)\n",
"role": "user"
},
{
"content": "wait",
"role": "assistant"
}
] |
def x(self, collection, name, things, values, how):
url = self._build_url('%s/%s' % (collection, name))
response = self._get(url)
logger.debug('before modification: %s', response.content)
build_json = response.json()
how(build_json['metadata'], things, values)
response = self._put(url, data=json.dumps(build_json), use_json=True)
check_response(response)
return response
|
adjust_attributes_on_object
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, collection, name, things, values, how):\n url = self._build_url('%s/%s' % (collection, name))\n response = self._get(url)\n logger.debug('before modification: %s', response.content)\n build_json = response.json()\n how(build_json['metadata'], things, values)\n response = self._put(url, data=json.dumps(build_json), use_json=True)\n check_response(response)\n return response\n",
"role": "user"
},
{
"content": "adjust_attributes_on_object",
"role": "assistant"
}
] |
def x(self, build_id, annotations):
return self.adjust_attributes_on_object('builds', build_id,
'annotations', annotations, self._update_metadata_things)
|
update_annotations_on_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_id, annotations):\n return self.adjust_attributes_on_object('builds', build_id,\n 'annotations', annotations, self._update_metadata_things)\n",
"role": "user"
},
{
"content": "update_annotations_on_build",
"role": "assistant"
}
] |
def x(self, name, stream_import, tags=None):
imagestream_json = self.get_image_stream(name).json()
logger.debug('imagestream: %r', imagestream_json)
if 'dockerImageRepository' in imagestream_json.get('spec', {}):
logger.debug("Removing 'dockerImageRepository' from ImageStream %s",
name)
source_repo = imagestream_json['spec'].pop('dockerImageRepository')
imagestream_json['metadata']['annotations'][ANNOTATION_SOURCE_REPO
] = source_repo
imagestream_json = self.update_image_stream(name, imagestream_json
).json()
oldtags = imagestream_json.get('status', {}).get('tags', [])
logger.debug('tags before import: %r', oldtags)
stream_import['metadata']['name'] = name
stream_import['spec']['images'] = []
tags_set = set(tags) if tags else set()
for tag in imagestream_json.get('spec', {}).get('tags', []):
if tags_set and tag['name'] not in tags_set:
continue
image_import = {'from': tag['from'], 'to': {'name': tag['name']},
'importPolicy': tag.get('importPolicy'), 'referencePolicy': tag
.get('referencePolicy')}
stream_import['spec']['images'].append(image_import)
if not stream_import['spec']['images']:
logger.debug('No tags to import')
return False
import_url = self._build_url('imagestreamimports/')
import_response = self._post(import_url, data=json.dumps(stream_import),
use_json=True)
self._check_import_image_response(import_response)
new_tags = [image['tag'] for image in import_response.json().get(
'status', {}).get('images', [])]
logger.debug('tags after import: %r', new_tags)
return True
|
import_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, stream_import, tags=None):\n imagestream_json = self.get_image_stream(name).json()\n logger.debug('imagestream: %r', imagestream_json)\n if 'dockerImageRepository' in imagestream_json.get('spec', {}):\n logger.debug(\"Removing 'dockerImageRepository' from ImageStream %s\",\n name)\n source_repo = imagestream_json['spec'].pop('dockerImageRepository')\n imagestream_json['metadata']['annotations'][ANNOTATION_SOURCE_REPO\n ] = source_repo\n imagestream_json = self.update_image_stream(name, imagestream_json\n ).json()\n oldtags = imagestream_json.get('status', {}).get('tags', [])\n logger.debug('tags before import: %r', oldtags)\n stream_import['metadata']['name'] = name\n stream_import['spec']['images'] = []\n tags_set = set(tags) if tags else set()\n for tag in imagestream_json.get('spec', {}).get('tags', []):\n if tags_set and tag['name'] not in tags_set:\n continue\n image_import = {'from': tag['from'], 'to': {'name': tag['name']},\n 'importPolicy': tag.get('importPolicy'), 'referencePolicy': tag\n .get('referencePolicy')}\n stream_import['spec']['images'].append(image_import)\n if not stream_import['spec']['images']:\n logger.debug('No tags to import')\n return False\n import_url = self._build_url('imagestreamimports/')\n import_response = self._post(import_url, data=json.dumps(stream_import),\n use_json=True)\n self._check_import_image_response(import_response)\n new_tags = [image['tag'] for image in import_response.json().get(\n 'status', {}).get('images', [])]\n logger.debug('tags after import: %r', new_tags)\n return True\n",
"role": "user"
},
{
"content": "import_image",
"role": "assistant"
}
] |
def x(self, name, stream_import, tags, repository, insecure):
imagestream_json = self.get_image_stream(name).json()
logger.debug('imagestream: %r', imagestream_json)
changed = False
if 'dockerImageRepository' in imagestream_json.get('spec', {}):
logger.debug("Removing 'dockerImageRepository' from ImageStream %s",
name)
imagestream_json['spec'].pop('dockerImageRepository')
changed = True
all_annotations = imagestream_json.get('metadata', {}).get('annotations',
{})
for annotation in (ANNOTATION_SOURCE_REPO, ANNOTATION_INSECURE_REPO):
if annotation in all_annotations:
imagestream_json['metadata']['annotations'].pop(annotation)
changed = True
if changed:
imagestream_json = self.update_image_stream(name, imagestream_json
).json()
oldtags = imagestream_json.get('status', {}).get('tags', [])
logger.debug('tags before import: %r', oldtags)
stream_import['metadata']['name'] = name
stream_import['spec']['images'] = []
tags_set = set(tags) if tags else set()
if not tags_set:
logger.debug('No tags to import')
return False
for tag in tags_set:
image_import = {'from': {'kind': 'DockerImage', 'name': '{}:{}'.
format(repository, tag)}, 'to': {'name': tag}, 'importPolicy':
{'insecure': insecure}}
stream_import['spec']['images'].append(image_import)
import_url = self._build_url('imagestreamimports/')
import_response = self._post(import_url, data=json.dumps(stream_import),
use_json=True)
self._check_import_image_response(import_response)
new_tags = [image['tag'] for image in import_response.json().get(
'status', {}).get('images', [])]
logger.debug('tags after import: %r', new_tags)
return True
|
import_image_tags
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, stream_import, tags, repository, insecure):\n imagestream_json = self.get_image_stream(name).json()\n logger.debug('imagestream: %r', imagestream_json)\n changed = False\n if 'dockerImageRepository' in imagestream_json.get('spec', {}):\n logger.debug(\"Removing 'dockerImageRepository' from ImageStream %s\",\n name)\n imagestream_json['spec'].pop('dockerImageRepository')\n changed = True\n all_annotations = imagestream_json.get('metadata', {}).get('annotations',\n {})\n for annotation in (ANNOTATION_SOURCE_REPO, ANNOTATION_INSECURE_REPO):\n if annotation in all_annotations:\n imagestream_json['metadata']['annotations'].pop(annotation)\n changed = True\n if changed:\n imagestream_json = self.update_image_stream(name, imagestream_json\n ).json()\n oldtags = imagestream_json.get('status', {}).get('tags', [])\n logger.debug('tags before import: %r', oldtags)\n stream_import['metadata']['name'] = name\n stream_import['spec']['images'] = []\n tags_set = set(tags) if tags else set()\n if not tags_set:\n logger.debug('No tags to import')\n return False\n for tag in tags_set:\n image_import = {'from': {'kind': 'DockerImage', 'name': '{}:{}'.\n format(repository, tag)}, 'to': {'name': tag}, 'importPolicy':\n {'insecure': insecure}}\n stream_import['spec']['images'].append(image_import)\n import_url = self._build_url('imagestreamimports/')\n import_response = self._post(import_url, data=json.dumps(stream_import),\n use_json=True)\n self._check_import_image_response(import_response)\n new_tags = [image['tag'] for image in import_response.json().get(\n 'status', {}).get('images', [])]\n logger.debug('tags after import: %r', new_tags)\n return True\n",
"role": "user"
},
{
"content": "import_image_tags",
"role": "assistant"
}
] |
def x(s):
if not PY3:
return str(s)
else:
if not isinstance(s, str):
return str(s, encoding='utf-8')
return s
|
str_on_2_unicode_on_3
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(s):\n if not PY3:\n return str(s)\n else:\n if not isinstance(s, str):\n return str(s, encoding='utf-8')\n return s\n",
"role": "user"
},
{
"content": "str_on_2_unicode_on_3",
"role": "assistant"
}
] |
def x(self):
formatter = JsonLinesTableFormatter(self.load_dict())
formatter.accept(self)
return formatter.to_table_data()
|
load
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n formatter = JsonLinesTableFormatter(self.load_dict())\n formatter.accept(self)\n return formatter.to_table_data()\n",
"role": "user"
},
{
"content": "load",
"role": "assistant"
}
] |
def x(self):
import gspread
from oauth2client.service_account import ServiceAccountCredentials
self._validate_table_name()
self._validate_title()
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
credentials = ServiceAccountCredentials.from_json_keyfile_name(self.
source, scope)
gc = gspread.authorize(credentials)
try:
for worksheet in gc.open(self.title).worksheets():
self._worksheet = worksheet
self.__all_values = [row for row in worksheet.get_all_values()]
if self._is_empty_sheet():
continue
try:
self.__strip_empty_col()
except ValueError:
continue
value_matrix = self.__all_values[self._get_start_row_idx():]
try:
headers = value_matrix[0]
rows = value_matrix[1:]
except IndexError:
continue
self.inc_table_count()
yield TableData(self.make_table_name(), headers, rows,
dp_extractor=self.dp_extractor, type_hints=self.
_extract_type_hints(headers))
except gspread.exceptions.SpreadsheetNotFound:
raise OpenError("spreadsheet '{}' not found".format(self.title))
except gspread.exceptions.APIError as e:
raise APIError(e)
|
load
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n import gspread\n from oauth2client.service_account import ServiceAccountCredentials\n self._validate_table_name()\n self._validate_title()\n scope = ['https://spreadsheets.google.com/feeds',\n 'https://www.googleapis.com/auth/drive']\n credentials = ServiceAccountCredentials.from_json_keyfile_name(self.\n source, scope)\n gc = gspread.authorize(credentials)\n try:\n for worksheet in gc.open(self.title).worksheets():\n self._worksheet = worksheet\n self.__all_values = [row for row in worksheet.get_all_values()]\n if self._is_empty_sheet():\n continue\n try:\n self.__strip_empty_col()\n except ValueError:\n continue\n value_matrix = self.__all_values[self._get_start_row_idx():]\n try:\n headers = value_matrix[0]\n rows = value_matrix[1:]\n except IndexError:\n continue\n self.inc_table_count()\n yield TableData(self.make_table_name(), headers, rows,\n dp_extractor=self.dp_extractor, type_hints=self.\n _extract_type_hints(headers))\n except gspread.exceptions.SpreadsheetNotFound:\n raise OpenError(\"spreadsheet '{}' not found\".format(self.title))\n except gspread.exceptions.APIError as e:\n raise APIError(e)\n",
"role": "user"
},
{
"content": "load",
"role": "assistant"
}
] |
def x(log_level):
if not LOGBOOK_INSTALLED:
return
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
dataproperty.set_log_level(log_level)
try:
import simplesqlite
simplesqlite.set_log_level(log_level)
except ImportError:
pass
|
set_log_level
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(log_level):\n if not LOGBOOK_INSTALLED:\n return\n logbook.get_level_name(log_level)\n if log_level == logger.level:\n return\n if log_level == logbook.NOTSET:\n set_logger(is_enable=False)\n else:\n set_logger(is_enable=True)\n logger.level = log_level\n dataproperty.set_log_level(log_level)\n try:\n import simplesqlite\n simplesqlite.set_log_level(log_level)\n except ImportError:\n pass\n",
"role": "user"
},
{
"content": "set_log_level",
"role": "assistant"
}
] |
def x(orig, new, remove_nonexistent_keys=False):
if isinstance(orig, dict) and isinstance(new, dict):
clean_triggers(orig, new)
if remove_nonexistent_keys:
missing = set(orig.keys()) - set(new.keys())
for k in missing:
orig.pop(k)
for k, v in new.items():
if k == 'strategy':
remove_nonexistent_keys = True
if isinstance(orig.get(k), dict) and isinstance(v, dict):
buildconfig_update(orig[k], v, remove_nonexistent_keys)
else:
orig[k] = v
|
buildconfig_update
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(orig, new, remove_nonexistent_keys=False):\n if isinstance(orig, dict) and isinstance(new, dict):\n clean_triggers(orig, new)\n if remove_nonexistent_keys:\n missing = set(orig.keys()) - set(new.keys())\n for k in missing:\n orig.pop(k)\n for k, v in new.items():\n if k == 'strategy':\n remove_nonexistent_keys = True\n if isinstance(orig.get(k), dict) and isinstance(v, dict):\n buildconfig_update(orig[k], v, remove_nonexistent_keys)\n else:\n orig[k] = v\n",
"role": "user"
},
{
"content": "buildconfig_update",
"role": "assistant"
}
] |
def x(git_url, target_dir=None, commit=None, retry_times=GIT_MAX_RETRIES,
branch=None, depth=None):
tmpdir = tempfile.mkdtemp()
target_dir = target_dir or os.path.join(tmpdir, 'repo')
try:
yield clone_git_repo(git_url, target_dir, commit, retry_times,
branch, depth)
finally:
shutil.rmtree(tmpdir)
|
checkout_git_repo
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(git_url, target_dir=None, commit=None, retry_times=GIT_MAX_RETRIES,\n branch=None, depth=None):\n tmpdir = tempfile.mkdtemp()\n target_dir = target_dir or os.path.join(tmpdir, 'repo')\n try:\n yield clone_git_repo(git_url, target_dir, commit, retry_times,\n branch, depth)\n finally:\n shutil.rmtree(tmpdir)\n",
"role": "user"
},
{
"content": "checkout_git_repo",
"role": "assistant"
}
] |
def x(git_url, target_dir=None, commit=None, retry_times=GIT_MAX_RETRIES,
branch=None, depth=None):
retry_delay = GIT_BACKOFF_FACTOR
target_dir = target_dir or os.path.join(tempfile.mkdtemp(), 'repo')
commit = commit or 'master'
logger.info("cloning git repo '%s'", git_url)
logger.debug("url = '%s', dir = '%s', commit = '%s'", git_url,
target_dir, commit)
cmd = ['git', 'clone']
if branch:
cmd += ['-b', branch, '--single-branch']
if depth:
cmd += ['--depth', str(depth)]
elif depth:
logger.warning('branch not provided for %s, depth setting ignored',
git_url)
depth = None
cmd += [git_url, target_dir]
logger.debug("cloning '%s'", cmd)
repo_commit = ''
repo_depth = None
for counter in range(retry_times + 1):
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
repo_commit, repo_depth = reset_git_repo(target_dir, commit, depth)
break
except subprocess.CalledProcessError as exc:
if counter != retry_times:
logger.info("retrying command '%s':\n '%s'", cmd, exc.output)
time.sleep(retry_delay * 2 ** counter)
else:
raise OsbsException(
"Unable to clone git repo '%s' branch '%s'" % (git_url,
branch), cause=exc, traceback=sys.exc_info()[2])
return ClonedRepoData(target_dir, repo_commit, repo_depth)
|
clone_git_repo
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(git_url, target_dir=None, commit=None, retry_times=GIT_MAX_RETRIES,\n branch=None, depth=None):\n retry_delay = GIT_BACKOFF_FACTOR\n target_dir = target_dir or os.path.join(tempfile.mkdtemp(), 'repo')\n commit = commit or 'master'\n logger.info(\"cloning git repo '%s'\", git_url)\n logger.debug(\"url = '%s', dir = '%s', commit = '%s'\", git_url,\n target_dir, commit)\n cmd = ['git', 'clone']\n if branch:\n cmd += ['-b', branch, '--single-branch']\n if depth:\n cmd += ['--depth', str(depth)]\n elif depth:\n logger.warning('branch not provided for %s, depth setting ignored',\n git_url)\n depth = None\n cmd += [git_url, target_dir]\n logger.debug(\"cloning '%s'\", cmd)\n repo_commit = ''\n repo_depth = None\n for counter in range(retry_times + 1):\n try:\n subprocess.check_output(cmd, stderr=subprocess.STDOUT)\n repo_commit, repo_depth = reset_git_repo(target_dir, commit, depth)\n break\n except subprocess.CalledProcessError as exc:\n if counter != retry_times:\n logger.info(\"retrying command '%s':\\n '%s'\", cmd, exc.output)\n time.sleep(retry_delay * 2 ** counter)\n else:\n raise OsbsException(\n \"Unable to clone git repo '%s' branch '%s'\" % (git_url,\n branch), cause=exc, traceback=sys.exc_info()[2])\n return ClonedRepoData(target_dir, repo_commit, repo_depth)\n",
"role": "user"
},
{
"content": "clone_git_repo",
"role": "assistant"
}
] |
def x(target_dir, git_reference, retry_depth=None):
deepen = retry_depth or 0
base_commit_depth = 0
for _ in range(GIT_FETCH_RETRY):
try:
if not deepen:
cmd = ['git', 'rev-list', '--count', git_reference]
base_commit_depth = int(subprocess.check_output(cmd, cwd=
target_dir)) - 1
cmd = ['git', 'reset', '--hard', git_reference]
logger.debug("Resetting current HEAD: '%s'", cmd)
subprocess.check_call(cmd, cwd=target_dir)
break
except subprocess.CalledProcessError:
if not deepen:
raise OsbsException('cannot find commit %s in repo %s' % (
git_reference, target_dir))
deepen *= 2
cmd = ['git', 'fetch', '--depth', str(deepen)]
subprocess.check_call(cmd, cwd=target_dir)
logger.debug("Couldn't find commit %s, increasing depth with '%s'",
git_reference, cmd)
else:
raise OsbsException('cannot find commit %s in repo %s' % (
git_reference, target_dir))
cmd = ['git', 'rev-parse', 'HEAD']
logger.debug("getting SHA-1 of provided ref '%s'", git_reference)
commit_id = subprocess.check_output(cmd, cwd=target_dir,
universal_newlines=True)
commit_id = commit_id.strip()
logger.info('commit ID = %s', commit_id)
final_commit_depth = None
if not deepen:
cmd = ['git', 'rev-list', '--count', 'HEAD']
final_commit_depth = int(subprocess.check_output(cmd, cwd=target_dir)
) - base_commit_depth
return commit_id, final_commit_depth
|
reset_git_repo
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(target_dir, git_reference, retry_depth=None):\n deepen = retry_depth or 0\n base_commit_depth = 0\n for _ in range(GIT_FETCH_RETRY):\n try:\n if not deepen:\n cmd = ['git', 'rev-list', '--count', git_reference]\n base_commit_depth = int(subprocess.check_output(cmd, cwd=\n target_dir)) - 1\n cmd = ['git', 'reset', '--hard', git_reference]\n logger.debug(\"Resetting current HEAD: '%s'\", cmd)\n subprocess.check_call(cmd, cwd=target_dir)\n break\n except subprocess.CalledProcessError:\n if not deepen:\n raise OsbsException('cannot find commit %s in repo %s' % (\n git_reference, target_dir))\n deepen *= 2\n cmd = ['git', 'fetch', '--depth', str(deepen)]\n subprocess.check_call(cmd, cwd=target_dir)\n logger.debug(\"Couldn't find commit %s, increasing depth with '%s'\",\n git_reference, cmd)\n else:\n raise OsbsException('cannot find commit %s in repo %s' % (\n git_reference, target_dir))\n cmd = ['git', 'rev-parse', 'HEAD']\n logger.debug(\"getting SHA-1 of provided ref '%s'\", git_reference)\n commit_id = subprocess.check_output(cmd, cwd=target_dir,\n universal_newlines=True)\n commit_id = commit_id.strip()\n logger.info('commit ID = %s', commit_id)\n final_commit_depth = None\n if not deepen:\n cmd = ['git', 'rev-list', '--count', 'HEAD']\n final_commit_depth = int(subprocess.check_output(cmd, cwd=target_dir)\n ) - base_commit_depth\n return commit_id, final_commit_depth\n",
"role": "user"
},
{
"content": "reset_git_repo",
"role": "assistant"
}
] |
def x(image):
ret = image
ret = strip_registry_from_image(image)
ret = ret.replace('/', '-')
if ret.find(':') == -1:
ret += ':latest'
return ret
|
get_imagestreamtag_from_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(image):\n ret = image\n ret = strip_registry_from_image(image)\n ret = ret.replace('/', '-')\n if ret.find(':') == -1:\n ret += ':latest'\n return ret\n",
"role": "user"
},
{
"content": "get_imagestreamtag_from_image",
"role": "assistant"
}
] |
def x(rfc3339):
try:
dt = dateutil.parser.parse(rfc3339, ignoretz=False)
return dt.timestamp()
except NameError:
time_tuple = strptime(rfc3339, '%Y-%m-%dT%H:%M:%SZ')
return timegm(time_tuple)
|
get_time_from_rfc3339
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(rfc3339):\n try:\n dt = dateutil.parser.parse(rfc3339, ignoretz=False)\n return dt.timestamp()\n except NameError:\n time_tuple = strptime(rfc3339, '%Y-%m-%dT%H:%M:%SZ')\n return timegm(time_tuple)\n",
"role": "user"
},
{
"content": "get_time_from_rfc3339",
"role": "assistant"
}
] |
def x(str1, str2='', limit=LABEL_MAX_CHARS, separator='-', label=True):
filter_chars = (VALID_LABEL_CHARS if label else
VALID_BUILD_CONFIG_NAME_CHARS)
str1_san = ''.join(filter(filter_chars.match, list(str1)))
str2_san = ''.join(filter(filter_chars.match, list(str2)))
str1_chars = []
str2_chars = []
groups = (str1_san, str1_chars), (str2_san, str2_chars)
size = len(separator)
limit = min(limit, LABEL_MAX_CHARS)
for i in range(max(len(str1_san), len(str2_san))):
for group, group_chars in groups:
if i < len(group):
group_chars.append(group[i])
size += 1
if size >= limit:
break
else:
continue
break
final_str1 = ''.join(str1_chars).strip(separator)
final_str2 = ''.join(str2_chars).strip(separator)
return separator.join(filter(None, (final_str1, final_str2)))
|
sanitize_strings_for_openshift
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(str1, str2='', limit=LABEL_MAX_CHARS, separator='-', label=True):\n filter_chars = (VALID_LABEL_CHARS if label else\n VALID_BUILD_CONFIG_NAME_CHARS)\n str1_san = ''.join(filter(filter_chars.match, list(str1)))\n str2_san = ''.join(filter(filter_chars.match, list(str2)))\n str1_chars = []\n str2_chars = []\n groups = (str1_san, str1_chars), (str2_san, str2_chars)\n size = len(separator)\n limit = min(limit, LABEL_MAX_CHARS)\n for i in range(max(len(str1_san), len(str2_san))):\n for group, group_chars in groups:\n if i < len(group):\n group_chars.append(group[i])\n size += 1\n if size >= limit:\n break\n else:\n continue\n break\n final_str1 = ''.join(str1_chars).strip(separator)\n final_str2 = ''.join(str2_chars).strip(separator)\n return separator.join(filter(None, (final_str1, final_str2)))\n",
"role": "user"
},
{
"content": "sanitize_strings_for_openshift",
"role": "assistant"
}
] |
def x(repo, branch, limit=53, separator='-', hash_size=5):
branch = branch or 'unknown'
full = urlparse(repo).path.lstrip('/') + branch
repo = git_repo_humanish_part_from_uri(repo)
shaval = sha256(full.encode('utf-8')).hexdigest()
hash_str = shaval[:hash_size]
limit = limit - len(hash_str) - 1
sanitized = sanitize_strings_for_openshift(repo, branch, limit,
separator, False)
return separator.join(filter(None, (sanitized, hash_str)))
|
make_name_from_git
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(repo, branch, limit=53, separator='-', hash_size=5):\n branch = branch or 'unknown'\n full = urlparse(repo).path.lstrip('/') + branch\n repo = git_repo_humanish_part_from_uri(repo)\n shaval = sha256(full.encode('utf-8')).hexdigest()\n hash_str = shaval[:hash_size]\n limit = limit - len(hash_str) - 1\n sanitized = sanitize_strings_for_openshift(repo, branch, limit,\n separator, False)\n return separator.join(filter(None, (sanitized, hash_str)))\n",
"role": "user"
},
{
"content": "make_name_from_git",
"role": "assistant"
}
] |
def x(prefix, suffix, *args, **kwargs):
prefix = ''.join(filter(VALID_BUILD_CONFIG_NAME_CHARS.match, list(prefix)))
suffix = ''.join(filter(VALID_BUILD_CONFIG_NAME_CHARS.match, list(suffix)))
kwargs['limit'] = kwargs.get('limit', 64) - len(prefix) - len(suffix) - 2
name_from_git = make_name_from_git(*args, **kwargs)
return '-'.join([prefix, name_from_git, suffix])
|
wrap_name_from_git
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(prefix, suffix, *args, **kwargs):\n prefix = ''.join(filter(VALID_BUILD_CONFIG_NAME_CHARS.match, list(prefix)))\n suffix = ''.join(filter(VALID_BUILD_CONFIG_NAME_CHARS.match, list(suffix)))\n kwargs['limit'] = kwargs.get('limit', 64) - len(prefix) - len(suffix) - 2\n name_from_git = make_name_from_git(*args, **kwargs)\n return '-'.join([prefix, name_from_git, suffix])\n",
"role": "user"
},
{
"content": "wrap_name_from_git",
"role": "assistant"
}
] |
def x(version):
if hasattr(version, 'base_version'):
if version.base_version:
parts = version.base_version.split('.')
else:
parts = []
else:
parts = []
for part in version:
if part.startswith('*'):
break
parts.append(part)
parts = [int(p) for p in parts]
if len(parts) < 3:
parts += [0] * (3 - len(parts))
major, minor, micro = parts[:3]
cleaned_version = '{}.{}.{}'.format(major, minor, micro)
return cleaned_version
|
sanitize_version
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(version):\n if hasattr(version, 'base_version'):\n if version.base_version:\n parts = version.base_version.split('.')\n else:\n parts = []\n else:\n parts = []\n for part in version:\n if part.startswith('*'):\n break\n parts.append(part)\n parts = [int(p) for p in parts]\n if len(parts) < 3:\n parts += [0] * (3 - len(parts))\n major, minor, micro = parts[:3]\n cleaned_version = '{}.{}.{}'.format(major, minor, micro)\n return cleaned_version\n",
"role": "user"
},
{
"content": "sanitize_version",
"role": "assistant"
}
] |
def x(self, label_type):
if label_type in self._label_values:
return self._label_values[label_type][0]
else:
return Labels.LABEL_NAMES[label_type][0]
|
get_name
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, label_type):\n if label_type in self._label_values:\n return self._label_values[label_type][0]\n else:\n return Labels.LABEL_NAMES[label_type][0]\n",
"role": "user"
},
{
"content": "get_name",
"role": "assistant"
}
] |
def x():
newdict = {}
for label_type, label_names in Labels.LABEL_NAMES.items():
for oldname in label_names[1:]:
newdict[oldname] = Labels.LABEL_NAMES[label_type][0]
return newdict
|
get_new_names_by_old
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n newdict = {}\n for label_type, label_names in Labels.LABEL_NAMES.items():\n for oldname in label_names[1:]:\n newdict[oldname] = Labels.LABEL_NAMES[label_type][0]\n return newdict\n",
"role": "user"
},
{
"content": "get_new_names_by_old",
"role": "assistant"
}
] |
def x(self, label_type):
if label_type in self._label_values:
return self._label_values[label_type]
else:
return label_type, self._df_labels[label_type]
|
get_name_and_value
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, label_type):\n if label_type in self._label_values:\n return self._label_values[label_type]\n else:\n return label_type, self._df_labels[label_type]\n",
"role": "user"
},
{
"content": "get_name_and_value",
"role": "assistant"
}
] |
def x(principal, keytab_file, ccache_file=None):
tgt_valid = False
env = {'LC_ALL': 'C'}
if ccache_file:
env['KRB5CCNAME'] = ccache_file
rc, klist, _ = run(['klist'], extraenv=env)
if rc == 0:
for line in klist.splitlines():
m = re.match(KLIST_TGT_RE, line)
if m:
year = m.group('year')
if len(year) == 2:
year = '20' + year
expires = datetime.datetime(int(year), int(m.group('month')
), int(m.group('day')), int(m.group('hour')), int(m.
group('minute')), int(m.group('second')))
if expires - datetime.datetime.now() > datetime.timedelta(hours
=1):
logger.debug('Valid TGT found, not renewing')
tgt_valid = True
break
if not tgt_valid:
logger.debug('Retrieving kerberos TGT')
rc, out, err = run(['kinit', '-k', '-t', keytab_file, principal],
extraenv=env)
if rc != 0:
raise OsbsException(
'kinit returned %s:\nstdout: %s\nstderr: %s' % (rc, out, err))
if ccache_file:
os.environ['KRB5CCNAME'] = ccache_file
|
kerberos_ccache_init
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(principal, keytab_file, ccache_file=None):\n tgt_valid = False\n env = {'LC_ALL': 'C'}\n if ccache_file:\n env['KRB5CCNAME'] = ccache_file\n rc, klist, _ = run(['klist'], extraenv=env)\n if rc == 0:\n for line in klist.splitlines():\n m = re.match(KLIST_TGT_RE, line)\n if m:\n year = m.group('year')\n if len(year) == 2:\n year = '20' + year\n expires = datetime.datetime(int(year), int(m.group('month')\n ), int(m.group('day')), int(m.group('hour')), int(m.\n group('minute')), int(m.group('second')))\n if expires - datetime.datetime.now() > datetime.timedelta(hours\n =1):\n logger.debug('Valid TGT found, not renewing')\n tgt_valid = True\n break\n if not tgt_valid:\n logger.debug('Retrieving kerberos TGT')\n rc, out, err = run(['kinit', '-k', '-t', keytab_file, principal],\n extraenv=env)\n if rc != 0:\n raise OsbsException(\n 'kinit returned %s:\\nstdout: %s\\nstderr: %s' % (rc, out, err))\n if ccache_file:\n os.environ['KRB5CCNAME'] = ccache_file\n",
"role": "user"
},
{
"content": "kerberos_ccache_init",
"role": "assistant"
}
] |
def x(self):
self._validate()
formatter = SqliteTableFormatter(self.source)
formatter.accept(self)
return formatter.to_table_data()
|
load
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._validate()\n formatter = SqliteTableFormatter(self.source)\n formatter.accept(self)\n return formatter.to_table_data()\n",
"role": "user"
},
{
"content": "load",
"role": "assistant"
}
] |
def x(self):
data = graceful_chain_get(self.json, 'data')
if data is None:
return {}
data_dict = {}
for key in data:
if self.is_yaml(key):
data_dict[key] = yaml.load(data[key])
else:
data_dict[key] = json.loads(data[key])
return data_dict
|
get_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n data = graceful_chain_get(self.json, 'data')\n if data is None:\n return {}\n data_dict = {}\n for key in data:\n if self.is_yaml(key):\n data_dict[key] = yaml.load(data[key])\n else:\n data_dict[key] = json.loads(data[key])\n return data_dict\n",
"role": "user"
},
{
"content": "get_data",
"role": "assistant"
}
] |
def x(self, name):
data = graceful_chain_get(self.json, 'data')
if data is None or name not in data:
return {}
if self.is_yaml(name):
return yaml.load(data[name]) or {}
return json.loads(data[name])
|
get_data_by_key
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name):\n data = graceful_chain_get(self.json, 'data')\n if data is None or name not in data:\n return {}\n if self.is_yaml(name):\n return yaml.load(data[name]) or {}\n return json.loads(data[name])\n",
"role": "user"
},
{
"content": "get_data_by_key",
"role": "assistant"
}
] |
def x(self):
self._validate_source_data()
header_list = []
for json_record in self._buffer:
for key in json_record:
if key not in header_list:
header_list.append(key)
self._loader.inc_table_count()
yield TableData(self._make_table_name(), header_list, self._buffer,
dp_extractor=self._loader.dp_extractor, type_hints=self.
_extract_type_hints(header_list))
|
to_table_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._validate_source_data()\n header_list = []\n for json_record in self._buffer:\n for key in json_record:\n if key not in header_list:\n header_list.append(key)\n self._loader.inc_table_count()\n yield TableData(self._make_table_name(), header_list, self._buffer,\n dp_extractor=self._loader.dp_extractor, type_hints=self.\n _extract_type_hints(header_list))\n",
"role": "user"
},
{
"content": "to_table_data",
"role": "assistant"
}
] |
def x(self, field_selector=None, koji_task_id=None, running=None, labels=None):
if running:
running_fs = ','.join(['status!={status}'.format(status=status.
capitalize()) for status in BUILD_FINISHED_STATES])
if not field_selector:
field_selector = running_fs
else:
field_selector = ','.join([field_selector, running_fs])
response = self.os.list_builds(field_selector=field_selector,
koji_task_id=koji_task_id, labels=labels)
serialized_response = response.json()
build_list = []
for build in serialized_response['items']:
build_list.append(BuildResponse(build, self))
return build_list
|
list_builds
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, field_selector=None, koji_task_id=None, running=None, labels=None):\n if running:\n running_fs = ','.join(['status!={status}'.format(status=status.\n capitalize()) for status in BUILD_FINISHED_STATES])\n if not field_selector:\n field_selector = running_fs\n else:\n field_selector = ','.join([field_selector, running_fs])\n response = self.os.list_builds(field_selector=field_selector,\n koji_task_id=koji_task_id, labels=labels)\n serialized_response = response.json()\n build_list = []\n for build in serialized_response['items']:\n build_list.append(BuildResponse(build, self))\n return build_list\n",
"role": "user"
},
{
"content": "list_builds",
"role": "assistant"
}
] |
def x(self, build_id):
pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
serialized_response = pods.json()
pod_list = [PodResponse(pod) for pod in serialized_response['items']]
if not pod_list:
raise OsbsException('No pod for build')
elif len(pod_list) != 1:
raise OsbsException('Only one pod expected but %d returned', len(
pod_list))
return pod_list[0]
|
get_pod_for_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_id):\n pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)\n serialized_response = pods.json()\n pod_list = [PodResponse(pod) for pod in serialized_response['items']]\n if not pod_list:\n raise OsbsException('No pod for build')\n elif len(pod_list) != 1:\n raise OsbsException('Only one pod expected but %d returned', len(\n pod_list))\n return pod_list[0]\n",
"role": "user"
},
{
"content": "get_pod_for_build",
"role": "assistant"
}
] |
def x(self, build_type=None, inner_template=None, outer_template=None,
customize_conf=None, arrangement_version=DEFAULT_ARRANGEMENT_VERSION):
if build_type is not None:
warnings.warn(
'build types are deprecated, do not use the build_type argument')
validate_arrangement_version(arrangement_version)
if (not arrangement_version or arrangement_version <
REACTOR_CONFIG_ARRANGEMENT_VERSION):
build_request = BuildRequest(build_json_store=self.os_conf.
get_build_json_store(), inner_template=inner_template,
outer_template=outer_template, customize_conf=customize_conf)
else:
build_request = BuildRequestV2(build_json_store=self.os_conf.
get_build_json_store(), outer_template=outer_template,
customize_conf=customize_conf)
cpu_limit = self.build_conf.get_cpu_limit()
memory_limit = self.build_conf.get_memory_limit()
storage_limit = self.build_conf.get_storage_limit()
if (cpu_limit is not None or memory_limit is not None or storage_limit
is not None):
build_request.set_resource_limits(cpu=cpu_limit, memory=
memory_limit, storage=storage_limit)
return build_request
|
get_build_request
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_type=None, inner_template=None, outer_template=None,\n customize_conf=None, arrangement_version=DEFAULT_ARRANGEMENT_VERSION):\n if build_type is not None:\n warnings.warn(\n 'build types are deprecated, do not use the build_type argument')\n validate_arrangement_version(arrangement_version)\n if (not arrangement_version or arrangement_version <\n REACTOR_CONFIG_ARRANGEMENT_VERSION):\n build_request = BuildRequest(build_json_store=self.os_conf.\n get_build_json_store(), inner_template=inner_template,\n outer_template=outer_template, customize_conf=customize_conf)\n else:\n build_request = BuildRequestV2(build_json_store=self.os_conf.\n get_build_json_store(), outer_template=outer_template,\n customize_conf=customize_conf)\n cpu_limit = self.build_conf.get_cpu_limit()\n memory_limit = self.build_conf.get_memory_limit()\n storage_limit = self.build_conf.get_storage_limit()\n if (cpu_limit is not None or memory_limit is not None or storage_limit\n is not None):\n build_request.set_resource_limits(cpu=cpu_limit, memory=\n memory_limit, storage=storage_limit)\n return build_request\n",
"role": "user"
},
{
"content": "get_build_request",
"role": "assistant"
}
] |
def x(self, build_request):
build_request.set_openshift_required_version(self.os_conf.
get_openshift_required_version())
build = build_request.render()
response = self.os.create_build(json.dumps(build))
build_response = BuildResponse(response.json(), self)
return build_response
|
create_build_from_buildrequest
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_request):\n build_request.set_openshift_required_version(self.os_conf.\n get_openshift_required_version())\n build = build_request.render()\n response = self.os.create_build(json.dumps(build))\n build_response = BuildResponse(response.json(), self)\n return build_response\n",
"role": "user"
},
{
"content": "create_build_from_buildrequest",
"role": "assistant"
}
] |
def x(self, build_config):
bc_labels = build_config['metadata']['labels']
git_labels = {'label_selectors': [(key, bc_labels[key]) for key in self
._GIT_LABEL_KEYS]}
old_labels_kwargs = {'label_selectors': [(key, bc_labels[key]) for key in
self._OLD_LABEL_KEYS], 'filter_key': FILTER_KEY, 'filter_value':
graceful_chain_get(build_config, *FILTER_KEY.split('.'))}
name = {'build_config_id': build_config['metadata']['name']}
queries = (self.os.get_build_config_by_labels, git_labels), (self.os.
get_build_config_by_labels_filtered, old_labels_kwargs), (self.os.
get_build_config, name)
existing_bc = None
for func, kwargs in queries:
try:
existing_bc = func(**kwargs)
break
except OsbsException as exc:
logger.info('Build config NOT found via %s: %s', func.__name__,
str(exc))
continue
return existing_bc
|
get_existing_build_config
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_config):\n bc_labels = build_config['metadata']['labels']\n git_labels = {'label_selectors': [(key, bc_labels[key]) for key in self\n ._GIT_LABEL_KEYS]}\n old_labels_kwargs = {'label_selectors': [(key, bc_labels[key]) for key in\n self._OLD_LABEL_KEYS], 'filter_key': FILTER_KEY, 'filter_value':\n graceful_chain_get(build_config, *FILTER_KEY.split('.'))}\n name = {'build_config_id': build_config['metadata']['name']}\n queries = (self.os.get_build_config_by_labels, git_labels), (self.os.\n get_build_config_by_labels_filtered, old_labels_kwargs), (self.os.\n get_build_config, name)\n existing_bc = None\n for func, kwargs in queries:\n try:\n existing_bc = func(**kwargs)\n break\n except OsbsException as exc:\n logger.info('Build config NOT found via %s: %s', func.__name__,\n str(exc))\n continue\n return existing_bc\n",
"role": "user"
},
{
"content": "get_existing_build_config",
"role": "assistant"
}
] |
def x(self, build_request):
image_stream = None
image_stream_tag_name = None
if build_request.has_ist_trigger():
image_stream_tag_id = build_request.trigger_imagestreamtag
image_stream_id, image_stream_tag_name = image_stream_tag_id.split(':')
try:
image_stream = self.get_image_stream(image_stream_id).json()
except OsbsResponseException as x:
if x.status_code != 404:
raise
if image_stream:
try:
self.get_image_stream_tag(image_stream_tag_id).json()
except OsbsResponseException as x:
if x.status_code != 404:
raise
return image_stream, image_stream_tag_name
|
get_image_stream_info_for_build_request
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_request):\n image_stream = None\n image_stream_tag_name = None\n if build_request.has_ist_trigger():\n image_stream_tag_id = build_request.trigger_imagestreamtag\n image_stream_id, image_stream_tag_name = image_stream_tag_id.split(':')\n try:\n image_stream = self.get_image_stream(image_stream_id).json()\n except OsbsResponseException as x:\n if x.status_code != 404:\n raise\n if image_stream:\n try:\n self.get_image_stream_tag(image_stream_tag_id).json()\n except OsbsResponseException as x:\n if x.status_code != 404:\n raise\n return image_stream, image_stream_tag_name\n",
"role": "user"
},
{
"content": "get_image_stream_info_for_build_request",
"role": "assistant"
}
] |
def x(self, *args, **kwargs):
logger.warning(
'prod (all-in-one) builds are deprecated, please use create_orchestrator_build (support will be removed in version 0.54)'
)
return self._do_create_prod_build(*args, **kwargs)
|
create_prod_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, *args, **kwargs):\n logger.warning(\n 'prod (all-in-one) builds are deprecated, please use create_orchestrator_build (support will be removed in version 0.54)'\n )\n return self._do_create_prod_build(*args, **kwargs)\n",
"role": "user"
},
{
"content": "create_prod_build",
"role": "assistant"
}
] |
def x(self, **kwargs):
missing = set()
for required in ('platform', 'release', 'arrangement_version'):
if not kwargs.get(required):
missing.add(required)
if missing:
raise ValueError('Worker build missing required parameters: %s' %
missing)
if kwargs.get('platforms'):
raise ValueError('Worker build called with unwanted platforms param')
arrangement_version = kwargs['arrangement_version']
kwargs.setdefault('inner_template', WORKER_INNER_TEMPLATE.format(
arrangement_version=arrangement_version))
kwargs.setdefault('outer_template', WORKER_OUTER_TEMPLATE)
kwargs.setdefault('customize_conf', WORKER_CUSTOMIZE_CONF)
kwargs['build_type'] = BUILD_TYPE_WORKER
try:
return self._do_create_prod_build(**kwargs)
except IOError as ex:
if os.path.basename(ex.filename) == kwargs['inner_template']:
raise OsbsValidationException(
'worker invalid arrangement_version %s' % arrangement_version)
raise
|
create_worker_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, **kwargs):\n missing = set()\n for required in ('platform', 'release', 'arrangement_version'):\n if not kwargs.get(required):\n missing.add(required)\n if missing:\n raise ValueError('Worker build missing required parameters: %s' %\n missing)\n if kwargs.get('platforms'):\n raise ValueError('Worker build called with unwanted platforms param')\n arrangement_version = kwargs['arrangement_version']\n kwargs.setdefault('inner_template', WORKER_INNER_TEMPLATE.format(\n arrangement_version=arrangement_version))\n kwargs.setdefault('outer_template', WORKER_OUTER_TEMPLATE)\n kwargs.setdefault('customize_conf', WORKER_CUSTOMIZE_CONF)\n kwargs['build_type'] = BUILD_TYPE_WORKER\n try:\n return self._do_create_prod_build(**kwargs)\n except IOError as ex:\n if os.path.basename(ex.filename) == kwargs['inner_template']:\n raise OsbsValidationException(\n 'worker invalid arrangement_version %s' % arrangement_version)\n raise\n",
"role": "user"
},
{
"content": "create_worker_build",
"role": "assistant"
}
] |
def x(self, **kwargs):
if not self.can_orchestrate():
raise OsbsOrchestratorNotEnabled(
"can't create orchestrate build when can_orchestrate isn't enabled"
)
extra = [x for x in ('platform',) if kwargs.get(x)]
if extra:
raise ValueError(
'Orchestrator build called with unwanted parameters: %s' % extra)
arrangement_version = kwargs.setdefault('arrangement_version', self.
build_conf.get_arrangement_version())
if (arrangement_version < REACTOR_CONFIG_ARRANGEMENT_VERSION and not
kwargs.get('platforms')):
raise ValueError('Orchestrator build requires platforms param')
kwargs.setdefault('inner_template', ORCHESTRATOR_INNER_TEMPLATE.format(
arrangement_version=arrangement_version))
kwargs.setdefault('outer_template', ORCHESTRATOR_OUTER_TEMPLATE)
kwargs.setdefault('customize_conf', ORCHESTRATOR_CUSTOMIZE_CONF)
kwargs['build_type'] = BUILD_TYPE_ORCHESTRATOR
try:
return self._do_create_prod_build(**kwargs)
except IOError as ex:
if os.path.basename(ex.filename) == kwargs['inner_template']:
raise OsbsValidationException(
'orchestrator invalid arrangement_version %s' %
arrangement_version)
raise
|
create_orchestrator_build
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, **kwargs):\n if not self.can_orchestrate():\n raise OsbsOrchestratorNotEnabled(\n \"can't create orchestrate build when can_orchestrate isn't enabled\"\n )\n extra = [x for x in ('platform',) if kwargs.get(x)]\n if extra:\n raise ValueError(\n 'Orchestrator build called with unwanted parameters: %s' % extra)\n arrangement_version = kwargs.setdefault('arrangement_version', self.\n build_conf.get_arrangement_version())\n if (arrangement_version < REACTOR_CONFIG_ARRANGEMENT_VERSION and not\n kwargs.get('platforms')):\n raise ValueError('Orchestrator build requires platforms param')\n kwargs.setdefault('inner_template', ORCHESTRATOR_INNER_TEMPLATE.format(\n arrangement_version=arrangement_version))\n kwargs.setdefault('outer_template', ORCHESTRATOR_OUTER_TEMPLATE)\n kwargs.setdefault('customize_conf', ORCHESTRATOR_CUSTOMIZE_CONF)\n kwargs['build_type'] = BUILD_TYPE_ORCHESTRATOR\n try:\n return self._do_create_prod_build(**kwargs)\n except IOError as ex:\n if os.path.basename(ex.filename) == kwargs['inner_template']:\n raise OsbsValidationException(\n 'orchestrator invalid arrangement_version %s' %\n arrangement_version)\n raise\n",
"role": "user"
},
{
"content": "create_orchestrator_build",
"role": "assistant"
}
] |
def x(self, build_id, follow=False, build_json=None, wait_if_missing=False,
decode=False):
logs = self.os.logs(build_id, follow=follow, build_json=build_json,
wait_if_missing=wait_if_missing)
if decode and isinstance(logs, GeneratorType):
return self._decode_build_logs_generator(logs)
if decode and logs is not None:
logs = logs.decode('utf-8').rstrip()
return logs
|
get_build_logs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_id, follow=False, build_json=None, wait_if_missing=False,\n decode=False):\n logs = self.os.logs(build_id, follow=follow, build_json=build_json,\n wait_if_missing=wait_if_missing)\n if decode and isinstance(logs, GeneratorType):\n return self._decode_build_logs_generator(logs)\n if decode and logs is not None:\n logs = logs.decode('utf-8').rstrip()\n return logs\n",
"role": "user"
},
{
"content": "get_build_logs",
"role": "assistant"
}
] |
def x(self, build_id, follow=False, wait_if_missing=False):
logs = self.get_build_logs(build_id=build_id, follow=follow,
wait_if_missing=wait_if_missing, decode=True)
if logs is None:
return
if isinstance(logs, GeneratorType):
for entries in logs:
for entry in entries.splitlines():
yield LogEntry(*self._parse_build_log_entry(entry))
else:
for entry in logs.splitlines():
yield LogEntry(*self._parse_build_log_entry(entry))
|
get_orchestrator_build_logs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, build_id, follow=False, wait_if_missing=False):\n logs = self.get_build_logs(build_id=build_id, follow=follow,\n wait_if_missing=wait_if_missing, decode=True)\n if logs is None:\n return\n if isinstance(logs, GeneratorType):\n for entries in logs:\n for entry in entries.splitlines():\n yield LogEntry(*self._parse_build_log_entry(entry))\n else:\n for entry in logs.splitlines():\n yield LogEntry(*self._parse_build_log_entry(entry))\n",
"role": "user"
},
{
"content": "get_orchestrator_build_logs",
"role": "assistant"
}
] |
def x(self, name, tags=None):
stream_import_file = os.path.join(self.os_conf.get_build_json_store(),
'image_stream_import.json')
with open(stream_import_file) as f:
stream_import = json.load(f)
return self.os.import_image(name, stream_import, tags=tags)
|
import_image
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, tags=None):\n stream_import_file = os.path.join(self.os_conf.get_build_json_store(),\n 'image_stream_import.json')\n with open(stream_import_file) as f:\n stream_import = json.load(f)\n return self.os.import_image(name, stream_import, tags=tags)\n",
"role": "user"
},
{
"content": "import_image",
"role": "assistant"
}
] |
def x(self, name, tags, repository, insecure=False):
stream_import_file = os.path.join(self.os_conf.get_build_json_store(),
'image_stream_import.json')
with open(stream_import_file) as f:
stream_import = json.load(f)
return self.os.import_image_tags(name, stream_import, tags, repository,
insecure)
|
import_image_tags
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, tags, repository, insecure=False):\n stream_import_file = os.path.join(self.os_conf.get_build_json_store(),\n 'image_stream_import.json')\n with open(stream_import_file) as f:\n stream_import = json.load(f)\n return self.os.import_image_tags(name, stream_import, tags, repository,\n insecure)\n",
"role": "user"
},
{
"content": "import_image_tags",
"role": "assistant"
}
] |
def x(self, stream, tag_name, scheduled=False, source_registry=None,
organization=None, base_image=None):
img_stream_tag_file = os.path.join(self.os_conf.get_build_json_store(),
'image_stream_tag.json')
with open(img_stream_tag_file) as f:
tag_template = json.load(f)
repository = None
registry = None
insecure = False
if source_registry:
registry = RegistryURI(source_registry['url']).docker_uri
insecure = source_registry.get('insecure', False)
if base_image and registry:
repository = self._get_enclosed_repo_with_source_registry(base_image,
registry, organization)
return self.os.ensure_image_stream_tag(stream, tag_name, tag_template,
scheduled, repository=repository, insecure=insecure)
|
ensure_image_stream_tag
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, stream, tag_name, scheduled=False, source_registry=None,\n organization=None, base_image=None):\n img_stream_tag_file = os.path.join(self.os_conf.get_build_json_store(),\n 'image_stream_tag.json')\n with open(img_stream_tag_file) as f:\n tag_template = json.load(f)\n repository = None\n registry = None\n insecure = False\n if source_registry:\n registry = RegistryURI(source_registry['url']).docker_uri\n insecure = source_registry.get('insecure', False)\n if base_image and registry:\n repository = self._get_enclosed_repo_with_source_registry(base_image,\n registry, organization)\n return self.os.ensure_image_stream_tag(stream, tag_name, tag_template,\n scheduled, repository=repository, insecure=insecure)\n",
"role": "user"
},
{
"content": "ensure_image_stream_tag",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.