_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q27400 | PackageManager.add_package | train | def add_package(
self,
package,
node_paths=None,
type_option=PackageInstallationTypeOption.PROD,
version_option=None):
"""Returns a command that when executed will add a node package to current node module.
:param package: string. A valid npm/yarn package description. The accepted forms are
package-name, package-name@version, package-name@tag, file:/folder, file:/path/to.tgz
https://url/to.tgz
:param node_paths: A list of path that should be included in $PATH when
running the script.
:param type_option: A value from PackageInstallationTypeOption that indicates the type
of package to be installed. Default to 'prod', which is a production dependency.
:param version_option: A value from PackageInstallationVersionOption that indicates how
to match version. Default to None, which uses package manager default.
"""
args=self._get_add_package_args(
package,
type_option=type_option,
version_option=version_option)
return self.run_command(args=args, node_paths=node_paths) | python | {
"resource": ""
} |
q27401 | PackageManager.run_cli | train | def run_cli(self, cli, args=None, node_paths=None):
"""Returns a command that when executed will run an installed cli via package manager."""
cli_args = [cli]
if args:
cli_args.append('--')
cli_args.extend(args)
return self.run_command(args=cli_args, node_paths=node_paths) | python | {
"resource": ""
} |
q27402 | BuildFile.get_build_files_family | train | def get_build_files_family(project_tree, dir_relpath, build_ignore_patterns=None):
"""Returns all the BUILD files on a path"""
build_files = set()
for build in sorted(project_tree.glob1(dir_relpath, '{prefix}*'.format(prefix=BuildFile._BUILD_FILE_PREFIX))):
if BuildFile._is_buildfile_name(build) and project_tree.isfile(os.path.join(dir_relpath, build)):
build_files.add(os.path.join(dir_relpath, build))
return BuildFile._build_files_from_paths(project_tree, build_files, build_ignore_patterns) | python | {
"resource": ""
} |
q27403 | BuildFile.code | train | def code(self):
"""Returns the code object for this BUILD file."""
return compile(self.source(), self.full_path, 'exec', flags=0, dont_inherit=True) | python | {
"resource": ""
} |
q27404 | Parser.walk | train | def walk(self, callback):
"""Invoke callback on this parser and its descendants, in depth-first order."""
callback(self)
for child in self._child_parsers:
child.walk(callback) | python | {
"resource": ""
} |
q27405 | Parser._create_flag_value_map | train | def _create_flag_value_map(self, flags):
"""Returns a map of flag -> list of values, based on the given flag strings.
None signals no value given (e.g., -x, --foo).
The value is a list because the user may specify the same flag multiple times, and that's
sometimes OK (e.g., when appending to list-valued options).
"""
flag_value_map = defaultdict(list)
for flag in flags:
key, has_equals_sign, flag_val = flag.partition('=')
if not has_equals_sign:
if not flag.startswith('--'): # '-xfoo' style.
key = flag[0:2]
flag_val = flag[2:]
if not flag_val:
# Either a short option with no value or a long option with no equals sign.
# Important so we can distinguish between no value ('--foo') and setting to an empty
# string ('--foo='), for options with an implicit_value.
flag_val = None
flag_value_map[key].append(flag_val)
return flag_value_map | python | {
"resource": ""
} |
q27406 | Parser.parse_args | train | def parse_args(self, flags, namespace):
"""Set values for this parser's options on the namespace object."""
flag_value_map = self._create_flag_value_map(flags)
mutex_map = defaultdict(list)
for args, kwargs in self._unnormalized_option_registrations_iter():
self._validate(args, kwargs)
dest = self.parse_dest(*args, **kwargs)
# Compute the values provided on the command line for this option. Note that there may be
# multiple values, for any combination of the following reasons:
# - The user used the same flag multiple times.
# - The user specified a boolean flag (--foo) and its inverse (--no-foo).
# - The option has multiple names, and the user used more than one of them.
#
# We also check if the option is deprecated, but we only do so if the option is explicitly
# specified as a command-line flag, so we don't spam users with deprecated option values
# specified in config, which isn't something they control.
implicit_value = kwargs.get('implicit_value')
if implicit_value is None and kwargs.get('type') == bool:
implicit_value = True # Allows --foo to mean --foo=true.
flag_vals = []
def add_flag_val(v):
if v is None:
if implicit_value is None:
raise ParseError('Missing value for command line flag {} in {}'.format(
arg, self._scope_str()))
else:
flag_vals.append(implicit_value)
else:
flag_vals.append(v)
for arg in args:
# If the user specified --no-foo on the cmd line, treat it as if the user specified
# --foo, but with the inverse value.
if kwargs.get('type') == bool:
inverse_arg = self._inverse_arg(arg)
if inverse_arg in flag_value_map:
flag_value_map[arg] = [self._invert(v) for v in flag_value_map[inverse_arg]]
implicit_value = self._invert(implicit_value)
del flag_value_map[inverse_arg]
if arg in flag_value_map:
for v in flag_value_map[arg]:
add_flag_val(v)
del flag_value_map[arg]
# Get the value for this option, falling back to defaults as needed.
try:
val = self._compute_value(dest, kwargs, flag_vals)
except ParseError as e:
# Reraise a new exception with context on the option being processed at the time of error.
# Note that other exception types can be raised here that are caught by ParseError (e.g.
# BooleanConversionError), hence we reference the original exception type as type(e).
raise type(e)(
'Error computing value for {} in {} (may also be from PANTS_* environment variables).'
'\nCaused by:\n{}'.format(', '.join(args), self._scope_str(), traceback.format_exc())
)
# If the option is explicitly given, check deprecation and mutual exclusion.
if val.rank > RankedValue.HARDCODED:
self._check_deprecated(dest, kwargs)
mutex_dest = kwargs.get('mutually_exclusive_group')
if mutex_dest:
mutex_map[mutex_dest].append(dest)
dest = mutex_dest
else:
mutex_map[dest].append(dest)
if len(mutex_map[dest]) > 1:
raise self.MutuallyExclusiveOptionError(
"Can only provide one of the mutually exclusive options {}".format(mutex_map[dest]))
setattr(namespace, dest, val)
# See if there are any unconsumed flags remaining.
if flag_value_map:
raise ParseError('Unrecognized command line flags on {}: {}'.format(
self._scope_str(), ', '.join(flag_value_map.keys())))
return namespace | python | {
"resource": ""
} |
q27407 | Parser.option_registrations_iter | train | def option_registrations_iter(self):
"""Returns an iterator over the normalized registration arguments of each option in this parser.
Useful for generating help and other documentation.
Each yielded item is an (args, kwargs) pair, as passed to register(), except that kwargs
will be normalized in the following ways:
- It will always have 'dest' explicitly set.
- It will always have 'default' explicitly set, and the value will be a RankedValue.
- For recursive options, the original registrar will also have 'recursive_root' set.
Note that recursive options we inherit from a parent will also be yielded here, with
the correctly-scoped default value.
"""
def normalize_kwargs(args, orig_kwargs):
nkwargs = copy.copy(orig_kwargs)
dest = self.parse_dest(*args, **nkwargs)
nkwargs['dest'] = dest
if not ('default' in nkwargs and isinstance(nkwargs['default'], RankedValue)):
nkwargs['default'] = self._compute_value(dest, nkwargs, [])
return nkwargs
# First yield any recursive options we inherit from our parent.
if self._parent_parser:
for args, kwargs in self._parent_parser._recursive_option_registration_args():
yield args, normalize_kwargs(args, kwargs)
# Then yield our directly-registered options.
# This must come after yielding inherited recursive options, so we can detect shadowing.
for args, kwargs in self._option_registrations:
normalized_kwargs = normalize_kwargs(args, kwargs)
if 'recursive' in normalized_kwargs:
# If we're the original registrar, make sure we can distinguish that.
normalized_kwargs['recursive_root'] = True
yield args, normalized_kwargs | python | {
"resource": ""
} |
q27408 | Parser._unnormalized_option_registrations_iter | train | def _unnormalized_option_registrations_iter(self):
"""Returns an iterator over the raw registration arguments of each option in this parser.
Each yielded item is an (args, kwargs) pair, exactly as passed to register(), except for
substituting list and dict types with list_option/dict_option.
Note that recursive options we inherit from a parent will also be yielded here.
"""
# First yield any recursive options we inherit from our parent.
if self._parent_parser:
for args, kwargs in self._parent_parser._recursive_option_registration_args():
yield args, kwargs
# Then yield our directly-registered options.
for args, kwargs in self._option_registrations:
if 'recursive' in kwargs and self._scope_info.category == ScopeInfo.SUBSYSTEM:
raise RecursiveSubsystemOption(self.scope, args[0])
yield args, kwargs | python | {
"resource": ""
} |
q27409 | Parser._recursive_option_registration_args | train | def _recursive_option_registration_args(self):
"""Yield args, kwargs pairs for just our recursive options.
Includes all the options we inherit recursively from our ancestors.
"""
if self._parent_parser:
for args, kwargs in self._parent_parser._recursive_option_registration_args():
yield args, kwargs
for args, kwargs in self._option_registrations:
# Note that all subsystem options are implicitly recursive: a subscope of a subsystem
# scope is another (optionable-specific) instance of the same subsystem, so it needs
# all the same options.
if self._scope_info.category == ScopeInfo.SUBSYSTEM or 'recursive' in kwargs:
yield args, kwargs | python | {
"resource": ""
} |
q27410 | Parser.register | train | def register(self, *args, **kwargs):
"""Register an option."""
if self._frozen:
raise FrozenRegistration(self.scope, args[0])
# Prevent further registration in enclosing scopes.
ancestor = self._parent_parser
while ancestor:
ancestor._freeze()
ancestor = ancestor._parent_parser
if kwargs.get('type') == bool:
default = kwargs.get('default')
if default is None:
# Unless a tri-state bool is explicitly opted into with the `UnsetBool` default value,
# boolean options always have an implicit boolean-typed default. We make that default
# explicit here.
kwargs['default'] = not self._ensure_bool(kwargs.get('implicit_value', True))
elif default is UnsetBool:
kwargs['default'] = None
# Record the args. We'll do the underlying parsing on-demand.
self._option_registrations.append((args, kwargs))
if self._parent_parser:
for arg in args:
existing_scope = self._parent_parser._existing_scope(arg)
if existing_scope is not None:
raise Shadowing(self.scope, arg, outer_scope=self._scope_str(existing_scope))
for arg in args:
if arg in self._known_args:
raise OptionAlreadyRegistered(self.scope, arg)
self._known_args.update(args) | python | {
"resource": ""
} |
q27411 | Parser._validate | train | def _validate(self, args, kwargs):
"""Validate option registration arguments."""
def error(exception_type, arg_name=None, **msg_kwargs):
if arg_name is None:
arg_name = args[0] if args else '<unknown>'
raise exception_type(self.scope, arg_name, **msg_kwargs)
if not args:
error(NoOptionNames)
# validate args.
for arg in args:
if not arg.startswith('-'):
error(OptionNameDash, arg_name=arg)
if not arg.startswith('--') and len(arg) > 2:
error(OptionNameDoubleDash, arg_name=arg)
# Validate kwargs.
if 'implicit_value' in kwargs and kwargs['implicit_value'] is None:
error(ImplicitValIsNone)
# Note: we check for list here, not list_option, because we validate the provided kwargs,
# not the ones we modified. However we temporarily also allow list_option, until the
# deprecation is complete.
if 'member_type' in kwargs and kwargs.get('type', str) not in [list, list_option]:
error(MemberTypeNotAllowed, type_=kwargs.get('type', str).__name__)
if kwargs.get('member_type', str) not in self._allowed_member_types:
error(InvalidMemberType, member_type=kwargs.get('member_type', str).__name__)
for kwarg in kwargs:
if kwarg not in self._allowed_registration_kwargs:
error(InvalidKwarg, kwarg=kwarg)
# Ensure `daemon=True` can't be passed on non-global scopes (except for `recursive=True`).
if (kwarg == 'daemon' and self._scope != GLOBAL_SCOPE and kwargs.get('recursive') is False):
error(InvalidKwargNonGlobalScope, kwarg=kwarg)
removal_version = kwargs.get('removal_version')
if removal_version is not None:
validate_deprecation_semver(removal_version, 'removal version') | python | {
"resource": ""
} |
q27412 | Parser.parse_dest | train | def parse_dest(*args, **kwargs):
"""Select the dest name for an option registration.
If an explicit `dest` is specified, returns that and otherwise derives a default from the
option flags where '--foo-bar' -> 'foo_bar' and '-x' -> 'x'.
"""
explicit_dest = kwargs.get('dest')
if explicit_dest:
return explicit_dest
arg = next((a for a in args if a.startswith('--')), args[0])
return arg.lstrip('-').replace('-', '_') | python | {
"resource": ""
} |
q27413 | BuildLocalPythonDistributions._create_dist | train | def _create_dist(self,
dist_tgt,
dist_target_dir,
setup_requires_pex,
snapshot_fingerprint,
is_platform_specific):
"""Create a .whl file for the specified python_distribution target."""
self._copy_sources(dist_tgt, dist_target_dir)
setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv(
snapshot_fingerprint, is_platform_specific)
cmd = safe_shlex_join(setup_requires_pex.cmdline(setup_py_snapshot_version_argv))
with self.context.new_workunit('setup.py', cmd=cmd, labels=[WorkUnitLabel.TOOL]) as workunit:
with pushd(dist_target_dir):
result = setup_requires_pex.run(args=setup_py_snapshot_version_argv,
stdout=workunit.output('stdout'),
stderr=workunit.output('stderr'))
if result != 0:
raise self.BuildLocalPythonDistributionsError(
"Installation of python distribution from target {target} into directory {into_dir} "
"failed (return value of run() was: {rc!r}).\n"
"The pex with any requirements is located at: {interpreter}.\n"
"The host system's compiler and linker were used.\n"
"The setup command was: {command}."
.format(target=dist_tgt,
into_dir=dist_target_dir,
rc=result,
interpreter=setup_requires_pex.path(),
command=setup_py_snapshot_version_argv)) | python | {
"resource": ""
} |
q27414 | BuildLocalPythonDistributions._inject_synthetic_dist_requirements | train | def _inject_synthetic_dist_requirements(self, dist, req_lib_addr):
"""Inject a synthetic requirements library that references a local wheel.
:param dist: Path of the locally built wheel to reference.
:param req_lib_addr: :class: `Address` to give to the synthetic target.
:return: a :class: `PythonRequirementLibrary` referencing the locally-built wheel.
"""
whl_dir, base = split_basename_and_dirname(dist)
whl_metadata = base.split('-')
req_name = '=='.join([whl_metadata[0], whl_metadata[1]])
req = PythonRequirement(req_name, repository=whl_dir)
self.context.build_graph.inject_synthetic_target(req_lib_addr, PythonRequirementLibrary,
requirements=[req]) | python | {
"resource": ""
} |
q27415 | BuildLocalPythonDistributions._get_whl_from_dir | train | def _get_whl_from_dir(cls, install_dir):
"""Return the absolute path of the whl in a setup.py install directory."""
dist_dir = cls._get_dist_dir(install_dir)
dists = glob.glob(os.path.join(dist_dir, '*.whl'))
if len(dists) == 0:
raise cls.BuildLocalPythonDistributionsError(
'No distributions were produced by python_create_distribution task.\n'
'dist_dir: {}, install_dir: {}'
.format(dist_dir, install_dir))
if len(dists) > 1:
# TODO: is this ever going to happen?
raise cls.BuildLocalPythonDistributionsError('Ambiguous local python distributions found: {}'
.format(dists))
return dists[0] | python | {
"resource": ""
} |
q27416 | JUnitRun._spawn | train | def _spawn(self, distribution, executor=None, *args, **kwargs):
"""Returns a processhandler to a process executing java.
:param Executor executor: the java subprocess executor to use. If not specified, construct
using the distribution.
:param Distribution distribution: The JDK or JRE installed.
:rtype: ProcessHandler
"""
actual_executor = executor or SubprocessExecutor(distribution)
return distribution.execute_java_async(*args,
executor=actual_executor,
**kwargs) | python | {
"resource": ""
} |
q27417 | JUnitRun.execute_java_for_coverage | train | def execute_java_for_coverage(self, targets, *args, **kwargs):
"""Execute java for targets directly and don't use the test mixin.
This execution won't be wrapped with timeouts and other test mixin code common
across test targets. Used for coverage instrumentation.
"""
distribution = self.preferred_jvm_distribution_for_targets(targets)
actual_executor = SubprocessExecutor(distribution)
return distribution.execute_java(*args, executor=actual_executor, **kwargs) | python | {
"resource": ""
} |
q27418 | JUnitRun._parse | train | def _parse(self, test_spec_str):
"""Parses a test specification string into an object that can yield corresponding tests.
Tests can be specified in one of four forms:
* [classname]
* [classname]#[methodname]
* [fully qualified classname]#[methodname]
* [fully qualified classname]#[methodname]
:param string test_spec: A test specification.
:returns: A Test object.
:rtype: :class:`Test`
"""
components = test_spec_str.split('#', 2)
classname = components[0]
methodname = components[1] if len(components) == 2 else None
return Test(classname=classname, methodname=methodname) | python | {
"resource": ""
} |
q27419 | JvmToolMixin.register_jvm_tool | train | def register_jvm_tool(cls,
register,
key,
classpath_spec=None,
main=None,
custom_rules=None,
fingerprint=True,
classpath=None,
help=None,
removal_version=None,
removal_hint=None):
"""Registers a jvm tool under `key` for lazy classpath resolution.
Classpaths can be retrieved in `execute` scope via `tool_classpath_from_products`.
NB: If the tool's `main` class name is supplied the tool classpath will be shaded.
:param register: A function that can register options with the option system.
:param unicode key: The key the tool configuration should be registered under.
:param unicode classpath_spec: The tool classpath target address spec that can be used to
override this tool's classpath; by default, `//:[key]`.
:param unicode main: The fully qualified class name of the tool's main class if shading of the
tool classpath is desired.
:param list custom_rules: An optional list of `Shader.Rule`s to apply before the automatically
generated binary jar shading rules. This is useful for excluding
classes shared between the tool and the code it runs over. The
canonical example is the `org.junit.Test` annotation read by junit
runner tools from user code. In this sort of case the shared code must
have a uniform name between the tool and the user code and so the
shared code must be excluded from shading.
:param bool fingerprint: Indicates whether to include the jvm tool in the task's fingerprint.
Note that unlike for other options, fingerprinting is enabled for tools
by default.
:param list classpath: A list of one or more `JarDependency` objects that form this tool's
default classpath. If the classpath is optional, supply an empty list;
otherwise the default classpath of `None` indicates the `classpath_spec`
must point to a target defined in a BUILD file that provides the tool
classpath.
:param unicode help: An optional custom help string; otherwise a reasonable one is generated.
:param string removal_version: A semver at which this tool will be removed.
:param unicode removal_hint: A hint on how to migrate away from this tool.
"""
def formulate_help():
if classpath:
return ('Target address spec for overriding the classpath of the {} jvm tool which is, '
'by default: {}'.format(key, classpath))
else:
return 'Target address spec for specifying the classpath of the {} jvm tool.'.format(key)
help = help or formulate_help()
register('--{}'.format(key),
advanced=True,
type=target_option,
default='//:{}'.format(key) if classpath_spec is None else classpath_spec,
help=help,
fingerprint=fingerprint,
removal_version=removal_version,
removal_hint=removal_hint)
# TODO(John Sirois): Move towards requiring tool specs point to jvm_binary targets.
# These already have a main and are a natural place to house any custom shading rules. That
# would eliminate the need to pass main and custom_rules here.
# It is awkward that jars can no longer be inlined as dependencies - this will require 2 targets
# for every tool - the jvm_binary, and a jar_library for its dependencies to point to. It may
# be worth creating a JarLibrary subclass - say JarBinary, or else mixing in a Binary interface
# to JarLibrary to endow it with main and shade_rules attributes to allow for single-target
# definition of resolvable jvm binaries.
jvm_tool = cls.JvmTool(register.scope, key, classpath, main, custom_rules)
JvmToolMixin._jvm_tools.append(jvm_tool) | python | {
"resource": ""
} |
q27420 | target_types_from_build_file_aliases | train | def target_types_from_build_file_aliases(aliases):
"""Given BuildFileAliases, return the concrete target types constructed for each alias."""
target_types = dict(aliases.target_types)
for alias, factory in aliases.target_macro_factories.items():
target_type, = factory.target_types
target_types[alias] = target_type
return target_types | python | {
"resource": ""
} |
q27421 | transitive_hydrated_targets | train | def transitive_hydrated_targets(build_file_addresses):
"""Given BuildFileAddresses, kicks off recursion on expansion of TransitiveHydratedTargets.
The TransitiveHydratedTarget struct represents a structure-shared graph, which we walk
and flatten here. The engine memoizes the computation of TransitiveHydratedTarget, so
when multiple TransitiveHydratedTargets objects are being constructed for multiple
roots, their structure will be shared.
"""
transitive_hydrated_targets = yield [Get(TransitiveHydratedTarget, Address, a)
for a in build_file_addresses.addresses]
closure = OrderedSet()
to_visit = deque(transitive_hydrated_targets)
while to_visit:
tht = to_visit.popleft()
if tht.root in closure:
continue
closure.add(tht.root)
to_visit.extend(tht.dependencies)
yield TransitiveHydratedTargets(tuple(tht.root for tht in transitive_hydrated_targets), closure) | python | {
"resource": ""
} |
q27422 | hydrated_targets | train | def hydrated_targets(build_file_addresses):
"""Requests HydratedTarget instances for BuildFileAddresses."""
targets = yield [Get(HydratedTarget, Address, a) for a in build_file_addresses.addresses]
yield HydratedTargets(targets) | python | {
"resource": ""
} |
q27423 | hydrate_target | train | def hydrate_target(hydrated_struct):
target_adaptor = hydrated_struct.value
"""Construct a HydratedTarget from a TargetAdaptor and hydrated versions of its adapted fields."""
# Hydrate the fields of the adaptor and re-construct it.
hydrated_fields = yield [Get(HydratedField, HydrateableField, fa)
for fa in target_adaptor.field_adaptors]
kwargs = target_adaptor.kwargs()
for field in hydrated_fields:
kwargs[field.name] = field.value
yield HydratedTarget(target_adaptor.address,
TargetAdaptor(**kwargs),
tuple(target_adaptor.dependencies)) | python | {
"resource": ""
} |
q27424 | hydrate_sources | train | def hydrate_sources(sources_field, glob_match_error_behavior):
"""Given a SourcesField, request a Snapshot for its path_globs and create an EagerFilesetWithSpec.
"""
# TODO(#5864): merge the target's selection of --glob-expansion-failure (which doesn't exist yet)
# with the global default!
path_globs = sources_field.path_globs.copy(glob_match_error_behavior=glob_match_error_behavior)
snapshot = yield Get(Snapshot, PathGlobs, path_globs)
fileset_with_spec = _eager_fileset_with_spec(
sources_field.address.spec_path,
sources_field.filespecs,
snapshot)
sources_field.validate_fn(fileset_with_spec)
yield HydratedField(sources_field.arg, fileset_with_spec) | python | {
"resource": ""
} |
q27425 | hydrate_bundles | train | def hydrate_bundles(bundles_field, glob_match_error_behavior):
"""Given a BundlesField, request Snapshots for each of its filesets and create BundleAdaptors."""
path_globs_with_match_errors = [
pg.copy(glob_match_error_behavior=glob_match_error_behavior)
for pg in bundles_field.path_globs_list
]
snapshot_list = yield [Get(Snapshot, PathGlobs, pg) for pg in path_globs_with_match_errors]
spec_path = bundles_field.address.spec_path
bundles = []
zipped = zip(bundles_field.bundles,
bundles_field.filespecs_list,
snapshot_list)
for bundle, filespecs, snapshot in zipped:
rel_spec_path = getattr(bundle, 'rel_path', spec_path)
kwargs = bundle.kwargs()
# NB: We `include_dirs=True` because bundle filesets frequently specify directories in order
# to trigger a (deprecated) default inclusion of their recursive contents. See the related
# deprecation in `pants.backend.jvm.tasks.bundle_create`.
kwargs['fileset'] = _eager_fileset_with_spec(rel_spec_path,
filespecs,
snapshot,
include_dirs=True)
bundles.append(BundleAdaptor(**kwargs))
yield HydratedField('bundles', bundles) | python | {
"resource": ""
} |
q27426 | create_legacy_graph_tasks | train | def create_legacy_graph_tasks():
"""Create tasks to recursively parse the legacy graph."""
return [
transitive_hydrated_targets,
transitive_hydrated_target,
hydrated_targets,
hydrate_target,
find_owners,
hydrate_sources,
hydrate_bundles,
RootRule(OwnersRequest),
] | python | {
"resource": ""
} |
q27427 | LegacyBuildGraph._index | train | def _index(self, hydrated_targets):
"""Index from the given roots into the storage provided by the base class.
This is an additive operation: any existing connections involving these nodes are preserved.
"""
all_addresses = set()
new_targets = list()
# Index the ProductGraph.
for hydrated_target in hydrated_targets:
target_adaptor = hydrated_target.adaptor
address = target_adaptor.address
all_addresses.add(address)
if address not in self._target_by_address:
new_targets.append(self._index_target(target_adaptor))
# Once the declared dependencies of all targets are indexed, inject their
# additional "traversable_(dependency_)?specs".
deps_to_inject = OrderedSet()
addresses_to_inject = set()
def inject(target, dep_spec, is_dependency):
address = Address.parse(dep_spec, relative_to=target.address.spec_path)
if not any(address == t.address for t in target.dependencies):
addresses_to_inject.add(address)
if is_dependency:
deps_to_inject.add((target.address, address))
self.apply_injectables(new_targets)
for target in new_targets:
for spec in target.compute_dependency_specs(payload=target.payload):
inject(target, spec, is_dependency=True)
for spec in target.compute_injectable_specs(payload=target.payload):
inject(target, spec, is_dependency=False)
# Inject all addresses, then declare injected dependencies.
self.inject_addresses_closure(addresses_to_inject)
for target_address, dep_address in deps_to_inject:
self.inject_dependency(dependent=target_address, dependency=dep_address)
return all_addresses | python | {
"resource": ""
} |
q27428 | LegacyBuildGraph._index_target | train | def _index_target(self, target_adaptor):
"""Instantiate the given TargetAdaptor, index it in the graph, and return a Target."""
# Instantiate the target.
address = target_adaptor.address
target = self._instantiate_target(target_adaptor)
self._target_by_address[address] = target
for dependency in target_adaptor.dependencies:
if dependency in self._target_dependencies_by_address[address]:
raise self.DuplicateAddressError(
'Addresses in dependencies must be unique. '
"'{spec}' is referenced more than once by target '{target}'."
.format(spec=dependency.spec, target=address.spec)
)
# Link its declared dependencies, which will be indexed independently.
self._target_dependencies_by_address[address].add(dependency)
self._target_dependees_by_address[dependency].add(address)
return target | python | {
"resource": ""
} |
q27429 | LegacyBuildGraph._instantiate_target | train | def _instantiate_target(self, target_adaptor):
"""Given a TargetAdaptor struct previously parsed from a BUILD file, instantiate a Target."""
target_cls = self._target_types[target_adaptor.type_alias]
try:
# Pop dependencies, which were already consumed during construction.
kwargs = target_adaptor.kwargs()
kwargs.pop('dependencies')
# Instantiate.
if issubclass(target_cls, AppBase):
return self._instantiate_app(target_cls, kwargs)
elif target_cls is RemoteSources:
return self._instantiate_remote_sources(kwargs)
return target_cls(build_graph=self, **kwargs)
except TargetDefinitionException:
raise
except Exception as e:
raise TargetDefinitionException(
target_adaptor.address,
'Failed to instantiate Target with type {}: {}'.format(target_cls, e)) | python | {
"resource": ""
} |
q27430 | LegacyBuildGraph._instantiate_app | train | def _instantiate_app(self, target_cls, kwargs):
"""For App targets, convert BundleAdaptor to BundleProps."""
parse_context = ParseContext(kwargs['address'].spec_path, dict())
bundleprops_factory = Bundle(parse_context)
kwargs['bundles'] = [
bundleprops_factory.create_bundle_props(bundle)
for bundle in kwargs['bundles']
]
return target_cls(build_graph=self, **kwargs) | python | {
"resource": ""
} |
q27431 | LegacyBuildGraph._instantiate_remote_sources | train | def _instantiate_remote_sources(self, kwargs):
"""For RemoteSources target, convert "dest" field to its real target type."""
kwargs['dest'] = _DestWrapper((self._target_types[kwargs['dest']],))
return RemoteSources(build_graph=self, **kwargs) | python | {
"resource": ""
} |
q27432 | LegacyBuildGraph._inject_addresses | train | def _inject_addresses(self, subjects):
"""Injects targets into the graph for each of the given `Address` objects, and then yields them.
TODO: See #5606 about undoing the split between `_inject_addresses` and `_inject_specs`.
"""
logger.debug('Injecting addresses to %s: %s', self, subjects)
with self._resolve_context():
addresses = tuple(subjects)
thts, = self._scheduler.product_request(TransitiveHydratedTargets,
[BuildFileAddresses(addresses)])
self._index(thts.closure)
yielded_addresses = set()
for address in subjects:
if address not in yielded_addresses:
yielded_addresses.add(address)
yield address | python | {
"resource": ""
} |
q27433 | LegacyBuildGraph._inject_specs | train | def _inject_specs(self, specs):
"""Injects targets into the graph for the given `Specs` object.
Yields the resulting addresses.
"""
if not specs:
return
logger.debug('Injecting specs to %s: %s', self, specs)
with self._resolve_context():
thts, = self._scheduler.product_request(TransitiveHydratedTargets,
[specs])
self._index(thts.closure)
for hydrated_target in thts.roots:
yield hydrated_target.address | python | {
"resource": ""
} |
q27434 | _DependentGraph.from_iterable | train | def from_iterable(cls, target_types, address_mapper, adaptor_iter):
"""Create a new DependentGraph from an iterable of TargetAdaptor subclasses."""
inst = cls(target_types, address_mapper)
all_valid_addresses = set()
for target_adaptor in adaptor_iter:
inst._inject_target(target_adaptor)
all_valid_addresses.add(target_adaptor.address)
inst._validate(all_valid_addresses)
return inst | python | {
"resource": ""
} |
q27435 | _DependentGraph._validate | train | def _validate(self, all_valid_addresses):
"""Validate that all of the dependencies in the graph exist in the given addresses set."""
for dependency, dependents in iteritems(self._dependent_address_map):
if dependency not in all_valid_addresses:
raise AddressLookupError(
'Dependent graph construction failed: {} did not exist. Was depended on by:\n {}'.format(
dependency.spec,
'\n '.join(d.spec for d in dependents)
)
) | python | {
"resource": ""
} |
q27436 | _DependentGraph._inject_target | train | def _inject_target(self, target_adaptor):
"""Inject a target, respecting all sources of dependencies."""
target_cls = self._target_types[target_adaptor.type_alias]
declared_deps = target_adaptor.dependencies
implicit_deps = (Address.parse(s,
relative_to=target_adaptor.address.spec_path,
subproject_roots=self._address_mapper.subproject_roots)
for s in target_cls.compute_dependency_specs(kwargs=target_adaptor.kwargs()))
for dep in declared_deps:
self._dependent_address_map[dep].add(target_adaptor.address)
for dep in implicit_deps:
self._implicit_dependent_address_map[dep].add(target_adaptor.address) | python | {
"resource": ""
} |
q27437 | _DependentGraph.dependents_of_addresses | train | def dependents_of_addresses(self, addresses):
"""Given an iterable of addresses, yield all of those addresses dependents."""
seen = OrderedSet(addresses)
for address in addresses:
seen.update(self._dependent_address_map[address])
seen.update(self._implicit_dependent_address_map[address])
return seen | python | {
"resource": ""
} |
q27438 | _DependentGraph.transitive_dependents_of_addresses | train | def transitive_dependents_of_addresses(self, addresses):
"""Given an iterable of addresses, yield all of those addresses dependents, transitively."""
closure = set()
result = []
to_visit = deque(addresses)
while to_visit:
address = to_visit.popleft()
if address in closure:
continue
closure.add(address)
result.append(address)
to_visit.extend(self._dependent_address_map[address])
to_visit.extend(self._implicit_dependent_address_map[address])
return result | python | {
"resource": ""
} |
q27439 | _legacy_symbol_table | train | def _legacy_symbol_table(build_file_aliases):
"""Construct a SymbolTable for the given BuildFileAliases.
:param build_file_aliases: BuildFileAliases to register.
:type build_file_aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
:returns: A SymbolTable.
"""
table = {
alias: _make_target_adaptor(TargetAdaptor, target_type)
for alias, target_type in build_file_aliases.target_types.items()
}
for alias, factory in build_file_aliases.target_macro_factories.items():
# TargetMacro.Factory with more than one target type is deprecated.
# For default sources, this means that TargetMacro Factories with more than one target_type
# will not parse sources through the engine, and will fall back to the legacy python sources
# parsing.
# Conveniently, multi-target_type TargetMacro.Factory, and legacy python source parsing, are
# targeted to be removed in the same version of pants.
if len(factory.target_types) == 1:
table[alias] = _make_target_adaptor(
TargetAdaptor,
tuple(factory.target_types)[0],
)
# TODO: The alias replacement here is to avoid elevating "TargetAdaptors" into the public
# API until after https://github.com/pantsbuild/pants/issues/3560 has been completed.
# These should likely move onto Target subclasses as the engine gets deeper into beta
# territory.
table['python_library'] = _make_target_adaptor(PythonTargetAdaptor, PythonLibrary)
table['jvm_app'] = _make_target_adaptor(AppAdaptor, JvmApp)
table['jvm_binary'] = _make_target_adaptor(JvmBinaryAdaptor, JvmBinary)
table['python_app'] = _make_target_adaptor(AppAdaptor, PythonApp)
table['python_tests'] = _make_target_adaptor(PythonTestsAdaptor, PythonTests)
table['python_binary'] = _make_target_adaptor(PythonBinaryAdaptor, PythonBinary)
table['remote_sources'] = _make_target_adaptor(RemoteSourcesAdaptor, RemoteSources)
table['page'] = _make_target_adaptor(PageAdaptor, Page)
# Note that these don't call _make_target_adaptor because we don't have a handy reference to the
# types being constructed. They don't have any default_sources behavior, so this should be ok,
# but if we end up doing more things in _make_target_adaptor, we should make sure they're
# applied here too.
table['pants_plugin'] = PantsPluginAdaptor
table['contrib_plugin'] = PantsPluginAdaptor
return SymbolTable(table) | python | {
"resource": ""
} |
q27440 | _make_target_adaptor | train | def _make_target_adaptor(base_class, target_type):
"""Look up the default source globs for the type, and apply them to parsing through the engine."""
if not target_type.supports_default_sources() or target_type.default_sources_globs is None:
return base_class
globs = _tuplify(target_type.default_sources_globs)
excludes = _tuplify(target_type.default_sources_exclude_globs)
class GlobsHandlingTargetAdaptor(base_class):
@property
def default_sources_globs(self):
if globs is None:
return super(GlobsHandlingTargetAdaptor, self).default_sources_globs
else:
return globs
@property
def default_sources_exclude_globs(self):
if excludes is None:
return super(GlobsHandlingTargetAdaptor, self).default_sources_exclude_globs
else:
return excludes
return GlobsHandlingTargetAdaptor | python | {
"resource": ""
} |
q27441 | LegacyGraphSession.warm_product_graph | train | def warm_product_graph(self, target_roots):
"""Warm the scheduler's `ProductGraph` with `TransitiveHydratedTargets` products.
This method raises only fatal errors, and does not consider failed roots in the execution
graph: in the v1 codepath, failed roots are accounted for post-fork.
:param TargetRoots target_roots: The targets root of the request.
"""
logger.debug('warming target_roots for: %r', target_roots)
subjects = [target_roots.specs]
request = self.scheduler_session.execution_request([TransitiveHydratedTargets], subjects)
self.scheduler_session.execute(request) | python | {
"resource": ""
} |
q27442 | LegacyGraphSession.create_build_graph | train | def create_build_graph(self, target_roots, build_root=None):
"""Construct and return a `BuildGraph` given a set of input specs.
:param TargetRoots target_roots: The targets root of the request.
:param string build_root: The build root.
:returns: A tuple of (BuildGraph, AddressMapper).
"""
logger.debug('target_roots are: %r', target_roots)
graph = LegacyBuildGraph.create(self.scheduler_session, self.build_file_aliases)
logger.debug('build_graph is: %s', graph)
# Ensure the entire generator is unrolled.
for _ in graph.inject_roots_closure(target_roots):
pass
address_mapper = LegacyAddressMapper(self.scheduler_session, build_root or get_buildroot())
logger.debug('address_mapper is: %s', address_mapper)
return graph, address_mapper | python | {
"resource": ""
} |
q27443 | FSEventService.register_all_files_handler | train | def register_all_files_handler(self, callback, name='all_files'):
"""Registers a subscription for all files under a given watch path.
:param func callback: the callback to execute on each filesystem event
:param str name: the subscription name as used by watchman
"""
self.register_handler(
name,
dict(
fields=['name'],
# Request events for all file types.
# NB: Touching a file invalidates its parent directory due to:
# https://github.com/facebook/watchman/issues/305
# ...but if we were to skip watching directories, we'd still have to invalidate
# the parents of any changed files, and we wouldn't see creation/deletion of
# empty directories.
expression=[
'allof', # All of the below rules must be true to match.
['not', ['dirname', 'dist', self.ZERO_DEPTH]], # Exclude the ./dist dir.
# N.B. 'wholename' ensures we match against the absolute ('x/y/z') vs base path ('z').
['not', ['pcre', r'^\..*', 'wholename']], # Exclude files in hidden dirs (.pants.d etc).
['not', ['match', '*.pyc']] # Exclude .pyc files.
# TODO(kwlzn): Make exclusions here optionable.
# Related: https://github.com/pantsbuild/pants/issues/2956
]
),
callback
) | python | {
"resource": ""
} |
q27444 | FSEventService.register_handler | train | def register_handler(self, name, metadata, callback):
"""Register subscriptions and their event handlers.
:param str name: the subscription name as used by watchman
:param dict metadata: a dictionary of metadata to be serialized and passed to the watchman
subscribe command. this should include the match expression as well
as any required callback fields.
:param func callback: the callback to execute on each matching filesystem event
"""
assert name not in self._handlers, 'duplicate handler name: {}'.format(name)
assert (
isinstance(metadata, dict) and 'fields' in metadata and 'expression' in metadata
), 'invalid handler metadata!'
self._handlers[name] = Watchman.EventHandler(name=name, metadata=metadata, callback=callback) | python | {
"resource": ""
} |
q27445 | default_subsystem_for_plugin | train | def default_subsystem_for_plugin(plugin_type):
"""Create a singleton PluginSubsystemBase subclass for the given plugin type.
The singleton enforcement is useful in cases where dependent Tasks are installed multiple times,
to avoid creating duplicate types which would have option scope collisions.
:param plugin_type: A CheckstylePlugin subclass.
:type: :class:`pants.contrib.python.checks.checker.common.CheckstylePlugin`
:rtype: :class:`pants.contrib.python.checks.tasks.checkstyle.plugin_subsystem_base.PluginSubsystemBase`
"""
if not issubclass(plugin_type, CheckstylePlugin):
raise ValueError('Can only create a default plugin subsystem for subclasses of {}, given: {}'
.format(CheckstylePlugin, plugin_type))
return type(str('{}Subsystem'.format(plugin_type.__name__)),
(PluginSubsystemBase,),
{
str('options_scope'): 'pycheck-{}'.format(plugin_type.name()),
str('plugin_type'): classmethod(lambda cls: plugin_type),
str('register_plugin_options'): classmethod(lambda cls, register: None),
}) | python | {
"resource": ""
} |
q27446 | CacheFactory.get_read_cache | train | def get_read_cache(self):
"""Returns the read cache for this setup, creating it if necessary.
Returns None if no read cache is configured.
"""
if self._options.read_from and not self._read_cache:
cache_spec = self._resolve(self._sanitize_cache_spec(self._options.read_from))
if cache_spec:
with self._cache_setup_lock:
self._read_cache = self._do_create_artifact_cache(cache_spec, 'will read from')
return self._read_cache | python | {
"resource": ""
} |
q27447 | CacheFactory.get_write_cache | train | def get_write_cache(self):
"""Returns the write cache for this setup, creating it if necessary.
Returns None if no write cache is configured.
"""
if self._options.write_to and not self._write_cache:
cache_spec = self._resolve(self._sanitize_cache_spec(self._options.write_to))
if cache_spec:
with self._cache_setup_lock:
self._write_cache = self._do_create_artifact_cache(cache_spec, 'will write to')
return self._write_cache | python | {
"resource": ""
} |
q27448 | CacheFactory._resolve | train | def _resolve(self, spec):
"""Attempt resolving cache URIs when a remote spec is provided. """
if not spec.remote:
return spec
try:
resolved_urls = self._resolver.resolve(spec.remote)
if resolved_urls:
# keep the bar separated list of URLs convention
return CacheSpec(local=spec.local, remote='|'.join(resolved_urls))
# no-op
return spec
except Resolver.ResolverError as e:
self._log.warn('Error while resolving from {0}: {1}'.format(spec.remote, str(e)))
# If for some reason resolver fails we continue to use local cache
if spec.local:
return CacheSpec(local=spec.local, remote=None)
# resolver fails but there is no local cache
return None | python | {
"resource": ""
} |
q27449 | CacheFactory.get_available_urls | train | def get_available_urls(self, urls):
"""Return reachable urls sorted by their ping times."""
baseurl_to_urls = {self._baseurl(url): url for url in urls}
pingtimes = self._pinger.pings(list(baseurl_to_urls.keys())) # List of pairs (host, time in ms).
self._log.debug('Artifact cache server ping times: {}'
.format(', '.join(['{}: {:.6f} secs'.format(*p) for p in pingtimes])))
sorted_pingtimes = sorted(pingtimes, key=lambda x: x[1])
available_urls = [baseurl_to_urls[baseurl] for baseurl, pingtime in sorted_pingtimes
if pingtime < Pinger.UNREACHABLE]
self._log.debug('Available cache servers: {0}'.format(available_urls))
return available_urls | python | {
"resource": ""
} |
q27450 | CacheFactory._do_create_artifact_cache | train | def _do_create_artifact_cache(self, spec, action):
"""Returns an artifact cache for the specified spec.
spec can be:
- a path to a file-based cache root.
- a URL of a RESTful cache root.
- a bar-separated list of URLs, where we'll pick the one with the best ping times.
- A list or tuple of two specs, local, then remote, each as described above
"""
compression = self._options.compression_level
if compression not in range(1, 10):
raise ValueError('compression_level must be an integer 1-9: {}'.format(compression))
artifact_root = self._options.pants_workdir
def create_local_cache(parent_path):
path = os.path.join(parent_path, self._cache_dirname)
self._log.debug('{0} {1} local artifact cache at {2}'
.format(self._task.stable_name(), action, path))
return LocalArtifactCache(artifact_root, path, compression,
self._options.max_entries_per_target,
permissions=self._options.write_permissions,
dereference=self._options.dereference_symlinks)
def create_remote_cache(remote_spec, local_cache):
urls = self.get_available_urls(remote_spec.split('|'))
if len(urls) > 0:
best_url_selector = BestUrlSelector(
['{}/{}'.format(url.rstrip('/'), self._cache_dirname) for url in urls]
)
local_cache = local_cache or TempLocalArtifactCache(artifact_root, compression)
return RESTfulArtifactCache(
artifact_root,
best_url_selector,
local_cache,
read_timeout=self._options.read_timeout,
write_timeout=self._options.write_timeout,
)
local_cache = create_local_cache(spec.local) if spec.local else None
remote_cache = create_remote_cache(spec.remote, local_cache) if spec.remote else None
if remote_cache:
return remote_cache
return local_cache | python | {
"resource": ""
} |
q27451 | PluginResolver.resolve | train | def resolve(self, working_set=None):
"""Resolves any configured plugins and adds them to the global working set.
:param working_set: The working set to add the resolved plugins to instead of the global
working set (for testing).
:type: :class:`pkg_resources.WorkingSet`
"""
working_set = working_set or global_working_set
if self._plugin_requirements:
for plugin_location in self._resolve_plugin_locations():
if self._is_wheel(plugin_location):
plugin_location = self._activate_wheel(plugin_location)
working_set.add_entry(plugin_location)
return working_set | python | {
"resource": ""
} |
q27452 | validate_deprecation_semver | train | def validate_deprecation_semver(version_string, version_description):
"""Validates that version_string is a valid semver.
If so, returns that semver. Raises an error otherwise.
:param str version_string: A pantsbuild.pants version which affects some deprecated entity.
:param str version_description: A string used in exception messages to describe what the
`version_string` represents.
:rtype: `packaging.version.Version`
:raises DeprecationApplicationError: if the version_string parameter is invalid.
"""
if version_string is None:
raise MissingSemanticVersionError('The {} must be provided.'.format(version_description))
if not isinstance(version_string, six.string_types):
raise BadSemanticVersionError('The {} must be a version string.'.format(version_description))
try:
# NB: packaging will see versions like 1.a.0 as 1a0, and are "valid"
# We explicitly want our versions to be of the form x.y.z.
v = Version(version_string)
if len(v.base_version.split('.')) != 3:
raise BadSemanticVersionError('The given {} is not a valid version: '
'{}'.format(version_description, version_string))
if not v.is_prerelease:
raise NonDevSemanticVersionError('The given {} is not a dev version: {}\n'
'Features should generally be removed in the first `dev` release '
'of a release cycle.'.format(version_description, version_string))
return v
except InvalidVersion as e:
raise BadSemanticVersionError('The given {} {} is not a valid version: '
'{}'.format(version_description, version_string, e)) | python | {
"resource": ""
} |
q27453 | _get_frame_info | train | def _get_frame_info(stacklevel, context=1):
"""Get a Traceback for the given `stacklevel`.
For example:
`stacklevel=0` means this function's frame (_get_frame_info()).
`stacklevel=1` means the calling function's frame.
See https://docs.python.org/2/library/inspect.html#inspect.getouterframes for more info.
NB: If `stacklevel` is greater than the number of actual frames, the outermost frame is used
instead.
"""
frame_list = inspect.getouterframes(inspect.currentframe(), context=context)
frame_stack_index = stacklevel if stacklevel < len(frame_list) else len(frame_list) - 1
return frame_list[frame_stack_index] | python | {
"resource": ""
} |
q27454 | warn_or_error | train | def warn_or_error(removal_version, deprecated_entity_description, hint=None,
deprecation_start_version=None,
stacklevel=3, frame_info=None, context=1, ensure_stderr=False):
"""Check the removal_version against the current pants version.
Issues a warning if the removal version is > current pants version, or an error otherwise.
:param string removal_version: The pantsbuild.pants version at which the deprecated entity
will be/was removed.
:param string deprecated_entity_description: A short description of the deprecated entity, that
we can embed in warning/error messages.
:param string hint: A message describing how to migrate from the removed entity.
:param string deprecation_start_version: The pantsbuild.pants version at which the entity will
begin to display a deprecation warning. This must be less
than the `removal_version`. If not provided, the
deprecation warning is always displayed.
:param int stacklevel: The stacklevel to pass to warnings.warn.
:param FrameInfo frame_info: If provided, use this frame info instead of getting one from
`stacklevel`.
:param int context: The number of lines of source code surrounding the selected frame to display
in a warning message.
:param bool ensure_stderr: Whether use warnings.warn, or use warnings.showwarning to print
directly to stderr.
:raises DeprecationApplicationError: if the removal_version parameter is invalid.
:raises CodeRemovedError: if the current version is later than the version marked for removal.
"""
removal_semver = validate_deprecation_semver(removal_version, 'removal version')
if deprecation_start_version:
deprecation_start_semver = validate_deprecation_semver(
deprecation_start_version, 'deprecation start version')
if deprecation_start_semver >= removal_semver:
raise InvalidSemanticVersionOrderingError(
'The deprecation start version {} must be less than the end version {}.'
.format(deprecation_start_version, removal_version))
elif PANTS_SEMVER < deprecation_start_semver:
return
msg = 'DEPRECATED: {} {} removed in version {}.'.format(deprecated_entity_description,
get_deprecated_tense(removal_version), removal_version)
if hint:
msg += '\n {}'.format(hint)
# We need to have filename and line_number for warnings.formatwarning, which appears to be the only
# way to get a warning message to display to stderr. We get that from frame_info -- it's too bad
# we have to reconstruct the `stacklevel` logic ourselves, but we do also gain the ability to have
# multiple lines of context, which is neat.
if frame_info is None:
frame_info = _get_frame_info(stacklevel, context=context)
_, filename, line_number, _, code_context, _ = frame_info
if code_context:
context_lines = ''.join(code_context)
else:
context_lines = '<no code context available>'
if removal_semver > PANTS_SEMVER:
if ensure_stderr:
# No warning filters can stop us from printing this message directly to stderr.
warning_msg = warnings.formatwarning(
msg, DeprecationWarning, filename, line_number, line=context_lines)
print(warning_msg, file=sys.stderr)
else:
# This output is filtered by warning filters.
with _greater_warnings_context(context_lines):
warnings.warn_explicit(
message=DeprecationWarning(msg) if PY2 else msg,
category=DeprecationWarning,
filename=filename,
lineno=line_number)
return msg
else:
raise CodeRemovedError(msg) | python | {
"resource": ""
} |
q27455 | deprecated_conditional | train | def deprecated_conditional(predicate,
removal_version,
entity_description,
hint_message=None,
stacklevel=4):
"""Marks a certain configuration as deprecated.
The predicate is used to determine if that configuration is deprecated. It is a function that
will be called, if true, then the deprecation warning will issue.
:param () -> bool predicate: A function that returns True if the deprecation warning should be on.
:param string removal_version: The pants version which will remove the deprecated functionality.
:param string entity_description: A description of the deprecated entity.
:param string hint_message: An optional hint pointing to alternatives to the deprecation.
:param int stacklevel: How far up in the stack do we go to find the calling fn to report
:raises DeprecationApplicationError if the deprecation is applied improperly.
"""
validate_deprecation_semver(removal_version, 'removal version')
if predicate():
warn_or_error(removal_version, entity_description, hint_message, stacklevel=stacklevel) | python | {
"resource": ""
} |
q27456 | deprecated | train | def deprecated(removal_version, hint_message=None, subject=None, ensure_stderr=False):
"""Marks a function or method as deprecated.
A removal version must be supplied and it must be greater than the current 'pantsbuild.pants'
version.
When choosing a removal version there is a natural tension between the code-base, which benefits
from short deprecation cycles, and the user-base which may prefer to deal with deprecations less
frequently. As a rule of thumb, if the hint message can fully convey corrective action
succinctly and you judge the impact to be on the small side (effects custom tasks as opposed to
effecting BUILD files), lean towards the next release version as the removal version; otherwise,
consider initiating a discussion to win consensus on a reasonable removal version.
:param str removal_version: The pantsbuild.pants version which will remove the deprecated
function.
:param str hint_message: An optional hint pointing to alternatives to the deprecation.
:param str subject: The name of the subject that has been deprecated for logging clarity. Defaults
to the name of the decorated function/method.
:param bool ensure_stderr: Forwarded to `ensure_stderr` in warn_or_error().
:raises DeprecationApplicationError if the @deprecation is applied improperly.
"""
validate_deprecation_semver(removal_version, 'removal version')
def decorator(func):
if not inspect.isfunction(func):
raise BadDecoratorNestingError('The @deprecated decorator must be applied innermost of all '
'decorators.')
func_full_name = '{}.{}'.format(func.__module__, func.__name__)
@wraps(func)
def wrapper(*args, **kwargs):
warn_or_error(removal_version, subject or func_full_name, hint_message,
ensure_stderr=ensure_stderr)
return func(*args, **kwargs)
return wrapper
return decorator | python | {
"resource": ""
} |
q27457 | HelpInfoExtracter.compute_default | train | def compute_default(kwargs):
"""Compute the default value to display in help for an option registered with these kwargs."""
ranked_default = kwargs.get('default')
typ = kwargs.get('type', str)
default = ranked_default.value if ranked_default else None
if default is None:
return 'None'
if typ == list:
default_str = '[{}]'.format(','.join(["'{}'".format(s) for s in default]))
elif typ == dict:
if default:
default_str = '{{ {} }}'.format(
','.join(["'{}':'{}'".format(k, v) for k, v in default.items()]))
else:
default_str = '{}'
elif typ == str:
default_str = "'{}'".format(default).replace('\n', ' ')
else:
default_str = str(default)
return default_str | python | {
"resource": ""
} |
q27458 | HelpInfoExtracter.compute_metavar | train | def compute_metavar(kwargs):
"""Compute the metavar to display in help for an option registered with these kwargs."""
metavar = kwargs.get('metavar')
if not metavar:
typ = kwargs.get('type', str)
if typ == list:
typ = kwargs.get('member_type', str)
if typ == dict:
metavar = '"{\'key1\':val1,\'key2\':val2,...}"'
else:
type_name = typ.__name__ if typ != newstr else 'str' # TODO(#6071): drop special case once Py2 removed
metavar = '<{}>'.format(type_name)
return metavar | python | {
"resource": ""
} |
q27459 | GlobalOptionsRegistrar.register_options | train | def register_options(cls, register):
"""Register options not tied to any particular task or subsystem."""
# The bootstrap options need to be registered on the post-bootstrap Options instance, so it
# won't choke on them on the command line, and also so we can access their values as regular
# global-scope options, for convenience.
cls.register_bootstrap_options(register)
register('-x', '--time', type=bool,
help='Output a timing report at the end of the run.')
register('-e', '--explain', type=bool,
help='Explain the execution of goals.')
register('--tag', type=list, metavar='[+-]tag1,tag2,...',
help="Include only targets with these tags (optional '+' prefix) or without these "
"tags ('-' prefix). Useful with ::, to find subsets of targets "
"(e.g., integration tests.)")
# Toggles v1/v2 `Task` vs `@rule` pipelines on/off.
register('--v1', advanced=True, type=bool, default=True,
help='Enables execution of v1 Tasks.')
register('--v2', advanced=True, type=bool, default=False,
help='Enables execution of v2 @console_rules.')
register('--v2-ui', default=False, type=bool, daemon=False,
help='Whether to show v2 engine execution progress. '
'This requires the --v2 flag to take effect.')
loop_flag = '--loop'
register(loop_flag, type=bool,
help='Run v2 @console_rules continuously as file changes are detected. Requires '
'`--v2`, and is best utilized with `--v2 --no-v1`.')
register('--loop-max', type=int, default=2**32, advanced=True,
help='The maximum number of times to loop when `{}` is specified.'.format(loop_flag))
register('-t', '--timeout', advanced=True, type=int, metavar='<seconds>',
help='Number of seconds to wait for http connections.')
# TODO: After moving to the new options system these abstraction leaks can go away.
register('-k', '--kill-nailguns', advanced=True, type=bool,
help='Kill nailguns before exiting')
register('--fail-fast', advanced=True, type=bool, recursive=True,
help='Exit as quickly as possible on error, rather than attempting to continue '
'to process the non-erroneous subset of the input.')
register('--cache-key-gen-version', advanced=True, default='200', recursive=True,
help='The cache key generation. Bump this to invalidate every artifact for a scope.')
register('--workdir-max-build-entries', advanced=True, type=int, default=8,
help='Maximum number of previous builds to keep per task target pair in workdir. '
'If set, minimum 2 will always be kept to support incremental compilation.')
register('--max-subprocess-args', advanced=True, type=int, default=100, recursive=True,
help='Used to limit the number of arguments passed to some subprocesses by breaking '
'the command up into multiple invocations.')
register('--lock', advanced=True, type=bool, default=True,
help='Use a global lock to exclude other versions of pants from running during '
'critical operations.') | python | {
"resource": ""
} |
q27460 | GlobalOptionsRegistrar.validate_instance | train | def validate_instance(cls, opts):
"""Validates an instance of global options for cases that are not prohibited via registration.
For example: mutually exclusive options may be registered by passing a `mutually_exclusive_group`,
but when multiple flags must be specified together, it can be necessary to specify post-parse
checks.
Raises pants.option.errors.OptionsError on validation failure.
"""
if opts.loop and (not opts.v2 or opts.v1):
raise OptionsError('The --loop option only works with @console_rules, and thus requires '
'`--v2 --no-v1` to function as expected.')
if opts.loop and not opts.enable_pantsd:
raise OptionsError('The --loop option requires `--enable-pantsd`, in order to watch files.')
if opts.v2_ui and not opts.v2:
raise OptionsError('The --v2-ui option requires --v2 to be enabled together.') | python | {
"resource": ""
} |
q27461 | assert_single_element | train | def assert_single_element(iterable):
"""Get the single element of `iterable`, or raise an error.
:raise: :class:`StopIteration` if there is no element.
:raise: :class:`ValueError` if there is more than one element.
"""
it = iter(iterable)
first_item = next(it)
try:
next(it)
except StopIteration:
return first_item
raise ValueError("iterable {!r} has more than one element.".format(iterable)) | python | {
"resource": ""
} |
q27462 | Checkstyle._constraints_are_whitelisted | train | def _constraints_are_whitelisted(self, constraint_tuple):
"""
Detect whether a tuple of compatibility constraints
matches constraints imposed by the merged list of the global
constraints from PythonSetup and a user-supplied whitelist.
"""
if self._acceptable_interpreter_constraints == []:
# The user wants to lint everything.
return True
return all(version.parse(constraint) in self._acceptable_interpreter_constraints
for constraint in constraint_tuple) | python | {
"resource": ""
} |
q27463 | Checkstyle.execute | train | def execute(self):
""""Run Checkstyle on all found non-synthetic source files."""
python_tgts = self.context.targets(
lambda tgt: isinstance(tgt, (PythonTarget))
)
if not python_tgts:
return 0
interpreter_cache = PythonInterpreterCache.global_instance()
with self.invalidated(self.get_targets(self._is_checked)) as invalidation_check:
failure_count = 0
tgts_by_compatibility, _ = interpreter_cache.partition_targets_by_compatibility(
[vt.target for vt in invalidation_check.invalid_vts]
)
for filters, targets in tgts_by_compatibility.items():
sources = self.calculate_sources([tgt for tgt in targets])
if sources:
allowed_interpreters = set(interpreter_cache.setup(filters=filters))
if not allowed_interpreters:
raise TaskError('No valid interpreters found for targets: {}\n(filters: {})'
.format(targets, filters))
interpreter = min(allowed_interpreters)
failure_count += self.checkstyle(interpreter, sources)
if failure_count > 0 and self.get_options().fail:
raise TaskError('{} Python Style issues found. You may try `./pants fmt <targets>`'
.format(failure_count))
return failure_count | python | {
"resource": ""
} |
q27464 | AddressMap.parse | train | def parse(cls, filepath, filecontent, parser):
"""Parses a source for addressable Serializable objects.
No matter the parser used, the parsed and mapped addressable objects are all 'thin'; ie: any
objects they point to in other namespaces or even in the same namespace but from a seperate
source are left as unresolved pointers.
:param string filepath: The path to the byte source containing serialized objects.
:param string filecontent: The content of byte source containing serialized objects to be parsed.
:param symbol_table: The symbol table cls to expose a symbol table dict.
:type symbol_table: Instance of :class:`pants.engine.parser.SymbolTable`.
:param parser: The parser cls to use.
:type parser: A :class:`pants.engine.parser.Parser`.
"""
try:
objects = parser.parse(filepath, filecontent)
except Exception as e:
raise MappingError('Failed to parse {}:\n{}'.format(filepath, e))
objects_by_name = {}
for obj in objects:
if not Serializable.is_serializable(obj):
raise UnaddressableObjectError('Parsed a non-serializable object: {!r}'.format(obj))
attributes = obj._asdict()
name = attributes.get('name')
if not name:
raise UnaddressableObjectError('Parsed a non-addressable object: {!r}'.format(obj))
if name in objects_by_name:
raise DuplicateNameError('An object already exists at {!r} with name {!r}: {!r}. Cannot '
'map {!r}'.format(filepath, name, objects_by_name[name], obj))
objects_by_name[name] = obj
return cls(filepath, OrderedDict(sorted(objects_by_name.items()))) | python | {
"resource": ""
} |
q27465 | AddressFamily.create | train | def create(cls, spec_path, address_maps):
"""Creates an address family from the given set of address maps.
:param spec_path: The directory prefix shared by all address_maps.
:param address_maps: The family of maps that form this namespace.
:type address_maps: :class:`collections.Iterable` of :class:`AddressMap`
:returns: a new address family.
:rtype: :class:`AddressFamily`
:raises: :class:`MappingError` if the given address maps do not form a family.
"""
if spec_path == '.':
spec_path = ''
for address_map in address_maps:
if not address_map.path.startswith(spec_path):
raise DifferingFamiliesError('Expected AddressMaps to share the same parent directory {}, '
'but received: {}'
.format(spec_path, address_map.path))
objects_by_name = {}
for address_map in address_maps:
current_path = address_map.path
for name, obj in address_map.objects_by_name.items():
previous = objects_by_name.get(name)
if previous:
previous_path, _ = previous
raise DuplicateNameError('An object with name {name!r} is already defined in '
'{previous_path!r}, will not overwrite with {obj!r} from '
'{current_path!r}.'
.format(name=name,
previous_path=previous_path,
obj=obj,
current_path=current_path))
objects_by_name[name] = (current_path, obj)
return AddressFamily(namespace=spec_path,
objects_by_name=OrderedDict((name, (path, obj)) for name, (path, obj)
in sorted(objects_by_name.items()))) | python | {
"resource": ""
} |
q27466 | AddressFamily.addressables | train | def addressables(self):
"""Return a mapping from BuildFileAddress to thin addressable objects in this namespace.
:rtype: dict from :class:`pants.build_graph.address.BuildFileAddress` to thin addressable
objects.
"""
return {
BuildFileAddress(rel_path=path, target_name=name): obj
for name, (path, obj) in self.objects_by_name.items()
} | python | {
"resource": ""
} |
q27467 | UnpackJarsFingerprintStrategy.compute_fingerprint | train | def compute_fingerprint(self, target):
"""UnpackedJars targets need to be re-unpacked if any of its configuration changes or any of
the jars they import have changed.
"""
if isinstance(target, UnpackedJars):
hasher = sha1()
for cache_key in sorted(jar.cache_key() for jar in target.all_imported_jar_deps):
hasher.update(cache_key.encode('utf-8'))
hasher.update(target.payload.fingerprint().encode('utf-8'))
return hasher.hexdigest() if PY3 else hasher.hexdigest().decode('utf-8')
return None | python | {
"resource": ""
} |
q27468 | DeferredSourcesMapper.process_remote_sources | train | def process_remote_sources(self):
"""Create synthetic targets with populated sources from remote_sources targets."""
unpacked_sources = self.context.products.get_data(UnpackedArchives)
remote_sources_targets = self.context.targets(predicate=lambda t: isinstance(t, RemoteSources))
if not remote_sources_targets:
return
snapshot_specs = []
filespecs = []
unpack_dirs = []
for target in remote_sources_targets:
unpacked_archive = unpacked_sources[target.sources_target]
sources = unpacked_archive.found_files
rel_unpack_dir = unpacked_archive.rel_unpack_dir
self.context.log.debug('target: {}, rel_unpack_dir: {}, sources: {}'
.format(target, rel_unpack_dir, sources))
sources_in_dir = tuple(os.path.join(rel_unpack_dir, source) for source in sources)
snapshot_specs.append(PathGlobsAndRoot(
PathGlobs(sources_in_dir),
get_buildroot(),
))
filespecs.append({'globs': sources_in_dir})
unpack_dirs.append(rel_unpack_dir)
snapshots = self.context._scheduler.capture_snapshots(tuple(snapshot_specs))
for target, snapshot, filespec, rel_unpack_dir in \
zip(remote_sources_targets, snapshots, filespecs, unpack_dirs):
synthetic_target = self.context.add_new_target(
address=Address(os.path.relpath(self.workdir, get_buildroot()), target.id),
target_type=target.destination_target_type,
dependencies=target.dependencies,
sources=EagerFilesetWithSpec(rel_unpack_dir, filespec, snapshot),
derived_from=target,
**target.destination_target_args
)
self.context.log.debug('synthetic_target: {}'.format(synthetic_target))
for dependent in self.context.build_graph.dependents_of(target.address):
self.context.build_graph.inject_dependency(dependent, synthetic_target.address) | python | {
"resource": ""
} |
q27469 | per_instance | train | def per_instance(*args, **kwargs):
"""A memoized key factory that works like `equal_args` except that the first parameter's identity
is used when forming the key.
This is a useful key factory when you want to enforce memoization happens per-instance for an
instance method in a class hierarchy that defines a custom `__hash__`/`__eq__`.
"""
instance_and_rest = (InstanceKey(args[0]),) + args[1:]
return equal_args(*instance_and_rest, **kwargs) | python | {
"resource": ""
} |
q27470 | memoized | train | def memoized(func=None, key_factory=equal_args, cache_factory=dict):
"""Memoizes the results of a function call.
By default, exactly one result is memoized for each unique combination of function arguments.
Note that memoization is not thread-safe and the default result cache will grow without bound;
so care must be taken to only apply this decorator to functions with single threaded access and
an expected reasonably small set of unique call parameters.
Note that the wrapped function comes equipped with 3 helper function attributes:
+ `put(*args, **kwargs)`: A context manager that takes the same arguments as the memoized
function and yields a setter function to set the value in the
memoization cache.
+ `forget(*args, **kwargs)`: Takes the same arguments as the memoized function and causes the
memoization cache to forget the computed value, if any, for those
arguments.
+ `clear()`: Causes the memoization cache to be fully cleared.
:API: public
:param func: The function to wrap. Only generally passed by the python runtime and should be
omitted when passing a custom `key_factory` or `cache_factory`.
:param key_factory: A function that can form a cache key from the arguments passed to the
wrapped, memoized function; by default uses simple parameter-set equality;
ie `equal_args`.
:param cache_factory: A no-arg callable that produces a mapping object to use for the memoized
method's value cache. By default the `dict` constructor, but could be a
a factory for an LRU cache for example.
:raises: `ValueError` if the wrapper is applied to anything other than a function.
:returns: A wrapped function that memoizes its results or else a function wrapper that does this.
"""
if func is None:
# We're being applied as a decorator factory; ie: the user has supplied args, like so:
# >>> @memoized(cache_factory=lru_cache)
# ... def expensive_operation(user):
# ... pass
# So we return a decorator with the user-supplied args curried in for the python decorator
# machinery to use to wrap the upcoming func.
#
# NB: This is just a tricky way to allow for both `@memoized` and `@memoized(...params...)`
# application forms. Without this trick, ie: using a decorator class or nested decorator
# function, the no-params application would have to be `@memoized()`. It still can, but need
# not be and a bare `@memoized` will work as well as a `@memoized()`.
return functools.partial(memoized, key_factory=key_factory, cache_factory=cache_factory)
if not inspect.isfunction(func):
raise ValueError('The @memoized decorator must be applied innermost of all decorators.')
key_func = key_factory or equal_args
memoized_results = cache_factory() if cache_factory else {}
@functools.wraps(func)
def memoize(*args, **kwargs):
key = key_func(*args, **kwargs)
if key in memoized_results:
return memoized_results[key]
result = func(*args, **kwargs)
memoized_results[key] = result
return result
@contextmanager
def put(*args, **kwargs):
key = key_func(*args, **kwargs)
yield functools.partial(memoized_results.__setitem__, key)
memoize.put = put
def forget(*args, **kwargs):
key = key_func(*args, **kwargs)
if key in memoized_results:
del memoized_results[key]
memoize.forget = forget
def clear():
memoized_results.clear()
memoize.clear = clear
return memoize | python | {
"resource": ""
} |
q27471 | memoized_method | train | def memoized_method(func=None, key_factory=per_instance, **kwargs):
"""A convenience wrapper for memoizing instance methods.
Typically you'd expect a memoized instance method to hold a cached value per class instance;
however, for classes that implement a custom `__hash__`/`__eq__` that can hash separate instances
the same, `@memoized` will share cached values across `==` class instances. Using
`@memoized_method` defaults to a `per_instance` key for the cache to provide the expected cached
value per-instance behavior.
Applied like so:
>>> class Foo(object):
... @memoized_method
... def name(self):
... pass
Is equivalent to:
>>> class Foo(object):
... @memoized(key_factory=per_instance)
... def name(self):
... pass
:API: public
:param func: The function to wrap. Only generally passed by the python runtime and should be
omitted when passing a custom `key_factory` or `cache_factory`.
:param key_factory: A function that can form a cache key from the arguments passed to the
wrapped, memoized function; by default `per_instance`.
:param kwargs: Any extra keyword args accepted by `memoized`.
:raises: `ValueError` if the wrapper is applied to anything other than a function.
:returns: A wrapped function that memoizes its results or else a function wrapper that does this.
"""
return memoized(func=func, key_factory=key_factory, **kwargs) | python | {
"resource": ""
} |
q27472 | memoized_property | train | def memoized_property(func=None, key_factory=per_instance, **kwargs):
"""A convenience wrapper for memoizing properties.
Applied like so:
>>> class Foo(object):
... @memoized_property
... def name(self):
... pass
Is equivalent to:
>>> class Foo(object):
... @property
... @memoized_method
... def name(self):
... pass
Which is equivalent to:
>>> class Foo(object):
... @property
... @memoized(key_factory=per_instance)
... def name(self):
... pass
By default a deleter for the property is setup that un-caches the property such that a subsequent
property access re-computes the value. In other words, for this `now` @memoized_property:
>>> import time
>>> class Bar(object):
... @memoized_property
... def now(self):
... return time.time()
You could write code like so:
>>> bar = Bar()
>>> bar.now
1433267312.622095
>>> time.sleep(5)
>>> bar.now
1433267312.622095
>>> del bar.now
>>> bar.now
1433267424.056189
>>> time.sleep(5)
>>> bar.now
1433267424.056189
>>>
:API: public
:param func: The property getter method to wrap. Only generally passed by the python runtime and
should be omitted when passing a custom `key_factory` or `cache_factory`.
:param key_factory: A function that can form a cache key from the arguments passed to the
wrapped, memoized function; by default `per_instance`.
:param kwargs: Any extra keyword args accepted by `memoized`.
:raises: `ValueError` if the wrapper is applied to anything other than a function.
:returns: A read-only property that memoizes its calculated value and un-caches its value when
`del`ed.
"""
getter = memoized_method(func=func, key_factory=key_factory, **kwargs)
return property(fget=getter, fdel=lambda self: getter.forget(self)) | python | {
"resource": ""
} |
q27473 | OptionsFingerprinter.combined_options_fingerprint_for_scope | train | def combined_options_fingerprint_for_scope(cls, scope, options,
build_graph=None, **kwargs):
"""Given options and a scope, compute a combined fingerprint for the scope.
:param string scope: The scope to fingerprint.
:param Options options: The `Options` object to fingerprint.
:param BuildGraph build_graph: A `BuildGraph` instance, only needed if fingerprinting
target options.
:param dict **kwargs: Keyword parameters passed on to
`Options#get_fingerprintable_for_scope`.
:return: Hexadecimal string representing the fingerprint for all `options`
values in `scope`.
"""
fingerprinter = cls(build_graph)
hasher = sha1()
pairs = options.get_fingerprintable_for_scope(scope, **kwargs)
for (option_type, option_value) in pairs:
fingerprint = fingerprinter.fingerprint(option_type, option_value)
if fingerprint is None:
# This isn't necessarily a good value to be using here, but it preserves behavior from
# before the commit which added it. I suspect that using the empty string would be
# reasonable too, but haven't done any archaeology to check.
fingerprint = 'None'
hasher.update(fingerprint.encode('utf-8'))
return hasher.hexdigest() | python | {
"resource": ""
} |
q27474 | OptionsFingerprinter.fingerprint | train | def fingerprint(self, option_type, option_val):
"""Returns a hash of the given option_val based on the option_type.
:API: public
Returns None if option_val is None.
"""
if option_val is None:
return None
# Wrapping all other values in a list here allows us to easily handle single-valued and
# list-valued options uniformly. For non-list-valued options, this will be a singleton list
# (with the exception of dict, which is not modified). This dict exception works because we do
# not currently have any "list of dict" type, so there is no ambiguity.
if not isinstance(option_val, (list, tuple, dict)):
option_val = [option_val]
if option_type == target_option:
return self._fingerprint_target_specs(option_val)
elif option_type == dir_option:
return self._fingerprint_dirs(option_val)
elif option_type == file_option:
return self._fingerprint_files(option_val)
elif option_type == dict_with_files_option:
return self._fingerprint_dict_with_files(option_val)
else:
return self._fingerprint_primitives(option_val) | python | {
"resource": ""
} |
q27475 | OptionsFingerprinter._fingerprint_target_specs | train | def _fingerprint_target_specs(self, specs):
"""Returns a fingerprint of the targets resolved from given target specs."""
assert self._build_graph is not None, (
'cannot fingerprint specs `{}` without a `BuildGraph`'.format(specs)
)
hasher = sha1()
for spec in sorted(specs):
for target in sorted(self._build_graph.resolve(spec)):
# Not all targets have hashes; in particular, `Dependencies` targets don't.
h = target.compute_invalidation_hash()
if h:
hasher.update(h.encode('utf-8'))
return hasher.hexdigest() | python | {
"resource": ""
} |
q27476 | OptionsFingerprinter._assert_in_buildroot | train | def _assert_in_buildroot(self, filepath):
"""Raises an error if the given filepath isn't in the buildroot.
Returns the normalized, absolute form of the path.
"""
filepath = os.path.normpath(filepath)
root = get_buildroot()
if not os.path.abspath(filepath) == filepath:
# If not absolute, assume relative to the build root.
return os.path.join(root, filepath)
else:
if '..' in os.path.relpath(filepath, root).split(os.path.sep):
# The path wasn't in the buildroot. This is an error because it violates the pants being
# hermetic.
raise ValueError('Received a file_option that was not inside the build root:\n'
' file_option: {filepath}\n'
' build_root: {buildroot}\n'
.format(filepath=filepath, buildroot=root))
return filepath | python | {
"resource": ""
} |
q27477 | OptionsFingerprinter._fingerprint_dirs | train | def _fingerprint_dirs(self, dirpaths, topdown=True, onerror=None, followlinks=False):
"""Returns a fingerprint of the given file directories and all their sub contents.
This assumes that the file directories are of reasonable size
to cause memory or performance issues.
"""
# Note that we don't sort the dirpaths, as their order may have meaning.
filepaths = []
for dirpath in dirpaths:
dirs = os.walk(dirpath, topdown=topdown, onerror=onerror,
followlinks=followlinks)
sorted_dirs = sorted(dirs, key=lambda d: d[0])
filepaths.extend([os.path.join(dirpath, filename)
for dirpath, dirnames, filenames in sorted_dirs
for filename in sorted(filenames)])
return self._fingerprint_files(filepaths) | python | {
"resource": ""
} |
q27478 | OptionsFingerprinter._fingerprint_files | train | def _fingerprint_files(self, filepaths):
"""Returns a fingerprint of the given filepaths and their contents.
This assumes the files are small enough to be read into memory.
"""
hasher = sha1()
# Note that we don't sort the filepaths, as their order may have meaning.
for filepath in filepaths:
filepath = self._assert_in_buildroot(filepath)
hasher.update(os.path.relpath(filepath, get_buildroot()).encode('utf-8'))
with open(filepath, 'rb') as f:
hasher.update(f.read())
return hasher.hexdigest() | python | {
"resource": ""
} |
q27479 | OptionsFingerprinter._fingerprint_dict_with_files | train | def _fingerprint_dict_with_files(self, option_val):
"""Returns a fingerprint of the given dictionary containing file paths.
Any value which is a file path which exists on disk will be fingerprinted by that file's
contents rather than by its path.
This assumes the files are small enough to be read into memory.
NB: The keys of the dict are assumed to be strings -- if they are not, the dict should be
converted to encode its keys with `stable_option_fingerprint()`, as is done in the `fingerprint()`
method.
"""
return stable_option_fingerprint({
k: self._expand_possible_file_value(v) for k, v in option_val.items()
}) | python | {
"resource": ""
} |
q27480 | OptionsFingerprinter._expand_possible_file_value | train | def _expand_possible_file_value(self, value):
"""If the value is a file, returns its contents. Otherwise return the original value."""
if value and os.path.isfile(str(value)):
with open(value, 'r') as f:
return f.read()
return value | python | {
"resource": ""
} |
q27481 | bootstrap_c_source | train | def bootstrap_c_source(scheduler_bindings_path, output_dir, module_name=NATIVE_ENGINE_MODULE):
"""Bootstrap an external CFFI C source file."""
safe_mkdir(output_dir)
with temporary_dir() as tempdir:
temp_output_prefix = os.path.join(tempdir, module_name)
real_output_prefix = os.path.join(output_dir, module_name)
temp_c_file = '{}.c'.format(temp_output_prefix)
if PY2:
temp_c_file = temp_c_file.encode('utf-8')
c_file = '{}.c'.format(real_output_prefix)
env_script = '{}.cflags'.format(real_output_prefix)
# Preprocessor directives won't parse in the .cdef calls, so we have to hide them for now.
scheduler_bindings_content = read_file(scheduler_bindings_path)
scheduler_bindings = _hackily_rewrite_scheduler_bindings(scheduler_bindings_content)
ffibuilder = cffi.FFI()
ffibuilder.cdef(scheduler_bindings)
ffibuilder.cdef(_FFISpecification.format_cffi_externs())
ffibuilder.set_source(module_name, scheduler_bindings)
ffibuilder.emit_c_code(temp_c_file)
# Work around https://github.com/rust-lang/rust/issues/36342 by renaming initnative_engine to
# wrapped_initnative_engine so that the rust code can define the symbol initnative_engine.
#
# If we dont do this, we end up at the mercy of the implementation details of rust's stripping
# and LTO. In the past we have found ways to trick it into not stripping symbols which was handy
# (it kept the binary working) but inconvenient (it was relying on unspecified behavior, it meant
# our binaries couldn't be stripped which inflated them by 2~3x, and it reduced the amount of LTO
# we could use, which led to unmeasured performance hits).
#
# We additionally remove the ifdefs that apply conditional `init` logic for Py2 vs Py3, in order
# to define a module that is loadable by either 2 or 3.
# TODO: Because PyPy uses the same `init` function name regardless of the python version, this
# trick does not work there: we leave its conditional in place.
file_content = read_file(temp_c_file)
if CFFI_C_PATCH_BEFORE not in file_content:
raise Exception('The patch for the CFFI generated code will not apply cleanly.')
file_content = file_content.replace(CFFI_C_PATCH_BEFORE, CFFI_C_PATCH_AFTER)
# Extract the preprocessor directives we had to hide to get the .cdef call to parse.
file_content = _hackily_recreate_includes_for_bindings(file_content)
_replace_file(c_file, file_content)
# Write a shell script to be sourced at build time that contains inherited CFLAGS.
_replace_file(env_script, get_build_cflags()) | python | {
"resource": ""
} |
q27482 | _replace_file | train | def _replace_file(path, content):
"""Writes a file if it doesn't already exist with the same content.
This is useful because cargo uses timestamps to decide whether to compile things."""
if os.path.exists(path):
with open(path, 'r') as f:
if content == f.read():
print("Not overwriting {} because it is unchanged".format(path), file=sys.stderr)
return
with open(path, 'w') as f:
f.write(content) | python | {
"resource": ""
} |
q27483 | _extern_decl | train | def _extern_decl(return_type, arg_types):
"""A decorator for methods corresponding to extern functions. All types should be strings.
The _FFISpecification class is able to automatically convert these into method declarations for
cffi.
"""
def wrapper(func):
signature = _ExternSignature(
return_type=str(return_type),
method_name=str(func.__name__),
arg_types=tuple(arg_types))
func.extern_signature = signature
return func
return wrapper | python | {
"resource": ""
} |
q27484 | _FFISpecification.format_cffi_externs | train | def format_cffi_externs(cls):
"""Generate stubs for the cffi bindings from @_extern_decl methods."""
extern_decls = [
f.extern_signature.pretty_print()
for _, f in cls._extern_fields.items()
]
return (
'extern "Python" {\n'
+ '\n'.join(extern_decls)
+ '\n}\n') | python | {
"resource": ""
} |
q27485 | _FFISpecification.extern_get_type_for | train | def extern_get_type_for(self, context_handle, val):
"""Return a representation of the object's type."""
c = self._ffi.from_handle(context_handle)
obj = self._ffi.from_handle(val[0])
type_id = c.to_id(type(obj))
return TypeId(type_id) | python | {
"resource": ""
} |
q27486 | _FFISpecification.extern_identify | train | def extern_identify(self, context_handle, val):
"""Return a representation of the object's identity, including a hash and TypeId.
`extern_get_type_for()` also returns a TypeId, but doesn't hash the object -- this allows that
method to be used on unhashable objects. `extern_identify()` returns a TypeId as well to avoid
having to make two separate Python calls when interning a Python object in interning.rs, which
requires both the hash and type.
"""
c = self._ffi.from_handle(context_handle)
obj = self._ffi.from_handle(val[0])
return c.identify(obj) | python | {
"resource": ""
} |
q27487 | _FFISpecification.extern_clone_val | train | def extern_clone_val(self, context_handle, val):
"""Clone the given Handle."""
c = self._ffi.from_handle(context_handle)
return c.to_value(self._ffi.from_handle(val[0])) | python | {
"resource": ""
} |
q27488 | _FFISpecification.extern_drop_handles | train | def extern_drop_handles(self, context_handle, handles_ptr, handles_len):
"""Drop the given Handles."""
c = self._ffi.from_handle(context_handle)
handles = self._ffi.unpack(handles_ptr, handles_len)
c.drop_handles(handles) | python | {
"resource": ""
} |
q27489 | _FFISpecification.extern_store_tuple | train | def extern_store_tuple(self, context_handle, vals_ptr, vals_len):
"""Given storage and an array of Handles, return a new Handle to represent the list."""
c = self._ffi.from_handle(context_handle)
return c.to_value(tuple(c.from_value(val[0]) for val in self._ffi.unpack(vals_ptr, vals_len))) | python | {
"resource": ""
} |
q27490 | _FFISpecification.extern_store_set | train | def extern_store_set(self, context_handle, vals_ptr, vals_len):
"""Given storage and an array of Handles, return a new Handle to represent the set."""
c = self._ffi.from_handle(context_handle)
return c.to_value(OrderedSet(c.from_value(val[0]) for val in self._ffi.unpack(vals_ptr, vals_len))) | python | {
"resource": ""
} |
q27491 | _FFISpecification.extern_store_dict | train | def extern_store_dict(self, context_handle, vals_ptr, vals_len):
"""Given storage and an array of Handles, return a new Handle to represent the dict.
Array of handles alternates keys and values (i.e. key0, value0, key1, value1, ...).
It is assumed that an even number of values were passed.
"""
c = self._ffi.from_handle(context_handle)
tup = tuple(c.from_value(val[0]) for val in self._ffi.unpack(vals_ptr, vals_len))
d = dict()
for i in range(0, len(tup), 2):
d[tup[i]] = tup[i + 1]
return c.to_value(d) | python | {
"resource": ""
} |
q27492 | _FFISpecification.extern_store_bytes | train | def extern_store_bytes(self, context_handle, bytes_ptr, bytes_len):
"""Given a context and raw bytes, return a new Handle to represent the content."""
c = self._ffi.from_handle(context_handle)
return c.to_value(binary_type(self._ffi.buffer(bytes_ptr, bytes_len))) | python | {
"resource": ""
} |
q27493 | _FFISpecification.extern_store_utf8 | train | def extern_store_utf8(self, context_handle, utf8_ptr, utf8_len):
"""Given a context and UTF8 bytes, return a new Handle to represent the content."""
c = self._ffi.from_handle(context_handle)
return c.to_value(self._ffi.string(utf8_ptr, utf8_len).decode('utf-8')) | python | {
"resource": ""
} |
q27494 | _FFISpecification.extern_store_i64 | train | def extern_store_i64(self, context_handle, i64):
"""Given a context and int32_t, return a new Handle to represent the int32_t."""
c = self._ffi.from_handle(context_handle)
return c.to_value(i64) | python | {
"resource": ""
} |
q27495 | _FFISpecification.extern_store_f64 | train | def extern_store_f64(self, context_handle, f64):
"""Given a context and double, return a new Handle to represent the double."""
c = self._ffi.from_handle(context_handle)
return c.to_value(f64) | python | {
"resource": ""
} |
q27496 | _FFISpecification.extern_store_bool | train | def extern_store_bool(self, context_handle, b):
"""Given a context and _Bool, return a new Handle to represent the _Bool."""
c = self._ffi.from_handle(context_handle)
return c.to_value(b) | python | {
"resource": ""
} |
q27497 | _FFISpecification.extern_project_ignoring_type | train | def extern_project_ignoring_type(self, context_handle, val, field_str_ptr, field_str_len):
"""Given a Handle for `obj`, and a field name, project the field as a new Handle."""
c = self._ffi.from_handle(context_handle)
obj = c.from_value(val[0])
field_name = self.to_py_str(field_str_ptr, field_str_len)
projected = getattr(obj, field_name)
return c.to_value(projected) | python | {
"resource": ""
} |
q27498 | _FFISpecification.extern_project_multi | train | def extern_project_multi(self, context_handle, val, field_str_ptr, field_str_len):
"""Given a Key for `obj`, and a field name, project the field as a list of Keys."""
c = self._ffi.from_handle(context_handle)
obj = c.from_value(val[0])
field_name = self.to_py_str(field_str_ptr, field_str_len)
return c.vals_buf(tuple(c.to_value(p) for p in getattr(obj, field_name))) | python | {
"resource": ""
} |
q27499 | _FFISpecification.extern_create_exception | train | def extern_create_exception(self, context_handle, msg_ptr, msg_len):
"""Given a utf8 message string, create an Exception object."""
c = self._ffi.from_handle(context_handle)
msg = self.to_py_str(msg_ptr, msg_len)
return c.to_value(Exception(msg)) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.