sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def unreserve_resources(role):
""" Unreserves all the resources for all the slaves for the role.
"""
state = dcos_agents_state()
if not state or 'slaves' not in state.keys():
return False
all_success = True
for agent in state['slaves']:
if not unreserve_resource(agent, role):
all_success = False
return all_success | Unreserves all the resources for all the slaves for the role. | entailment |
def wait_for_service_endpoint(service_name, timeout_sec=120):
"""Checks the service url if available it returns true, on expiration
it returns false"""
master_count = len(get_all_masters())
return time_wait(lambda: service_available_predicate(service_name),
timeout_seconds=timeout_sec,
required_consecutive_success_count=master_count) | Checks the service url if available it returns true, on expiration
it returns false | entailment |
def task_states_predicate(service_name, expected_task_count, expected_task_states):
""" Returns whether the provided service_names's tasks have expected_task_count tasks
in any of expected_task_states. For example, if service 'foo' has 5 tasks which are
TASK_STAGING or TASK_RUNNING.
:param service_name: the service name
:type service_name: str
:param expected_task_count: the number of tasks which should have an expected state
:type expected_task_count: int
:param expected_task_states: the list states to search for among the service's tasks
:type expected_task_states: [str]
:return: True if expected_task_count tasks have any of expected_task_states, False otherwise
:rtype: bool
"""
try:
tasks = get_service_tasks(service_name)
except (DCOSConnectionError, DCOSHTTPException):
tasks = []
matching_tasks = []
other_tasks = []
for t in tasks:
name = t.get('name', 'UNKNOWN_NAME')
state = t.get('state', None)
if state and state in expected_task_states:
matching_tasks.append(name)
else:
other_tasks.append('{}={}'.format(name, state))
print('expected {} tasks in {}:\n- {} in expected {}: {}\n- {} in other states: {}'.format(
expected_task_count, ', '.join(expected_task_states),
len(matching_tasks), ', '.join(expected_task_states), ', '.join(matching_tasks),
len(other_tasks), ', '.join(other_tasks)))
return len(matching_tasks) >= expected_task_count | Returns whether the provided service_names's tasks have expected_task_count tasks
in any of expected_task_states. For example, if service 'foo' has 5 tasks which are
TASK_STAGING or TASK_RUNNING.
:param service_name: the service name
:type service_name: str
:param expected_task_count: the number of tasks which should have an expected state
:type expected_task_count: int
:param expected_task_states: the list states to search for among the service's tasks
:type expected_task_states: [str]
:return: True if expected_task_count tasks have any of expected_task_states, False otherwise
:rtype: bool | entailment |
def wait_for_service_tasks_state(
service_name,
expected_task_count,
expected_task_states,
timeout_sec=120
):
""" Returns once the service has at least N tasks in one of the specified state(s)
:param service_name: the service name
:type service_name: str
:param expected_task_count: the expected number of tasks in the specified state(s)
:type expected_task_count: int
:param expected_task_states: the expected state(s) for tasks to be in, e.g. 'TASK_RUNNING'
:type expected_task_states: [str]
:param timeout_sec: duration to wait
:type timeout_sec: int
:return: the duration waited in seconds
:rtype: int
"""
return time_wait(
lambda: task_states_predicate(service_name, expected_task_count, expected_task_states),
timeout_seconds=timeout_sec) | Returns once the service has at least N tasks in one of the specified state(s)
:param service_name: the service name
:type service_name: str
:param expected_task_count: the expected number of tasks in the specified state(s)
:type expected_task_count: int
:param expected_task_states: the expected state(s) for tasks to be in, e.g. 'TASK_RUNNING'
:type expected_task_states: [str]
:param timeout_sec: duration to wait
:type timeout_sec: int
:return: the duration waited in seconds
:rtype: int | entailment |
def tasks_all_replaced_predicate(
service_name,
old_task_ids,
task_predicate=None
):
""" Returns whether ALL of old_task_ids have been replaced with new tasks
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:return: True if none of old_task_ids are still present in the service
:rtype: bool
"""
try:
task_ids = get_service_task_ids(service_name, task_predicate)
except DCOSHTTPException:
print('failed to get task ids for service {}'.format(service_name))
task_ids = []
print('waiting for all task ids in "{}" to change:\n- old tasks: {}\n- current tasks: {}'.format(
service_name, old_task_ids, task_ids))
for id in task_ids:
if id in old_task_ids:
return False # old task still present
if len(task_ids) < len(old_task_ids): # new tasks haven't fully replaced old tasks
return False
return True | Returns whether ALL of old_task_ids have been replaced with new tasks
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:return: True if none of old_task_ids are still present in the service
:rtype: bool | entailment |
def tasks_missing_predicate(
service_name,
old_task_ids,
task_predicate=None
):
""" Returns whether any of old_task_ids are no longer present
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:return: True if any of old_task_ids are no longer present in the service
:rtype: bool
"""
try:
task_ids = get_service_task_ids(service_name, task_predicate)
except DCOSHTTPException:
print('failed to get task ids for service {}'.format(service_name))
task_ids = []
print('checking whether old tasks in "{}" are missing:\n- old tasks: {}\n- current tasks: {}'.format(
service_name, old_task_ids, task_ids))
for id in old_task_ids:
if id not in task_ids:
return True # an old task was not present
return False | Returns whether any of old_task_ids are no longer present
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:return: True if any of old_task_ids are no longer present in the service
:rtype: bool | entailment |
def wait_for_service_tasks_all_changed(
service_name,
old_task_ids,
task_predicate=None,
timeout_sec=120
):
""" Returns once ALL of old_task_ids have been replaced with new tasks
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:param timeout_sec: duration to wait
:type timeout_sec: int
:return: the duration waited in seconds
:rtype: int
"""
return time_wait(
lambda: tasks_all_replaced_predicate(service_name, old_task_ids, task_predicate),
timeout_seconds=timeout_sec) | Returns once ALL of old_task_ids have been replaced with new tasks
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:param timeout_sec: duration to wait
:type timeout_sec: int
:return: the duration waited in seconds
:rtype: int | entailment |
def wait_for_service_tasks_all_unchanged(
service_name,
old_task_ids,
task_predicate=None,
timeout_sec=30
):
""" Returns after verifying that NONE of old_task_ids have been removed or replaced from the service
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:param timeout_sec: duration to wait until assuming tasks are unchanged
:type timeout_sec: int
:return: the duration waited in seconds (the timeout value)
:rtype: int
"""
try:
time_wait(
lambda: tasks_missing_predicate(service_name, old_task_ids, task_predicate),
timeout_seconds=timeout_sec)
# shouldn't have exited successfully: raise below
except TimeoutExpired:
return timeout_sec # no changes occurred within timeout, as expected
raise DCOSException("One or more of the following tasks were no longer found: {}".format(old_task_ids)) | Returns after verifying that NONE of old_task_ids have been removed or replaced from the service
:param service_name: the service name
:type service_name: str
:param old_task_ids: list of original task ids as returned by get_service_task_ids
:type old_task_ids: [str]
:param task_predicate: filter to use when searching for tasks
:type task_predicate: func
:param timeout_sec: duration to wait until assuming tasks are unchanged
:type timeout_sec: int
:return: the duration waited in seconds (the timeout value)
:rtype: int | entailment |
def docker_version(host=None, component='server'):
""" Return the version of Docker [Server]
:param host: host or IP of the machine Docker is running on
:type host: str
:param component: Docker component
:type component: str
:return: Docker version
:rtype: str
"""
if component.lower() == 'client':
component = 'Client'
else:
component = 'Server'
# sudo is required for non-coreOS installs
command = 'sudo docker version -f {{.{}.Version}}'.format(component)
if host is None:
success, output = shakedown.run_command_on_master(command, None, None, False)
else:
success, output = shakedown.run_command_on_host(host, command, None, None, False)
if success:
return output
else:
return 'unknown' | Return the version of Docker [Server]
:param host: host or IP of the machine Docker is running on
:type host: str
:param component: Docker component
:type component: str
:return: Docker version
:rtype: str | entailment |
def create_docker_credentials_file(
username,
password,
file_name='docker.tar.gz'):
""" Create a docker credentials file.
Docker username and password are used to create a `{file_name}`
with `.docker/config.json` containing the credentials.
:param username: docker username
:type username: str
:param password: docker password
:type password: str
:param file_name: credentials file name `docker.tar.gz` by default
:type file_name: str
"""
import base64
auth_hash = base64.b64encode(
'{}:{}'.format(username, password).encode()).decode()
config_json = {
"auths": {
"https://index.docker.io/v1/": {"auth": auth_hash}
}
}
config_json_filename = 'config.json'
# Write config.json to file
with open(config_json_filename, 'w') as f:
json.dump(config_json, f, indent=4)
try:
# Create a docker.tar.gz
import tarfile
with tarfile.open(file_name, 'w:gz') as tar:
tar.add(config_json_filename, arcname='.docker/config.json')
tar.close()
except Exception as e:
print('Failed to create a docker credentils file {}'.format(e))
raise e
finally:
os.remove(config_json_filename) | Create a docker credentials file.
Docker username and password are used to create a `{file_name}`
with `.docker/config.json` containing the credentials.
:param username: docker username
:type username: str
:param password: docker password
:type password: str
:param file_name: credentials file name `docker.tar.gz` by default
:type file_name: str | entailment |
def distribute_docker_credentials_to_private_agents(
username,
password,
file_name='docker.tar.gz'):
""" Create and distributes a docker credentials file to all private agents
:param username: docker username
:type username: str
:param password: docker password
:type password: str
:param file_name: credentials file name `docker.tar.gz` by default
:type file_name: str
"""
create_docker_credentials_file(username, password, file_name)
try:
__distribute_docker_credentials_file()
finally:
os.remove(file_name) | Create and distributes a docker credentials file to all private agents
:param username: docker username
:type username: str
:param password: docker password
:type password: str
:param file_name: credentials file name `docker.tar.gz` by default
:type file_name: str | entailment |
def prefetch_docker_image_on_private_agents(
image,
timeout=timedelta(minutes=5).total_seconds()):
""" Given a docker image. An app with the image is scale across the private
agents to ensure that the image is prefetched to all nodes.
:param image: docker image name
:type image: str
:param timeout: timeout for deployment wait in secs (default: 5m)
:type password: int
"""
agents = len(shakedown.get_private_agents())
app = {
"id": "/prefetch",
"instances": agents,
"container": {
"type": "DOCKER",
"docker": {"image": image}
},
"cpus": 0.1,
"mem": 128
}
client = marathon.create_client()
client.add_app(app)
shakedown.deployment_wait(timeout)
shakedown.delete_all_apps()
shakedown.deployment_wait(timeout) | Given a docker image. An app with the image is scale across the private
agents to ensure that the image is prefetched to all nodes.
:param image: docker image name
:type image: str
:param timeout: timeout for deployment wait in secs (default: 5m)
:type password: int | entailment |
def _get_options(options_file=None):
""" Read in options_file as JSON.
:param options_file: filename to return
:type options_file: str
:return: options as dictionary
:rtype: dict
"""
if options_file is not None:
with open(options_file, 'r') as opt_file:
options = json.loads(opt_file.read())
else:
options = {}
return options | Read in options_file as JSON.
:param options_file: filename to return
:type options_file: str
:return: options as dictionary
:rtype: dict | entailment |
def install_package(
package_name,
package_version=None,
service_name=None,
options_file=None,
options_json=None,
wait_for_completion=False,
timeout_sec=600,
expected_running_tasks=0
):
""" Install a package via the DC/OS library
:param package_name: name of the package
:type package_name: str
:param package_version: version of the package (defaults to latest)
:type package_version: str
:param service_name: unique service name for the package
:type service_name: str
:param options_file: filename that has options to use and is JSON format
:type options_file: str
:param options_json: dict that has options to use and is JSON format
:type options_json: dict
:param wait_for_completion: whether or not to wait for the app's deployment to complete
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:param expected_running_tasks: number of service tasks to check for, or zero to disable
:type expected_task_count: int
:return: True if installation was successful, False otherwise
:rtype: bool
"""
start = time.time()
if options_file:
options = _get_options(options_file)
elif options_json:
options = options_json
else:
options = {}
package_manager = _get_package_manager()
pkg = package_manager.get_package_version(package_name, package_version)
if package_version is None:
# Get the resolved version for logging below
package_version = 'auto:{}'.format(pkg.version())
if service_name is None:
# Get the service name from the marathon template
try:
labels = pkg.marathon_json(options).get('labels')
if 'DCOS_SERVICE_NAME' in labels:
service_name = labels['DCOS_SERVICE_NAME']
except errors.DCOSException as e:
pass
print('\n{}installing {} with service={} version={} options={}'.format(
shakedown.cli.helpers.fchr('>>'), package_name, service_name, package_version, options))
try:
# Print pre-install notes to console log
pre_install_notes = pkg.package_json().get('preInstallNotes')
if pre_install_notes:
print(pre_install_notes)
package_manager.install_app(pkg, options, service_name)
# Print post-install notes to console log
post_install_notes = pkg.package_json().get('postInstallNotes')
if post_install_notes:
print(post_install_notes)
# Optionally wait for the app's deployment to finish
if wait_for_completion:
print("\n{}waiting for {} deployment to complete...".format(
shakedown.cli.helpers.fchr('>>'), service_name))
if expected_running_tasks > 0 and service_name is not None:
wait_for_service_tasks_running(service_name, expected_running_tasks, timeout_sec)
app_id = pkg.marathon_json(options).get('id')
shakedown.deployment_wait(timeout_sec, app_id)
print('\n{}install completed after {}\n'.format(
shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start)))
else:
print('\n{}install started after {}\n'.format(
shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start)))
except errors.DCOSException as e:
print('\n{}{}'.format(
shakedown.cli.helpers.fchr('>>'), e))
# Install subcommands (if defined)
if pkg.cli_definition():
print("{}installing CLI commands for package '{}'".format(
shakedown.cli.helpers.fchr('>>'), package_name))
subcommand.install(pkg)
return True | Install a package via the DC/OS library
:param package_name: name of the package
:type package_name: str
:param package_version: version of the package (defaults to latest)
:type package_version: str
:param service_name: unique service name for the package
:type service_name: str
:param options_file: filename that has options to use and is JSON format
:type options_file: str
:param options_json: dict that has options to use and is JSON format
:type options_json: dict
:param wait_for_completion: whether or not to wait for the app's deployment to complete
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:param expected_running_tasks: number of service tasks to check for, or zero to disable
:type expected_task_count: int
:return: True if installation was successful, False otherwise
:rtype: bool | entailment |
def install_package_and_wait(
package_name,
package_version=None,
service_name=None,
options_file=None,
options_json=None,
wait_for_completion=True,
timeout_sec=600,
expected_running_tasks=0
):
""" Install a package via the DC/OS library and wait for completion
"""
return install_package(
package_name,
package_version,
service_name,
options_file,
options_json,
wait_for_completion,
timeout_sec,
expected_running_tasks
) | Install a package via the DC/OS library and wait for completion | entailment |
def package_installed(package_name, service_name=None):
""" Check whether the package package_name is currently installed.
:param package_name: package name
:type package_name: str
:param service_name: service_name
:type service_name: str
:return: True if installed, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
app_installed = len(package_manager.installed_apps(package_name, service_name)) > 0
subcommand_installed = False
for subcmd in package.installed_subcommands():
package_json = subcmd.package_json()
if package_json['name'] == package_name:
subcommand_installed = True
return (app_installed or subcommand_installed) | Check whether the package package_name is currently installed.
:param package_name: package name
:type package_name: str
:param service_name: service_name
:type service_name: str
:return: True if installed, False otherwise
:rtype: bool | entailment |
def uninstall_package(
package_name,
service_name=None,
all_instances=False,
wait_for_completion=False,
timeout_sec=600
):
""" Uninstall a package using the DC/OS library.
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param all_instances: uninstall all instances of package
:type all_instances: bool
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:return: True if uninstall was successful, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
pkg = package_manager.get_package_version(package_name, None)
try:
if service_name is None:
service_name = _get_service_name(package_name, pkg)
print("{}uninstalling package '{}' with service name '{}'\n".format(
shakedown.cli.helpers.fchr('>>'), package_name, service_name))
package_manager.uninstall_app(package_name, all_instances, service_name)
# Optionally wait for the service to unregister as a framework
if wait_for_completion:
wait_for_mesos_task_removal(service_name, timeout_sec=timeout_sec)
except errors.DCOSException as e:
print('\n{}{}'.format(
shakedown.cli.helpers.fchr('>>'), e))
# Uninstall subcommands (if defined)
if pkg.cli_definition():
print("{}uninstalling CLI commands for package '{}'".format(
shakedown.cli.helpers.fchr('>>'), package_name))
subcommand.uninstall(package_name)
return True | Uninstall a package using the DC/OS library.
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param all_instances: uninstall all instances of package
:type all_instances: bool
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:return: True if uninstall was successful, False otherwise
:rtype: bool | entailment |
def uninstall_package_and_wait(
package_name,
service_name=None,
all_instances=False,
wait_for_completion=True,
timeout_sec=600
):
""" Uninstall a package via the DC/OS library and wait for completion
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param all_instances: uninstall all instances of package
:type all_instances: bool
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:return: True if uninstall was successful, False otherwise
:rtype: bool
"""
return uninstall_package(
package_name,
service_name,
all_instances,
wait_for_completion,
timeout_sec
) | Uninstall a package via the DC/OS library and wait for completion
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param all_instances: uninstall all instances of package
:type all_instances: bool
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:return: True if uninstall was successful, False otherwise
:rtype: bool | entailment |
def uninstall_package_and_data(
package_name,
service_name=None,
role=None,
principal=None,
zk_node=None,
timeout_sec=600):
""" Uninstall a package via the DC/OS library, wait for completion, and delete any persistent data
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param role: role to use when deleting data, or <service_name>-role if unset
:type role: str, or None
:param principal: principal to use when deleting data, or <service_name>-principal if unset
:type principal: str, or None
:param zk_node: zk node to delete, or dcos-service-<service_name> if unset
:type zk_node: str, or None
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
"""
start = time.time()
if service_name is None:
pkg = _get_package_manager().get_package_version(package_name, None)
service_name = _get_service_name(package_name, pkg)
print('\n{}uninstalling/deleting {}'.format(shakedown.cli.helpers.fchr('>>'), service_name))
try:
uninstall_package_and_wait(package_name, service_name=service_name, timeout_sec=timeout_sec)
except (errors.DCOSException, ValueError) as e:
print('Got exception when uninstalling package, ' +
'continuing with janitor anyway: {}'.format(e))
data_start = time.time()
if (not role or not principal or not zk_node) and service_name is None:
raise DCOSException('service_name must be provided when data params are missing AND the package isn\'t installed')
if not role:
role = '{}-role'.format(service_name)
if not zk_node:
zk_node = 'dcos-service-{}'.format(service_name)
delete_persistent_data(role, zk_node)
finish = time.time()
print('\n{}uninstall/delete done after pkg({}) + data({}) = total({})\n'.format(
shakedown.cli.helpers.fchr('>>'),
pretty_duration(data_start - start),
pretty_duration(finish - data_start),
pretty_duration(finish - start))) | Uninstall a package via the DC/OS library, wait for completion, and delete any persistent data
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param role: role to use when deleting data, or <service_name>-role if unset
:type role: str, or None
:param principal: principal to use when deleting data, or <service_name>-principal if unset
:type principal: str, or None
:param zk_node: zk node to delete, or dcos-service-<service_name> if unset
:type zk_node: str, or None
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int | entailment |
def add_package_repo(
repo_name,
repo_url,
index=None,
wait_for_package=None,
expect_prev_version=None):
""" Add a repository to the list of package sources
:param repo_name: name of the repository to add
:type repo_name: str
:param repo_url: location of the repository to add
:type repo_url: str
:param index: index (precedence) for this repository
:type index: int
:param wait_for_package: the package whose version should change after the repo is added
:type wait_for_package: str, or None
:return: True if successful, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
if wait_for_package:
prev_version = package_manager.get_package_version(wait_for_package, None)
if not package_manager.add_repo(repo_name, repo_url, index):
return False
if wait_for_package:
try:
spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version))
except TimeoutExpired:
return False
return True | Add a repository to the list of package sources
:param repo_name: name of the repository to add
:type repo_name: str
:param repo_url: location of the repository to add
:type repo_url: str
:param index: index (precedence) for this repository
:type index: int
:param wait_for_package: the package whose version should change after the repo is added
:type wait_for_package: str, or None
:return: True if successful, False otherwise
:rtype: bool | entailment |
def remove_package_repo(repo_name, wait_for_package=None):
""" Remove a repository from the list of package sources
:param repo_name: name of the repository to remove
:type repo_name: str
:param wait_for_package: the package whose version should change after the repo is removed
:type wait_for_package: str, or None
:returns: True if successful, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
if wait_for_package:
prev_version = package_manager.get_package_version(wait_for_package, None)
if not package_manager.remove_repo(repo_name):
return False
if wait_for_package:
try:
spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version))
except TimeoutExpired:
return False
return True | Remove a repository from the list of package sources
:param repo_name: name of the repository to remove
:type repo_name: str
:param wait_for_package: the package whose version should change after the repo is removed
:type wait_for_package: str, or None
:returns: True if successful, False otherwise
:rtype: bool | entailment |
def get_package_versions(package_name):
""" Returns the list of versions of a given package
:param package_name: name of the package
:type package_name: str
"""
package_manager = _get_package_manager()
pkg = package_manager.get_package_version(package_name, None)
return pkg.package_versions() | Returns the list of versions of a given package
:param package_name: name of the package
:type package_name: str | entailment |
def cli(**args):
""" Shakedown is a DC/OS test-harness wrapper for the pytest tool.
"""
import shakedown
# Read configuration options from ~/.shakedown (if exists)
args = read_config(args)
# Set configuration defaults
args = set_config_defaults(args)
if args['quiet']:
shakedown.cli.quiet = True
if not args['dcos_url']:
try:
args['dcos_url'] = dcos_url()
except:
click.secho('error: cluster URL not set, use --dcos-url or see --help for more information.', fg='red', bold=True)
sys.exit(1)
if not args['dcos_url']:
click.secho('error: --dcos-url is a required option; see --help for more information.', fg='red', bold=True)
sys.exit(1)
if args['ssh_key_file']:
shakedown.cli.ssh_key_file = args['ssh_key_file']
if args['ssh_user']:
shakedown.cli.ssh_user = args['ssh_user']
if not args['no_banner']:
echo(banner(), n=False)
echo('Running pre-flight checks...', d='step-maj')
# required modules and their 'version' method
imported = {}
requirements = {
'pytest': '__version__',
'dcos': 'version'
}
for req in requirements:
ver = requirements[req]
echo("Checking for {} library...".format(req), d='step-min', n=False)
try:
imported[req] = importlib.import_module(req, package=None)
except ImportError:
click.secho("error: {p} is not installed; run 'pip install {p}'.".format(p=req), fg='red', bold=True)
sys.exit(1)
echo(getattr(imported[req], requirements[req]))
if shakedown.attach_cluster(args['dcos_url']):
echo('Checking DC/OS cluster version...', d='step-min', n=False)
echo(shakedown.dcos_version())
else:
with imported['dcos'].cluster.setup_directory() as temp_path:
imported['dcos'].cluster.set_attached(temp_path)
imported['dcos'].config.set_val('core.dcos_url', args['dcos_url'])
if args['ssl_no_verify']:
imported['dcos'].config.set_val('core.ssl_verify', 'False')
try:
imported['dcos'].cluster.setup_cluster_config(args['dcos_url'], temp_path, False)
except:
echo('Authenticating with DC/OS cluster...', d='step-min')
authenticated = False
token = imported['dcos'].config.get_config_val("core.dcos_acs_token")
if token is not None:
echo('trying existing ACS token...', d='step-min', n=False)
try:
shakedown.dcos_leader()
authenticated = True
echo(fchr('PP'), d='pass')
except imported['dcos'].errors.DCOSException:
echo(fchr('FF'), d='fail')
if not authenticated and args['oauth_token']:
try:
echo('trying OAuth token...', d='item-maj', n=False)
token = shakedown.authenticate_oauth(args['oauth_token'])
with stdchannel_redirected(sys.stderr, os.devnull):
imported['dcos'].config.set_val('core.dcos_acs_token', token)
authenticated = True
echo(fchr('PP'), d='pass')
except:
echo(fchr('FF'), d='fail')
if not authenticated and args['username'] and args['password']:
try:
echo('trying username and password...', d='item-maj', n=False)
token = shakedown.authenticate(args['username'], args['password'])
with stdchannel_redirected(sys.stderr, os.devnull):
imported['dcos'].config.set_val('core.dcos_acs_token', token)
authenticated = True
echo(fchr('PP'), d='pass')
except:
echo(fchr('FF'), d='fail')
if authenticated:
imported['dcos'].cluster.setup_cluster_config(args['dcos_url'], temp_path, False)
echo('Checking DC/OS cluster version...', d='step-min', n=False)
echo(shakedown.dcos_version())
else:
click.secho("error: no authentication credentials or token found.", fg='red', bold=True)
sys.exit(1)
class shakedown:
""" This encapsulates a PyTest wrapper plugin
"""
state = {}
stdout = []
tests = {
'file': {},
'test': {}
}
report_stats = {
'passed':[],
'skipped':[],
'failed':[],
'total_passed':0,
'total_skipped':0,
'total_failed':0,
}
def output(title, state, text, status=True):
""" Capture and display stdout/stderr output
:param title: the title of the output box (eg. test name)
:type title: str
:param state: state of the result (pass, fail)
:type state: str
:param text: the stdout/stderr output
:type text: str
:param status: whether to output a status marker
:type status: bool
"""
if state == 'fail':
schr = fchr('FF')
elif state == 'pass':
schr = fchr('PP')
elif state == 'skip':
schr = fchr('SK')
else:
schr = ''
if status:
if not args['stdout_inline']:
if state == 'fail':
echo(schr, d='fail')
elif state == 'pass':
echo(schr, d='pass')
else:
if not text:
if state == 'fail':
echo(schr, d='fail')
elif state == 'pass':
if '::' in title:
echo(title.split('::')[-1], d='item-min', n=False)
echo(schr, d='pass')
if text and args['stdout'] in [state, 'all']:
o = decorate(schr + ': ', 'quote-head-' + state)
o += click.style(decorate(title, style=state), bold=True) + "\n"
o += decorate(str(text).strip(), style='quote-' + state)
if args['stdout_inline']:
echo(o)
else:
shakedown.stdout.append(o)
def pytest_collectreport(self, report):
""" Collect and validate individual test files
"""
if not 'collect' in shakedown.state:
shakedown.state['collect'] = 1
echo('Collecting and validating test files...', d='step-min')
if report.nodeid:
echo(report.nodeid, d='item-maj', n=False)
state = None
if report.failed:
state = 'fail'
if report.passed:
state = 'pass'
if report.skipped:
state = 'skip'
if state:
if report.longrepr:
shakedown.output(report.nodeid, state, report.longrepr)
else:
shakedown.output(report.nodeid, state, None)
def pytest_sessionstart(self):
""" Tests have been collected, begin running them...
"""
echo('Initiating testing phase...', d='step-maj')
def pytest_report_teststatus(self, report):
""" Print report results to the console as they are run
"""
try:
report_file, report_test = report.nodeid.split('::', 1)
except ValueError:
return
if not 'test' in shakedown.state:
shakedown.state['test'] = 1
echo('Running individual tests...', d='step-min')
if not report_file in shakedown.tests['file']:
shakedown.tests['file'][report_file] = 1
echo(report_file, d='item-maj')
if not report.nodeid in shakedown.tests['test']:
shakedown.tests['test'][report.nodeid] = {}
if args['stdout_inline']:
echo('')
echo(report_test + ':', d='item-min')
else:
echo(report_test, d='item-min', n=False)
if report.failed:
shakedown.tests['test'][report.nodeid]['fail'] = True
if report.when == 'teardown' and not 'tested' in shakedown.tests['test'][report.nodeid]:
shakedown.output(report.nodeid, 'pass', None)
# Suppress excess terminal output
return report.outcome, None, None
def pytest_runtest_logreport(self, report):
""" Log the [stdout, stderr] results of tests if desired
"""
state = None
for secname, content in report.sections:
if report.failed:
state = 'fail'
if report.passed:
state = 'pass'
if report.skipped:
state = 'skip'
if state and secname != 'Captured stdout call':
module = report.nodeid.split('::', 1)[0]
cap_type = secname.split(' ')[-1]
if not 'setup' in shakedown.tests['test'][report.nodeid]:
shakedown.tests['test'][report.nodeid]['setup'] = True
shakedown.output(module + ' ' + cap_type, state, content, False)
elif cap_type == 'teardown':
shakedown.output(module + ' ' + cap_type, state, content, False)
elif state and report.when == 'call':
if 'tested' in shakedown.tests['test'][report.nodeid]:
shakedown.output(report.nodeid, state, content, False)
else:
shakedown.tests['test'][report.nodeid]['tested'] = True
shakedown.output(report.nodeid, state, content)
# Capture execution crashes
if hasattr(report.longrepr, 'reprcrash'):
longreport = report.longrepr
if 'tested' in shakedown.tests['test'][report.nodeid]:
shakedown.output(report.nodeid, 'fail', 'error: ' + str(longreport.reprcrash), False)
else:
shakedown.tests['test'][report.nodeid]['tested'] = True
shakedown.output(report.nodeid, 'fail', 'error: ' + str(longreport.reprcrash))
def pytest_sessionfinish(self, session, exitstatus):
""" Testing phase is complete; print extra reports (stdout/stderr, JSON) as requested
"""
echo('Test phase completed.', d='step-maj')
if ('stdout' in args and args['stdout']) and shakedown.stdout:
for output in shakedown.stdout:
echo(output)
opts = ['-q', '--tb=no', "--timeout={}".format(args['timeout'])]
if args['fail'] == 'fast':
opts.append('-x')
if args['pytest_option']:
for opt in args['pytest_option']:
opts.append(opt)
if args['stdout_inline']:
opts.append('-s')
if args['tests']:
tests_to_run = []
for test in args['tests']:
tests_to_run.extend(test.split())
for test in tests_to_run:
opts.append(test)
exitstatus = imported['pytest'].main(opts, plugins=[shakedown()])
sys.exit(exitstatus) | Shakedown is a DC/OS test-harness wrapper for the pytest tool. | entailment |
def find_coverage(self, zoom):
"""
Returns the bounding box (minx, miny, maxx, maxy) of an adjacent
group of tiles at this zoom level.
"""
# Find a group of adjacent available tiles at this zoom level
rows = self._query('''SELECT tile_column, tile_row FROM tiles
WHERE zoom_level=?
ORDER BY tile_column, tile_row;''', (zoom,))
t = rows.fetchone()
xmin, ymin = t
previous = t
while t and t[0] - previous[0] <= 1:
# adjacent, go on
previous = t
t = rows.fetchone()
xmax, ymax = previous
# Transform (xmin, ymin) (xmax, ymax) to pixels
S = self.tilesize
bottomleft = (xmin * S, (ymax + 1) * S)
topright = ((xmax + 1) * S, ymin * S)
# Convert center to (lon, lat)
proj = GoogleProjection(S, [zoom]) # WGS84
return proj.unproject_pixels(bottomleft, zoom) + proj.unproject_pixels(topright, zoom) | Returns the bounding box (minx, miny, maxx, maxy) of an adjacent
group of tiles at this zoom level. | entailment |
def tile(self, z, x, y):
"""
Download the specified tile from `tiles_url`
"""
logger.debug(_("Download tile %s") % ((z, x, y),))
# Render each keyword in URL ({s}, {x}, {y}, {z}, {size} ... )
size = self.tilesize
s = self.tiles_subdomains[(x + y) % len(self.tiles_subdomains)];
try:
url = self.tiles_url.format(**locals())
except KeyError as e:
raise DownloadError(_("Unknown keyword %s in URL") % e)
logger.debug(_("Retrieve tile at %s") % url)
r = DOWNLOAD_RETRIES
sleeptime = 1
while r > 0:
try:
request = requests.get(url, headers=self.headers)
if request.status_code == 200:
return request.content
raise DownloadError(_("Status code : %s, url : %s") % (request.status_code, url))
except requests.exceptions.ConnectionError as e:
logger.debug(_("Download error, retry (%s left). (%s)") % (r, e))
r -= 1
time.sleep(sleeptime)
# progressivly sleep longer to wait for this tile
if (sleeptime <= 10) and (r % 2 == 0):
sleeptime += 1 # increase wait
raise DownloadError(_("Cannot download URL %s") % url) | Download the specified tile from `tiles_url` | entailment |
def tile(self, z, x, y):
"""
Render the specified tile with Mapnik
"""
logger.debug(_("Render tile %s") % ((z, x, y),))
proj = GoogleProjection(self.tilesize, [z])
return self.render(proj.tile_bbox((z, x, y))) | Render the specified tile with Mapnik | entailment |
def render(self, bbox, width=None, height=None):
"""
Render the specified tile with Mapnik
"""
width = width or self.tilesize
height = height or self.tilesize
self._prepare_rendering(bbox, width=width, height=height)
# Render image with default Agg renderer
tmpfile = NamedTemporaryFile(delete=False)
im = mapnik.Image(width, height)
mapnik.render(self._mapnik, im)
im.save(tmpfile.name, 'png256') # TODO: mapnik output only to file?
tmpfile.close()
content = open(tmpfile.name, 'rb').read()
os.unlink(tmpfile.name)
return content | Render the specified tile with Mapnik | entailment |
def grid(self, z, x, y, fields, layer):
"""
Render the specified grid with Mapnik
"""
logger.debug(_("Render grid %s") % ((z, x, y),))
proj = GoogleProjection(self.tilesize, [z])
return self.render_grid(proj.tile_bbox((z, x, y)), fields, layer) | Render the specified grid with Mapnik | entailment |
def render_grid(self, bbox, grid_fields, layer, width=None, height=None):
"""
Render the specified grid with Mapnik
"""
width = width or self.tilesize
height = height or self.tilesize
self._prepare_rendering(bbox, width=width, height=height)
grid = mapnik.Grid(width, height)
mapnik.render_layer(self._mapnik, grid, layer=layer, fields=grid_fields)
grid = grid.encode()
return json.dumps(grid) | Render the specified grid with Mapnik | entailment |
def tile_at(self, zoom, position):
"""
Returns a tuple of (z, x, y)
"""
x, y = self.project_pixels(position, zoom)
return (zoom, int(x/self.tilesize), int(y/self.tilesize)) | Returns a tuple of (z, x, y) | entailment |
def project(self, lng_lat):
"""
Returns the coordinates in meters from WGS84
"""
(lng, lat) = lng_lat
x = lng * DEG_TO_RAD
lat = max(min(MAX_LATITUDE, lat), -MAX_LATITUDE)
y = lat * DEG_TO_RAD
y = log(tan((pi / 4) + (y / 2)))
return (x*EARTH_RADIUS, y*EARTH_RADIUS) | Returns the coordinates in meters from WGS84 | entailment |
def string2rgba(cls, colorstring):
""" Convert #RRGGBBAA to an (R, G, B, A) tuple """
colorstring = colorstring.strip()
if colorstring[0] == '#':
colorstring = colorstring[1:]
if len(colorstring) < 6:
raise ValueError("input #%s is not in #RRGGBB format" % colorstring)
r, g, b = colorstring[:2], colorstring[2:4], colorstring[4:6]
a = 'ff'
if len(colorstring) > 6:
a = colorstring[6:8]
r, g, b, a = [int(n, 16) for n in (r, g, b, a)]
return (r, g, b, a) | Convert #RRGGBBAA to an (R, G, B, A) tuple | entailment |
def tileslist(self, bbox, zoomlevels):
"""
Build the tiles list within the bottom-left/top-right bounding
box (minx, miny, maxx, maxy) at the specified zoom levels.
Return a list of tuples (z,x,y)
"""
proj = GoogleProjection(self.tile_size, zoomlevels, self.tile_scheme)
return proj.tileslist(bbox) | Build the tiles list within the bottom-left/top-right bounding
box (minx, miny, maxx, maxy) at the specified zoom levels.
Return a list of tuples (z,x,y) | entailment |
def add_layer(self, tilemanager, opacity=1.0):
"""
Add a layer to be blended (alpha-composite) on top of the tile.
tilemanager -- a `TileManager` instance
opacity -- transparency factor for compositing
"""
assert has_pil, _("Cannot blend layers without python PIL")
assert self.tile_size == tilemanager.tile_size, _("Cannot blend layers whose tile size differs")
assert 0 <= opacity <= 1, _("Opacity should be between 0.0 (transparent) and 1.0 (opaque)")
self.cache.basename += '%s%.1f' % (tilemanager.cache.basename, opacity)
self._layers.append((tilemanager, opacity)) | Add a layer to be blended (alpha-composite) on top of the tile.
tilemanager -- a `TileManager` instance
opacity -- transparency factor for compositing | entailment |
def add_filter(self, filter_):
""" Add an image filter for post-processing """
assert has_pil, _("Cannot add filters without python PIL")
self.cache.basename += filter_.basename
self._filters.append(filter_) | Add an image filter for post-processing | entailment |
def tile(self, z_x_y):
"""
Return the tile (binary) content of the tile and seed the cache.
"""
(z, x, y) = z_x_y
logger.debug(_("tile method called with %s") % ([z, x, y]))
output = self.cache.read((z, x, y))
if output is None:
output = self.reader.tile(z, x, y)
# Blend layers
if len(self._layers) > 0:
logger.debug(_("Will blend %s layer(s)") % len(self._layers))
output = self._blend_layers(output, (z, x, y))
# Apply filters
for f in self._filters:
image = f.process(self._tile_image(output))
output = self._image_tile(image)
# Save result to cache
self.cache.save(output, (z, x, y))
self.rendered += 1
return output | Return the tile (binary) content of the tile and seed the cache. | entailment |
def grid(self, z_x_y):
""" Return the UTFGrid content """
# sources.py -> MapnikRenderer -> grid
(z, x, y) = z_x_y
content = self.reader.grid(z, x, y, self.grid_fields, self.grid_layer)
return content | Return the UTFGrid content | entailment |
def _blend_layers(self, imagecontent, z_x_y):
"""
Merge tiles of all layers into the specified tile path
"""
(z, x, y) = z_x_y
result = self._tile_image(imagecontent)
# Paste each layer
for (layer, opacity) in self._layers:
try:
# Prepare tile of overlay, if available
overlay = self._tile_image(layer.tile((z, x, y)))
except (IOError, DownloadError, ExtractionError)as e:
logger.warn(e)
continue
# Extract alpha mask
overlay = overlay.convert("RGBA")
r, g, b, a = overlay.split()
overlay = Image.merge("RGB", (r, g, b))
a = ImageEnhance.Brightness(a).enhance(opacity)
overlay.putalpha(a)
mask = Image.merge("L", (a,))
result.paste(overlay, (0, 0), mask)
# Read result
return self._image_tile(result) | Merge tiles of all layers into the specified tile path | entailment |
def _tile_image(self, data):
"""
Tile binary content as PIL Image.
"""
image = Image.open(BytesIO(data))
return image.convert('RGBA') | Tile binary content as PIL Image. | entailment |
def zoomlevels(self):
"""
Return the list of covered zoom levels, in ascending order
"""
zooms = set()
for coverage in self._bboxes:
for zoom in coverage[1]:
zooms.add(zoom)
return sorted(zooms) | Return the list of covered zoom levels, in ascending order | entailment |
def run(self, force=False):
"""
Build a MBTile file.
force -- overwrite if MBTiles file already exists.
"""
if os.path.exists(self.filepath):
if force:
logger.warn(_("%s already exists. Overwrite.") % self.filepath)
os.remove(self.filepath)
else:
# Already built, do not do anything.
logger.info(_("%s already exists. Nothing to do.") % self.filepath)
return
# Clean previous runs
self._clean_gather()
# If no coverage added, use bottom layer metadata
if len(self._bboxes) == 0 and len(self._layers) > 0:
bottomlayer = self._layers[0]
metadata = bottomlayer.reader.metadata()
if 'bounds' in metadata:
logger.debug(_("Use bounds of bottom layer %s") % bottomlayer)
bbox = map(float, metadata.get('bounds', '').split(','))
zoomlevels = range(int(metadata.get('minzoom', 0)), int(metadata.get('maxzoom', 0)))
self.add_coverage(bbox=bbox, zoomlevels=zoomlevels)
# Compute list of tiles
tileslist = set()
for bbox, levels in self._bboxes:
logger.debug(_("Compute list of tiles for bbox %s on zooms %s.") % (bbox, levels))
bboxlist = self.tileslist(bbox, levels)
logger.debug(_("Add %s tiles.") % len(bboxlist))
tileslist = tileslist.union(bboxlist)
logger.debug(_("%s tiles in total.") % len(tileslist))
self.nbtiles = len(tileslist)
if not self.nbtiles:
raise EmptyCoverageError(_("No tiles are covered by bounding boxes : %s") % self._bboxes)
logger.debug(_("%s tiles to be packaged.") % self.nbtiles)
# Go through whole list of tiles and gather them in tmp_dir
self.rendered = 0
for (z, x, y) in tileslist:
try:
self._gather((z, x, y))
except Exception as e:
logger.warn(e)
if not self.ignore_errors:
raise
logger.debug(_("%s tiles were missing.") % self.rendered)
# Some metadata
middlezoom = self.zoomlevels[len(self.zoomlevels) // 2]
lat = self.bounds[1] + (self.bounds[3] - self.bounds[1])/2
lon = self.bounds[0] + (self.bounds[2] - self.bounds[0])/2
metadata = {}
metadata['name'] = str(uuid.uuid4())
metadata['format'] = self._tile_extension[1:]
metadata['minzoom'] = self.zoomlevels[0]
metadata['maxzoom'] = self.zoomlevels[-1]
metadata['bounds'] = '%s,%s,%s,%s' % tuple(self.bounds)
metadata['center'] = '%s,%s,%s' % (lon, lat, middlezoom)
#display informations from the grids on hover
content_to_display = ''
for field_name in self.grid_fields:
content_to_display += "{{{ %s }}}<br>" % field_name
metadata['template'] = '{{#__location__}}{{/__location__}} {{#__teaser__}} \
%s {{/__teaser__}}{{#__full__}}{{/__full__}}' % content_to_display
metadatafile = os.path.join(self.tmp_dir, 'metadata.json')
with open(metadatafile, 'w') as output:
json.dump(metadata, output)
# TODO: add UTF-Grid of last layer, if any
# Package it!
logger.info(_("Build MBTiles file '%s'.") % self.filepath)
extension = self.tile_format.split("image/")[-1]
disk_to_mbtiles(
self.tmp_dir,
self.filepath,
format=extension,
scheme=self.cache.scheme
)
try:
os.remove("%s-journal" % self.filepath) # created by mbutil
except OSError as e:
pass
self._clean_gather() | Build a MBTile file.
force -- overwrite if MBTiles file already exists. | entailment |
def grid_tiles(self, bbox, zoomlevel):
"""
Return a grid of (x, y) tuples representing the juxtaposition
of tiles on the specified ``bbox`` at the specified ``zoomlevel``.
"""
tiles = self.tileslist(bbox, [zoomlevel])
grid = {}
for (z, x, y) in tiles:
if not grid.get(y):
grid[y] = []
grid[y].append(x)
sortedgrid = []
for y in sorted(grid.keys(), reverse=self.tile_scheme == 'tms'):
sortedgrid.append([(x, y) for x in sorted(grid[y])])
return sortedgrid | Return a grid of (x, y) tuples representing the juxtaposition
of tiles on the specified ``bbox`` at the specified ``zoomlevel``. | entailment |
def export_image(self, bbox, zoomlevel, imagepath):
"""
Writes to ``imagepath`` the tiles for the specified bounding box and zoomlevel.
"""
assert has_pil, _("Cannot export image without python PIL")
grid = self.grid_tiles(bbox, zoomlevel)
width = len(grid[0])
height = len(grid)
widthpix = width * self.tile_size
heightpix = height * self.tile_size
result = Image.new("RGBA", (widthpix, heightpix))
offset = (0, 0)
for i, row in enumerate(grid):
for j, (x, y) in enumerate(row):
offset = (j * self.tile_size, i * self.tile_size)
img = self._tile_image(self.tile((zoomlevel, x, y)))
result.paste(img, offset)
logger.info(_("Save resulting image to '%s'") % imagepath)
result.save(imagepath) | Writes to ``imagepath`` the tiles for the specified bounding box and zoomlevel. | entailment |
def _makeScriptOrder(gpos):
"""
Run therough GPOS and make an alphabetically
ordered list of scripts. If DFLT is in the list,
move it to the front.
"""
scripts = []
for scriptRecord in gpos.ScriptList.ScriptRecord:
scripts.append(scriptRecord.ScriptTag)
if "DFLT" in scripts:
scripts.remove("DFLT")
scripts.insert(0, "DFLT")
return sorted(scripts) | Run therough GPOS and make an alphabetically
ordered list of scripts. If DFLT is in the list,
move it to the front. | entailment |
def _gatherDataFromLookups(gpos, scriptOrder):
"""
Gather kerning and classes from the applicable lookups
and return them in script order.
"""
lookupIndexes = _gatherLookupIndexes(gpos)
seenLookups = set()
kerningDictionaries = []
leftClassDictionaries = []
rightClassDictionaries = []
for script in scriptOrder:
kerning = []
leftClasses = []
rightClasses = []
for lookupIndex in lookupIndexes[script]:
if lookupIndex in seenLookups:
continue
seenLookups.add(lookupIndex)
result = _gatherKerningForLookup(gpos, lookupIndex)
if result is None:
continue
k, lG, rG = result
kerning.append(k)
leftClasses.append(lG)
rightClasses.append(rG)
if kerning:
kerningDictionaries.append(kerning)
leftClassDictionaries.append(leftClasses)
rightClassDictionaries.append(rightClasses)
return kerningDictionaries, leftClassDictionaries, rightClassDictionaries | Gather kerning and classes from the applicable lookups
and return them in script order. | entailment |
def _gatherLookupIndexes(gpos):
"""
Gather a mapping of script to lookup indexes
referenced by the kern feature for each script.
Returns a dictionary of this structure:
{
"latn" : [0],
"DFLT" : [0]
}
"""
# gather the indexes of the kern features
kernFeatureIndexes = [index for index, featureRecord in enumerate(gpos.FeatureList.FeatureRecord) if featureRecord.FeatureTag == "kern"]
# find scripts and languages that have kern features
scriptKernFeatureIndexes = {}
for scriptRecord in gpos.ScriptList.ScriptRecord:
script = scriptRecord.ScriptTag
thisScriptKernFeatureIndexes = []
defaultLangSysRecord = scriptRecord.Script.DefaultLangSys
if defaultLangSysRecord is not None:
f = []
for featureIndex in defaultLangSysRecord.FeatureIndex:
if featureIndex not in kernFeatureIndexes:
continue
f.append(featureIndex)
if f:
thisScriptKernFeatureIndexes.append((None, f))
if scriptRecord.Script.LangSysRecord is not None:
for langSysRecord in scriptRecord.Script.LangSysRecord:
langSys = langSysRecord.LangSysTag
f = []
for featureIndex in langSysRecord.LangSys.FeatureIndex:
if featureIndex not in kernFeatureIndexes:
continue
f.append(featureIndex)
if f:
thisScriptKernFeatureIndexes.append((langSys, f))
scriptKernFeatureIndexes[script] = thisScriptKernFeatureIndexes
# convert the feature indexes to lookup indexes
scriptLookupIndexes = {}
for script, featureDefinitions in scriptKernFeatureIndexes.items():
lookupIndexes = scriptLookupIndexes[script] = []
for language, featureIndexes in featureDefinitions:
for featureIndex in featureIndexes:
featureRecord = gpos.FeatureList.FeatureRecord[featureIndex]
for lookupIndex in featureRecord.Feature.LookupListIndex:
if lookupIndex not in lookupIndexes:
lookupIndexes.append(lookupIndex)
# done
return scriptLookupIndexes | Gather a mapping of script to lookup indexes
referenced by the kern feature for each script.
Returns a dictionary of this structure:
{
"latn" : [0],
"DFLT" : [0]
} | entailment |
def _gatherKerningForLookup(gpos, lookupIndex):
"""
Gather the kerning and class data for a particular lookup.
Returns kerning, left clases, right classes.
The kerning dictionary is of this structure:
{
("a", "a") : 10,
((1, 1, 3), "a") : -20
}
The class dictionaries have this structure:
{
(1, 1, 3) : ["x", "y", "z"]
}
Where the tuple means this:
(lookup index, subtable index, class index)
"""
allKerning = {}
allLeftClasses = {}
allRightClasses = {}
lookup = gpos.LookupList.Lookup[lookupIndex]
# only handle pair positioning and extension
if lookup.LookupType not in (2, 9):
return
for subtableIndex, subtable in enumerate(lookup.SubTable):
if lookup.LookupType == 2:
format = subtable.Format
lookupType = subtable.LookupType
if (lookupType, format) == (2, 1):
kerning = _handleLookupType2Format1(subtable)
allKerning.update(kerning)
elif (lookupType, format) == (2, 2):
kerning, leftClasses, rightClasses = _handleLookupType2Format2(subtable, lookupIndex, subtableIndex)
allKerning.update(kerning)
allLeftClasses.update(leftClasses)
allRightClasses.update(rightClasses)
elif lookup.LookupType == 9:
extSubtable = subtable.ExtSubTable
format = extSubtable.Format
lookupType = extSubtable.LookupType
if (lookupType, format) == (2, 1):
kerning = _handleLookupType2Format1(extSubtable)
allKerning.update(kerning)
elif (lookupType, format) == (2, 2):
kerning, leftClasses, rightClasses = _handleLookupType2Format2(extSubtable, lookupIndex, subtableIndex)
allKerning.update(kerning)
allLeftClasses.update(leftClasses)
allRightClasses.update(rightClasses)
# done
return allKerning, allLeftClasses, allRightClasses | Gather the kerning and class data for a particular lookup.
Returns kerning, left clases, right classes.
The kerning dictionary is of this structure:
{
("a", "a") : 10,
((1, 1, 3), "a") : -20
}
The class dictionaries have this structure:
{
(1, 1, 3) : ["x", "y", "z"]
}
Where the tuple means this:
(lookup index, subtable index, class index) | entailment |
def _handleLookupType2Format1(subtable):
"""
Extract a kerning dictionary from a Lookup Type 2 Format 1.
"""
kerning = {}
coverage = subtable.Coverage.glyphs
valueFormat1 = subtable.ValueFormat1
pairSets = subtable.PairSet
for index, leftGlyphName in enumerate(coverage):
pairSet = pairSets[index]
for pairValueRecord in pairSet.PairValueRecord:
rightGlyphName = pairValueRecord.SecondGlyph
if valueFormat1:
value = pairValueRecord.Value1
else:
value = pairValueRecord.Value2
if hasattr(value, "XAdvance"):
value = value.XAdvance
kerning[leftGlyphName, rightGlyphName] = value
return kerning | Extract a kerning dictionary from a Lookup Type 2 Format 1. | entailment |
def _handleLookupType2Format2(subtable, lookupIndex, subtableIndex):
"""
Extract kerning, left class and right class dictionaries from a Lookup Type 2 Format 2.
"""
# extract the classes
leftClasses = _extractFeatureClasses(lookupIndex=lookupIndex, subtableIndex=subtableIndex, classDefs=subtable.ClassDef1.classDefs, coverage=subtable.Coverage.glyphs)
rightClasses = _extractFeatureClasses(lookupIndex=lookupIndex, subtableIndex=subtableIndex, classDefs=subtable.ClassDef2.classDefs)
# extract the pairs
kerning = {}
for class1RecordIndex, class1Record in enumerate(subtable.Class1Record):
for class2RecordIndex, class2Record in enumerate(class1Record.Class2Record):
leftClass = (lookupIndex, subtableIndex, class1RecordIndex)
rightClass = (lookupIndex, subtableIndex, class2RecordIndex)
valueFormat1 = subtable.ValueFormat1
if valueFormat1:
value = class2Record.Value1
else:
value = class2Record.Value2
if hasattr(value, "XAdvance") and value.XAdvance != 0:
value = value.XAdvance
kerning[leftClass, rightClass] = value
return kerning, leftClasses, rightClasses | Extract kerning, left class and right class dictionaries from a Lookup Type 2 Format 2. | entailment |
def _mergeKerningDictionaries(kerningDictionaries):
"""
Merge all of the kerning dictionaries found into
one flat dictionary.
"""
# work through the dictionaries backwards since
# this uses an update to load the kerning. this
# will ensure that the script order is honored.
kerning = {}
for dictionaryGroup in reversed(kerningDictionaries):
for dictionary in dictionaryGroup:
kerning.update(dictionary)
# done.
return kerning | Merge all of the kerning dictionaries found into
one flat dictionary. | entailment |
def _findSingleMemberGroups(classDictionaries):
"""
Find all classes that have only one member.
"""
toRemove = {}
for classDictionaryGroup in classDictionaries:
for classDictionary in classDictionaryGroup:
for name, members in list(classDictionary.items()):
if len(members) == 1:
toRemove[name] = list(members)[0]
del classDictionary[name]
return toRemove | Find all classes that have only one member. | entailment |
def _removeSingleMemberGroupReferences(kerning, leftGroups, rightGroups):
"""
Translate group names into glyph names in pairs
if the group only contains one glyph.
"""
new = {}
for (left, right), value in kerning.items():
left = leftGroups.get(left, left)
right = rightGroups.get(right, right)
new[left, right] = value
return new | Translate group names into glyph names in pairs
if the group only contains one glyph. | entailment |
def _mergeClasses(classDictionaries):
"""
Look for classes that have the exact same list
of members and flag them for removal.
This returns left classes, left rename map,
right classes and right rename map.
The classes have the standard class structure.
The rename maps have this structure:
{
(1, 1, 3) : (2, 3, 4),
old name : new name
}
Where the key is the class that should be
preserved and the value is a list of classes
that should be removed.
"""
# build a mapping of members to names
memberTree = {}
for classDictionaryGroup in classDictionaries:
for classDictionary in classDictionaryGroup:
for name, members in classDictionary.items():
if members not in memberTree:
memberTree[members] = set()
memberTree[members].add(name)
# find members that have more than one name
classes = {}
rename = {}
for members, names in memberTree.items():
name = names.pop()
if len(names) > 0:
for otherName in names:
rename[otherName] = name
classes[name] = members
return classes, rename | Look for classes that have the exact same list
of members and flag them for removal.
This returns left classes, left rename map,
right classes and right rename map.
The classes have the standard class structure.
The rename maps have this structure:
{
(1, 1, 3) : (2, 3, 4),
old name : new name
}
Where the key is the class that should be
preserved and the value is a list of classes
that should be removed. | entailment |
def _setGroupNames(classes, classRename):
"""
Set the final names into the groups.
"""
groups = {}
for groupName, glyphList in classes.items():
groupName = classRename.get(groupName, groupName)
# if the glyph list has only one member,
# the glyph name will be used in the pairs.
# no group is needed.
if len(glyphList) == 1:
continue
groups[groupName] = glyphList
return groups | Set the final names into the groups. | entailment |
def _validateClasses(classes):
"""
Check to make sure that a glyph is not part of more than
one class. If this is found, an ExtractorError is raised.
"""
glyphToClass = {}
for className, glyphList in classes.items():
for glyphName in glyphList:
if glyphName not in glyphToClass:
glyphToClass[glyphName] = set()
glyphToClass[glyphName].add(className)
for glyphName, groupList in glyphToClass.items():
if len(groupList) > 1:
raise ExtractorError("Kerning classes are in an conflicting state.") | Check to make sure that a glyph is not part of more than
one class. If this is found, an ExtractorError is raised. | entailment |
def _replaceRenamedPairMembers(kerning, leftRename, rightRename):
"""
Populate the renamed pair members into the kerning.
"""
renamedKerning = {}
for (left, right), value in kerning.items():
left = leftRename.get(left, left)
right = rightRename.get(right, right)
renamedKerning[left, right] = value
return renamedKerning | Populate the renamed pair members into the kerning. | entailment |
def _renameClasses(classes, prefix):
"""
Replace class IDs with nice strings.
"""
renameMap = {}
for classID, glyphList in classes.items():
if len(glyphList) == 0:
groupName = "%s_empty_lu.%d_st.%d_cl.%d" % (prefix, classID[0], classID[1], classID[2])
elif len(glyphList) == 1:
groupName = list(glyphList)[0]
else:
glyphList = list(sorted(glyphList))
groupName = prefix + glyphList[0]
renameMap[classID] = groupName
return renameMap | Replace class IDs with nice strings. | entailment |
def _extractFeatureClasses(lookupIndex, subtableIndex, classDefs, coverage=None):
"""
Extract classes for a specific lookup in a specific subtable.
This is relatively straightforward, except for class 0 interpretation.
Some fonts don't have class 0. Some fonts have a list of class
members that are clearly not all to be used in kerning pairs.
In the case of a missing class 0, the coverage is used as a basis
for the class and glyph names used in classed 1+ are filtered out.
In the case of class 0 having glyph names that are not part of the
kerning pairs, the coverage is used to filter out the unnecessary
glyph names.
"""
# gather the class members
classDict = {}
for glyphName, classIndex in classDefs.items():
if classIndex not in classDict:
classDict[classIndex] = set()
classDict[classIndex].add(glyphName)
# specially handle class index 0
revisedClass0 = set()
if coverage is not None and 0 in classDict:
for glyphName in classDict[0]:
if glyphName in coverage:
revisedClass0.add(glyphName)
elif coverage is not None and 0 not in classDict:
revisedClass0 = set(coverage)
for glyphList in classDict.values():
revisedClass0 = revisedClass0 - glyphList
classDict[0] = revisedClass0
# flip the class map around
classes = {}
for classIndex, glyphList in classDict.items():
classes[lookupIndex, subtableIndex, classIndex] = frozenset(glyphList)
return classes | Extract classes for a specific lookup in a specific subtable.
This is relatively straightforward, except for class 0 interpretation.
Some fonts don't have class 0. Some fonts have a list of class
members that are clearly not all to be used in kerning pairs.
In the case of a missing class 0, the coverage is used as a basis
for the class and glyph names used in classed 1+ are filtered out.
In the case of class 0 having glyph names that are not part of the
kerning pairs, the coverage is used to filter out the unnecessary
glyph names. | entailment |
def _get_voters(cls, session, owner_id, poll_id, answer_id):
"""
https://vk.com/dev/polls.getVoters
"""
from .users import User
return session.fetch_items("polls.getVoters", User._get_users, count=100, owner_id=owner_id, poll_id=poll_id, answer_ids=answer_id) | https://vk.com/dev/polls.getVoters | entailment |
def add_include(self, name, module_spec):
"""Adds a module as an included module.
:param name:
Name under which the included module should be exposed in the
current module.
:param module_spec:
ModuleSpec of the included module.
"""
assert name, 'name is required'
assert self.can_include
if name in self.includes:
raise ThriftCompilerError(
'Cannot include module "%s" as "%s" in "%s". '
'The name is already taken.'
% (module_spec.name, name, self.path)
)
self.includes[name] = module_spec
self.scope.add_include(name, module_spec.scope, module_spec.surface) | Adds a module as an included module.
:param name:
Name under which the included module should be exposed in the
current module.
:param module_spec:
ModuleSpec of the included module. | entailment |
def link(self):
"""Link all the types in this module and all included modules."""
if self.linked:
return self
self.linked = True
included_modules = []
# Link includes
for include in self.includes.values():
included_modules.append(include.link().surface)
self.scope.add_surface('__includes__', tuple(included_modules))
self.scope.add_surface('__thrift_source__', self.thrift_source)
# Link self
for linker in LINKERS:
linker(self.scope).link()
self.scope.add_surface('loads', Deserializer(self.protocol))
self.scope.add_surface('dumps', Serializer(self.protocol))
return self | Link all the types in this module and all included modules. | entailment |
def compile(self, name, contents, path=None):
"""Compile the given Thrift document into a Python module.
The generated module contains,
.. py:attribute:: __services__
A collection of generated classes for all services defined in the
thrift file.
.. versionchanged:: 1.0
Renamed from ``services`` to ``__services__``.
.. py:attribute:: __types__
A collection of generated types for all types defined in the
thrift file.
.. versionchanged:: 1.0
Renamed from ``types`` to ``__types__``.
.. py:attribute:: __includes__
A collection of modules included by this module.
.. versionadded:: 1.0
.. py:attribute:: __constants__
A mapping of constant name to value for all constants defined in
the thrift file.
.. versionchanged:: 1.0
Renamed from ``constants`` to ``__constants__``.
.. py:attribute:: __thrift_source__
Contents of the .thrift file from which this module was compiled.
.. versionadded:: 1.1
.. py:function:: dumps(obj)
Serializes the given object using the protocol the compiler was
instantiated with.
.. py:function:: loads(cls, payload)
Deserializes an object of type ``cls`` from ``payload`` using the
protocol the compiler was instantiated with.
.. py:function:: dumps.message(obj, seqid=0)
Serializes the given request or response into a
:py:class:`~thriftrw.wire.Message` using the protocol that the
compiler was instantiated with.
See :ref:`calling-apache-thrift`.
.. versionadded:: 1.0
.. py:function:: loads.message(service, payload)
Deserializes a :py:class:`~thriftrw.wire.Message` from
``payload`` using the protocol the compiler was instantiated with.
A request or response of a method defined in the given service is
parsed in the message body.
See :ref:`calling-apache-thrift`.
.. versionadded:: 1.0
And one class each for every struct, union, exception, enum, and
service defined in the IDL.
Service classes have references to
:py:class:`thriftrw.spec.ServiceFunction` objects for each method
defined in the service.
:param str name:
Name of the Thrift document. This will be the name of the
generated module.
:param str contents:
Thrift document to compile
:param str path:
Path to the Thrift file being compiled. If not specified, imports
from within the Thrift file will be disallowed.
:returns:
ModuleSpec of the generated module.
"""
assert name
if path:
path = os.path.abspath(path)
if path in self._module_specs:
return self._module_specs[path]
module_spec = ModuleSpec(name, self.protocol, path, contents)
if path:
self._module_specs[path] = module_spec
program = self.parser.parse(contents)
header_processor = HeaderProcessor(self, module_spec, self.include_as)
for header in program.headers:
header.apply(header_processor)
generator = Generator(module_spec.scope, strict=self.strict)
for definition in program.definitions:
generator.process(definition)
return module_spec | Compile the given Thrift document into a Python module.
The generated module contains,
.. py:attribute:: __services__
A collection of generated classes for all services defined in the
thrift file.
.. versionchanged:: 1.0
Renamed from ``services`` to ``__services__``.
.. py:attribute:: __types__
A collection of generated types for all types defined in the
thrift file.
.. versionchanged:: 1.0
Renamed from ``types`` to ``__types__``.
.. py:attribute:: __includes__
A collection of modules included by this module.
.. versionadded:: 1.0
.. py:attribute:: __constants__
A mapping of constant name to value for all constants defined in
the thrift file.
.. versionchanged:: 1.0
Renamed from ``constants`` to ``__constants__``.
.. py:attribute:: __thrift_source__
Contents of the .thrift file from which this module was compiled.
.. versionadded:: 1.1
.. py:function:: dumps(obj)
Serializes the given object using the protocol the compiler was
instantiated with.
.. py:function:: loads(cls, payload)
Deserializes an object of type ``cls`` from ``payload`` using the
protocol the compiler was instantiated with.
.. py:function:: dumps.message(obj, seqid=0)
Serializes the given request or response into a
:py:class:`~thriftrw.wire.Message` using the protocol that the
compiler was instantiated with.
See :ref:`calling-apache-thrift`.
.. versionadded:: 1.0
.. py:function:: loads.message(service, payload)
Deserializes a :py:class:`~thriftrw.wire.Message` from
``payload`` using the protocol the compiler was instantiated with.
A request or response of a method defined in the given service is
parsed in the message body.
See :ref:`calling-apache-thrift`.
.. versionadded:: 1.0
And one class each for every struct, union, exception, enum, and
service defined in the IDL.
Service classes have references to
:py:class:`thriftrw.spec.ServiceFunction` objects for each method
defined in the service.
:param str name:
Name of the Thrift document. This will be the name of the
generated module.
:param str contents:
Thrift document to compile
:param str path:
Path to the Thrift file being compiled. If not specified, imports
from within the Thrift file will be disallowed.
:returns:
ModuleSpec of the generated module. | entailment |
def t_HEXCONSTANT(self, t):
r'0x[0-9A-Fa-f]+'
t.value = int(t.value, 16)
t.type = 'INTCONSTANT'
return t | r'0x[0-9A-Fa-f]+ | entailment |
def t_LITERAL(self, t):
r'(\"([^\\\n]|(\\.))*?\")|\'([^\\\n]|(\\.))*?\''
s = t.value[1:-1]
maps = {
't': '\t',
'r': '\r',
'n': '\n',
'\\': '\\',
'\'': '\'',
'"': '\"'
}
i = 0
length = len(s)
val = ''
while i < length:
if s[i] == '\\':
i += 1
if s[i] in maps:
val += maps[s[i]]
else:
msg = 'Cannot escape character: %s' % s[i]
raise ThriftParserError(msg)
else:
val += s[i]
i += 1
t.value = val
return t | r'(\"([^\\\n]|(\\.))*?\")|\'([^\\\n]|(\\.))*?\ | entailment |
def t_IDENTIFIER(self, t):
r'[a-zA-Z_](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*'
if t.value in THRIFT_KEYWORDS:
# Not an identifier after all.
t.type = t.value.upper()
return t | r'[a-zA-Z_](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])* | entailment |
def input(self, data):
"""Reset the lexer and feed in new input.
:param data:
String of input data.
"""
# input(..) doesn't reset the lineno. We have to do that manually.
self._lexer.lineno = 1
return self._lexer.input(data) | Reset the lexer and feed in new input.
:param data:
String of input data. | entailment |
def from_json(cls, session, photo_json):
"""
https://vk.com/dev/objects/photo
"""
photo = cls()
photo.id = photo_json.get('id')
photo.album_id = photo_json.get('album_id')
photo.owner_id = photo_json.get('owner_id')
photo.user_id = photo_json.get('user_id')
photo.text = photo_json.get('text')
photo.type = "photo"
photo.date = photo_json.get('date')
photo.photo_75 = photo_json.get('photo_75')
photo.photo_130 = photo_json.get('photo_130')
photo.photo_604 = photo_json.get('photo_604')
photo.photo_807 = photo_json.get('photo_807')
photo.photo_1280 = photo_json.get('photo_1280')
photo.photo_2560 = photo_json.get('photo_2560')
photo._session = session
return photo | https://vk.com/dev/objects/photo | entailment |
def _get_photos(session, user_or_group_id):
"""
https://vk.com/dev/photos.getAll
"""
response = session.fetch_items("photos.getAll", Photo.from_json, count=200, owner_id=user_or_group_id)
return response | https://vk.com/dev/photos.getAll | entailment |
def _get_owner_cover_photo_upload_server(session, group_id, crop_x=0, crop_y=0, crop_x2=795, crop_y2=200):
"""
https://vk.com/dev/photos.getOwnerCoverPhotoUploadServer
"""
group_id = abs(group_id)
response = session.fetch("photos.getOwnerCoverPhotoUploadServer", group_id=group_id, crop_x=crop_x, crop_y=crop_y, crop_x2=crop_x2, crop_y2=crop_y2)
return response['upload_url'] | https://vk.com/dev/photos.getOwnerCoverPhotoUploadServer | entailment |
def _save_owner_cover_photo(session, hash, photo):
"""
https://vk.com/dev/photos.saveOwnerCoverPhoto
"""
response = session.fetch('photos.saveOwnerCoverPhoto', hash=hash, photo=photo)
return response | https://vk.com/dev/photos.saveOwnerCoverPhoto | entailment |
def _get_save_wall_photo(session, photo, server, hash, user_id=None, group_id=None):
"""
https://vk.com/dev/photos.saveWallPhoto
"""
if group_id < 0:
group_id = abs(group_id)
response = session.fetch("photos.saveWallPhoto", photo=photo, server=server, hash=hash, user_id=user_id, group_id=group_id)[0]
return response['id'], response['owner_id'] | https://vk.com/dev/photos.saveWallPhoto | entailment |
def _get_save_messages_photo(session, photo, server, hash):
"""
https://vk.com/dev/photos.saveMessagesPhoto
"""
response = session.fetch("photos.saveMessagesPhoto", photo=photo, server=server, hash=hash)[0]
return response['id'], response['owner_id'] | https://vk.com/dev/photos.saveMessagesPhoto | entailment |
def get_city(self):
"""
:return: City or None
"""
response = self._session.fetch("users.get", user_ids=self.id, fields="city")[0]
if response.get('city'):
return City.from_json(self._session, response.get('city')) | :return: City or None | entailment |
def get_country(self):
"""
:return: Country or None
"""
response = self._session.fetch("users.get", user_ids=self.id, fields="country")[0]
if response.get('country'):
return Country.from_json(self._session, response.get('country')) | :return: Country or None | entailment |
def get_followers(self):
"""
https://vk.com/dev/users.getFollowers
"""
response = self._session.fetch_items("users.getFollowers", self.from_json, self._session, count=1000, user_id=self.id, fields=self.USER_FIELDS)
return response | https://vk.com/dev/users.getFollowers | entailment |
def _get_user(session, slug_or_user_id):
"""
:param slug_or_user_id: str or int
:return: User
"""
user_json_items = session.fetch('users.get', user_ids=slug_or_user_id, fields=User.USER_FIELDS)
return User.from_json(session, user_json_items[0]) | :param slug_or_user_id: str or int
:return: User | entailment |
def get_data():
"""
Currently pretends to talk to an instrument and get back the magnitud
and phase of the measurement.
"""
# pretend we're measuring a noisy resonance at zero
y = 1.0 / (1.0 + 1j*(n_x.get_value()-0.002)*1000) + _n.random.rand()*0.1
# and that it takes time to do so
_t.sleep(0.1)
# return mag phase
return abs(y), _n.angle(y, True) | Currently pretends to talk to an instrument and get back the magnitud
and phase of the measurement. | entailment |
def List(self):
"""
Lists the keys and values.
"""
print()
for key in list(self.keys()):
print(key,'=',self[key])
print() | Lists the keys and values. | entailment |
def Set(self, key, value):
"""
Sets the key-value pair and dumps to the preferences file.
"""
if not value == None: self.prefs[key] = value
else: self.prefs.pop(key)
self.Dump() | Sets the key-value pair and dumps to the preferences file. | entailment |
def MakeDir(self, path="temp"):
"""
Creates a directory of the specified path in the .spinmob directory.
"""
full_path = _os.path.join(self.path_home, path)
# only make it if it doesn't exist!
if not _os.path.exists(full_path): _os.makedirs(full_path) | Creates a directory of the specified path in the .spinmob directory. | entailment |
def ListDir(self, path="temp"):
"""
Returns a list of files in the specified path (directory), or an
empty list if the directory doesn't exist.
"""
full_path = _os.path.join(self.path_home, path)
# only if the path exists!
if _os.path.exists(full_path) and _os.path.isdir(full_path):
return _os.listdir(full_path)
else:
return [] | Returns a list of files in the specified path (directory), or an
empty list if the directory doesn't exist. | entailment |
def _convert_list2str(self, fields):
"""
:param fields: ('bdate', 'domain')
:return: 'bdate,domain'
"""
if isinstance(fields, tuple) or isinstance(fields, list):
return ','.join(fields)
return fields | :param fields: ('bdate', 'domain')
:return: 'bdate,domain' | entailment |
def p_start(self, p):
'''start : header definition'''
p[0] = ast.Program(headers=p[1], definitions=p[2]) | start : header definition | entailment |
def p_include(self, p):
'''include : INCLUDE IDENTIFIER LITERAL
| INCLUDE LITERAL'''
if len(p) == 4:
p[0] = ast.Include(name=p[2], path=p[3], lineno=p.lineno(1))
else:
p[0] = ast.Include(name=None, path=p[2], lineno=p.lineno(1)) | include : INCLUDE IDENTIFIER LITERAL
| INCLUDE LITERAL | entailment |
def p_namespace(self, p):
'''namespace : NAMESPACE namespace_scope IDENTIFIER'''
p[0] = ast.Namespace(scope=p[2], name=p[3], lineno=p.lineno(1)) | namespace : NAMESPACE namespace_scope IDENTIFIER | entailment |
def p_const(self, p):
'''const : CONST field_type IDENTIFIER '=' const_value
| CONST field_type IDENTIFIER '=' const_value sep'''
p[0] = ast.Const(
name=p[3],
value_type=p[2],
value=p[5],
lineno=p.lineno(3),
) | const : CONST field_type IDENTIFIER '=' const_value
| CONST field_type IDENTIFIER '=' const_value sep | entailment |
def p_const_value_primitive(self, p):
'''const_value_primitive : INTCONSTANT
| DUBCONSTANT
| LITERAL
| const_bool'''
p[0] = ast.ConstPrimitiveValue(p[1], lineno=p.lineno(1)) | const_value_primitive : INTCONSTANT
| DUBCONSTANT
| LITERAL
| const_bool | entailment |
def p_const_list(self, p):
'''const_list : '[' const_list_seq ']' '''
p[0] = ast.ConstList(list(p[2]), p.lineno(1)) | const_list : '[' const_list_seq ']' | entailment |
def p_const_map(self, p):
'''const_map : '{' const_map_seq '}' '''
p[0] = ast.ConstMap(dict(p[2]), p.lineno(1)) | const_map : '{' const_map_seq '}' | entailment |
def p_const_ref(self, p):
'''const_ref : IDENTIFIER'''
p[0] = ast.ConstReference(p[1], lineno=p.lineno(1)) | const_ref : IDENTIFIER | entailment |
def p_typedef(self, p):
'''typedef : TYPEDEF field_type IDENTIFIER annotations'''
p[0] = ast.Typedef(
name=p[3], target_type=p[2], annotations=p[4], lineno=p.lineno(3)
) | typedef : TYPEDEF field_type IDENTIFIER annotations | entailment |
def p_enum(self, p): # noqa
'''enum : ENUM IDENTIFIER '{' enum_seq '}' annotations'''
p[0] = ast.Enum(
name=p[2], items=p[4], annotations=p[6], lineno=p.lineno(2)
) | enum : ENUM IDENTIFIER '{' enum_seq '}' annotations | entailment |
def p_enum_item(self, p):
'''enum_item : IDENTIFIER '=' INTCONSTANT annotations
| IDENTIFIER annotations'''
if len(p) == 5:
p[0] = ast.EnumItem(
name=p[1], value=p[3], annotations=p[4], lineno=p.lineno(1)
)
else:
p[0] = ast.EnumItem(
name=p[1], value=None, annotations=p[2], lineno=p.lineno(1)
) | enum_item : IDENTIFIER '=' INTCONSTANT annotations
| IDENTIFIER annotations | entailment |
def p_struct(self, p):
'''struct : STRUCT IDENTIFIER '{' field_seq '}' annotations'''
p[0] = ast.Struct(
name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2)
) | struct : STRUCT IDENTIFIER '{' field_seq '}' annotations | entailment |
def p_union(self, p):
'''union : UNION IDENTIFIER '{' field_seq '}' annotations'''
p[0] = ast.Union(
name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2)
) | union : UNION IDENTIFIER '{' field_seq '}' annotations | entailment |
def p_exception(self, p):
'''exception : EXCEPTION IDENTIFIER '{' field_seq '}' annotations'''
p[0] = ast.Exc(
name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2)
) | exception : EXCEPTION IDENTIFIER '{' field_seq '}' annotations | entailment |
def p_service(self, p):
'''service : SERVICE IDENTIFIER '{' function_seq '}' annotations
| SERVICE IDENTIFIER EXTENDS IDENTIFIER \
'{' function_seq '}' annotations
'''
if len(p) == 7:
p[0] = ast.Service(
name=p[2],
functions=p[4],
parent=None,
annotations=p[6],
lineno=p.lineno(2),
)
else:
p[0] = ast.Service(
name=p[2],
functions=p[6],
parent=ast.ServiceReference(p[4], p.lineno(4)),
annotations=p[8],
lineno=p.lineno(2),
) | service : SERVICE IDENTIFIER '{' function_seq '}' annotations
| SERVICE IDENTIFIER EXTENDS IDENTIFIER \
'{' function_seq '}' annotations | entailment |
def p_function(self, p):
'''function : oneway function_type IDENTIFIER '(' field_seq ')' \
throws annotations '''
p[0] = ast.Function(
name=p[3],
parameters=p[5],
return_type=p[2],
exceptions=p[7],
oneway=p[1],
annotations=p[8],
lineno=p.lineno(3),
) | function : oneway function_type IDENTIFIER '(' field_seq ')' \
throws annotations | entailment |
def p_field(self, p):
'''field : field_id field_req field_type IDENTIFIER annotations
| field_id field_req field_type IDENTIFIER '=' const_value \
annotations'''
if len(p) == 8:
default = p[6]
annotations = p[7]
else:
default = None
annotations = p[5]
p[0] = ast.Field(
id=p[1],
name=p[4],
field_type=p[3],
requiredness=p[2],
default=default,
annotations=annotations,
lineno=p.lineno(4),
) | field : field_id field_req field_type IDENTIFIER annotations
| field_id field_req field_type IDENTIFIER '=' const_value \
annotations | entailment |
def p_field_id(self, p):
'''field_id : INTCONSTANT ':'
| '''
if len(p) == 3:
if p[1] == 0:
# Prevent users from ever using field ID 0. It's reserved for
# internal use only.
raise ThriftParserError(
'Line %d: Field ID 0 is reserved for internal use.'
% p.lineno(1)
)
p[0] = p[1]
else:
p[0] = None | field_id : INTCONSTANT ':'
| | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.