_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q8700
render_cvmfs_pvc
train
def render_cvmfs_pvc(cvmfs_volume): """Render REANA_CVMFS_PVC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_PVC_TEMPLATE) rendered_template['metadata']['name'] = 'csi-cvmfs-{}-pvc'.format(name) rendered_template['spec']['storageClassName'] = "csi-cvmfs-{}".format(name) return rendered_template
python
{ "resource": "" }
q8701
render_cvmfs_sc
train
def render_cvmfs_sc(cvmfs_volume): """Render REANA_CVMFS_SC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_SC_TEMPLATE) rendered_template['metadata']['name'] = "csi-cvmfs-{}".format(name) rendered_template['parameters']['repository'] = cvmfs_volume return rendered_template
python
{ "resource": "" }
q8702
create_cvmfs_storage_class
train
def create_cvmfs_storage_class(cvmfs_volume): """Create CVMFS storage class.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_storagev1_api_client try: current_k8s_storagev1_api_client.\ create_storage_class( render_cvmfs_sc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
python
{ "resource": "" }
q8703
create_cvmfs_persistent_volume_claim
train
def create_cvmfs_persistent_volume_claim(cvmfs_volume): """Create CVMFS persistent volume claim.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_corev1_api_client try: current_k8s_corev1_api_client.\ create_namespaced_persistent_volume_claim( "default", render_cvmfs_pvc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
python
{ "resource": "" }
q8704
create_api_client
train
def create_api_client(api='BatchV1'): """Create Kubernetes API client using config. :param api: String which represents which Kubernetes API to spawn. By default BatchV1. :returns: Kubernetes python client object for a specific API i.e. BatchV1. """ k8s_config.load_incluster_config() api_configuration = client.Configuration() api_configuration.verify_ssl = False if api == 'extensions/v1beta1': api_client = client.ExtensionsV1beta1Api() elif api == 'CoreV1': api_client = client.CoreV1Api() elif api == 'StorageV1': api_client = client.StorageV1Api() else: api_client = client.BatchV1Api() return api_client
python
{ "resource": "" }
q8705
BasePublisher.__error_callback
train
def __error_callback(self, exception, interval): """Execute when there is an error while sending a message. :param exception: Exception which has been thrown while trying to send the message. :param interval: Interval in which the message delivery will be retried. """ logging.error('Error while publishing {}'.format( exception)) logging.info('Retry in %s seconds.', interval)
python
{ "resource": "" }
q8706
BasePublisher._publish
train
def _publish(self, msg): """Publish, handling retries, a message in the queue. :param msg: Object which represents the message to be sent in the queue. Note that this object should be serializable in the configured format (by default JSON). """ connection = self._connection.clone() publish = connection.ensure(self.producer, self.producer.publish, errback=self.__error_callback, max_retries=MQ_PRODUCER_MAX_RETRIES) publish(json.dumps(msg), exchange=self._exchange, routing_key=self._routing_key, declare=[self._queue]) logging.debug('Publisher: message sent: %s', msg)
python
{ "resource": "" }
q8707
WorkflowStatusPublisher.publish_workflow_status
train
def publish_workflow_status(self, workflow_uuid, status, logs='', message=None): """Publish workflow status using the configured. :param workflow_uudid: String which represents the workflow UUID. :param status: Integer which represents the status of the workflow, this is defined in the `reana-db` `Workflow` models. :param logs: String which represents the logs which the workflow has produced as output. :param message: Dictionary which includes additional information can be attached such as the overall progress of the workflow. """ msg = { "workflow_uuid": workflow_uuid, "logs": logs, "status": status, "message": message } self._publish(msg)
python
{ "resource": "" }
q8708
WorkflowSubmissionPublisher.publish_workflow_submission
train
def publish_workflow_submission(self, user_id, workflow_id_or_name, parameters): """Publish workflow submission parameters.""" msg = { "user": user_id, "workflow_id_or_name": workflow_id_or_name, "parameters": parameters } self._publish(msg)
python
{ "resource": "" }
q8709
serial_load
train
def serial_load(workflow_file, specification, parameters=None, original=None): """Validate and return a expanded REANA Serial workflow specification. :param workflow_file: A specification file compliant with REANA Serial workflow specification. :returns: A dictionary which represents the valid Serial workflow with all parameters expanded. """ parameters = parameters or {} if not specification: with open(workflow_file, 'r') as f: specification = json.loads(f.read()) expanded_specification = _expand_parameters(specification, parameters, original) validate(specification, serial_workflow_schema) return expanded_specification
python
{ "resource": "" }
q8710
_expand_parameters
train
def _expand_parameters(specification, parameters, original=None): """Expand parameters inside comands for Serial workflow specifications. :param specification: Full valid Serial workflow specification. :param parameters: Parameters to be extended on a Serial specification. :param original: Flag which, determins type of specifications to return. :returns: If 'original' parameter is set, a copy of the specification whithout expanded parametrers will be returned. If 'original' is not set, a copy of the specification with expanded parameters (all $varname and ${varname} will be expanded with their value). Otherwise an error will be thrown if the parameters can not be expanded. :raises: jsonschema.ValidationError """ expanded_specification = deepcopy(specification) try: for step_num, step in enumerate(expanded_specification['steps']): current_step = expanded_specification['steps'][step_num] for command_num, command in enumerate(step['commands']): current_step['commands'][command_num] = \ Template(command).substitute(parameters) # if call is done from client, original==True and original # specifications withtout applied parameters are returned. if original: return specification else: return expanded_specification except KeyError as e: raise ValidationError('Workflow parameter(s) could not ' 'be expanded. Please take a look ' 'to {params}'.format(params=str(e)))
python
{ "resource": "" }
q8711
reana_ready
train
def reana_ready(): """Check if reana can start new workflows.""" from reana_commons.config import REANA_READY_CONDITIONS for module_name, condition_list in REANA_READY_CONDITIONS.items(): for condition_name in condition_list: module = importlib.import_module(module_name) condition_func = getattr(module, condition_name) if not condition_func(): return False return True
python
{ "resource": "" }
q8712
check_predefined_conditions
train
def check_predefined_conditions(): """Check k8s predefined conditions for the nodes.""" try: node_info = current_k8s_corev1_api_client.list_node() for node in node_info.items: # check based on the predefined conditions about the # node status: MemoryPressure, OutOfDisk, KubeletReady # DiskPressure, PIDPressure, for condition in node.status.conditions: if not condition.status: return False except ApiException as e: log.error('Something went wrong while getting node information.') log.error(e) return False return True
python
{ "resource": "" }
q8713
check_running_job_count
train
def check_running_job_count(): """Check upper limit on running jobs.""" try: job_list = current_k8s_batchv1_api_client.\ list_job_for_all_namespaces() if len(job_list.items) > K8S_MAXIMUM_CONCURRENT_JOBS: return False except ApiException as e: log.error('Something went wrong while getting running job list.') log.error(e) return False return True
python
{ "resource": "" }
q8714
BaseAPIClient._get_spec
train
def _get_spec(self, spec_file): """Get json specification from package data.""" spec_file_path = os.path.join( pkg_resources. resource_filename( 'reana_commons', 'openapi_specifications'), spec_file) with open(spec_file_path) as f: json_spec = json.load(f) return json_spec
python
{ "resource": "" }
q8715
JobControllerAPIClient.submit
train
def submit(self, workflow_uuid='', experiment='', image='', cmd='', prettified_cmd='', workflow_workspace='', job_name='', cvmfs_mounts='false'): """Submit a job to RJC API. :param name: Name of the job. :param experiment: Experiment the job belongs to. :param image: Identifier of the Docker image which will run the job. :param cmd: String which represents the command to execute. It can be modified by the workflow engine i.e. prepending ``cd /some/dir/``. :prettified_cmd: Original command submitted by the user. :workflow_workspace: Path to the workspace of the workflow. :cvmfs_mounts: String with CVMFS volumes to mount in job pods. :return: Returns a dict with the ``job_id``. """ job_spec = { 'experiment': experiment, 'docker_img': image, 'cmd': cmd, 'prettified_cmd': prettified_cmd, 'env_vars': {}, 'workflow_workspace': workflow_workspace, 'job_name': job_name, 'cvmfs_mounts': cvmfs_mounts, 'workflow_uuid': workflow_uuid } response, http_response = self._client.jobs.create_job(job=job_spec).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to create a job. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return response
python
{ "resource": "" }
q8716
JobControllerAPIClient.check_status
train
def check_status(self, job_id): """Check status of a job.""" response, http_response = self._client.jobs.get_job(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return response
python
{ "resource": "" }
q8717
JobControllerAPIClient.get_logs
train
def get_logs(self, job_id): """Get logs of a job.""" response, http_response = self._client.jobs.get_logs(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return http_response.text
python
{ "resource": "" }
q8718
JobControllerAPIClient.check_if_cached
train
def check_if_cached(self, job_spec, step, workflow_workspace): """Check if job result is in cache.""" response, http_response = self._client.job_cache.check_if_cached( job_spec=json.dumps(job_spec), workflow_json=json.dumps(step), workflow_workspace=workflow_workspace).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to check cache. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return http_response
python
{ "resource": "" }
q8719
_logging_callback
train
def _logging_callback(level, domain, message, data): """ Callback that outputs libgphoto2's logging message via Python's standard logging facilities. :param level: libgphoto2 logging level :param domain: component the message originates from :param message: logging message :param data: Other data in the logging record (unused) """ domain = ffi.string(domain).decode() message = ffi.string(message).decode() logger = LOGGER.getChild(domain) if level not in LOG_LEVELS: return logger.log(LOG_LEVELS[level], message)
python
{ "resource": "" }
q8720
Transfer.run
train
def run(self, name, cache_key, local_path, remote_path, local_options, remote_options, **kwargs): """ The main work horse of the transfer task. Calls the transfer method with the local and remote storage backends as given with the parameters. :param name: name of the file to transfer :type name: str :param local_path: local storage class to transfer from :type local_path: str :param local_options: options of the local storage class :type local_options: dict :param remote_path: remote storage class to transfer to :type remote_path: str :param remote_options: options of the remote storage class :type remote_options: dict :param cache_key: cache key to set after a successful transfer :type cache_key: str :rtype: task result """ local = import_attribute(local_path)(**local_options) remote = import_attribute(remote_path)(**remote_options) result = self.transfer(name, local, remote, **kwargs) if result is True: cache.set(cache_key, True) file_transferred.send(sender=self.__class__, name=name, local=local, remote=remote) elif result is False: args = [name, cache_key, local_path, remote_path, local_options, remote_options] self.retry(args=args, kwargs=kwargs) else: raise ValueError("Task '%s' did not return True/False but %s" % (self.__class__, result)) return result
python
{ "resource": "" }
q8721
Transfer.transfer
train
def transfer(self, name, local, remote, **kwargs): """ Transfers the file with the given name from the local to the remote storage backend. :param name: The name of the file to transfer :param local: The local storage backend instance :param remote: The remote storage backend instance :returns: `True` when the transfer succeeded, `False` if not. Retries the task when returning `False` :rtype: bool """ try: remote.save(name, local.open(name)) return True except Exception as e: logger.error("Unable to save '%s' to remote storage. " "About to retry." % name) logger.exception(e) return False
python
{ "resource": "" }
q8722
get_string
train
def get_string(cfunc, *args): """ Call a C function and return its return value as a Python string. :param cfunc: C function to call :param args: Arguments to call function with :rtype: str """ cstr = get_ctype("const char**", cfunc, *args) return backend.ffi.string(cstr).decode() if cstr else None
python
{ "resource": "" }
q8723
get_ctype
train
def get_ctype(rtype, cfunc, *args): """ Call a C function that takes a pointer as its last argument and return the C object that it contains after the function has finished. :param rtype: C data type is filled by the function :param cfunc: C function to call :param args: Arguments to call function with :return: A pointer to the specified data type """ val_p = backend.ffi.new(rtype) args = args + (val_p,) cfunc(*args) return val_p[0]
python
{ "resource": "" }
q8724
new_gp_object
train
def new_gp_object(typename): """ Create an indirect pointer to a GPhoto2 type, call its matching constructor function and return the pointer to it. :param typename: Name of the type to create. :return: A pointer to the specified data type. """ obj_p = backend.ffi.new("{0}**".format(typename)) backend.CONSTRUCTORS[typename](obj_p) return obj_p[0]
python
{ "resource": "" }
q8725
get_library_version
train
def get_library_version(): """ Get the version number of the underlying gphoto2 library. :return: The version :rtype: tuple of (major, minor, patch) version numbers """ version_str = ffi.string(lib.gp_library_version(True)[0]).decode() return tuple(int(x) for x in version_str.split('.'))
python
{ "resource": "" }
q8726
list_cameras
train
def list_cameras(): """ List all attached USB cameras that are supported by libgphoto2. :return: All recognized cameras :rtype: list of :py:class:`Camera` """ ctx = lib.gp_context_new() camlist_p = new_gp_object("CameraList") port_list_p = new_gp_object("GPPortInfoList") lib.gp_port_info_list_load(port_list_p) abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) lib.gp_abilities_list_detect(abilities_list_p, port_list_p, camlist_p, ctx) out = [] for idx in range(lib.gp_list_count(camlist_p)): name = get_string(lib.gp_list_get_name, camlist_p, idx) value = get_string(lib.gp_list_get_value, camlist_p, idx) # Skip iteration if no matches matches = re.match(r"usb:(\d+),(\d+)", value) if not matches: continue bus_no, device_no = (int(x) for x in matches.groups()) abilities = ffi.new("CameraAbilities*") ability_idx = lib.gp_abilities_list_lookup_model( abilities_list_p, name.encode()) lib.gp_abilities_list_get_abilities(abilities_list_p, ability_idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: out.append(Camera(bus_no, device_no, lazy=True, _abilities=abilities)) lib.gp_list_free(camlist_p) lib.gp_port_info_list_free(port_list_p) lib.gp_abilities_list_free(abilities_list_p) return out
python
{ "resource": "" }
q8727
supported_cameras
train
def supported_cameras(): """ List the names of all cameras supported by libgphoto2, grouped by the name of their driver. """ ctx = lib.gp_context_new() abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) abilities = ffi.new("CameraAbilities*") out = [] for idx in range(lib.gp_abilities_list_count(abilities_list_p)): lib.gp_abilities_list_get_abilities(abilities_list_p, idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: libname = os.path.basename(ffi.string(abilities.library) .decode()) out.append((ffi.string(abilities.model).decode(), libname)) lib.gp_abilities_list_free(abilities_list_p) key_func = lambda name, driver: driver out = sorted(out, key=key_func) return {k: tuple(x[0] for x in v) for k, v in itertools.groupby(out, key_func)} return out
python
{ "resource": "" }
q8728
VideoCaptureContext.stop
train
def stop(self): """ Stop the capture. """ self.camera._get_config()['actions']['movie'].set(False) self.videofile = self.camera._wait_for_event( event_type=lib.GP_EVENT_FILE_ADDED) if self._old_captarget != "Memory card": self.camera.config['settings']['capturetarget'].set( self._old_captarget)
python
{ "resource": "" }
q8729
Directory.path
train
def path(self): """ Absolute path to the directory on the camera's filesystem. """ if self.parent is None: return "/" else: return os.path.join(self.parent.path, self.name)
python
{ "resource": "" }
q8730
Directory.supported_operations
train
def supported_operations(self): """ All directory operations supported by the camera. """ return tuple(op for op in backend.DIR_OPS if self._dir_ops & op)
python
{ "resource": "" }
q8731
Directory.exists
train
def exists(self): """ Check whether the directory exists on the camera. """ if self.name in ("", "/") and self.parent is None: return True else: return self in self.parent.directories
python
{ "resource": "" }
q8732
Directory.files
train
def files(self): """ Get a generator that yields all files in the directory. """ filelist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_files(self._cam._cam, self.path.encode(), filelist_p, self._cam._ctx) for idx in range(lib.gp_list_count(filelist_p)): fname = get_string(lib.gp_list_get_name, filelist_p, idx) yield File(name=fname, directory=self, camera=self._cam) lib.gp_list_free(filelist_p)
python
{ "resource": "" }
q8733
Directory.directories
train
def directories(self): """ Get a generator that yields all subdirectories in the directory. """ dirlist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_folders(self._cam._cam, self.path.encode(), dirlist_p, self._cam._ctx) for idx in range(lib.gp_list_count(dirlist_p)): name = os.path.join( self.path, get_string(lib.gp_list_get_name, dirlist_p, idx)) yield Directory(name=name, parent=self, camera=self._cam) lib.gp_list_free(dirlist_p)
python
{ "resource": "" }
q8734
Directory.create
train
def create(self): """ Create the directory. """ lib.gp_camera_folder_make_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
python
{ "resource": "" }
q8735
Directory.remove
train
def remove(self): """ Remove the directory. """ lib.gp_camera_folder_remove_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
python
{ "resource": "" }
q8736
Directory.upload
train
def upload(self, local_path): """ Upload a file to the camera's permanent storage. :param local_path: Path to file to copy :type local_path: str/unicode """ camerafile_p = ffi.new("CameraFile**") with open(local_path, 'rb') as fp: lib.gp_file_new_from_fd(camerafile_p, fp.fileno()) lib.gp_camera_folder_put_file( self._cam._cam, self.path.encode() + b"/", os.path.basename(local_path).encode(), backend.FILE_TYPES['normal'], camerafile_p[0], self._cam.ctx)
python
{ "resource": "" }
q8737
File.supported_operations
train
def supported_operations(self): """ All file operations supported by the camera. """ return tuple(op for op in backend.FILE_OPS if self._operations & op)
python
{ "resource": "" }
q8738
File.dimensions
train
def dimensions(self): """ Dimensions of the image. :rtype: :py:class:`ImageDimensions` """ return ImageDimensions(self._info.file.width, self._info.file.height)
python
{ "resource": "" }
q8739
File.permissions
train
def permissions(self): """ Permissions of the file. Can be "r-" (read-only), "-w" (write-only), "rw" (read-write) or "--" (no rights). :rtype: str """ can_read = self._info.file.permissions & lib.GP_FILE_PERM_READ can_write = self._info.file.permissions & lib.GP_FILE_PERM_DELETE return "{0}{1}".format("r" if can_read else "-", "w" if can_write else "-")
python
{ "resource": "" }
q8740
File.save
train
def save(self, target_path, ftype='normal'): """ Save file content to a local file. :param target_path: Path to save remote file as. :type target_path: str/unicode :param ftype: Select 'view' on file. :type ftype: str """ camfile_p = ffi.new("CameraFile**") with open(target_path, 'wb') as fp: lib.gp_file_new_from_fd(camfile_p, fp.fileno()) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx)
python
{ "resource": "" }
q8741
File.get_data
train
def get_data(self, ftype='normal'): """ Get file content as a bytestring. :param ftype: Select 'view' on file. :type ftype: str :return: File content :rtype: bytes """ camfile_p = ffi.new("CameraFile**") lib.gp_file_new(camfile_p) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx) data_p = ffi.new("char**") length_p = ffi.new("unsigned long*") lib.gp_file_get_data_and_size(camfile_p[0], data_p, length_p) byt = bytes(ffi.buffer(data_p[0], length_p[0])) # gphoto2 camera files MUST be freed. lib.gp_file_free(camfile_p[0]) # just to be safe. del data_p, length_p, camfile_p return byt
python
{ "resource": "" }
q8742
File.iter_data
train
def iter_data(self, chunk_size=2**16, ftype='normal'): """ Get an iterator that yields chunks of the file content. :param chunk_size: Size of yielded chunks in bytes :type chunk_size: int :param ftype: Select 'view' on file. :type ftype: str :return: Iterator """ self._check_type_supported(ftype) buf_p = ffi.new("char[{0}]".format(chunk_size)) size_p = ffi.new("uint64_t*") offset_p = ffi.new("uint64_t*") for chunk_idx in range(int(math.ceil(self.size/chunk_size))): size_p[0] = chunk_size lib.gp_camera_file_read( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], offset_p[0], buf_p, size_p, self._cam._ctx) yield ffi.buffer(buf_p, size_p[0])[:]
python
{ "resource": "" }
q8743
File.remove
train
def remove(self): """ Remove file from device. """ lib.gp_camera_file_delete(self._cam._cam, self.directory.path.encode(), self.name.encode(), self._cam._ctx)
python
{ "resource": "" }
q8744
ConfigItem.set
train
def set(self, value): """ Update value of the option. Only possible for options with :py:attr:`readonly` set to `False`. If :py:attr:`type` is `choice`, the value must be one of the :py:attr:`choices`. If :py:attr:`type` is `range`, the value must be in the range described by :py:attr:`range`. :param value: Value to set """ if self.readonly: raise ValueError("Option is read-only.") val_p = None if self.type == 'selection': if value not in self.choices: raise ValueError("Invalid choice (valid: {0})".format( repr(self.choices))) val_p = ffi.new("const char[]", value.encode()) elif self.type == 'text': if not isinstance(value, basestring): raise ValueError("Value must be a string.") val_p = ffi.new("char**") val_p[0] = ffi.new("char[]", value.encode()) elif self.type == 'range': if value < self.range.min or value > self.range.max: raise ValueError("Value exceeds valid range ({0}-{1}." .format(self.range.min, self.range.max)) if value % self.range.step: raise ValueError("Value can only be changed in steps of {0}." .format(self.range.step)) val_p = ffi.new("float*") val_p[0] = value elif self.type == 'toggle': if not isinstance(value, bool): raise ValueError("Value must be bool.") val_p = ffi.new("int*") val_p[0] = int(value) elif self.type == 'date': val_p = ffi.new("int*") val_p[0] = value lib.gp_widget_set_value(self._widget, val_p) lib.gp_camera_set_config(self._cam._cam, self._root, self._cam._ctx) self.value = value
python
{ "resource": "" }
q8745
Camera.supported_operations
train
def supported_operations(self): """ All operations supported by the camera. """ return tuple(op for op in backend.CAM_OPS if self._abilities.operations & op)
python
{ "resource": "" }
q8746
Camera.usb_info
train
def usb_info(self): """ The camera's USB information. """ return UsbInformation(self._abilities.usb_vendor, self._abilities.usb_product, self._abilities.usb_class, self._abilities.usb_subclass, self._abilities.usb_protocol)
python
{ "resource": "" }
q8747
Camera.config
train
def config(self): """ Writeable configuration parameters. :rtype: dict """ config = self._get_config() return {section: {itm.name: itm for itm in config[section].values() if not itm.readonly} for section in config if 'settings' in section or section == 'other'}
python
{ "resource": "" }
q8748
Camera.storage_info
train
def storage_info(self): """ Information about the camera's storage. """ info_p = ffi.new("CameraStorageInformation**") num_info_p = ffi.new("int*") lib.gp_camera_get_storageinfo(self._cam, info_p, num_info_p, self._ctx) infos = [] for idx in range(num_info_p[0]): out = SimpleNamespace() struc = (info_p[0] + idx) fields = struc.fields if lib.GP_STORAGEINFO_BASE & fields: out.directory = next( (d for d in self.list_all_directories() if d.path == ffi.string(struc.basedir).decode()), None) if lib.GP_STORAGEINFO_LABEL & fields: out.label = ffi.string(struc.label).decode() if lib.GP_STORAGEINFO_DESCRIPTION & fields: out.description = ffi.string(struc.description).decode() if lib.GP_STORAGEINFO_STORAGETYPE & fields: stype = struc.type if lib.GP_STORAGEINFO_ST_FIXED_ROM & stype: out.type = 'fixed_rom' elif lib.GP_STORAGEINFO_ST_REMOVABLE_ROM & stype: out.type = 'removable_rom' elif lib.GP_STORAGEINFO_ST_FIXED_RAM & stype: out.type = 'fixed_ram' elif lib.GP_STORAGEINFO_ST_REMOVABLE_RAM & stype: out.type = 'removable_ram' else: out.type = 'unknown' if lib.GP_STORAGEINFO_ACCESS & fields: if lib.GP_STORAGEINFO_AC_READWRITE & struc.access: out.access = 'read-write' elif lib.GP_STORAGEINFO_AC_READONLY & struc.access: out.access = 'read-only' elif lib.GP_STORAGEINFO_AC_READONLY_WITH_DELETE & struc.access: out.access = 'read-delete' if lib.GP_STORAGEINFO_MAXCAPACITY & fields: out.capacity = int(struc.capacitykbytes) if lib.GP_STORAGEINFO_FREESPACEKBYTES & fields: out.free_space = int(struc.freekbytes) if lib.GP_STORAGEINFO_FREESPACEIMAGES & fields: out.remaining_images = int(struc.freeimages) infos.append(out) return infos
python
{ "resource": "" }
q8749
Camera.list_all_files
train
def list_all_files(self): """ Utility method that yields all files on the device's file systems. """ def list_files_recursively(directory): f_gen = itertools.chain( directory.files, *tuple(list_files_recursively(d) for d in directory.directories)) for f in f_gen: yield f return list_files_recursively(self.filesystem)
python
{ "resource": "" }
q8750
Camera.list_all_directories
train
def list_all_directories(self): """ Utility method that yields all directories on the device's file systems. """ def list_dirs_recursively(directory): if directory == self.filesystem: yield directory d_gen = itertools.chain( directory.directories, *tuple(list_dirs_recursively(d) for d in directory.directories)) for d in d_gen: yield d return list_dirs_recursively(self.filesystem)
python
{ "resource": "" }
q8751
Camera.capture
train
def capture(self, to_camera_storage=False): """ Capture an image. Some cameras (mostly Canon and Nikon) support capturing to internal RAM. On these devices, you have to specify `to_camera_storage` if you want to save the images to the memory card. On devices that do not support saving to RAM, the only difference is that the file is automatically downloaded and deleted when set to `False`. :param to_camera_storage: Save image to the camera's internal storage :type to_camera_storage: bool :return: A :py:class:`File` if `to_camera_storage` was `True`, otherwise the captured image as a bytestring. :rtype: :py:class:`File` or bytes """ target = self.config['settings']['capturetarget'] if to_camera_storage and target.value != "Memory card": target.set("Memory card") elif not to_camera_storage and target.value != "Internal RAM": target.set("Internal RAM") lib.gp_camera_trigger_capture(self._cam, self._ctx) fobj = self._wait_for_event(event_type=lib.GP_EVENT_FILE_ADDED) if to_camera_storage: self._logger.info("File written to storage at {0}.".format(fobj)) return fobj else: data = fobj.get_data() try: fobj.remove() except errors.CameraIOError: # That probably means the file is already gone from RAM, # so nothing to worry about. pass return data
python
{ "resource": "" }
q8752
Camera.capture_video
train
def capture_video(self, length): """ Capture a video. This always writes to the memory card, since internal RAM is likely to run out of space very quickly. Currently this only works with Nikon cameras. :param length: Length of the video to capture in seconds. :type length: int :return: Video file :rtype: :py:class:`File` """ with self.capture_video_context() as ctx: time.sleep(length) return ctx.videofile
python
{ "resource": "" }
q8753
Camera.get_preview
train
def get_preview(self): """ Get a preview from the camera's viewport. This will usually be a JPEG image with the dimensions depending on the camera. You will need to call the exit() method manually after you are done capturing a live preview. :return: The preview image as a bytestring :rtype: bytes """ lib.gp_camera_capture_preview(self._cam, self.__camfile_p[0], self._ctx) lib.gp_file_get_data_and_size(self.__camfile_p[0], self.__data_p, self.__length_p) return ffi.buffer(self.__data_p[0], self.__length_p[0])[:]
python
{ "resource": "" }
q8754
QueuedStorage.transfer
train
def transfer(self, name, cache_key=None): """ Transfers the file with the given name to the remote storage backend by queuing the task. :param name: file name :type name: str :param cache_key: the cache key to set after a successful task run :type cache_key: str :rtype: task result """ if cache_key is None: cache_key = self.get_cache_key(name) return self.task.delay(name, cache_key, self.local_path, self.remote_path, self.local_options, self.remote_options)
python
{ "resource": "" }
q8755
QueuedStorage.get_available_name
train
def get_available_name(self, name): """ Returns a filename that's free on both the local and remote storage systems, and available for new content to be written to. :param name: file name :type name: str :rtype: str """ local_available_name = self.local.get_available_name(name) remote_available_name = self.remote.get_available_name(name) if remote_available_name > local_available_name: return remote_available_name return local_available_name
python
{ "resource": "" }
q8756
QueryAnalyzer.generate_query_report
train
def generate_query_report(self, db_uri, parsed_query, db_name, collection_name): """Generates a comprehensive report on the raw query""" index_analysis = None recommendation = None namespace = parsed_query['ns'] indexStatus = "unknown" index_cache_entry = self._ensure_index_cache(db_uri, db_name, collection_name) query_analysis = self._generate_query_analysis(parsed_query, db_name, collection_name) if ((query_analysis['analyzedFields'] != []) and query_analysis['supported']): index_analysis = self._generate_index_analysis(query_analysis, index_cache_entry['indexes']) indexStatus = index_analysis['indexStatus'] if index_analysis['indexStatus'] != 'full': recommendation = self._generate_recommendation(query_analysis, db_name, collection_name) # a temporary fix to suppress faulty parsing of $regexes. # if the recommendation cannot be re-parsed into yaml, we assume # it is invalid. if not validate_yaml(recommendation['index']): recommendation = None query_analysis['supported'] = False # QUERY REPORT return OrderedDict({ 'queryMask': parsed_query['queryMask'], 'indexStatus': indexStatus, 'parsed': parsed_query, 'namespace': namespace, 'queryAnalysis': query_analysis, 'indexAnalysis': index_analysis, 'recommendation': recommendation })
python
{ "resource": "" }
q8757
QueryAnalyzer._ensure_index_cache
train
def _ensure_index_cache(self, db_uri, db_name, collection_name): """Adds a collections index entries to the cache if not present""" if not self._check_indexes or db_uri is None: return {'indexes': None} if db_name not in self.get_cache(): self._internal_map[db_name] = {} if collection_name not in self._internal_map[db_name]: indexes = [] try: if self._index_cache_connection is None: self._index_cache_connection = pymongo.MongoClient(db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) db = self._index_cache_connection[db_name] indexes = db[collection_name].index_information() except: warning = 'Warning: unable to connect to ' + db_uri + "\n" else: internal_map_entry = {'indexes': indexes} self.get_cache()[db_name][collection_name] = internal_map_entry return self.get_cache()[db_name][collection_name]
python
{ "resource": "" }
q8758
QueryAnalyzer._generate_query_analysis
train
def _generate_query_analysis(self, parsed_query, db_name, collection_name): """Translates a raw query object into a Dex query analysis""" analyzed_fields = [] field_count = 0 supported = True sort_fields = [] query_mask = None if 'command' in parsed_query and parsed_query['command'] not in SUPPORTED_COMMANDS: supported = False else: #if 'orderby' in parsed_query: sort_component = parsed_query['orderby'] if 'orderby' in parsed_query else [] sort_seq = 0 for key in sort_component: sort_field = {'fieldName': key, 'fieldType': SORT_TYPE, 'seq': sort_seq} sort_fields.append(key) analyzed_fields.append(sort_field) field_count += 1 sort_seq += 1 query_component = parsed_query['query'] if 'query' in parsed_query else {} for key in query_component: if key not in sort_fields: field_type = UNSUPPORTED_TYPE if ((key not in UNSUPPORTED_QUERY_OPERATORS) and (key not in COMPOSITE_QUERY_OPERATORS)): try: if query_component[key] == {}: raise nested_field_list = query_component[key].keys() except: field_type = EQUIV_TYPE else: for nested_field in nested_field_list: if ((nested_field in RANGE_QUERY_OPERATORS) and (nested_field not in UNSUPPORTED_QUERY_OPERATORS)): field_type = RANGE_TYPE else: supported = False field_type = UNSUPPORTED_TYPE break if field_type is UNSUPPORTED_TYPE: supported = False analyzed_field = {'fieldName': key, 'fieldType': field_type} analyzed_fields.append(analyzed_field) field_count += 1 query_mask = parsed_query['queryMask'] # QUERY ANALYSIS return OrderedDict({ 'analyzedFields': analyzed_fields, 'fieldCount': field_count, 'supported': supported, 'queryMask': query_mask })
python
{ "resource": "" }
q8759
QueryAnalyzer._generate_index_analysis
train
def _generate_index_analysis(self, query_analysis, indexes): """Compares a query signature to the index cache to identify complete and partial indexes available to the query""" needs_recommendation = True full_indexes = [] partial_indexes = [] coverage = "unknown" if indexes is not None: for index_key in indexes.keys(): index = indexes[index_key] index_report = self._generate_index_report(index, query_analysis) if index_report['supported'] is True: if index_report['coverage'] == 'full': full_indexes.append(index_report) if index_report['idealOrder']: needs_recommendation = False elif index_report['coverage'] == 'partial': partial_indexes.append(index_report) if len(full_indexes) > 0: coverage = "full" elif (len(partial_indexes)) > 0: coverage = "partial" elif query_analysis['supported']: coverage = "none" # INDEX ANALYSIS return OrderedDict([('indexStatus', coverage), ('fullIndexes', full_indexes), ('partialIndexes', partial_indexes)])
python
{ "resource": "" }
q8760
QueryAnalyzer._generate_index_report
train
def _generate_index_report(self, index, query_analysis): """Analyzes an existing index against the results of query analysis""" all_fields = [] equiv_fields = [] sort_fields = [] range_fields = [] for query_field in query_analysis['analyzedFields']: all_fields.append(query_field['fieldName']) if query_field['fieldType'] is EQUIV_TYPE: equiv_fields.append(query_field['fieldName']) elif query_field['fieldType'] is SORT_TYPE: sort_fields.append(query_field['fieldName']) elif query_field['fieldType'] is RANGE_TYPE: range_fields.append(query_field['fieldName']) max_equiv_seq = len(equiv_fields) max_sort_seq = max_equiv_seq + len(sort_fields) max_range_seq = max_sort_seq + len(range_fields) coverage = 'none' query_fields_covered = 0 query_field_count = query_analysis['fieldCount'] supported = True ideal_order = True for index_field in index['key']: field_name = index_field[0] if index_field[1] == '2d': supported = False break if field_name not in all_fields: break if query_fields_covered == 0: coverage = 'partial' if query_fields_covered < max_equiv_seq: if field_name not in equiv_fields: ideal_order = False elif query_fields_covered < max_sort_seq: if field_name not in sort_fields: ideal_order = False elif query_fields_covered < max_range_seq: if field_name not in range_fields: ideal_order = False query_fields_covered += 1 if query_fields_covered == query_field_count: coverage = 'full' # INDEX REPORT return OrderedDict({ 'coverage': coverage, 'idealOrder': ideal_order, 'queryFieldsCovered': query_fields_covered, 'index': index, 'supported': supported })
python
{ "resource": "" }
q8761
QueryAnalyzer._generate_recommendation
train
def _generate_recommendation(self, query_analysis, db_name, collection_name): """Generates an ideal query recommendation""" index_rec = '{' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is EQUIV_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is SORT_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is RANGE_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' index_rec += '}' # RECOMMENDATION return OrderedDict([('index',index_rec), ('shellCommand', self.generate_shell_command(collection_name, index_rec))])
python
{ "resource": "" }
q8762
ReportAggregation.add_query_occurrence
train
def add_query_occurrence(self, report): """Adds a report to the report aggregation""" initial_millis = int(report['parsed']['stats']['millis']) mask = report['queryMask'] existing_report = self._get_existing_report(mask, report) if existing_report is not None: self._merge_report(existing_report, report) else: time = None if 'ts' in report['parsed']: time = report['parsed']['ts'] self._reports.append(OrderedDict([ ('namespace', report['namespace']), ('lastSeenDate', time), ('queryMask', mask), ('supported', report['queryAnalysis']['supported']), ('indexStatus', report['indexStatus']), ('recommendation', report['recommendation']), ('stats', OrderedDict([('count', 1), ('totalTimeMillis', initial_millis), ('avgTimeMillis', initial_millis)]))]))
python
{ "resource": "" }
q8763
ReportAggregation.get_reports
train
def get_reports(self): """Returns a minimized version of the aggregation""" return sorted(self._reports, key=lambda x: x['stats']['totalTimeMillis'], reverse=True)
python
{ "resource": "" }
q8764
ReportAggregation._get_existing_report
train
def _get_existing_report(self, mask, report): """Returns the aggregated report that matches report""" for existing_report in self._reports: if existing_report['namespace'] == report['namespace']: if mask == existing_report['queryMask']: return existing_report return None
python
{ "resource": "" }
q8765
ReportAggregation._merge_report
train
def _merge_report(self, target, new): """Merges a new report into the target report""" time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis = int(new['parsed']['stats']['millis']) target['stats']['totalTimeMillis'] += query_millis target['stats']['count'] += 1 target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']
python
{ "resource": "" }
q8766
Parser.parse
train
def parse(self, input): """Passes input to each QueryLineHandler in use""" query = None for handler in self._line_handlers: try: query = handler.handle(input) except Exception as e: query = None finally: if query is not None: return query return None
python
{ "resource": "" }
q8767
Dex.generate_query_report
train
def generate_query_report(self, db_uri, query, db_name, collection_name): """Analyzes a single query""" return self._query_analyzer.generate_query_report(db_uri, query, db_name, collection_name)
python
{ "resource": "" }
q8768
Dex.watch_logfile
train
def watch_logfile(self, logfile_path): """Analyzes queries from the tail of a given log file""" self._run_stats['logSource'] = logfile_path log_parser = LogParser() # For each new line in the logfile ... output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: firstLine = True for line in self._tail_file(open(logfile_path), WATCH_INTERVAL_SECONDS): if firstLine: self._run_stats['timeRange']['start'] = get_line_time(line) self._process_query(line, log_parser) self._run_stats['timeRange']['end'] = get_line_time(line) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) return 0
python
{ "resource": "" }
q8769
Dex._tail_file
train
def _tail_file(self, file, interval): """Tails a file""" file.seek(0,2) while True: where = file.tell() line = file.readline() if not line: time.sleep(interval) file.seek(where) else: yield line
python
{ "resource": "" }
q8770
Dex._tail_profile
train
def _tail_profile(self, db, interval): """Tails the system.profile collection""" latest_doc = None while latest_doc is None: time.sleep(interval) latest_doc = db['system.profile'].find_one() current_time = latest_doc['ts'] while True: time.sleep(interval) cursor = db['system.profile'].find({'ts': {'$gte': current_time}}).sort('ts', pymongo.ASCENDING) for doc in cursor: current_time = doc['ts'] yield doc
python
{ "resource": "" }
q8771
Dex._tuplefy_namespace
train
def _tuplefy_namespace(self, namespace): """Converts a mongodb namespace to a db, collection tuple""" namespace_split = namespace.split('.', 1) if len(namespace_split) is 1: # we treat a single element as a collection name. # this also properly tuplefies '*' namespace_tuple = ('*', namespace_split[0]) elif len(namespace_split) is 2: namespace_tuple = (namespace_split[0],namespace_split[1]) else: return None return namespace_tuple
python
{ "resource": "" }
q8772
Dex._validate_namespaces
train
def _validate_namespaces(self, input_namespaces): """Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards""" output_namespaces = [] if input_namespaces == []: return output_namespaces elif '*' in input_namespaces: if len(input_namespaces) > 1: warning = 'Warning: Multiple namespaces are ' warning += 'ignored when one namespace is "*"\n' sys.stderr.write(warning) return output_namespaces else: for namespace in input_namespaces: if not isinstance(namespace, unicode): namespace = unicode(namespace) namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple is None: warning = 'Warning: Invalid namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) else: if namespace_tuple not in output_namespaces: output_namespaces.append(namespace_tuple) else: warning = 'Warning: Duplicate namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) return output_namespaces
python
{ "resource": "" }
q8773
Dex._namespace_requested
train
def _namespace_requested(self, namespace): """Checks whether the requested_namespaces contain the provided namespace""" if namespace is None: return False namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple[0] in IGNORE_DBS: return False elif namespace_tuple[1] in IGNORE_COLLECTIONS: return False else: return self._tuple_requested(namespace_tuple)
python
{ "resource": "" }
q8774
Dex._tuple_requested
train
def _tuple_requested(self, namespace_tuple): """Helper for _namespace_requested. Supports limited wildcards""" if not isinstance(namespace_tuple[0], unicode): encoded_db = unicode(namespace_tuple[0]) else: encoded_db = namespace_tuple[0] if not isinstance(namespace_tuple[1], unicode): encoded_coll = unicode(namespace_tuple[1]) else: encoded_coll = namespace_tuple[1] if namespace_tuple is None: return False elif len(self._requested_namespaces) is 0: return True for requested_namespace in self._requested_namespaces: if ((((requested_namespace[0]) == u'*') or (encoded_db == requested_namespace[0])) and (((requested_namespace[1]) == u'*') or (encoded_coll == requested_namespace[1]))): return True return False
python
{ "resource": "" }
q8775
Dex._get_requested_databases
train
def _get_requested_databases(self): """Returns a list of databases requested, not including ignored dbs""" requested_databases = [] if ((self._requested_namespaces is not None) and (self._requested_namespaces != [])): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is '*': return [] elif requested_namespace[0] not in IGNORE_DBS: requested_databases.append(requested_namespace[0]) return requested_databases
python
{ "resource": "" }
q8776
FortiOSDriver.get_config
train
def get_config(self, retrieve="all"): """get_config implementation for FortiOS.""" get_startup = retrieve == "all" or retrieve == "startup" get_running = retrieve == "all" or retrieve == "running" get_candidate = retrieve == "all" or retrieve == "candidate" if retrieve == "all" or get_running: result = self._execute_command_with_vdom('show') text_result = '\n'.join(result) return { 'startup': u"", 'running': py23_compat.text_type(text_result), 'candidate': u"", } elif get_startup or get_candidate: return { 'startup': u"", 'running': u"", 'candidate': u"", }
python
{ "resource": "" }
q8777
DelugeRPCClient.connect
train
def connect(self): """ Connects to the Deluge instance """ self._connect() logger.debug('Connected to Deluge, detecting daemon version') self._detect_deluge_version() logger.debug('Daemon version {} detected, logging in'.format(self.deluge_version)) if self.deluge_version == 2: result = self.call('daemon.login', self.username, self.password, client_version='deluge-client') else: result = self.call('daemon.login', self.username, self.password) logger.debug('Logged in with value %r' % result) self.connected = True
python
{ "resource": "" }
q8778
DelugeRPCClient.disconnect
train
def disconnect(self): """ Disconnect from deluge """ if self.connected: self._socket.close() self._socket = None self.connected = False
python
{ "resource": "" }
q8779
DelugeRPCClient.call
train
def call(self, method, *args, **kwargs): """ Calls an RPC function """ tried_reconnect = False for _ in range(2): try: self._send_call(self.deluge_version, self.deluge_protocol_version, method, *args, **kwargs) return self._receive_response(self.deluge_version, self.deluge_protocol_version) except (socket.error, ConnectionLostException, CallTimeoutException): if self.automatic_reconnect: if tried_reconnect: raise FailedToReconnectException() else: try: self.reconnect() except (socket.error, ConnectionLostException, CallTimeoutException): raise FailedToReconnectException() tried_reconnect = True else: raise
python
{ "resource": "" }
q8780
StickerSet.to_array
train
def to_array(self): """ Serializes this StickerSet to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(StickerSet, self).to_array() array['name'] = u(self.name) # py2: type unicode, py3: type str array['title'] = u(self.title) # py2: type unicode, py3: type str array['contains_masks'] = bool(self.contains_masks) # type bool array['stickers'] = self._as_array(self.stickers) # type list of Sticker return array
python
{ "resource": "" }
q8781
StickerSet.from_array
train
def from_array(array): """ Deserialize a new StickerSet from a given dictionary. :return: new StickerSet instance. :rtype: StickerSet """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.media import Sticker data = {} data['name'] = u(array.get('name')) data['title'] = u(array.get('title')) data['contains_masks'] = bool(array.get('contains_masks')) data['stickers'] = Sticker.from_array_list(array.get('stickers'), list_level=1) data['_raw'] = array return StickerSet(**data)
python
{ "resource": "" }
q8782
MaskPosition.to_array
train
def to_array(self): """ Serializes this MaskPosition to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(MaskPosition, self).to_array() array['point'] = u(self.point) # py2: type unicode, py3: type str array['x_shift'] = float(self.x_shift) # type float array['y_shift'] = float(self.y_shift) # type float array['scale'] = float(self.scale) # type float return array
python
{ "resource": "" }
q8783
MaskPosition.from_array
train
def from_array(array): """ Deserialize a new MaskPosition from a given dictionary. :return: new MaskPosition instance. :rtype: MaskPosition """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['point'] = u(array.get('point')) data['x_shift'] = float(array.get('x_shift')) data['y_shift'] = float(array.get('y_shift')) data['scale'] = float(array.get('scale')) data['_raw'] = array return MaskPosition(**data)
python
{ "resource": "" }
q8784
compile
train
def compile(cfg_path, out_path, executable=None, env=None, log=None): """ Use ACE to compile a grammar. Args: cfg_path (str): the path to the ACE config file out_path (str): the path where the compiled grammar will be written executable (str, optional): the path to the ACE binary; if `None`, the `ace` command will be used env (dict, optional): environment variables to pass to the ACE subprocess log (file, optional): if given, the file, opened for writing, or stream to write ACE's stdout and stderr compile messages """ try: check_call( [(executable or 'ace'), '-g', cfg_path, '-G', out_path], stdout=log, stderr=log, close_fds=True, env=(env or os.environ) ) except (CalledProcessError, OSError): logging.error( 'Failed to compile grammar with ACE. See {}' .format(log.name if log is not None else '<stderr>') ) raise
python
{ "resource": "" }
q8785
AceProcess.close
train
def close(self): """ Close the ACE process and return the process's exit code. """ self.run_info['end'] = datetime.now() self._p.stdin.close() for line in self._p.stdout: if line.startswith('NOTE: tsdb run:'): self._read_run_info(line) else: logging.debug('ACE cleanup: {}'.format(line.rstrip())) retval = self._p.wait() return retval
python
{ "resource": "" }
q8786
loads
train
def loads(s, single=False): """ Deserialize DMRX string representations Args: s (str): a DMRX string single (bool): if `True`, only return the first Xmrs object Returns: a generator of Xmrs objects (unless *single* is `True`) """ corpus = etree.fromstring(s) if single: ds = _deserialize_dmrs(next(iter(corpus))) else: ds = (_deserialize_dmrs(dmrs_elem) for dmrs_elem in corpus) return ds
python
{ "resource": "" }
q8787
ParseResult.derivation
train
def derivation(self): """ Deserialize and return a Derivation object for UDF- or JSON-formatted derivation data; otherwise return the original string. """ drv = self.get('derivation') if drv is not None: if isinstance(drv, dict): drv = Derivation.from_dict(drv) elif isinstance(drv, stringtypes): drv = Derivation.from_string(drv) return drv
python
{ "resource": "" }
q8788
ParseResult.tree
train
def tree(self): """ Deserialize and return a labeled syntax tree. The tree data may be a standalone datum, or embedded in the derivation. """ tree = self.get('tree') if isinstance(tree, stringtypes): tree = SExpr.parse(tree).data elif tree is None: drv = self.get('derivation') if isinstance(drv, dict) and 'label' in drv: def _extract_tree(d): t = [d.get('label', '')] if 'tokens' in d: t.append([d.get('form', '')]) else: for dtr in d.get('daughters', []): t.append(_extract_tree(dtr)) return t tree = _extract_tree(drv) return tree
python
{ "resource": "" }
q8789
ParseResult.mrs
train
def mrs(self): """ Deserialize and return an Mrs object for simplemrs or JSON-formatted MRS data; otherwise return the original string. """ mrs = self.get('mrs') if mrs is not None: if isinstance(mrs, dict): mrs = Mrs.from_dict(mrs) elif isinstance(mrs, stringtypes): mrs = simplemrs.loads_one(mrs) return mrs
python
{ "resource": "" }
q8790
ParseResult.eds
train
def eds(self): """ Deserialize and return an Eds object for native- or JSON-formatted EDS data; otherwise return the original string. """ _eds = self.get('eds') if _eds is not None: if isinstance(_eds, dict): _eds = eds.Eds.from_dict(_eds) elif isinstance(_eds, stringtypes): _eds = eds.loads_one(_eds) return _eds
python
{ "resource": "" }
q8791
ParseResult.dmrs
train
def dmrs(self): """ Deserialize and return a Dmrs object for JSON-formatted DMRS data; otherwise return the original string. """ dmrs = self.get('dmrs') if dmrs is not None: if isinstance(dmrs, dict): dmrs = Dmrs.from_dict(dmrs) return dmrs
python
{ "resource": "" }
q8792
ParseResponse.tokens
train
def tokens(self, tokenset='internal'): """ Deserialize and return a YyTokenLattice object for the initial or internal token set, if provided, from the YY format or the JSON-formatted data; otherwise return the original string. Args: tokenset (str): return `'initial'` or `'internal'` tokens (default: `'internal'`) Returns: :class:`YyTokenLattice` """ toks = self.get('tokens', {}).get(tokenset) if toks is not None: if isinstance(toks, stringtypes): toks = YyTokenLattice.from_string(toks) elif isinstance(toks, Sequence): toks = YyTokenLattice.from_list(toks) return toks
python
{ "resource": "" }
q8793
GameHighScore.to_array
train
def to_array(self): """ Serializes this GameHighScore to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(GameHighScore, self).to_array() array['position'] = int(self.position) # type int array['user'] = self.user.to_array() # type User array['score'] = int(self.score) # type int return array
python
{ "resource": "" }
q8794
GameHighScore.from_array
train
def from_array(array): """ Deserialize a new GameHighScore from a given dictionary. :return: new GameHighScore instance. :rtype: GameHighScore """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.peer import User data = {} data['position'] = int(array.get('position')) data['user'] = User.from_array(array.get('user')) data['score'] = int(array.get('score')) data['_raw'] = array return GameHighScore(**data)
python
{ "resource": "" }
q8795
valuemap
train
def valuemap(f): """ Decorator to help PEG functions handle value conversions. """ @wraps(f) def wrapper(*args, **kwargs): if 'value' in kwargs: val = kwargs['value'] del kwargs['value'] _f = f(*args, **kwargs) def valued_f(*args, **kwargs): result = _f(*args, **kwargs) s, obj, span = result if callable(val): return PegreResult(s, val(obj), span) else: return PegreResult(s, val, span) return valued_f else: return f(*args, **kwargs) return wrapper
python
{ "resource": "" }
q8796
literal
train
def literal(x): """ Create a PEG function to consume a literal. """ xlen = len(x) msg = 'Expected: "{}"'.format(x) def match_literal(s, grm=None, pos=0): if s[:xlen] == x: return PegreResult(s[xlen:], x, (pos, pos+xlen)) raise PegreError(msg, pos) return match_literal
python
{ "resource": "" }
q8797
regex
train
def regex(r): """ Create a PEG function to match a regular expression. """ if isinstance(r, stringtypes): p = re.compile(r) else: p = r msg = 'Expected to match: {}'.format(p.pattern) def match_regex(s, grm=None, pos=0): m = p.match(s) if m is not None: start, end = m.span() data = m.groupdict() if p.groupindex else m.group() return PegreResult(s[m.end():], data, (pos+start, pos+end)) raise PegreError(msg, pos) return match_regex
python
{ "resource": "" }
q8798
nonterminal
train
def nonterminal(n): """ Create a PEG function to match a nonterminal. """ def match_nonterminal(s, grm=None, pos=0): if grm is None: grm = {} expr = grm[n] return expr(s, grm, pos) return match_nonterminal
python
{ "resource": "" }
q8799
and_next
train
def and_next(e): """ Create a PEG function for positive lookahead. """ def match_and_next(s, grm=None, pos=0): try: e(s, grm, pos) except PegreError as ex: raise PegreError('Positive lookahead failed', pos) else: return PegreResult(s, Ignore, (pos, pos)) return match_and_next
python
{ "resource": "" }