INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Return parent of * index *.
def parent(self, index): '''Return parent of *index*.''' if not index.isValid(): return QModelIndex() item = index.internalPointer() if not item: return QModelIndex() parent = item.parent if not parent or parent == self.root: return QModelIndex() return self.createIndex(parent.row, 0, parent)
Return data for * index * according to * role *.
def data(self, index, role): '''Return data for *index* according to *role*.''' if not index.isValid(): return None column = index.column() item = index.internalPointer() if role == self.ITEM_ROLE: return item elif role == Qt.DisplayRole: if column == 0: return item.name elif column == 1: if item.size: return item.size elif column == 2: return item.type elif column == 3: if item.modified is not None: return item.modified.strftime('%c') elif role == Qt.DecorationRole: if column == 0: return self.iconFactory.icon(item) elif role == Qt.TextAlignmentRole: if column == 1: return Qt.AlignRight else: return Qt.AlignLeft return None
Return label for * section * according to * orientation * and * role *.
def headerData(self, section, orientation, role): '''Return label for *section* according to *orientation* and *role*.''' if orientation == Qt.Horizontal: if section < len(self.columns): column = self.columns[section] if role == Qt.DisplayRole: return column return None
Return if * index * has children.
def hasChildren(self, index): '''Return if *index* has children. Optimised to avoid loading children at this stage. ''' if not index.isValid(): item = self.root else: item = index.internalPointer() if not item: return False return item.mayHaveChildren()
Return if more data available for * index *.
def canFetchMore(self, index): '''Return if more data available for *index*.''' if not index.isValid(): item = self.root else: item = index.internalPointer() return item.canFetchMore()
Fetch additional data under * index *.
def fetchMore(self, index): '''Fetch additional data under *index*.''' if not index.isValid(): item = self.root else: item = index.internalPointer() if item.canFetchMore(): startIndex = len(item.children) additionalChildren = item.fetchChildren() endIndex = startIndex + len(additionalChildren) - 1 if endIndex >= startIndex: self.beginInsertRows(index, startIndex, endIndex) for newChild in additionalChildren: item.addChild(newChild) self.endInsertRows()
Return ordering of * left * vs * right *.
def lessThan(self, left, right): '''Return ordering of *left* vs *right*.''' sourceModel = self.sourceModel() if sourceModel: leftItem = sourceModel.item(left) rightItem = sourceModel.item(right) if (isinstance(leftItem, Directory) and not isinstance(rightItem, Directory)): return self.sortOrder() == Qt.AscendingOrder elif (not isinstance(leftItem, Directory) and isinstance(rightItem, Directory)): return self.sortOrder() == Qt.DescendingOrder return super(FilesystemSortProxy, self).lessThan(left, right)
Return index of item with * path *.
def pathIndex(self, path): '''Return index of item with *path*.''' sourceModel = self.sourceModel() if not sourceModel: return QModelIndex() return self.mapFromSource(sourceModel.pathIndex(path))
Return item at * index *.
def item(self, index): '''Return item at *index*.''' sourceModel = self.sourceModel() if not sourceModel: return None return sourceModel.item(self.mapToSource(index))
Return icon for index.
def icon(self, index): '''Return icon for index.''' sourceModel = self.sourceModel() if not sourceModel: return None return sourceModel.icon(self.mapToSource(index))
Return if * index * has children.
def hasChildren(self, index): '''Return if *index* has children.''' sourceModel = self.sourceModel() if not sourceModel: return False return sourceModel.hasChildren(self.mapToSource(index))
Return if more data available for * index *.
def canFetchMore(self, index): '''Return if more data available for *index*.''' sourceModel = self.sourceModel() if not sourceModel: return False return sourceModel.canFetchMore(self.mapToSource(index))
Fetch additional data under * index *.
def fetchMore(self, index): '''Fetch additional data under *index*.''' sourceModel = self.sourceModel() if not sourceModel: return False return sourceModel.fetchMore(self.mapToSource(index))
Return appropriate icon for * specification *.
def icon(self, specification): '''Return appropriate icon for *specification*. *specification* should be either: * An instance of :py:class:`riffle.model.Item` * One of the defined icon types (:py:class:`IconType`) ''' if isinstance(specification, riffle.model.Item): specification = self.type(specification) icon = None if specification == IconType.Computer: icon = QtGui.QIcon(':riffle/icon/computer') elif specification == IconType.Mount: icon = QtGui.QIcon(':riffle/icon/drive') elif specification == IconType.Directory: icon = QtGui.QIcon(':riffle/icon/folder') elif specification == IconType.File: icon = QtGui.QIcon(':riffle/icon/file') elif specification == IconType.Collection: icon = QtGui.QIcon(':riffle/icon/collection') return icon
Return appropriate icon type for * item *.
def type(self, item): '''Return appropriate icon type for *item*.''' iconType = IconType.Unknown if isinstance(item, riffle.model.Computer): iconType = IconType.Computer elif isinstance(item, riffle.model.Mount): iconType = IconType.Mount elif isinstance(item, riffle.model.Directory): iconType = IconType.Directory elif isinstance(item, riffle.model.File): iconType = IconType.File elif isinstance(item, riffle.model.Collection): iconType = IconType.Collection return iconType
Run an external command in a separate process and detach it from the current process. Excepting stdout stderr and stdin all file descriptors are closed after forking. If daemonize is True then the parent process exits. All stdio is redirected to os. devnull unless specified. The preexec_fn shell cwd and env parameters are the same as their Popen counterparts. Return the PID of the child process if not daemonized.
def call(args, stdout=None, stderr=None, stdin=None, daemonize=False, preexec_fn=None, shell=False, cwd=None, env=None): """ Run an external command in a separate process and detach it from the current process. Excepting `stdout`, `stderr`, and `stdin` all file descriptors are closed after forking. If `daemonize` is True then the parent process exits. All stdio is redirected to `os.devnull` unless specified. The `preexec_fn`, `shell`, `cwd`, and `env` parameters are the same as their `Popen` counterparts. Return the PID of the child process if not daemonized. """ stream = lambda s, m: s is None and os.open(os.devnull, m) or s stdout = stream(stdout, os.O_WRONLY) stderr = stream(stderr, os.O_WRONLY) stdin = stream(stdin, os.O_RDONLY) shared_pid = Value('i', 0) pid = os.fork() if pid > 0: os.waitpid(pid, 0) child_pid = shared_pid.value del shared_pid if daemonize: sys.exit(0) return child_pid else: os.setsid() proc = subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, close_fds=True, preexec_fn=preexec_fn, shell=shell, cwd=cwd, env=env) shared_pid.value = proc.pid os._exit(0)
Return the maximum file descriptor value.
def _get_max_fd(self): """Return the maximum file descriptor value.""" limits = resource.getrlimit(resource.RLIMIT_NOFILE) result = limits[1] if result == resource.RLIM_INFINITY: result = maxfd return result
Close a file descriptor if it is open.
def _close_fd(self, fd): """Close a file descriptor if it is open.""" try: os.close(fd) except OSError, exc: if exc.errno != errno.EBADF: msg = "Failed to close file descriptor {}: {}".format(fd, exc) raise Error(msg)
Close open file descriptors.
def _close_open_fds(self): """Close open file descriptors.""" maxfd = self._get_max_fd() for fd in reversed(range(maxfd)): if fd not in self.exclude_fds: self._close_fd(fd)
Redirect a system stream to the provided target.
def _redirect(self, stream, target): """Redirect a system stream to the provided target.""" if target is None: target_fd = os.open(os.devnull, os.O_RDWR) else: target_fd = target.fileno() os.dup2(target_fd, stream.fileno())
Applies a given HTML attributes to each field widget of a given form.
def set_form_widgets_attrs(form, attrs): """Applies a given HTML attributes to each field widget of a given form. Example: set_form_widgets_attrs(my_form, {'class': 'clickable'}) """ for _, field in form.fields.items(): attrs_ = dict(attrs) for name, val in attrs.items(): if hasattr(val, '__call__'): attrs_[name] = val(field) field.widget.attrs = field.widget.build_attrs(attrs_)
Returns a certain model as defined in a string formatted <app_name >. <model_name >.
def get_model_class_from_string(model_path): """Returns a certain model as defined in a string formatted `<app_name>.<model_name>`. Example: model = get_model_class_from_string('myapp.MyModel') """ try: app_name, model_name = model_path.split('.') except ValueError: raise ImproperlyConfigured('`%s` must have the following format: `app_name.model_name`.' % model_path) if apps_get_model is None: model = get_model(app_name, model_name) else: try: model = apps_get_model(app_name, model_name) except (LookupError, ValueError): model = None if model is None: raise ImproperlyConfigured('`%s` refers to a model `%s` that has not been installed.' % (model_path, model_name)) return model
Tries to get a site URL from environment and settings in the following order:
def get_site_url(request=None): """Tries to get a site URL from environment and settings in the following order: 1. (SITE_PROTO / SITE_SCHEME) + SITE_DOMAIN 2. SITE_URL 3. Django Sites contrib 4. Request object :param HttpRequest request: Request object to deduce URL from. :rtype: str """ env = partial(environ.get) settings_ = partial(getattr, settings) domain = None scheme = None url = None for src in (env, settings_): if url is None: url = src('SITE_URL', None) if domain is None: domain = src('SITE_DOMAIN', None) if scheme is None: scheme = src('SITE_PROTO', src('SITE_SCHEME', None)) if domain is None and url is not None: scheme, domain = url.split('://')[:2] if domain is None: site = get_current_site(request or DomainGetter(domain)) domain = site.domain if scheme is None and request: scheme = request.scheme if domain is None: domain = 'undefined-domain.local' if scheme is None: scheme = 'http' domain = domain.rstrip('/') return '%s://%s' % (scheme, domain)
Returns a module from a given app by its name.
def import_app_module(app_name, module_name): """Returns a module from a given app by its name. :param str app_name: :param str module_name: :rtype: module or None """ name_split = app_name.split('.') if name_split[-1][0].isupper(): # Seems that we have app config class path here. app_name = '.'.join(name_split[:-2]) module = import_module(app_name) try: sub_module = import_module('%s.%s' % (app_name, module_name)) return sub_module except: # The same bubbling strategy as in autodiscover_modules(). if module_has_submodule(module, module_name): # Module is in a package. raise return None
Imports modules from registered apps using given module name and returns them as a list.
def import_project_modules(module_name): """Imports modules from registered apps using given module name and returns them as a list. :param str module_name: :rtype: list """ from django.conf import settings submodules = [] for app in settings.INSTALLED_APPS: module = import_app_module(app, module_name) if module is not None: submodules.append(module) return submodules
Similar to built - in include template tag but allowing template variables to be used in template name and a fallback template thus making the tag more dynamic.
def include_(parser, token): """Similar to built-in ``include`` template tag, but allowing template variables to be used in template name and a fallback template, thus making the tag more dynamic. .. warning:: Requires Django 1.8+ Example: {% load etc_misc %} {% include_ "sub_{{ postfix_var }}.html" fallback "default.html" %} """ bits = token.split_contents() dynamic = False # We fallback to built-in `include` if a template name contains no variables. if len(bits) >= 2: dynamic = '{{' in bits[1] if dynamic: fallback = None bits_new = [] for bit in bits: if fallback is True: # This bit is a `fallback` argument. fallback = bit continue if bit == 'fallback': fallback = True else: bits_new.append(bit) if fallback: fallback = parser.compile_filter(construct_relative_path_(parser, fallback)) token.contents = ' '.join(bits_new) token.contents = token.contents.replace('include_', 'include') include_node = do_include(parser, token) if dynamic: # swap simple include with dynamic include_node = DynamicIncludeNode( include_node.template, extra_context=include_node.extra_context, isolated_context=include_node.isolated_context, fallback=fallback or None, ) return include_node
Return a list of all repository objects in the repofiles in the repo folder specified: return:
def repositories(self): """ Return a list of all repository objects in the repofiles in the repo folder specified :return: """ for repo_path in self.path.glob('*.repo'): for id, repository in self._get_repo_file(repo_path).repositories: yield id, repository
Lazy load RepoFile objects on demand.: param repo_path:: return:
def _get_repo_file(self, repo_path): """ Lazy load RepoFile objects on demand. :param repo_path: :return: """ if repo_path not in self._repo_files: self._repo_files[repo_path] = RepoFile(repo_path) return self._repo_files[repo_path]
Given a URL return a package: param url:: return:
def from_url(url): """ Given a URL, return a package :param url: :return: """ package_data = HTTPClient().http_request(url=url, decode=None) return Package(raw_data=package_data)
Read the contents of the rpm itself: return:
def dependencies(self): """ Read the contents of the rpm itself :return: """ cpio = self.rpm.gzip_file.read() content = cpio.read() return []
Returns Gravatar image URL for a given string or UserModel.
def gravatar_get_url(obj, size=65, default='identicon'): """Returns Gravatar image URL for a given string or UserModel. Example: {% load gravatar %} {% gravatar_get_url user_model %} :param UserModel, str obj: :param int size: :param str default: :return: """ return get_gravatar_url(obj, size=size, default=default)
Returns Gravatar image HTML tag for a given string or UserModel.
def gravatar_get_img(obj, size=65, default='identicon'): """Returns Gravatar image HTML tag for a given string or UserModel. Example: {% load gravatar %} {% gravatar_get_img user_model %} :param UserModel, str obj: :param int size: :param str default: :return: """ url = get_gravatar_url(obj, size=size, default=default) if url: return safe('<img src="%s" class="gravatar">' % url) return ''
Parses an xml_path with the inherited xml parser: param xml_path:: return:
def parse(cls, xml_path): """ Parses an xml_path with the inherited xml parser :param xml_path: :return: """ parser = etree.XMLParser(target=cls.xml_parse()) return etree.parse(xml_path, parser)
Load the repo database from the remote source and then parse it.: return:
def load(self): """ Load the repo database from the remote source, and then parse it. :return: """ data = self.http_request(self.location()) self._parse(data) return self
Register a task for a python dict: param task_def: dict defining gbdx task
def register_task(self, task_def): ''' Register a task for a python dict :param task_def: dict defining gbdx task ''' r = self.session.post( self.task_url, data=task_def, headers={'Content-Type': 'application/json', 'Accept': 'application/json'} ) task_dict = json.loads(task_def) if r.status_code == 200: return r.status_code, 'Task %s registered' % task_dict['name'] else: return r.status_code, 'Task %s was not registered: %s' % (task_dict['name'], r.text)
Delete a task from the platforms regoistry: param task_name: name of the task to delete
def delete_task(self, task_name): ''' Delete a task from the platforms regoistry :param task_name: name of the task to delete ''' response = self.session.delete('%s/%s' % (self.task_url, task_name)) if response.status_code == 200: return response.status_code, 'Task %s deleted' % task_name elif response.status_code == 400: return response.status_code, None # Task isn't registered. else: return response.status_code, 'Task %s was not deleted: %s' % (task_name, response.text)
Get input string port value: param port_name:: param default:: return:: rtype:
def get_input_string_port(self, port_name, default=None): """ Get input string port value :param port_name: :param default: :return: :rtype: """ if self.__string_input_ports: return self.__string_input_ports.get(port_name, default) return default
Set output string port value: param port_name:: param value:: return:: rtype:
def set_output_string_port(self, port_name, value): """ Set output string port value :param port_name: :param value: :return: :rtype: """ if not self.__string_output_ports: self.__string_output_ports = {} self.__string_output_ports[port_name] = value
: param success_or_fail: string that is success or fail: param message:
def finalize(self, success_or_fail, message=''): """ :param success_or_fail: string that is 'success' or 'fail' :param message: """ self.logit.debug('String OutputPorts: %s' % self.__string_output_ports) if self.__string_output_ports: with open(os.path.join(self.output_path, 'ports.json'), 'w') as opf: json.dump(self.__string_output_ports, opf, indent=4) self.logit.debug('Ports.json written to %s' % os.path.join(self.output_path, 'ports.json')) with open(os.path.join(self.base_path, 'status.json'), 'w') as sf: json.dump({'status': success_or_fail, 'reason': message}, sf, indent=4)
List the ports contents by file type or all.: param extensions: string extensions single string or list of extensions.: return: A list of full path names of each file.
def list_files(self, extensions=None): """ List the ports contents by file type or all. :param extensions: string extensions, single string or list of extensions. :return: A list of full path names of each file. """ if self.type.lower() != 'directory': raise ValueError("Port type is not == directory") filesystem_location = self.path for root, dirs, files in os.walk(filesystem_location): if extensions is None: return [os.path.join(root, f) for f in files] elif not isinstance(extensions, list): extensions = [extensions] subset_files = [] for f in files: for extension in extensions: if f.lower().endswith(extension.lower()): subset_files.append(os.path.join(root, f)) break return subset_files
Checks if the path is correct and exists must be abs - > a dir - > and not a file.
def is_valid_filesys(path): """Checks if the path is correct and exists, must be abs-> a dir -> and not a file.""" if os.path.isabs(path) and os.path.isdir(path) and \ not os.path.isfile(path): return True else: raise LocalPortValidationError( 'Port value %s is not a valid filesystem location' % path )
Checks if the url contains S3. Not an accurate validation of the url
def is_valid_s3_url(url): """Checks if the url contains S3. Not an accurate validation of the url""" # Skip if the url start with source: (gbdxtools syntax) if url.startswith('source:'): return True scheme, netloc, path, _, _, _ = urlparse(url) port_except = RemotePortValidationError( 'Port value %s is not a valid s3 location' % url ) if len(scheme) < 2: raise port_except if 's3' in scheme or 's3' in netloc or 's3' in path: return True else: raise port_except
Execute the command from the arguments.: return: None or Error
def invoke(self): """ Execute the command from the arguments. :return: None or Error """ for key in self.FUNCTION_KEYS.keys(): if self._arguments[key] is True: self.FUNCTION_KEYS[key]()
Register the anonymouse task or overwrite it.: return: success or fail message.
def _register_anonymous_task(self): """ Register the anonymouse task or overwrite it. :return: success or fail message. """ is_overwrite = self._arguments.get('--overwrite') task_name = "CloudHarness_Anonymous_Task" task_srv = TaskService() if is_overwrite: # Delete the task first code, message = task_srv.delete_task(task_name) # ignore status if deleted, or not registered if code not in [200, 400]: raise TaskRegistryError(message) task_def_file = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'gbdx_task_template', 'task_definition.json' ) with open(task_def_file, 'r') as f: code, message = task_srv.register_task(f.read()) if code == 200: print(message) elif code == 409: print('Task already exists') else: raise TaskRegistryError(message)
Method for creating a new Application Template. USAGE: cloud - harness create <dir_name > [ -- destination = <path > ]
def _create_app(self): """ Method for creating a new Application Template. USAGE: cloud-harness create <dir_name> [--destination=<path>] """ template_path = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), self.TEMPLATE_FOLDER, self.TEMPLATE_FILENAME ) new_dir = self._arguments['<dir_name>'] # Make new application directory override_destination = self._arguments.get('--destination', None) if override_destination is not None: if override_destination == '': raise ValueError('Destination path is empty') # Check if the new destination is abs and exists. if os.path.isabs(override_destination) and os.path.isdir(override_destination): new_dir = os.path.join(override_destination, new_dir) else: # Create a path from the cwd, then check if it is valid and exists. override_path = os.path.join(os.getcwd(), override_destination) if not os.path.isabs(override_path) or not os.path.isdir(override_path): raise ValueError('New path parameter %s is not a directory' % override_destination) new_dir = os.path.join(override_path, new_dir) else: if os.path.isabs(new_dir) or os.path.sep in new_dir: raise ValueError("Directory name is invalid") # No override, put the folder in the cwd. new_dir = os.path.join(os.getcwd(), new_dir) os.makedirs(new_dir) new_file_path = os.path.join(new_dir, self.DEFAULT_NEW_APP_FILENAME) # Copy the template the new application location. shutil.copyfile(template_path, new_file_path) printer('New Application created at %s' % new_file_path)
Method for running a custom Application Templates. NOTES: * The default name of the application is app. py. So this function is going to look for app. py unless the -- file option is provide with a different file name. * The generated source bundle will package everything in the work_path. If large files not required for the application source they need to be ignored. Use a file called pkg_ignore to identify folders and files to ignore. USAGE: cloud - harness run <file_name > [ -- remote ] [ -- verbose ] [ -- upload ] [ -- download ] [ -- dry - run ]
def _run_app(self): """ Method for running a custom Application Templates. NOTES: * The default name of the application is app.py. So this function is going to look for app.py, unless the --file option is provide with a different file name. * The generated source bundle will package everything in the work_path. If large files not required for the application source, they need to be ignored. Use a file called "pkg_ignore" to identify folders and files to ignore. USAGE: cloud-harness run <file_name> [--remote] [--verbose] [--upload] [--download] [--dry-run] """ is_remote_run = self._arguments.get('--remote') filename = self._arguments.get('<file_name>') upload_ports = self._arguments.get('--upload') download_ports = self._arguments.get('--download') is_verbose = self._arguments.get('--verbose') # A dry run means, allow port sot be pushed up, but don't allow execution and monitoring. is_dry_run = self._arguments.get('--dry-run') if download_ports: # TODO temporary until implemented. raise NotImplementedError("Downloading of output ports is not implemented yet.") # Check if the filename passed is actually a class object (gbdxtools functionality) if not isinstance(filename, str) and issubclass(filename, TaskTemplate): template_class = filename template_file = inspect.getfile(template_class) config_file = self._write_config_file(template_file) else: template_file = self._get_template_abs_path(filename) if not os.path.isfile(template_file): raise ValueError('The location %s does not exist' % template_file) config_file = self._write_config_file(template_file) template_class = self._get_class(template_file) with template_class() as template: if is_remote_run: # Means the user is running with --remote, push to S3 and submit workflow. task = template.task # Set the source bundle directory to where the template_file is. task.source_bundle.value = os.path.join(os.path.dirname(template_file), 'tmp_%s' % str(uuid.uuid4())) task.run_name = '{task_name}_src'.format(task_name=task.name) src_bundle_dir = task.source_bundle.value # Create source bundle to be executed on the GBDX platform self._archive_source(os.path.dirname(src_bundle_dir), src_bundle_dir) port_service = PortService(task) if upload_ports: # Push all port data to S3 port_service.upload_input_ports() else: # Only push source bundle port port_service.upload_input_ports(port_list=[self.SOURCE_BUNDLE_PORT]) # Delete source bundle directory and config after upload. shutil.rmtree(src_bundle_dir) os.remove(config_file) # Get the new task object with uploaded port locations. task = port_service.task # Validate task task.is_valid(remote=True) workflow = Workflow(task) if is_verbose: temp_wf = workflow.json printer(temp_wf) if is_dry_run: return task try: workflow.execute() printer(workflow.id) except Exception as e: printer(e.message) template.reason = "Execution Failed: %s" % e.message return # Monitor events of workflow is_done = workflow.monitor_run() if not is_done: template.reason = "Execution Failed during Run" if download_ports: # TODO port_service.download_output_port() pass else: # For local and Docker container execution. # Check that all output locations exist. template.check_and_create_outputs() # Validate task template.task.is_valid() if is_verbose: printer(template.task.json()) all_ports = template.task.ports[0] + template.task.ports[1] printer([port.__str__() for port in all_ports]) if is_dry_run: template.reason = "Execution Skipped" return # Run Task Locally try: template.invoke() except Exception as e: template.reason = "Failed Exception: %s" % e if template.reason is None or template.reason == '': template.reason = "Execution Completed"
Write a config file to the source bundle location to identify the entry point.: param template_file: path to the task template subclass ( executable )
def _write_config_file(template_file): """ Write a config file to the source bundle location to identify the entry point. :param template_file: path to the task template subclass (executable) """ config_filename = '.cloud_harness_config.json' config_path = os.path.dirname(template_file) filename = os.path.split(template_file)[1] if filename.endswith('.pyc'): filename = filename[:-1] config_file = os.path.join(config_path, config_filename) with open(config_file, 'w') as f: f.write(json.dumps({'task_filename': filename})) return config_file
Import the file and inspect for subclass of TaskTemplate.: param template_file: filename to import.
def _get_class(template_file): """ Import the file and inspect for subclass of TaskTemplate. :param template_file: filename to import. """ with warnings.catch_warnings(): # suppress warning from importing warnings.filterwarnings("ignore", category=RuntimeWarning) template_module = imp.load_source('module.name', template_file) # Find the subclass of TaskTemplate for name, data in inspect.getmembers(template_module, inspect.isclass): if issubclass(data, TaskTemplate) and data.__name__ != TaskTemplate.__name__: return data
Return a valid absolute path. filename can be relative or absolute.
def _get_template_abs_path(filename): """ Return a valid absolute path. filename can be relative or absolute. """ if os.path.isabs(filename) and os.path.isfile(filename): return filename else: return os.path.join(os.getcwd(), filename)
Upload a list of files to a users account location: param source_files: list of files to upload or single file name: param s3_folder: the user location to upload to.
def upload(self, source_files, s3_folder=None): """ Upload a list of files to a users account location :param source_files: list of files to upload, or single file name :param s3_folder: the user location to upload to. """ if s3_folder is None: folder = self.prefix else: folder = '%s/%s' % (self.prefix, s3_folder) if isinstance(source_files, list): for file_tuple in source_files: self.__upload_file(file_tuple, folder) elif isinstance(source_files, tuple): self.__upload_file(source_files, folder) else: raise ValueError("Source Files must be a tuple or list of tuples: (filename, keyname)")
download all files from a users account location: param local_port_path: the local path where the data is to download to: param key_name: can start with self. prefix or taken as relative to prefix.
def download(self, local_port_path, key_names): # pragma: no cover """ download all files from a users account location :param local_port_path: the local path where the data is to download to :param key_name: can start with self.prefix or taken as relative to prefix. Example: local_port_path = /home/user/myworkflow/input_images/ (sync all data in this folder) s3_folder = myworkflow/input_images/ (location on s3 that will be synced to local path) """ if not os.path.isdir(local_port_path): raise ValueError("Download path does not exist: %s" % local_port_path) if not isinstance(key_names, list): key_names = [key_names] for key_name in key_names: is_folder = key_name.endswith('/') # strip leading and trailing slashes key_name = key_name.lstrip('/').rstrip('/') key_parts = key_name.split('/') # Key names from the list function will include the account prefix # and any folder namespace. if key_parts[0] == self.prefix: path = os.path.join(local_port_path, *key_parts[1:]) if not is_folder: folder_path = os.path.join(local_port_path, *key_parts[1:-1]) get_key_name = key_name else: path = os.path.join(local_port_path, *key_parts) if not is_folder: folder_path = os.path.join(local_port_path, *key_parts[:-1]) get_key_name = '%s/%s' % (self.prefix, key_name) if is_folder and not os.path.isdir(path): # A directory that doesn't exist os.makedirs(path) else: if not os.path.isdir(folder_path): os.makedirs(folder_path) # Assume it is a file self.__download_file(path, get_key_name)
Get a list of keys for the accounts
def list(self, s3_folder='', full_key_data=False): """Get a list of keys for the accounts""" if not s3_folder.startswith('/'): s3_folder = '/' + s3_folder s3_prefix = self.prefix + s3_folder bucket_data = self.client.list_objects(Bucket=self.bucket, Prefix=s3_prefix) if full_key_data: return bucket_data['Contents'] else: return [k['Key'] for k in bucket_data['Contents']]
Build a workflow definition from the cloud_harness task.
def _build_worklfow_json(self): """ Build a workflow definition from the cloud_harness task. """ wf_json = {'tasks': [], 'name': 'cloud-harness_%s' % str(uuid.uuid4())} task_def = json.loads(self.task_template.json()) d = { "name": task_def['name'], "outputs": [], "inputs": [], "taskType": task_def['taskType'] } # Add input ports for port in self.task_template.input_ports: port_value = port.value if port_value is False: port_value = 'false' if port_value is True: port_value = 'true' d['inputs'].append({ "name": port._name, "value": port_value }) # Add output ports for port in self.task_template.output_ports: d['outputs'].append({ "name": port._name }) # Add task to workflow wf_json['tasks'].append(d) # Add port to be saved for port in self.task_template.output_ports: # Add save data locations if hasattr(port, 'stageToS3') and port.stageToS3: save_location = '{customer_storage}/{run_name}/{port}'.format( customer_storage=self.storage.location, run_name=self.task_template.run_name, port=port.name ) new_task = dict(**self.STAGE_TO_S3) new_task['inputs'] = [ {'name': 'data', 'source': '%s:%s' % (task_def['name'], port._name)}, {'name': 'destination', 'value': save_location} ] wf_json['tasks'].append(new_task) return wf_json
Execute the cloud_harness task.
def execute(self, override_wf_json=None): """ Execute the cloud_harness task. """ r = self.gbdx.post( self.URL, json=self.json if override_wf_json is None else override_wf_json ) try: r.raise_for_status() except: print("GBDX API Status Code: %s" % r.status_code) print("GBDX API Response: %s" % r.text) self.id = None return self.id = r.json()['id'] self._refresh_status()
Monitor the workflows events and display spinner while running.: param workflow: the workflow object
def monitor_run(self): # pragma: no cover """ Monitor the workflows events and display spinner while running. :param workflow: the workflow object """ spinner = itertools.cycle(['-', '/', '|', '\\']) while not self.complete: for i in xrange(300): sys.stdout.write(spinner.next()) sys.stdout.flush() sys.stdout.write('\b') time.sleep(0.03) if self.succeeded: sys.stdout.write("\nWorkflow completed successfully\n") return True else: sys.stdout.write("\nWorkflow failed: %s\n" % self.status) return False
: param success_or_fail: string that is success or fail: param message:
def finalize(self, success_or_fail, message=''): """ :param success_or_fail: string that is 'success' or 'fail' :param message: """ if not self.__remote_run: return json.dumps({'status': success_or_fail, 'reason': message}, indent=4) else: super(TaskTemplate, self).finalize(success_or_fail, message)
Iterate through the task outputs. Two scenarios: - User is running locally check that output folders exist. - User is running remotely when docker container runs filesystem check that output folders exist. - Else do nothing.: return: None
def check_and_create_outputs(self): """ Iterate through the task outputs. Two scenarios: - User is running locally, check that output folders exist. - User is running remotely, when docker container runs filesystem, check that output folders exist. - Else, do nothing. :return: None """ if self.task is None: raise TaskTemplateError('A task must be initialized before running a TaskTemplate subclass.') for output_port in self.task.output_ports: # Make the dir if output_port.type == 'directory': try: is_file = os.path.isabs(output_port.value) and not os.path.isfile(output_port.value) is_remote = output_port.is_valid_s3_url(output_port.value) except LocalPortValidationError: is_file = False is_remote = None except RemotePortValidationError: is_remote = False self.logit.debug('Create Outputs: %s -> is_filesys %s, is_valid_s3_url %s' % (output_port.name, is_file, is_remote)) if is_file and not is_remote: try: os.makedirs(output_port.value) except OSError as e: self.logit.exception(e) if 'File exists' not in e.strerror: raise e
Takes the workflow value for each port and does the following: * If local filesystem - > Uploads locally files to s3. S3 location will be as follows: gbd - customer - data/ <acct_id >/ <workflow_name >/ <task_name >/ <port_name >/ * If S3 url - > do nothing.: returns the update workflow with S3 urls.
def upload_input_ports(self, port_list=None, exclude_list=None): """ Takes the workflow value for each port and does the following: * If local filesystem -> Uploads locally files to s3. S3 location will be as follows: gbd-customer-data/<acct_id>/<workflow_name>/<task_name>/<port_name>/ * If S3 url -> do nothing. :returns the update workflow with S3 urls. """ input_ports = self._task.input_ports for port in input_ports: # If port list is not None, then only allow port names in the list if port_list and port.name not in port_list: continue # Exclude ports as provided if exclude_list and port.name in exclude_list: continue # port_value = port.get('value', None) # Check if the port value is a valid file system location if not port.value or not os.path.isabs(port.value) or not os.path.isdir(port.value): continue # The prefix for each key that is uploaded, not including the the acct id. prefix = '{run_name}/{port}'.format( run_name=self._task.run_name, # task=self._task.name, port=port.name ) port_files = self._get_port_files(port.value, prefix) # Update the port value with an S3 url port.value = '%s/%s' % (self.s3_root, prefix) if len(port_files) == 0: printer('Port %s is empty, push to S3 skipped' % port.name) else: self.storage.upload(port_files) printer('Port %s pushed to account storage, %s files' % (port.name, len(port_files)))
Find files for the local_path and return tuples of filename and keynames: param local_path: the local path to search for files: param prefix: the S3 prefix for each key name on S3
def _get_port_files(local_path, prefix): """ Find files for the local_path and return tuples of filename and keynames :param local_path: the local path to search for files :param prefix: the S3 prefix for each key name on S3 """ source_files = [] for root, dirs, files in os.walk(local_path, topdown=False): for name in files: fname = os.path.join(root, name) key_name = '%s/%s' % (prefix, fname[len(local_path) + 1:]) source_files.append((fname, key_name)) return source_files
Move an active project to the archive.
def archive(folder, dry_run=False): "Move an active project to the archive." # error handling on archive_dir already done in main() for f in folder: if not os.path.exists(f): bail('folder does not exist: ' + f) _archive_safe(folder, PROJ_ARCHIVE, dry_run=dry_run)
The equivalent of mkdir - p in shell.
def _mkdir(p): "The equivalent of 'mkdir -p' in shell." isdir = os.path.isdir stack = [os.path.abspath(p)] while not isdir(stack[-1]): parent_dir = os.path.dirname(stack[-1]) stack.append(parent_dir) while stack: p = stack.pop() if not isdir(p): os.mkdir(p)
List the contents of the archive directory.
def list(pattern=()): "List the contents of the archive directory." # strategy: pick the intersection of all the patterns the user provides globs = ['*{0}*'.format(p) for p in pattern] + ['*'] matches = [] offset = len(PROJ_ARCHIVE) + 1 for suffix in globs: glob_pattern = os.path.join(PROJ_ARCHIVE, '*', '*', suffix) matches.append(set( f[offset:] for f in glob.glob(glob_pattern) )) matches = reduce(lambda x, y: x.intersection(y), matches) for m in sorted(matches): print(m)
Restore a project from the archive.
def restore(folder): "Restore a project from the archive." if os.path.isdir(folder): bail('a folder of the same name already exists!') pattern = os.path.join(PROJ_ARCHIVE, '*', '*', folder) matches = glob.glob(pattern) if not matches: bail('no project matches: ' + folder) if len(matches) > 1: print('Warning: multiple matches, picking the most recent', file=sys.stderr) source = sorted(matches)[-1] print(source, '-->', folder) shutil.move(source, '.')
Create new storage service client.
def new(cls, access_token, environment='prod'): '''Create new storage service client. Arguments: environment(str): The service environment to be used for the client. 'prod' or 'dev'. access_token(str): The access token used to authenticate with the service Returns: A storage_service.Client instance ''' api_client = ApiClient.new(access_token, environment) return cls(api_client)
List the entities found directly under the given path.
def list(self, path): '''List the entities found directly under the given path. Args: path (str): The path of the entity to be listed. Must start with a '/'. Returns: The list of entity names directly under the given path: u'/12345/folder_1' Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path) entity = self.api_client.get_entity_by_query(path=path) if entity['entity_type'] not in self.__BROWSABLE_TYPES: raise StorageArgumentException('The entity type "{0}" cannot be' 'listed'.format(entity['entity_type'])) entity_uuid = entity['uuid'] file_names = [] # get files more_pages = True page_number = 1 while more_pages: response = self.api_client.list_folder_content( entity_uuid, page=page_number, ordering='name') more_pages = response['next'] is not None page_number += 1 for child in response['results']: pattern = '/{name}' if child['entity_type'] == 'folder' else '{name}' file_names.append(pattern.format(name=child['name'])) return file_names
Download a file from storage service to local disk.
def download_file(self, path, target_path): '''Download a file from storage service to local disk. Existing files on the target path will be overwritten. The download is not recursive, as it only works on files. Args: path (str): The path of the entity to be downloaded. Must start with a '/'. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path) entity = self.api_client.get_entity_by_query(path=path) if entity['entity_type'] != 'file': raise StorageArgumentException('Only file entities can be downloaded') signed_url = self.api_client.get_signed_url(entity['uuid']) response = self.api_client.download_signed_url(signed_url) with open(target_path, "wb") as output: for chunk in response.iter_content(chunk_size=1024): output.write(chunk)
Check if a certain path exists in the storage service.
def exists(self, path): '''Check if a certain path exists in the storage service. Args: path (str): The path to be checked Returns: True if the path exists, False otherwise Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path) try: metadata = self.api_client.get_entity_by_query(path=path) except StorageNotFoundException: return False return metadata and 'uuid' in metadata
Get the parent entity of the entity pointed by the given path.
def get_parent(self, path): '''Get the parent entity of the entity pointed by the given path. Args: path (str): The path of the entity whose parent is needed Returns: A JSON object of the parent entity if found. Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path, projects_allowed=False) path_steps = [step for step in path.split('/') if step] del path_steps[-1] parent_path = '/{0}'.format('/'.join(path_steps)) return self.api_client.get_entity_by_query(path=parent_path)
Create a folder in the storage service pointed by the given path.
def mkdir(self, path): '''Create a folder in the storage service pointed by the given path. Args: path (str): The path of the folder to be created Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path, projects_allowed=False) parent_metadata = self.get_parent(path) self.api_client.create_folder(path.split('/')[-1], parent_metadata['uuid'])
Upload local file content to a storage service destination folder.
def upload_file(self, local_file, dest_path, mimetype): '''Upload local file content to a storage service destination folder. Args: local_file(str) dest_path(str): absolute Storage service path '/project' prefix is essential suffix should be the name the file will have on in the destination folder i.e.: /project/folder/.../file_name mimetype(str): set the contentType attribute Returns: The uuid of created file entity as string Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(dest_path) # get the paths of the target dir and the target file name if dest_path.endswith('/'): raise StorageArgumentException('Must specify target file name in dest_path argument') if local_file.endswith(os.path.sep): raise StorageArgumentException('Must specify source file name in local_file' ' argument, directory upload not supported') # create the file container new_file = self.api_client.create_file( name=dest_path.split('/').pop(), content_type=mimetype, parent=self.get_parent(dest_path)['uuid'] ) etag = self.api_client.upload_file_content(new_file['uuid'], source=local_file) new_file['etag'] = etag return new_file
Delete an entity from the storage service using its path.
def delete(self, path): ''' Delete an entity from the storage service using its path. Args: path(str): The path of the entity to be delete Returns: The uuid of created file entity as string Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path, projects_allowed=False) entity = self.api_client.get_entity_by_query(path=path) if entity['entity_type'] in self.__BROWSABLE_TYPES: # At this point it can only be a folder contents = self.api_client.list_folder_content(entity['uuid']) if contents['count'] > 0: raise StorageArgumentException( 'This method cannot delete non-empty folder. Please empty the folder first.') self.api_client.delete_folder(entity['uuid']) elif entity['entity_type'] == 'file': self.api_client.delete_file(entity['uuid'])
Validate a string as a valid storage path
def __validate_storage_path(cls, path, projects_allowed=True): '''Validate a string as a valid storage path''' if not path or not isinstance(path, str) or path[0] != '/' or path == '/': raise StorageArgumentException( 'The path must be a string, start with a slash (/), and be longer' ' than 1 character.') if not projects_allowed and len([elem for elem in path.split('/') if elem]) == 1: raise StorageArgumentException( 'This method does not accept projects in the path.')
Check cloud - harness code is valid. task schema validation is left to the API endpoint.: param remote: Flag indicating if the task is being ran on the platform or not.: return: is valid or not.
def is_valid(self, remote=False): """ Check cloud-harness code is valid. task schema validation is left to the API endpoint. :param remote: Flag indicating if the task is being ran on the platform or not. :return: is valid or not. """ if len(self.input_ports) < 1: return False if remote: # Ignore output ports as value will overriden. ports = [ port for port in self.input_ports if port.type == 'directory' ] for port in ports: # Will raise exception if the port is invalid. port.is_valid_s3_url(port.value) else: all_ports = self.ports[0] + self.ports[1] ports = [ port for port in all_ports if port.type == 'directory' and port.name != 'source_bundle' ] for port in ports: # Will raise exception if the port is invalid. port.is_valid_filesys(port.value) return True
This function computes a graph of nearest - neighbors for each sample point in data and returns the median of the distribution of distances between those nearest - neighbors the distance metric being specified by metric. Parameters ---------- data: array of shape ( n_samples n_features ) The data - set a fraction of whose sample points will be extracted by density sampling. metric: string The distance metric used to determine the nearest - neighbor to each data - point. The DistanceMetric class defined in scikit - learn s library lists all available metrics. Returns ------- median_min_dist: float The median of the distribution of distances between nearest - neighbors.
def median_min_distance(data, metric): """This function computes a graph of nearest-neighbors for each sample point in 'data' and returns the median of the distribution of distances between those nearest-neighbors, the distance metric being specified by 'metric'. Parameters ---------- data : array of shape (n_samples, n_features) The data-set, a fraction of whose sample points will be extracted by density sampling. metric : string The distance metric used to determine the nearest-neighbor to each data-point. The DistanceMetric class defined in scikit-learn's library lists all available metrics. Returns ------- median_min_dist : float The median of the distribution of distances between nearest-neighbors. """ data = np.atleast_2d(data) nearest_distances = kneighbors_graph(data, 1, mode = 'distance', metric = metric, include_self = False).data median_min_dist = np.median(nearest_distances, overwrite_input = True) return round(median_min_dist, 4)
For each sample point of the data - set data estimate a local density in feature space by counting the number of neighboring data - points within a particular region centered around that sample point. Parameters ---------- data: array of shape ( n_samples n_features ) The data - set a fraction of whose sample points will be extracted by density sampling. kernel_mult: float optional ( default = 2. 0 ) The kernel multiplier which determine ( in terms of the median of the distribution of distances among nearest neighbors ) the extent of the regions centered around each sample point to consider for the computation of the local density associated to that particular sample point. metric: string optional ( default = manhattan ) The distance metric used to determine the nearest - neighbor to each data - point. The DistanceMetric class defined in scikit - learn s library lists all available metrics. Returns ------- local_densities: array of shape ( n_samples ) The i - th entry of this vector corresponds to the local density of the i - th sample point in the order of the rows of data.
def get_local_densities(data, kernel_mult = 2.0, metric = 'manhattan'): """For each sample point of the data-set 'data', estimate a local density in feature space by counting the number of neighboring data-points within a particular region centered around that sample point. Parameters ---------- data : array of shape (n_samples, n_features) The data-set, a fraction of whose sample points will be extracted by density sampling. kernel_mult : float, optional (default = 2.0) The kernel multiplier, which determine (in terms of the median of the distribution of distances among nearest neighbors) the extent of the regions centered around each sample point to consider for the computation of the local density associated to that particular sample point. metric : string, optional (default = 'manhattan') The distance metric used to determine the nearest-neighbor to each data-point. The DistanceMetric class defined in scikit-learn's library lists all available metrics. Returns ------- local_densities : array of shape (n_samples,) The i-th entry of this vector corresponds to the local density of the i-th sample point in the order of the rows of 'data'. """ data = np.atleast_2d(data) assert isinstance(kernel_mult, numbers.Real) and kernel_mult > 0 kernel_width = kernel_mult * median_min_distance(data, metric) N_samples = data.shape[0] if 8.0 * get_chunk_size(N_samples, 1) > N_samples: A = radius_neighbors_graph(data, kernel_width, mode = 'connectivity', metric = metric, include_self = True) rows, _ = A.nonzero() with NamedTemporaryFile('w', delete = True, dir = './') as file_name: fp = np.memmap(file_name, dtype = int, mode = 'w+', shape = rows.shape) fp[:] = rows[:] _, counts = np.unique(fp, return_counts = True) local_densities = np.zeros(N_samples, dtype = int) for i in xrange(N_samples): local_densities[i] = counts[i] else: local_densities = np.zeros(N_samples, dtype = int) chunks_size = get_chunk_size(N_samples, 2) for i in xrange(0, N_samples, chunks_size): chunk = data[i:min(i + chunks_size, N_samples)] D = pairwise_distances(chunk, data, metric, n_jobs = 1) D = (D <= kernel_width) local_densities[i + np.arange(min(chunks_size, N_samples - i))] = D.sum(axis = 1) return local_densities
The i - th sample point of the data - set data is selected by density sampling with a probability given by: | 0 if outlier_density > LD [ i ] ; P ( keep the i - th data - point ) = | 1 if outlier_density < = LD [ i ] < = target_density ; | target_density/ LD [ i ] if LD [ i ] > target_density. Here LD [ i ] denotes the local density of the i - th sample point of the data - set whereas outlier_density and target_density are computed as particular percentiles of that distribution of local densities. Parameters ---------- data: array of shape ( n_samples n_features ) The data - set a fraction of whose sample points will be extracted by density sampling. local_densities: array of shape ( n_samples ) optional ( default = None ) The i - th entry of this vector corresponds to the local density of the i - th sample point in the order of the rows of data. metric: string optional ( default = manhattan ) The distance metric used to determine the nearest - neighbor to each data - point. The DistanceMetric class defined in scikit - learn s library lists all available metrics. kernel_mult: float optional ( default = 2. 0 ) The kernel multiplier which determine ( in terms of the median of the distribution of distances among nearest neighbors ) the extent of the regions centered around each sample point to consider for the computation of the local density associated to that particular sample point. outlier_percentile: float optional ( default = 0. 01 ) Specify the outlier density as a percentile of the distribution of local densities. target_percentile: float optional ( default = 0. 05 ) Specifiy the target density as a percentile of the distribution of local densities. Relevant only if desired_samples is left unspecified. desired_samples: int optional ( default = None ) The number of samples to be selected from the whole data - set such that members of rare populations and members of more common populations are roughly equally represented. To that purpose a target density is computed that to selects about desired_samples data - points. Returns ------- samples_kept: array of shape ( n_selected_samples ) If the i - th sample point of data has been selected by a given instance of density sampling number i is featured in the array returned by the present function.
def density_sampling(data, local_densities = None, metric = 'manhattan', kernel_mult = 2.0, outlier_percentile = 0.01, target_percentile = 0.05, desired_samples = None): """The i-th sample point of the data-set 'data' is selected by density sampling with a probability given by: | 0 if outlier_density > LD[i]; P(keep the i-th data-point) = | 1 if outlier_density <= LD[i] <= target_density; | target_density / LD[i] if LD[i] > target_density. Here 'LD[i]' denotes the local density of the i-th sample point of the data-set, whereas 'outlier_density' and 'target_density' are computed as particular percentiles of that distribution of local densities. Parameters ---------- data : array of shape (n_samples, n_features) The data-set, a fraction of whose sample points will be extracted by density sampling. local_densities : array of shape (n_samples,), optional (default = None) The i-th entry of this vector corresponds to the local density of the i-th sample point in the order of the rows of 'data'. metric : string, optional (default = 'manhattan') The distance metric used to determine the nearest-neighbor to each data-point. The DistanceMetric class defined in scikit-learn's library lists all available metrics. kernel_mult : float, optional (default = 2.0) The kernel multiplier, which determine (in terms of the median of the distribution of distances among nearest neighbors) the extent of the regions centered around each sample point to consider for the computation of the local density associated to that particular sample point. outlier_percentile : float, optional (default = 0.01) Specify the outlier density as a percentile of the distribution of local densities. target_percentile : float, optional (default = 0.05) Specifiy the target density as a percentile of the distribution of local densities. Relevant only if 'desired_samples' is left unspecified. desired_samples : int, optional (default = None) The number of samples to be selected from the whole data-set such that members of rare populations and members of more common populations are roughly equally represented. To that purpose, a target density is computed that to selects about 'desired_samples' data-points. Returns ------- samples_kept : array of shape (n_selected_samples,) If the 'i'-th sample point of 'data' has been selected by a given instance of density sampling, number 'i' is featured in the array returned by the present function. """ random_state = np.random.RandomState() data = np.atleast_2d(data) for x in (kernel_mult, outlier_percentile, target_percentile): assert isinstance(x, numbers.Real) and x > 0 for x in (outlier_percentile, target_percentile): assert x <= 1.0 if local_densities is None: local_densities = get_local_densities(data, kernel_mult, metric) if reduce(operator.mul, local_densities.shape, 1) != max(local_densities.shape): raise ValueError("\nERROR: Density_Sampling: density_sampling: problem with " "the dimensions of the vector of local densities provided.\n") else: local_densities = np.reshape(local_densities, local_densities.size) outlier_density = np.percentile(local_densities, outlier_percentile) target_density = np.percentile(local_densities, target_percentile) samples_kept = np.where(local_densities > outlier_density)[0] N_kept = samples_kept.size local_densities = local_densities[samples_kept] if desired_samples is None: probs = np.divide(target_density + 0.0, local_densities) ind = np.where(probs > random_state.uniform(size = N_kept))[0] samples_kept = samples_kept[ind] elif desired_samples <= N_kept: sorted_densities = np.sort(local_densities) temp = np.reciprocal(sorted_densities[::-1].astype(float)) cdf = np.cumsum(temp)[::-1] target_density = (desired_samples + 0.0) / cdf[0] if target_density > sorted_densities[0]: temp = desired_samples - np.arange(1.0, N_kept + 1.0) possible_targets = np.divide(temp, cdf) ind = np.argmax(possible_targets < sorted_densities) target_density = possible_targets[ind] probs = np.divide(target_density + 0.0, local_densities) ind = np.where(probs > random_state.uniform(size = N_kept))[0] samples_kept = samples_kept[ind] else: print("\nERROR: Density_Sampling: density_sampling: 'desired_samples' has been " "assigned a value of {desired_samples}, larger than {N_kept}, " "the number of samples whose local densities are high enough " "(i.e. excluded are the local densities in the lowest {outlier_percentile} " "percentile).\n".format(**locals())) exit(1) return samples_kept
Creates a new cross - service client.
def new(cls, access_token, environment='prod'): '''Creates a new cross-service client.''' return cls( storage_client=StorageClient.new(access_token, environment=environment))
Create a new storage service REST client.
def new(cls, access_token, environment='prod'): '''Create a new storage service REST client. Arguments: environment: The service environment to be used for the client access_token: The access token used to authenticate with the service Returns: A storage_service.api.ApiClient instance Example: >>> storage_client = ApiClient.new(my_access_token) ''' request = RequestBuilder \ .request(environment) \ .to_service(cls.SERVICE_NAME, cls.SERVICE_VERSION) \ .throw( StorageForbiddenException, lambda resp: 'You are forbidden to do this.' if resp.status_code == 403 else None ) \ .throw( StorageNotFoundException, lambda resp: 'The entity is not found' if resp.status_code == 404 else None ) \ .throw( StorageException, lambda resp: 'Server response: {0} - {1}'.format(resp.status_code, resp.text) if not resp.ok else None ) authenticated_request = request.with_token(access_token) return cls(request, authenticated_request)
Remove empty ( None ) valued keywords and self from function parameters
def _prep_params(params): '''Remove empty (None) valued keywords and self from function parameters''' return {k: v for (k, v) in params.items() if v is not None and k != 'self'}
Get generic entity by UUID.
def get_entity_details(self, entity_id): '''Get generic entity by UUID. Args: entity_id (str): The UUID of the requested entity. Returns: A dictionary describing the entity:: { u'collab_id': 2271, u'created_by': u'303447', u'created_on': u'2017-03-10T12:50:06.077891Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'303447', u'modified_on': u'2017-03-10T12:50:06.077946Z', u'name': u'2271', u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) return self._authenticated_request \ .to_endpoint('entity/{}/'.format(entity_id)) \ .return_body() \ .get()
Retrieve entity by query param which can be either uuid/ path/ metadata.
def get_entity_by_query(self, uuid=None, path=None, metadata=None): '''Retrieve entity by query param which can be either uuid/path/metadata. Args: uuid (str): The UUID of the requested entity. path (str): The path of the requested entity. metadata (dict): A dictionary of one metadata {key: value} of the requested entitity. Returns: The details of the entity, if found:: { u'content_type': u'plain/text', u'created_by': u'303447', u'created_on': u'2017-03-13T10:52:23.275087Z', u'description': u'', u'entity_type': u'file', u'modified_by': u'303447', u'modified_on': u'2017-03-13T10:52:23.275126Z', u'name': u'myfile', u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682', u'uuid': u'e2c25c1b-f6a9-4cf6-b8d2-271e628a9a56' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not (uuid or path or metadata): raise StorageArgumentException('No parameter given for the query.') if uuid and not is_valid_uuid(uuid): raise StorageArgumentException( 'Invalid UUID for uuid: {0}'.format(uuid)) params = locals().copy() if metadata: if not isinstance(metadata, dict): raise StorageArgumentException('The metadata needs to be provided' ' as a dictionary.') key, value = next(iter(metadata.items())) params[key] = value del params['metadata'] params = self._prep_params(params) return self._authenticated_request \ .to_endpoint('entity/') \ .with_params(params) \ .return_body() \ .get()
Set metadata for an entity.
def set_metadata(self, entity_type, entity_id, metadata): '''Set metadata for an entity. Args: entity_type (str): Type of the entity. Admitted values: ['project', 'folder', 'file']. entity_id (str): The UUID of the entity to be modified. metadata (dict): A dictionary of key/value pairs to be written as metadata. Warning: It will replace all existing metadata with the provided dictionary. Returns: A dictionary of the updated metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) if not isinstance(metadata, dict): raise StorageArgumentException('The metadata was not provided as a ' 'dictionary') return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .with_json_body(metadata) \ .return_body() \ .post()
Get metadata of an entity.
def get_metadata(self, entity_type, entity_id): '''Get metadata of an entity. Args: entity_type (str): Type of the entity. Admitted values: ['project', 'folder', 'file']. entity_id (str): The UUID of the entity to be modified. Returns: A dictionary of the metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .return_body() \ .get()
Update the metadata of an entity.
def update_metadata(self, entity_type, entity_id, metadata): '''Update the metadata of an entity. Existing non-modified metadata will not be affected. Args: entity_type (str): Type of the entity. Admitted values: 'project', 'folder', 'file'. entity_id (str): The UUID of the entity to be modified. metadata (dict): A dictionary of key/value pairs to be written as metadata. Returns: A dictionary of the updated object metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) if not isinstance(metadata, dict): raise StorageArgumentException('The metadata was not provided as a ' 'dictionary') return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .with_json_body(metadata) \ .return_body() \ .put()
Delete the selected metadata entries of an entity.
def delete_metadata(self, entity_type, entity_id, metadata_keys): '''Delete the selected metadata entries of an entity. Only deletes selected metadata keys, for a complete wipe, use set_metadata. Args: entity_type (str): Type of the entity. Admitted values: ['project', 'folder', 'file']. entity_id (srt): The UUID of the entity to be modified. metadata_keys (lst): A list of metada keys to be deleted. Returns: A dictionary of the updated object metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) if not isinstance(metadata_keys, list): raise StorageArgumentException('The metadata was not provided as a ' 'dictionary') return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .with_json_body({'keys': metadata_keys}) \ .return_body() \ .delete()
List all the projects the user have access to.
def list_projects(self, hpc=None, access=None, name=None, collab_id=None, page_size=DEFAULT_PAGE_SIZE, page=None, ordering=None): '''List all the projects the user have access to. This function does not retrieve all results, pages have to be manually retrieved by the caller. Args: hpc (bool): If 'true', the result will contain only the HPC projects (Unicore projects). access (str): If provided, the result will contain only projects where the user has the provided acccess. Admitted values: ['read', 'write']. name (str): Filter on the project name. collab_id (int): Filter on the collab id. page_size (int): Number of elements per page. page (int): Number of the page ordering (str): Indicate on which fields to sort the result. Prepend '-' to invert order. Multiple values can be provided. Ordering is supported on: ['name', 'created_on', 'modified_on']. Example: ordering='name,created_on' Returns: A dictionary of the results:: { u'count': 256, u'next': u'http://link.to.next/page', u'previous': None, u'results': [{u'collab_id': 2079, u'created_by': u'258666', u'created_on': u'2017-02-23T15:09:27.626973Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'258666', u'modified_on': u'2017-02-23T15:09:27.627025Z', u'name': u'2079', u'uuid': u'64a6ad2e-acd1-44a3-a4cd-6bd96e3da2b0'}] } Raises: StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' return self._authenticated_request \ .to_endpoint('project/') \ .with_params(self._prep_params(locals())) \ .return_body() \ .get()
Get information on a given project
def get_project_details(self, project_id): '''Get information on a given project Args: project_id (str): The UUID of the requested project. Returns: A dictionary describing the project:: { u'collab_id': 2271, u'created_by': u'303447', u'created_on': u'2017-03-10T12:50:06.077891Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'303447', u'modified_on': u'2017-03-10T12:50:06.077946Z', u'name': u'2271', u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682' } Raises: StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(project_id): raise StorageArgumentException( 'Invalid UUID for project_id: {0}'.format(project_id)) return self._authenticated_request \ .to_endpoint('project/{}/'.format(project_id)) \ .return_body() \ .get()
Create a new project.
def create_project(self, collab_id): '''Create a new project. Args: collab_id (int): The id of the collab the project should be created in. Returns: A dictionary of details of the created project:: { u'collab_id': 12998, u'created_by': u'303447', u'created_on': u'2017-03-21T14:06:32.293902Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'303447', u'modified_on': u'2017-03-21T14:06:32.293967Z', u'name': u'12998', u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40' } Raises: StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' return self._authenticated_request \ .to_endpoint('project/') \ .with_json_body(self._prep_params(locals())) \ .return_body() \ .post()
Delete a project. It will recursively delete all the content.
def delete_project(self, project): '''Delete a project. It will recursively delete all the content. Args: project (str): The UUID of the project to be deleted. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: 403 StorageNotFoundException: 404 HTTPError: other non-20x error codes ''' if not is_valid_uuid(project): raise StorageArgumentException( 'Invalid UUID for project: {0}'.format(project)) self._authenticated_request \ .to_endpoint('project/{}/'.format(project)) \ .delete()
Create a new folder.
def create_folder(self, name, parent): '''Create a new folder. Args: name (srt): The name of the folder. parent (str): The UUID of the parent entity. The parent must be a project or a folder. Returns: A dictionary of details of the created folder:: { u'created_by': u'303447', u'created_on': u'2017-03-21T14:06:32.293902Z', u'description': u'', u'entity_type': u'folder', u'modified_by': u'303447', u'modified_on': u'2017-03-21T14:06:32.293967Z', u'name': u'myfolder', u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682', u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(parent): raise StorageArgumentException( 'Invalid UUID for parent: {0}'.format(parent)) return self._authenticated_request \ .to_endpoint('folder/') \ .with_json_body(self._prep_params(locals())) \ .return_body() \ .post()
Get information on a given folder.
def get_folder_details(self, folder): '''Get information on a given folder. Args: folder (str): The UUID of the requested folder. Returns: A dictionary of the folder details if found:: { u'created_by': u'303447', u'created_on': u'2017-03-21T14:06:32.293902Z', u'description': u'', u'entity_type': u'folder', u'modified_by': u'303447', u'modified_on': u'2017-03-21T14:06:32.293967Z', u'name': u'myfolder', u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682', u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(folder): raise StorageArgumentException( 'Invalid UUID for folder: {0}'.format(folder)) return self._authenticated_request \ .to_endpoint('folder/{}/'.format(folder)) \ .return_body() \ .get()
List files and folders ( not recursively ) contained in the folder.
def list_folder_content(self, folder, name=None, entity_type=None, content_type=None, page_size=DEFAULT_PAGE_SIZE, page=None, ordering=None): '''List files and folders (not recursively) contained in the folder. This function does not retrieve all results, pages have to be manually retrieved by the caller. Args: folder (str): The UUID of the requested folder. name (str): Optional filter on entity name. entity_type (str): Optional filter on entity type. Admitted values: ['file', 'folder']. content_type (str): Optional filter on entity content type (only files are returned). page_size (int): Number of elements per page. page (int): Number of the page. ordering (str): Indicate on which fields to sort the result. Prepend '-' to invert order. Multiple values can be provided. Ordering is supported on: ['name', 'created_on', 'modified_on']. Example: 'ordering=name,created_on' Returns: A dictionary of the results:: { u'count': 1, u'next': None, u'previous': None, u'results': [{u'content_type': u'plain/text', u'created_by': u'303447', u'created_on': u'2017-03-13T10:17:01.688472Z', u'description': u'', u'entity_type': u'file', u'modified_by': u'303447', u'modified_on': u'2017-03-13T10:17:01.688632Z', u'name': u'file_1', u'parent': u'eac11058-4ae0-4ea9-ada8-d3ea23887509', u'uuid': u'0e17eaac-cb00-4336-b9d7-657026844281'}] } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(folder): raise StorageArgumentException( 'Invalid UUID for folder: {0}'.format(folder)) params = self._prep_params(locals()) del params['folder'] # not a query parameter return self._authenticated_request \ .to_endpoint('folder/{}/children/'.format(folder)) \ .with_params(params) \ .return_body() \ .get()
Delete a folder. It will recursively delete all the content.
def delete_folder(self, folder): '''Delete a folder. It will recursively delete all the content. Args: folder_id (str): The UUID of the folder to be deleted. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: 403 StorageNotFoundException: 404 HTTPError: other non-20x error codes ''' if not is_valid_uuid(folder): raise StorageArgumentException( 'Invalid UUID for folder: {0}'.format(folder)) self._authenticated_request \ .to_endpoint('folder/{}/'.format(folder)) \ .delete()
Upload a file content. The file entity must already exist.
def upload_file_content(self, file_id, etag=None, source=None, content=None): '''Upload a file content. The file entity must already exist. If an ETag is provided the file stored on the server is verified against it. If it does not match, StorageException is raised. This means the client needs to update its knowledge of the resource before attempting to update again. This can be used for optimistic concurrency control. Args: file_id (str): The UUID of the file whose content is written. etag (str): The etag to match the contents against. source (str): The path of the local file whose content to be uploaded. content (str): A string of the content to be uploaded. Note: ETags should be enclosed in double quotes:: my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"' Returns: The ETag of the file upload:: '"71e1ed9ee52e565a56aec66bc648a32c"' Raises: IOError: The source cannot be opened. StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) if not (source or content) or (source and content): raise StorageArgumentException('Either one of source file or content ' 'has to be provided.') resp = self._authenticated_request \ .to_endpoint('file/{}/content/upload/'.format(file_id)) \ .with_body(content or open(source, 'rb')) \ .with_headers({'If-Match': etag} if etag else {}) \ .post() if 'ETag' not in resp.headers: raise StorageException('No ETag received from the service after the upload') return resp.headers['ETag']
Copy file content from source file to target file.
def copy_file_content(self, file_id, source_file): '''Copy file content from source file to target file. Args: file_id (str): The UUID of the file whose content is written. source_file (str): The UUID of the file whose content is copied. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) if not is_valid_uuid(source_file): raise StorageArgumentException( 'Invalid UUID for source_file: {0}'.format(source_file)) self._authenticated_request \ .to_endpoint('file/{}/content/'.format(file_id)) \ .with_headers({'X-Copy-From': source_file}) \ .put()
Download file content.
def download_file_content(self, file_id, etag=None): '''Download file content. Args: file_id (str): The UUID of the file whose content is requested etag (str): If the content is not changed since the provided ETag, the content won't be downloaded. If the content is changed, it will be downloaded and returned with its new ETag. Note: ETags should be enclosed in double quotes:: my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"' Returns: A tuple of ETag and content (etag, content) if the content was retrieved. If an etag was provided, and content didn't change returns (None, None):: ('"71e1ed9ee52e565a56aec66bc648a32c"', 'Hello world!') Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) headers = {'Accept': '*/*'} if etag: headers['If-None-Match'] = etag resp = self._authenticated_request \ .to_endpoint('file/{}/content/'.format(file_id)) \ .with_headers(headers) \ .get() if resp.status_code == 304: return (None, None) if 'ETag' not in resp.headers: raise StorageException('No ETag received from the service with the download') return (resp.headers['ETag'], resp.content)
Get a signed unauthenticated URL.
def get_signed_url(self, file_id): '''Get a signed unauthenticated URL. It can be used to download the file content without the need for a token. The signed URL expires after 5 seconds. Args: file_id (str): The UUID of the file to get the link for. Returns: The signed url as a string Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) return self._authenticated_request \ .to_endpoint('file/{}/content/secure_link/'.format(file_id)) \ .return_body() \ .get()['signed_url']
Delete a file.
def delete_file(self, file_id): '''Delete a file. Args: file_id (str): The UUID of the file to delete. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) self._authenticated_request \ .to_endpoint('file/{}/'.format(file_id)) \ .delete()
pymongo expects a dict
def emit(self, record): """ pymongo expects a dict """ msg = self.format(record) if not isinstance(msg, dict): msg = json.loads(msg) self.collection.insert(msg)
Sets the service name and version the request should target
def to_service(self, service, version): '''Sets the service name and version the request should target Args: service (str): The name of the service as displayed in the services.json file version (str): The version of the service as displayed in the services.json file Returns: The request builder instance in order to chain calls ''' service_url = self._service_locator.get_service_url(service, version) return self.__copy_and_set('service_url', self.__strip_trailing_slashes(service_url))