signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def set_hash(self, file_hash, hash_alg):
|
self.file_hash = file_hash<EOL>self.hash_alg = hash_alg<EOL>
|
Set the hash value and algorithm for the contents of the file.
:param file_hash: str hash value
:param hash_alg: str name of the hash algorithm(md5)
|
f3927:c3:m1
|
@staticmethod<EOL><INDENT>def get_hash_from_upload(upload, target_algorithm=HashUtil.HASH_NAME):<DEDENT>
|
hash_info = upload.get('<STR_LIT>')<EOL>if hash_info:<EOL><INDENT>return hash_info<EOL><DEDENT>hashes_array = upload.get('<STR_LIT>')<EOL>if hashes_array:<EOL><INDENT>for hash_info in hashes_array:<EOL><INDENT>algorithm = hash_info.get('<STR_LIT>')<EOL>if algorithm == target_algorithm:<EOL><INDENT>return hash_info<EOL><DEDENT><DEDENT><DEDENT>return None<EOL>
|
Find hash value in upload dictionary.
Older upload format stores a single hash in 'hash' property.
New upload format stores multiple under 'hashes' property for this one we look for a particular algorithm.
:param upload: dictionary: contains hash data in DukeDS upload format.
:param target_algorithm: str: name of the algorithm to look for if there are more than one hash
:return: dictionary of hash information, keys: "algorithm" and "value"
|
f3927:c3:m3
|
def __init__(self, json_data):
|
self.id = json_data['<STR_LIT:id>']<EOL>self.username = json_data['<STR_LIT:username>']<EOL>self.full_name = json_data['<STR_LIT>']<EOL>self.email = json_data['<STR_LIT:email>']<EOL>self.first_name = json_data['<STR_LIT>']<EOL>self.last_name = json_data['<STR_LIT>']<EOL>
|
Set properties based on json_data.
:param json_data: dict JSON data containing file info
|
f3927:c4:m0
|
def __init__(self, json_data):
|
self.id = json_data['<STR_LIT:id>']<EOL>self.name = json_data['<STR_LIT:name>']<EOL>self.description = json_data['<STR_LIT:description>']<EOL>self.is_deprecated = json_data['<STR_LIT>']<EOL>
|
Set properties based on json_data.
:param json_data: dict JSON data containing auth_role info
|
f3927:c5:m0
|
def __init__(self, project_id, data):
|
self.project_id = project_id<EOL>self.data = data<EOL>
|
Specify the project_id and the array of item dictionaries.
:param project_id: str: uuid of the project
:param data: [object]: DukeDS recursive project children
|
f3927:c6:m0
|
def _get_children_for_parent(self, parent_id):
|
children = []<EOL>for child in self.data:<EOL><INDENT>parent = child['<STR_LIT>']<EOL>if parent['<STR_LIT:id>'] == parent_id:<EOL><INDENT>children.append(child)<EOL><DEDENT><DEDENT>return children<EOL>
|
Given a parent uuid return a list of dictionaries.
:param parent_id: str: uuid of the parent
:return: [dict]: children in this list with parent_id parent
|
f3927:c6:m1
|
def get_tree(self):
|
return self.get_tree_recur(self.project_id, '<STR_LIT>')<EOL>
|
Return array of RemoteFolders(with appropriate children)/RemoteFiles based on the values from constructor.
:return: [RemoteFolder/RemoteFile]
|
f3927:c6:m2
|
def get_tree_recur(self, parent_id, parent_path):
|
children = []<EOL>for child_data in self._get_children_for_parent(parent_id):<EOL><INDENT>if child_data['<STR_LIT>'] == KindType.folder_str:<EOL><INDENT>folder = RemoteFolder(child_data, parent_path)<EOL>for grand_child in self.get_tree_recur(child_data['<STR_LIT:id>'], folder.remote_path):<EOL><INDENT>folder.add_child(grand_child)<EOL><DEDENT>children.append(folder)<EOL><DEDENT>else:<EOL><INDENT>file = RemoteFile(child_data, parent_path)<EOL>children.append(file)<EOL><DEDENT><DEDENT>return children<EOL>
|
Recursively create array RemoteFolders/RemoteFiles.
:param parent_id: str: uuid if the parent to find children for
:param parent_path: str: remote path of parent to build child paths
:return: [RemoteFolder/RemoteFile]
|
f3927:c6:m3
|
def __init__(self, json_data):
|
self.id = json_data['<STR_LIT:id>']<EOL>self.service_id = json_data['<STR_LIT>']<EOL>self.name = json_data['<STR_LIT:name>']<EOL>self.is_deprecated = json_data['<STR_LIT>']<EOL>self.is_default = json_data['<STR_LIT>']<EOL>self.is_deprecated = json_data['<STR_LIT>']<EOL>self.login_initiation_url = json_data['<STR_LIT>']<EOL>
|
Set properties based on json_data.
:param json_data: dict JSON data containing auth_role info
|
f3927:c7:m0
|
def __init__(self, json_data):
|
self.id = json_data['<STR_LIT:id>']<EOL>self.name = json_data['<STR_LIT:name>']<EOL>self.size = json_data['<STR_LIT:size>']<EOL>self.file_url = json_data['<STR_LIT>']<EOL>self.hashes = json_data['<STR_LIT>']<EOL>self.ancestors = json_data['<STR_LIT>']<EOL>self.json_data = json_data<EOL>self.kind = KindType.file_str<EOL>
|
Set properties based on json_data.
:param json_data: dict JSON data containing auth_role info
|
f3927:c10:m0
|
def __init__(self, include_paths, exclude_paths):
|
if include_paths and exclude_paths:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>path_filter = IncludeAll()<EOL>if include_paths:<EOL><INDENT>path_filter = IncludeFilter(include_paths)<EOL><DEDENT>elif exclude_paths:<EOL><INDENT>path_filter = ExcludeFilter(exclude_paths)<EOL><DEDENT>self.filter = path_filter<EOL>self.seen_paths = set()<EOL>
|
Creates a path filter based on either include_paths or exclude_paths if both are filled in raises error.
:param include_paths: [str]: list of paths that should be included
:param exclude_paths: [str]: list of paths that should be excluded
|
f3928:c0:m0
|
def include_path(self, path):
|
self.seen_paths.add(path)<EOL>return self.filter.include(path)<EOL>
|
Should this path be included based on the include_paths or exclude_paths.
Keeps track of paths seen to allow finding unused filters.
:param path: str: remote path to be filtered
:return: bool: True if we should include the path
|
f3928:c0:m1
|
def reset_seen_paths(self):
|
self.seen_paths = set()<EOL>
|
Clear list of paths seen via include_path method.
|
f3928:c0:m2
|
def get_unused_paths(self):
|
return [path for path in self.filter.paths if path not in self.seen_paths]<EOL>
|
Returns which include_paths or exclude_paths that were not used via include_path method.
:return: [str] list of filtering paths that were not used.
|
f3928:c0:m3
|
@staticmethod<EOL><INDENT>def is_child(child_path, parent_path):<DEDENT>
|
parent_dir = os.path.join(parent_path, '<STR_LIT>')<EOL>child_dir = os.path.join(child_path, '<STR_LIT>')<EOL>return child_dir.startswith(parent_dir)<EOL>
|
Is parent_path a parent(or grandparent) directory of child_path.
:param child_path: str: remote file path
:param parent_path: str: remote file path
:return: bool: True when parent_path is child_path's parent
|
f3928:c1:m0
|
@staticmethod<EOL><INDENT>def parent_child_paths(path, some_path):<DEDENT>
|
return PathFilterUtil.is_child(path, some_path) or PathFilterUtil.is_child(some_path, path)<EOL>
|
Is path a parent of some_path or some_path is a parent of path.
:param path: str: remote file path
:param some_path: str: remote file path
:return: bool: True when they are parents
|
f3928:c1:m1
|
@staticmethod<EOL><INDENT>def strip_trailing_slash(paths):<DEDENT>
|
return [path.rstrip(os.sep) for path in paths]<EOL>
|
Remove trailing slash from a list of paths
:param paths: [str]: paths to fix
:return: [str]: stripped paths
|
f3928:c1:m2
|
def __init__(self, path_filter, visitor):
|
self.path_filter = path_filter<EOL>self.visitor = visitor<EOL>self.skipped_folder_paths = set()<EOL>self.filtered_project = FilteredProject(self.include, visitor)<EOL>
|
Setup to allow visitor to visit files/folders from a project that pass path_filter.
:param path_filter: PathFilter: determines which items are sent to visitor
:param visitor: object: has methods to visit_project,visit_folder,visit_file that will be called from run method
|
f3928:c5:m0
|
def run(self, project):
|
self.filtered_project.walk_project(project)<EOL>
|
Walk project calling visit_* methods for visitor for items.
:param project: RemoteProject: project to visit folders/files of.
|
f3928:c5:m1
|
def include(self, item):
|
return self.path_filter.include_path(item.remote_path)<EOL>
|
Method that determines which items the visitor sees.
:param item: RemoteProject/RemoteFolder/RemoteItem: item to have it's remote_path checked
:return: bool: True if the item is to be included
|
f3928:c5:m2
|
def execute_task_async(task_func, task_id, context):
|
try:<EOL><INDENT>result = task_func(context)<EOL>return task_id, result<EOL><DEDENT>except:<EOL><INDENT>raise Exception("<STR_LIT>".join(traceback.format_exception(*sys.exc_info())))<EOL><DEDENT>
|
Global function run for Task. multiprocessing requires a top level function.
:param task_func: function: function to run (must be pickle-able)
:param task_id: int: unique id of this task
:param context: object: single argument to task_func (must be pickle-able)
:return: (task_id, object): return passed in task id and result object
|
f3929:m0
|
def __init__(self, task_id, wait_for_task_id, command):
|
self.id = task_id<EOL>self.wait_for_task_id = wait_for_task_id<EOL>self.command = command<EOL>self.func = command.func<EOL>
|
Setup task so it can be executed.
:param task_id: int: unique id of this task
:param wait_for_task_id: int: unique id of the task that this one is waiting for
:param command: object with foreground setup/teardown methods and background function
|
f3929:c0:m0
|
def before_run(self, parent_task_result):
|
self.command.before_run(parent_task_result)<EOL>
|
Run in main process before run method.
:param parent_task_result: object: result of previous task or None if no previous task
|
f3929:c0:m1
|
def create_context(self, message_queue):
|
return self.command.create_context(message_queue, self.id)<EOL>
|
Run serially before the run method.
:return object: context object passing state to the thread
|
f3929:c0:m2
|
def after_run(self, results):
|
return self.command.after_run(results)<EOL>
|
Run in main process after run method.
:param results: object: results from run method.
|
f3929:c0:m3
|
def on_message(self, data):
|
self.command.on_message(data)<EOL>
|
Call on_message on our command passing data
:param data: object: parameter passed to the on_message member of this task's command
|
f3929:c0:m4
|
def add(self, task):
|
wait_id = task.wait_for_task_id<EOL>task_list = self.wait_id_to_task.get(wait_id, [])<EOL>task_list.append(task)<EOL>self.wait_id_to_task[wait_id] = task_list<EOL>
|
Add this task to the lookup based on it's wait_for_task_id property.
:param task: Task: task to add to the list
|
f3929:c1:m1
|
def get_next_tasks(self, finished_task_id):
|
return self.wait_id_to_task.get(finished_task_id, [])<EOL>
|
Return list of tasks that were waiting for finished_task_id.
:param finished_task_id: int: task id for some task that has just finished
:return: [Task]: tasks waiting for finished_task_id
|
f3929:c1:m2
|
def __init__(self, executor):
|
self.waiting_task_list = WaitingTaskList()<EOL>self.executor = executor<EOL>self.next_id = <NUM_LIT:1><EOL>
|
Setup runner to use executor to run it's tasks.
:param executor: TaskExecutor: actually executes tasks and returns their results
|
f3929:c2:m0
|
def _claim_next_id(self):
|
next_id = self.next_id<EOL>self.next_id += <NUM_LIT:1><EOL>return next_id<EOL>
|
Convinience method to generate sequential ids for tasks.
:return: int: numeric ids representing each unique task
|
f3929:c2:m1
|
def add(self, parent_task_id, command):
|
task_id = self._claim_next_id()<EOL>self.waiting_task_list.add(Task(task_id, parent_task_id, command))<EOL>return task_id<EOL>
|
Create a task for the command that will wait for parent_task_id before starting.
:param parent_task_id: int: id of task to wait for or None if it can start immediately
:param command: TaskCommand: contains data function to run
:return: int: task id we created for this command
|
f3929:c2:m2
|
def get_next_tasks(self, finished_task_id):
|
return self.waiting_task_list.get_next_tasks(None)<EOL>
|
Get the next set of tasks for a finished_task_id
:param finished_task_id: int: task id of a task that finished
:return: [Task]: tasks that were waiting for the finished_task_id task to finish
|
f3929:c2:m3
|
def run(self):
|
for task in self.get_next_tasks(None):<EOL><INDENT>self.executor.add_task(task, None)<EOL><DEDENT>while not self.executor.is_done():<EOL><INDENT>done_task_and_result = self.executor.wait_for_tasks()<EOL>for task, task_result in done_task_and_result:<EOL><INDENT>self._add_sub_tasks_to_executor(task, task_result)<EOL><DEDENT><DEDENT>
|
Runs all tasks in this runner on the executor.
Blocks until all tasks have been completed.
:return:
|
f3929:c2:m4
|
def _add_sub_tasks_to_executor(self, parent_task, parent_task_result):
|
for sub_task in self.waiting_task_list.get_next_tasks(parent_task.id):<EOL><INDENT>self.executor.add_task(sub_task, parent_task_result)<EOL><DEDENT>
|
Add all subtasks for parent_task to the executor.
:param parent_task: Task: task that has just finished
:param parent_task_result: object: result of task that is finished
|
f3929:c2:m5
|
def __init__(self, tasks_at_once):
|
self.pool = multiprocessing.Pool()<EOL>self.tasks = deque()<EOL>self.task_id_to_task = {}<EOL>self.pending_results = []<EOL>self.tasks_at_once = tasks_at_once<EOL>self.message_queue = multiprocessing.Manager().Queue()<EOL>
|
Setup to run tasks in background limiting to tasks_at_once processes.
:param tasks_at_once: int: number of tasks we can run at once
|
f3929:c3:m0
|
def add_task(self, task, parent_task_result):
|
self.tasks.append((task, parent_task_result))<EOL>self.task_id_to_task[task.id] = task<EOL>
|
Add a task to run with the specified result from this tasks parent(can be None)
:param task: Task: task that should be run
:param parent_task_result: object: value to be passed to task for setup
|
f3929:c3:m1
|
def is_done(self):
|
return not self._has_more_tasks() and not self._has_more_pending_results()<EOL>
|
Have we exhausted all tasks.
:return: bool: True if we have finished all tasks and their pending results
|
f3929:c3:m2
|
def wait_for_tasks(self):
|
finished_tasks_and_results = []<EOL>while len(finished_tasks_and_results) == <NUM_LIT:0>:<EOL><INDENT>if self.is_done():<EOL><INDENT>break<EOL><DEDENT>self.start_tasks()<EOL>self.process_all_messages_in_queue()<EOL>finished_tasks_and_results = self.get_finished_results()<EOL><DEDENT>return finished_tasks_and_results<EOL>
|
Wait for one or more tasks to finish or return empty list if we are done.
Starts new tasks if we have less than task_at_once currently running.
:return: [(Task,object)]: list of (task,result) for finished tasks
|
f3929:c3:m5
|
def start_tasks(self):
|
while self.tasks_at_once > len(self.pending_results) and self._has_more_tasks():<EOL><INDENT>task, parent_result = self.tasks.popleft()<EOL>self.execute_task(task, parent_result)<EOL><DEDENT>
|
Start however many tasks we can based on our limits and what we have left to finish.
|
f3929:c3:m6
|
def execute_task(self, task, parent_result):
|
task.before_run(parent_result)<EOL>context = task.create_context(self.message_queue)<EOL>pending_result = self.pool.apply_async(execute_task_async, (task.func, task.id, context))<EOL>self.pending_results.append(pending_result)<EOL>
|
Run a single task in another process saving the result to our list of pending results.
:param task: Task: function and data we can run in another process
:param parent_result: object: result from our parent task
|
f3929:c3:m7
|
def process_all_messages_in_queue(self):
|
keep_checking = True<EOL>while keep_checking:<EOL><INDENT>keep_checking = self.process_single_message_from_queue()<EOL><DEDENT>
|
Process all messages in the queue coming from tasks.
|
f3929:c3:m8
|
def process_single_message_from_queue(self):
|
try:<EOL><INDENT>message = self.message_queue.get_nowait()<EOL>task_id, data = message<EOL>task = self.task_id_to_task[task_id]<EOL>task.on_message(data)<EOL>return True<EOL><DEDENT>except queue.Empty:<EOL><INDENT>return False<EOL><DEDENT>
|
Tries to read a single message from the queue and let the associated task process it.
:return: bool: True if we processed a message, otherwise False
|
f3929:c3:m9
|
def get_finished_results(self):
|
task_and_results = []<EOL>for pending_result in self.pending_results:<EOL><INDENT>if pending_result.ready():<EOL><INDENT>ret = pending_result.get()<EOL>task_id, result = ret<EOL>task = self.task_id_to_task[task_id]<EOL>self.process_all_messages_in_queue()<EOL>task.after_run(result)<EOL>task_and_results.append((task, result))<EOL>self.pending_results.remove(pending_result)<EOL><DEDENT><DEDENT>return task_and_results<EOL>
|
Go through pending results and retrieve the results if they are done.
Then start child tasks for the task that finished.
|
f3929:c3:m10
|
def upload_async(data_service_auth_data, config, upload_id,<EOL>filename, index, num_chunks_to_send, progress_queue):
|
auth = DataServiceAuth(config)<EOL>auth.set_auth_data(data_service_auth_data)<EOL>data_service = DataServiceApi(auth, config.url)<EOL>sender = ChunkSender(data_service, upload_id, filename, config.upload_bytes_per_chunk, index, num_chunks_to_send,<EOL>progress_queue)<EOL>try:<EOL><INDENT>sender.send()<EOL><DEDENT>except:<EOL><INDENT>error_msg = "<STR_LIT>".join(traceback.format_exception(*sys.exc_info()))<EOL>progress_queue.error(error_msg)<EOL><DEDENT>
|
Method run in another process called from ParallelChunkProcessor.make_and_start_process.
:param data_service_auth_data: tuple of auth data for rebuilding DataServiceAuth
:param config: dds.Config configuration settings to use during upload
:param upload_id: uuid unique id of the 'upload' we are uploading chunks into
:param filename: str path to file who's contents we will be uploading
:param index: int offset into filename where we will start sending bytes from (must multiply by upload_bytes_per_chunk)
:param num_chunks_to_send: int number of chunks of config.upload_bytes_per_chunk size to send.
:param progress_queue: ProgressQueue queue to send notifications of progress or errors
|
f3930:m0
|
def __init__(self, config, data_service, local_file, watcher, file_upload_post_processor=None):
|
self.config = config<EOL>self.data_service = data_service<EOL>self.upload_operations = FileUploadOperations(self.data_service, watcher)<EOL>self.file_upload_post_processor = file_upload_post_processor<EOL>self.local_file = local_file<EOL>self.upload_id = None<EOL>self.watcher = watcher<EOL>
|
Setup for sending to remote store.
:param config: ddsc.config.Config user configuration settings from YAML file/environment
:param data_service: DataServiceApi data service we are sending the content to.
:param local_file: LocalFile file we are sending to remote store
:param watcher: ProgressPrinter we notify of our progress
:param file_upload_post_processor: object: has run(data_service, file_response) method to run after download
|
f3930:c0:m0
|
def upload(self, project_id, parent_kind, parent_id):
|
path_data = self.local_file.get_path_data()<EOL>hash_data = path_data.get_hash()<EOL>self.upload_id = self.upload_operations.create_upload(project_id, path_data, hash_data,<EOL>storage_provider_id=self.config.storage_provider_id)<EOL>ParallelChunkProcessor(self).run()<EOL>parent_data = ParentData(parent_kind, parent_id)<EOL>remote_file_data = self.upload_operations.finish_upload(self.upload_id, hash_data, parent_data,<EOL>self.local_file.remote_id)<EOL>if self.file_upload_post_processor:<EOL><INDENT>self.file_upload_post_processor.run(self.data_service, remote_file_data)<EOL><DEDENT>return remote_file_data['<STR_LIT:id>']<EOL>
|
Upload file contents to project within a specified parent.
:param project_id: str project uuid
:param parent_kind: str type of parent ('dds-project' or 'dds-folder')
:param parent_id: str uuid of parent
:return: str uuid of the newly uploaded file
|
f3930:c0:m1
|
def __init__(self, parent_kind, parent_id):
|
self.kind = parent_kind<EOL>self.id = parent_id<EOL>
|
DukeDS info about a parent.
:param parent_kind: str: dds_folder/dds_file
:param parent_id: str: uuid of the parent
|
f3930:c1:m0
|
def __init__(self, data_service, waiting_monitor):
|
self.data_service = data_service<EOL>self.waiting_monitor = waiting_monitor<EOL>
|
Setup with specified data service we will communicate with.
:param data_service: DataServiceApi data service we are uploading the file to.
:param waiting_monitor: object with started_waiting() and done_waiting() methods called when waiting for
project to become ready to upload file chunks
|
f3930:c2:m0
|
def _create_upload(self, project_id, path_data, hash_data, remote_filename=None, storage_provider_id=None,<EOL>chunked=True):
|
if not remote_filename:<EOL><INDENT>remote_filename = path_data.name()<EOL><DEDENT>mime_type = path_data.mime_type()<EOL>size = path_data.size()<EOL>def func():<EOL><INDENT>return self.data_service.create_upload(project_id, remote_filename, mime_type, size,<EOL>hash_data.value, hash_data.alg,<EOL>storage_provider_id=storage_provider_id,<EOL>chunked=chunked)<EOL><DEDENT>resp = retry_until_resource_is_consistent(func, self.waiting_monitor)<EOL>return resp.json()<EOL>
|
Create upload for uploading multiple chunks or the non-chunked variety (includes upload url).
:param project_id: str: uuid of the project
:param path_data: PathData: holds file system data about the file we are uploading
:param hash_data: HashData: contains hash alg and value for the file we are uploading
:param remote_filename: str: name to use for our remote file (defaults to path_data basename otherwise)
:param storage_provider_id: str: optional storage provider id
:param chunked: bool: should we create a chunked upload
:return: str: uuid for the upload
|
f3930:c2:m1
|
def create_upload(self, project_id, path_data, hash_data, remote_filename=None, storage_provider_id=None):
|
upload_response = self._create_upload(project_id, path_data, hash_data, remote_filename=remote_filename,<EOL>storage_provider_id=storage_provider_id, chunked=True)<EOL>return upload_response['<STR_LIT:id>']<EOL>
|
Create a chunked upload id to pass to create_file_chunk_url to create upload urls.
:param project_id: str: uuid of the project
:param path_data: PathData: holds file system data about the file we are uploading
:param hash_data: HashData: contains hash alg and value for the file we are uploading
:param remote_filename: str: name to use for our remote file (defaults to path_data basename otherwise)
:param storage_provider_id: str: optional storage provider id
:return: str: uuid for the upload
|
f3930:c2:m2
|
def create_upload_and_chunk_url(self, project_id, path_data, hash_data, remote_filename=None,<EOL>storage_provider_id=None):
|
upload_response = self._create_upload(project_id, path_data, hash_data, remote_filename=remote_filename,<EOL>storage_provider_id=storage_provider_id, chunked=False)<EOL>return upload_response['<STR_LIT:id>'], upload_response['<STR_LIT>']<EOL>
|
Create an non-chunked upload that returns upload id and upload url. This type of upload doesn't allow
additional upload urls. For single chunk files this method is more efficient than
create_upload/create_file_chunk_url.
:param project_id: str: uuid of the project
:param path_data: PathData: holds file system data about the file we are uploading
:param hash_data: HashData: contains hash alg and value for the file we are uploading
:param remote_filename: str: name to use for our remote file (defaults to path_data basename otherwise)
:param storage_provider_id:str: optional storage provider id
:return: str, dict: uuid for the upload, upload chunk url dict
|
f3930:c2:m3
|
def create_file_chunk_url(self, upload_id, chunk_num, chunk):
|
chunk_len = len(chunk)<EOL>hash_data = HashData.create_from_chunk(chunk)<EOL>one_based_index = chunk_num + <NUM_LIT:1><EOL>def func():<EOL><INDENT>return self.data_service.create_upload_url(upload_id, one_based_index, chunk_len,<EOL>hash_data.value, hash_data.alg)<EOL><DEDENT>resp = retry_until_resource_is_consistent(func, self.waiting_monitor)<EOL>return resp.json()<EOL>
|
Create a url for uploading a particular chunk to the datastore.
:param upload_id: str: uuid of the upload this chunk is for
:param chunk_num: int: where in the file does this chunk go (0-based index)
:param chunk: bytes: data we are going to upload
:return:
|
f3930:c2:m4
|
def send_file_external(self, url_json, chunk):
|
http_verb = url_json['<STR_LIT>']<EOL>host = url_json['<STR_LIT:host>']<EOL>url = url_json['<STR_LIT:url>']<EOL>http_headers = url_json['<STR_LIT>']<EOL>resp = self._send_file_external_with_retry(http_verb, host, url, http_headers, chunk)<EOL>if resp.status_code != <NUM_LIT:200> and resp.status_code != <NUM_LIT>:<EOL><INDENT>raise ValueError("<STR_LIT>" + str(resp.status_code) + host + url)<EOL><DEDENT>
|
Send chunk to external store specified in url_json.
Raises ValueError on upload failure.
:param data_service: data service to use for sending chunk
:param url_json: dict contains where/how to upload chunk
:param chunk: data to be uploaded
|
f3930:c2:m5
|
def _send_file_external_with_retry(self, http_verb, host, url, http_headers, chunk):
|
count = <NUM_LIT:0><EOL>retry_times = <NUM_LIT:1><EOL>if http_verb == '<STR_LIT>':<EOL><INDENT>retry_times = SEND_EXTERNAL_PUT_RETRY_TIMES<EOL><DEDENT>while True:<EOL><INDENT>try:<EOL><INDENT>return self.data_service.send_external(http_verb, host, url, http_headers, chunk)<EOL><DEDENT>except requests.exceptions.ConnectionError:<EOL><INDENT>count += <NUM_LIT:1><EOL>if count < retry_times:<EOL><INDENT>if count == <NUM_LIT:1>: <EOL><INDENT>self._show_retry_warning(host)<EOL><DEDENT>time.sleep(SEND_EXTERNAL_RETRY_SECONDS)<EOL>self.data_service.recreate_requests_session()<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT><DEDENT>
|
Send chunk to host, url using http_verb. If http_verb is PUT and a connection error occurs
retry a few times. Pauses between retries. Raises if unsuccessful.
|
f3930:c2:m6
|
@staticmethod<EOL><INDENT>def _show_retry_warning(host):<DEDENT>
|
sys.stderr.write("<STR_LIT>".format(host))<EOL>sys.stderr.flush()<EOL>
|
Displays a message on stderr that we lost connection to a host and will retry.
:param host: str: name of the host we are trying to communicate with
|
f3930:c2:m7
|
def finish_upload(self, upload_id, hash_data, parent_data, remote_file_id):
|
self.data_service.complete_upload(upload_id, hash_data.value, hash_data.alg)<EOL>if remote_file_id:<EOL><INDENT>result = self.data_service.update_file(remote_file_id, upload_id)<EOL>return result.json()<EOL><DEDENT>else:<EOL><INDENT>result = self.data_service.create_file(parent_data.kind, parent_data.id, upload_id)<EOL>return result.json()<EOL><DEDENT>
|
Complete the upload and create or update the file.
:param upload_id: str: uuid of the upload we are completing
:param hash_data: HashData: hash info about the file
:param parent_data: ParentData: info about the parent of this file
:param remote_file_id: str: uuid of this file if it already exists or None if it is a new file
:return: dict: DukeDS details about this file
|
f3930:c2:m8
|
def __init__(self, file_uploader):
|
self.config = file_uploader.config<EOL>self.data_service = file_uploader.data_service<EOL>self.upload_id = file_uploader.upload_id<EOL>self.watcher = file_uploader.watcher<EOL>self.local_file = file_uploader.local_file<EOL>
|
Send chunks in the file specified in file_uploader to the remote data service using multiple processes.
:param file_uploader: FileUploader contains all data we need to upload chunks of a file.
|
f3930:c3:m0
|
def run(self):
|
processes = []<EOL>progress_queue = ProgressQueue(Queue())<EOL>num_chunks = ParallelChunkProcessor.determine_num_chunks(self.config.upload_bytes_per_chunk,<EOL>self.local_file.size)<EOL>work_parcels = ParallelChunkProcessor.make_work_parcels(self.config.upload_workers, num_chunks)<EOL>for (index, num_items) in work_parcels:<EOL><INDENT>processes.append(self.make_and_start_process(index, num_items, progress_queue))<EOL><DEDENT>wait_for_processes(processes, num_chunks, progress_queue, self.watcher, self.local_file)<EOL>
|
Sends contents of a local file to a remote data service.
|
f3930:c3:m1
|
@staticmethod<EOL><INDENT>def determine_num_chunks(chunk_size, file_size):<DEDENT>
|
if file_size == <NUM_LIT:0>:<EOL><INDENT>return <NUM_LIT:1><EOL><DEDENT>return int(math.ceil(float(file_size) / float(chunk_size)))<EOL>
|
Figure out how many pieces we are sending the file in.
NOTE: duke-data-service requires an empty chunk to be uploaded for empty files.
|
f3930:c3:m2
|
@staticmethod<EOL><INDENT>def make_work_parcels(upload_workers, num_chunks):<DEDENT>
|
chunks_per_worker = int(math.ceil(float(num_chunks) / float(upload_workers)))<EOL>return ParallelChunkProcessor.divide_work(range(num_chunks), chunks_per_worker)<EOL>
|
Make groups so we can split up num_chunks into similar sizes.
Rounds up trying to keep work evenly split so sometimes it will not use all workers.
For very small numbers it can result in (upload_workers-1) total workers.
For example if there are two few items to distribute.
:param upload_workers: int target number of workers
:param num_chunks: int number of total items we need to send
:return [(index, num_items)] - an array of tuples where array element will be in a separate process.
|
f3930:c3:m3
|
@staticmethod<EOL><INDENT>def divide_work(list_of_indexes, batch_size):<DEDENT>
|
grouped_indexes = [list_of_indexes[i:i + batch_size] for i in range(<NUM_LIT:0>, len(list_of_indexes), batch_size)]<EOL>return [(batch[<NUM_LIT:0>], len(batch)) for batch in grouped_indexes]<EOL>
|
Given a sequential list of indexes split them into num_parts.
:param list_of_indexes: [int] list of indexes to be divided up
:param batch_size: number of items to put in batch(not exact obviously)
:return: [(int,int)] list of (index, num_items) to be processed
|
f3930:c3:m4
|
def make_and_start_process(self, index, num_items, progress_queue):
|
process = Process(target=upload_async,<EOL>args=(self.data_service.auth.get_auth_data(), self.config, self.upload_id,<EOL>self.local_file.path, index, num_items, progress_queue))<EOL>process.start()<EOL>return process<EOL>
|
Create and start a process to upload num_items chunks from our file starting at index.
:param index: int offset into file(must be multiplied by upload_bytes_per_chunk to get actual location)
:param num_items: int number chunks to send
:param progress_queue: ProgressQueue queue to send notifications of progress or errors
|
f3930:c3:m5
|
def __init__(self, data_service, upload_id, filename, chunk_size, index, num_chunks_to_send, progress_queue):
|
self.data_service = data_service<EOL>self.upload_operations = FileUploadOperations(self.data_service, None)<EOL>self.upload_id = upload_id<EOL>self.filename = filename<EOL>self.chunk_size = chunk_size<EOL>self.index = index<EOL>self.num_chunks_to_send = num_chunks_to_send<EOL>self.progress_queue = progress_queue<EOL>
|
Sends num_chunks_to_send from filename at offset index*chunk_size.
:param data_service: DataServiceApi remote service we will be uploading to
:param upload_id: str upload uuid we are sending chunks part of
:param filename: str path to file on disk we are uploading parts of
:param chunk_size: int size of block we will upload
:param index: int index into filename content(must multiply by chunk_size during seek)
:param num_chunks_to_send: how many chunks of chunk_size should we upload
:param progress_queue: ProgressQueue queue we will send updates or errors to.
|
f3930:c4:m0
|
def send(self):
|
sent_chunks = <NUM_LIT:0><EOL>chunk_num = self.index<EOL>with open(self.filename, '<STR_LIT:rb>') as infile:<EOL><INDENT>infile.seek(self.index * self.chunk_size)<EOL>while sent_chunks != self.num_chunks_to_send:<EOL><INDENT>chunk = infile.read(self.chunk_size)<EOL>self._send_chunk(chunk, chunk_num)<EOL>self.progress_queue.processed(<NUM_LIT:1>)<EOL>chunk_num += <NUM_LIT:1><EOL>sent_chunks += <NUM_LIT:1><EOL><DEDENT><DEDENT>
|
For each chunk we need to send, create upload url and send bytes. Raises exception on error.
|
f3930:c4:m1
|
def _send_chunk(self, chunk, chunk_num):
|
url_info = self.upload_operations.create_file_chunk_url(self.upload_id, chunk_num, chunk)<EOL>self.upload_operations.send_file_external(url_info, chunk)<EOL>
|
Send a single chunk to the remote service.
:param chunk: bytes data we are uploading
:param chunk_num: int number associated with this chunk
|
f3930:c4:m2
|
def __init__(self, message, warning=False):
|
Exception.__init__(self, message)<EOL>self.message = message<EOL>self.warning = warning<EOL>
|
Setup error.
:param message: str reason for the error
:param warning: boolean is this just a warning
|
f3931:c0:m0
|
def __init__(self, message):
|
Exception.__init__(self, message)<EOL>
|
:param message: str reason for the error
|
f3931:c1:m0
|
def __init__(self, message):
|
Exception.__init__(self, message)<EOL>
|
:param message: str reason for the error
|
f3931:c2:m0
|
def __init__(self, url, api_token):
|
self.url = url<EOL>self.json_headers = {<EOL>'<STR_LIT:Content-Type>': '<STR_LIT:application/json>',<EOL>'<STR_LIT>': api_token<EOL>}<EOL>
|
Setup url we will be talking to.
:param url: str url of the service including "/api/v1" portion
|
f3931:c3:m0
|
def make_url(self, destination, extra='<STR_LIT>'):
|
return '<STR_LIT>'.format(self.url, destination, extra)<EOL>
|
Build url based on destionation with optional suffix(extra).
:param destination: str base suffix(with slashes)
:param extra: str optional suffix
|
f3931:c3:m1
|
def get_existing_item(self, item):
|
params = {<EOL>'<STR_LIT>': item.project_id,<EOL>'<STR_LIT>': item.from_user_id,<EOL>'<STR_LIT>': item.to_user_id,<EOL>}<EOL>resp = requests.get(self.make_url(item.destination), headers=self.json_headers, params=params)<EOL>self.check_response(resp)<EOL>return resp<EOL>
|
Lookup item in remote service based on keys.
:param item: D4S2Item data contains keys we will use for lookup.
:return: requests.Response containing the successful result
|
f3931:c3:m2
|
def create_item(self, item):
|
item_dict = {<EOL>'<STR_LIT>': item.project_id,<EOL>'<STR_LIT>': item.from_user_id,<EOL>'<STR_LIT>': item.to_user_id,<EOL>'<STR_LIT>': item.auth_role,<EOL>'<STR_LIT>': item.user_message<EOL>}<EOL>if item.share_user_ids:<EOL><INDENT>item_dict['<STR_LIT>'] = item.share_user_ids<EOL><DEDENT>data = json.dumps(item_dict)<EOL>resp = requests.post(self.make_url(item.destination), headers=self.json_headers, data=data)<EOL>self.check_response(resp)<EOL>return resp<EOL>
|
Create a new item in D4S2 service for item at the specified destination.
:param item: D4S2Item data to use for creating a D4S2 item
:return: requests.Response containing the successful result
|
f3931:c3:m3
|
def send_item(self, destination, item_id, force_send):
|
data = json.dumps({<EOL>'<STR_LIT>': force_send,<EOL>})<EOL>url_suffix = "<STR_LIT>".format(item_id)<EOL>resp = requests.post(self.make_url(destination, url_suffix), headers=self.json_headers, data=data)<EOL>self.check_response(resp)<EOL>return resp<EOL>
|
Run send method for item_id at destination.
:param destination: str which type of operation are we doing (SHARE_DESTINATION or DELIVER_DESTINATION)
:param item_id: str D4S2 service id representing the item we want to send
:param force_send: bool it's ok to email the item again
:return: requests.Response containing the successful result
|
f3931:c3:m4
|
def check_response(self, response):
|
if response.status_code == <NUM_LIT>:<EOL><INDENT>raise D4S2Error(UNAUTHORIZED_MESSAGE)<EOL><DEDENT>if not <NUM_LIT:200> <= response.status_code < <NUM_LIT>:<EOL><INDENT>raise D4S2Error("<STR_LIT>".format(response.url, response.status_code,<EOL>response.text))<EOL><DEDENT>
|
Raises error if the response isn't successful.
:param response: requests.Response response to be checked
|
f3931:c3:m5
|
def __init__(self, destination, from_user_id, to_user_id, project_id, project_name, auth_role, user_message,<EOL>share_user_ids):
|
self.destination = destination<EOL>self.from_user_id = from_user_id<EOL>self.to_user_id = to_user_id<EOL>self.project_id = project_id<EOL>self.project_name = project_name<EOL>self.auth_role = auth_role<EOL>self.user_message = user_message<EOL>self.share_user_ids = share_user_ids<EOL>
|
Save data for use with send method.
:param destination: str type of message we are sending(SHARE_DESTINATION or DELIVER_DESTINATION)
:param from_user_id: str uuid(duke-data-service) of the user who is sending the share/delivery
:param to_user_id: str uuid(duke-data-service) of the user is receiving the share/delivery
:param project_id: str uuid(duke-data-service) of project we are sharing
:param project_name: str name of the project (sent for debugging purposes)
:param auth_role: str authorization role to given to the user (determines which email to send)
:param user_message: str user message to send with the share/delivery
:param share_user_ids: [str] users to share the project with once ownership is transferred (only for delivery)
|
f3931:c4:m0
|
def send(self, api, force_send):
|
item_id = self.get_existing_item_id(api)<EOL>if not item_id:<EOL><INDENT>item_id = self.create_item_returning_id(api)<EOL>api.send_item(self.destination, item_id, force_send)<EOL><DEDENT>else:<EOL><INDENT>if force_send:<EOL><INDENT>api.send_item(self.destination, item_id, force_send)<EOL><DEDENT>else:<EOL><INDENT>item_type = D4S2Api.DEST_TO_NAME.get(self.destination, "<STR_LIT>")<EOL>msg = "<STR_LIT>"<EOL>raise D4S2Error(msg.format(item_type), warning=True)<EOL><DEDENT><DEDENT>
|
Send this item using api.
:param api: D4S2Api sends messages to D4S2
:param force_send: bool should we send even if the item already exists
|
f3931:c4:m1
|
def get_existing_item_id(self, api):
|
resp = api.get_existing_item(self)<EOL>items = resp.json()<EOL>num_items = len(items)<EOL>if num_items == <NUM_LIT:0>:<EOL><INDENT>return None<EOL><DEDENT>else:<EOL><INDENT>return items[<NUM_LIT:0>]['<STR_LIT:id>']<EOL><DEDENT>
|
Lookup the id for this item via the D4S2 service.
:param api: D4S2Api object who communicates with D4S2 server.
:return str id of this item or None if not found
|
f3931:c4:m2
|
def create_item_returning_id(self, api):
|
resp = api.create_item(self)<EOL>item = resp.json()<EOL>return item['<STR_LIT:id>']<EOL>
|
Create this item in the D4S2 service.
:param api: D4S2Api object who communicates with D4S2 server.
:return str newly created id for this item
|
f3931:c4:m3
|
def __init__(self, config, remote_store, print_func):
|
self.config = config<EOL>auth = DataServiceAuth(self.config)<EOL>api_token = auth.get_auth()<EOL>self.api = D4S2Api(config.d4s2_url, api_token)<EOL>self.remote_store = remote_store<EOL>self.print_func = print_func<EOL>
|
Setup for sharing a project and sending email on the D4S2 service.
:param config: Config configuration specifying which remote_store to use.
:param remote_store: RemoteStore remote store we will be sharing a project from
:param print_func: func used to print output somewhere
|
f3931:c5:m0
|
def share(self, project, to_user, force_send, auth_role, user_message):
|
if self._is_current_user(to_user):<EOL><INDENT>raise ShareWithSelfError(SHARE_WITH_SELF_MESSAGE.format("<STR_LIT>"))<EOL><DEDENT>if not to_user.email:<EOL><INDENT>self._raise_user_missing_email_exception("<STR_LIT>")<EOL><DEDENT>self.set_user_project_permission(project, to_user, auth_role)<EOL>return self._share_project(D4S2Api.SHARE_DESTINATION, project, to_user, force_send, auth_role, user_message)<EOL>
|
Send mail and give user specified access to the project.
:param project: RemoteProject project to share
:param to_user: RemoteUser user to receive email/access
:param auth_role: str project role eg 'project_admin' to give to the user
:param user_message: str message to be sent with the share
:return: str email we share the project with
|
f3931:c5:m1
|
def set_user_project_permission(self, project, user, auth_role):
|
self.remote_store.set_user_project_permission(project, user, auth_role)<EOL>
|
Give user access permissions for a project.
:param project: RemoteProject project to update permissions on
:param user: RemoteUser user to receive permissions
:param auth_role: str project role eg 'project_admin'
|
f3931:c5:m2
|
def deliver(self, project, new_project_name, to_user, share_users, force_send, path_filter, user_message):
|
if self._is_current_user(to_user):<EOL><INDENT>raise ShareWithSelfError(SHARE_WITH_SELF_MESSAGE.format("<STR_LIT>"))<EOL><DEDENT>if not to_user.email:<EOL><INDENT>self._raise_user_missing_email_exception("<STR_LIT>")<EOL><DEDENT>self.remove_user_permission(project, to_user)<EOL>if new_project_name:<EOL><INDENT>project = self._copy_project(project, new_project_name, path_filter)<EOL><DEDENT>return self._share_project(D4S2Api.DELIVER_DESTINATION, project, to_user,<EOL>force_send, user_message=user_message, share_users=share_users)<EOL>
|
Remove access to project_name for to_user, copy to new_project_name if not None,
send message to service to email user so they can have access.
:param project: RemoteProject pre-existing project to be delivered
:param new_project_name: str name of non-existing project to copy project_name to, if None we don't copy
:param to_user: RemoteUser user we are handing over the project to
:param share_users: [RemoteUser] who will have project shared with them once to_user accepts the project
:param force_send: boolean enables resending of email for existing projects
:param path_filter: PathFilter: filters what files are shared
:param user_message: str message to be sent with the share
:return: str email we sent deliver to
|
f3931:c5:m3
|
def remove_user_permission(self, project, user):
|
self.remote_store.revoke_user_project_permission(project, user)<EOL>
|
Take away user's access to project.
:param project: RemoteProject project to remove permissions on
:param user: RemoteUser user who should no longer have access
|
f3931:c5:m4
|
def _share_project(self, destination, project, to_user, force_send, auth_role='<STR_LIT>', user_message='<STR_LIT>',<EOL>share_users=None):
|
from_user = self.remote_store.get_current_user()<EOL>share_user_ids = None<EOL>if share_users:<EOL><INDENT>share_user_ids = [share_user.id for share_user in share_users]<EOL><DEDENT>item = D4S2Item(destination=destination,<EOL>from_user_id=from_user.id,<EOL>to_user_id=to_user.id,<EOL>project_id=project.id,<EOL>project_name=project.name,<EOL>auth_role=auth_role,<EOL>user_message=user_message,<EOL>share_user_ids=share_user_ids)<EOL>item.send(self.api, force_send)<EOL>return to_user.email<EOL>
|
Send message to remote service to email/share project with to_user.
:param destination: str which type of sharing we are doing (SHARE_DESTINATION or DELIVER_DESTINATION)
:param project: RemoteProject project we are sharing
:param to_user: RemoteUser user we are sharing with
:param auth_role: str project role eg 'project_admin' email is customized based on this setting.
:param user_message: str message to be sent with the share
:param share_users: [RemoteUser] users to have this project shared with after delivery (delivery only)
:return: the email the user should receive a message on soon
|
f3931:c5:m5
|
def _copy_project(self, project, new_project_name, path_filter):
|
temp_directory = tempfile.mkdtemp()<EOL>new_project_name_or_id = ProjectNameOrId.create_from_name(new_project_name)<EOL>remote_project = self.remote_store.fetch_remote_project(new_project_name_or_id)<EOL>if remote_project:<EOL><INDENT>raise ValueError("<STR_LIT>".format(new_project_name))<EOL><DEDENT>activity = CopyActivity(self.remote_store.data_service, project, new_project_name)<EOL>self._download_project(activity, project, temp_directory, path_filter)<EOL>self._upload_project(activity, new_project_name, temp_directory)<EOL>activity.finished()<EOL>shutil.rmtree(temp_directory)<EOL>return self.remote_store.fetch_remote_project(new_project_name_or_id, must_exist=True)<EOL>
|
Copy pre-existing project with name project_name to non-existing project new_project_name.
:param project: remotestore.RemoteProject project to copy from
:param new_project_name: str project to copy to
:param path_filter: PathFilter: filters what files are shared
:return: RemoteProject new project we copied data to
|
f3931:c5:m6
|
def _download_project(self, activity, project, temp_directory, path_filter):
|
self.print_func("<STR_LIT>".format(project.name))<EOL>project_download = ProjectDownload(self.remote_store, project, temp_directory, path_filter,<EOL>file_download_pre_processor=DownloadedFileRelations(activity))<EOL>project_download.run()<EOL>
|
Download the project with project_name to temp_directory.
:param activity: CopyActivity: info about the copy activity are downloading for
:param project: remotestore.RemoteProject project to download
:param temp_directory: str path to directory we can download into
:param path_filter: PathFilter: filters what files are shared
|
f3931:c5:m7
|
def _upload_project(self, activity, project_name, temp_directory):
|
self.print_func("<STR_LIT>".format(project_name))<EOL>items_to_send = [os.path.join(temp_directory, item) for item in os.listdir(os.path.abspath(temp_directory))]<EOL>project_name_or_id = ProjectNameOrId.create_from_name(project_name)<EOL>project_upload = ProjectUpload(self.config, project_name_or_id, items_to_send,<EOL>file_upload_post_processor=UploadedFileRelations(activity))<EOL>project_upload.run()<EOL>
|
Upload the contents of temp_directory into project_name
:param activity: CopyActivity: info about the copy activity are uploading for
:param project_name: str project name we will upload files to
:param temp_directory: str path to directory who's files we will upload
|
f3931:c5:m8
|
def _is_current_user(self, some_user):
|
current_user = self.remote_store.get_current_user()<EOL>return current_user.id == some_user.id<EOL>
|
Is the specified user the current user?
:param some_user: RemoteUser user we want to check against the current user
:return: boolean: True if the current user is the passed in user
|
f3931:c5:m9
|
def __init__(self, data_service, project, new_project_name):
|
self.data_service = data_service<EOL>self.name = "<STR_LIT>".format(project.name)<EOL>self.desc = "<STR_LIT>".format(project.name, new_project_name,<EOL>get_internal_version_str())<EOL>self.started = self._current_timestamp_str()<EOL>result = data_service.create_activity(self.name, self.desc, started_on=self.started)<EOL>self.id = result.json()['<STR_LIT:id>']<EOL>self.remote_path_to_file_version_id = {}<EOL>
|
Create an activity for our copy operation so users can trace back where the copied files came from.
:param data_service: DataServiceApi: service used to create the activity
:param project: RemoteProject project name we will download files from
:param new_project_name: str project name we will upload files into
|
f3931:c6:m0
|
def finished(self):
|
self.data_service.update_activity(self.id, self.name, self.desc,<EOL>started_on=self.started,<EOL>ended_on=self._current_timestamp_str())<EOL>
|
Mark the activity as finished
|
f3931:c6:m1
|
def __init__(self, activity):
|
self.activity = activity<EOL>
|
:param activity: CopyActivity: info about the activity associated with the files we are downloading
|
f3931:c7:m0
|
def run(self, data_service, project_file):
|
remote_path = project_file.path<EOL>file_dict = data_service.get_file(project_file.id).json()<EOL>file_version_id = file_dict['<STR_LIT>']['<STR_LIT:id>']<EOL>data_service.create_used_relation(self.activity.id, KindType.file_str, file_version_id)<EOL>self.activity.remote_path_to_file_version_id[remote_path] = file_version_id<EOL>
|
Attach a remote file to activity with used relationship.
:param data_service: DataServiceApi: service used to attach relationship
:param project_file: ProjectFile: contains details about a file we will attach
|
f3931:c7:m1
|
def __init__(self, activity):
|
self.activity = activity<EOL>
|
:param activity: CopyActivity: info about the activity associated with the files we are uploading
|
f3931:c8:m0
|
def run(self, data_service, file_details):
|
file_version_id = file_details['<STR_LIT>']['<STR_LIT:id>']<EOL>data_service.create_was_generated_by_relation(self.activity.id, KindType.file_str, file_version_id)<EOL>used_entity_id = self._lookup_used_entity_id(file_details)<EOL>data_service.create_was_derived_from_relation(used_entity_id, KindType.file_str,<EOL>file_version_id, KindType.file_str)<EOL>
|
Attach a remote file to activity with was generated by relationship.
:param data_service: DataServiceApi: service used to attach relationship
:param file_details: dict: response from DukeDS POST to /files/ containing current_version id
|
f3931:c8:m1
|
def _lookup_used_entity_id(self, file_details):
|
<EOL>name_parts = [ancestor['<STR_LIT:name>'] for ancestor in file_details['<STR_LIT>']<EOL>if ancestor['<STR_LIT>'] == KindType.folder_str]<EOL>name_parts.append(file_details['<STR_LIT:name>'])<EOL>remote_path = os.sep.join(name_parts)<EOL>return self.activity.remote_path_to_file_version_id[remote_path]<EOL>
|
Return the file_version_id associated with the path from file_details.
The file_version_id is looked up from a dictionary in the activity.
:param file_details: dict: response from DukeDS POST to /files/
:return: str: file_version_id uuid
|
f3931:c8:m2
|
def __init__(self, file_exclude_regex):
|
if file_exclude_regex:<EOL><INDENT>self.exclude_regex = re.compile(file_exclude_regex)<EOL><DEDENT>else:<EOL><INDENT>self.exclude_regex = None<EOL><DEDENT>
|
Set exclusion regex to be used when filtering.
Pass empty string to include everything.
:param file_exclude_regex: str: regex that matches files we want to exclude
|
f3932:c0:m0
|
def include(self, filename, is_file):
|
if self.exclude_regex and is_file:<EOL><INDENT>if self.exclude_regex.match(filename):<EOL><INDENT>return False<EOL><DEDENT>return True<EOL><DEDENT>else:<EOL><INDENT>return True<EOL><DEDENT>
|
Determines if a file should be included in a project for uploading.
If file_exclude_regex is empty it will include everything.
:param filename: str: filename to match it should not include directory
:param is_file: bool: is this a file if not this will always return true
:return: boolean: True if we should include the file.
|
f3932:c0:m1
|
def add_filename_pattern(self, dir_name, pattern):
|
full_pattern = '<STR_LIT>'.format(dir_name, os.sep, pattern)<EOL>filename_regex = fnmatch.translate(full_pattern)<EOL>self.regex_list.append(re.compile(filename_regex))<EOL>
|
Adds a Unix shell-style wildcard pattern underneath the specified directory
:param dir_name: str: directory that contains the pattern
:param pattern: str: Unix shell-style wildcard pattern
|
f3932:c1:m1
|
def include(self, path):
|
for regex_item in self.regex_list:<EOL><INDENT>if regex_item.match(path):<EOL><INDENT>return False<EOL><DEDENT><DEDENT>return True<EOL>
|
Returns False if any pattern matches the path
:param path: str: filename path to test
:return: boolean: True if we should include this path
|
f3932:c1:m2
|
def load_directory(self, top_path, followlinks):
|
for dir_name, child_dirs, child_files in os.walk(top_path, followlinks=followlinks):<EOL><INDENT>for child_filename in child_files:<EOL><INDENT>if child_filename == DDS_IGNORE_FILENAME:<EOL><INDENT>pattern_lines = self._read_non_empty_lines(dir_name, child_filename)<EOL>self.add_patterns(dir_name, pattern_lines)<EOL><DEDENT><DEDENT><DEDENT>
|
Traverse top_path directory and save patterns in any .ddsignore files found.
:param top_path: str: directory name we should traverse looking for ignore files
:param followlinks: boolean: should we traverse symbolic links
|
f3932:c2:m1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.