signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def get_auth_provider_affiliates(self, auth_provider_id, full_name_contains=None, email=None, username=None):
|
data = {}<EOL>if full_name_contains:<EOL><INDENT>data['<STR_LIT>'] = full_name_contains<EOL><DEDENT>if email:<EOL><INDENT>data['<STR_LIT:email>'] = email<EOL><DEDENT>if username:<EOL><INDENT>data['<STR_LIT:username>'] = username<EOL><DEDENT>return self._get_collection("<STR_LIT>".format(auth_provider_id), data)<EOL>
|
List affiliates for a specific auth provider.
:param auth_provider_id: str: uuid of the auth provider to list affiliates of
:param full_name_contains: str: filters affiliates for this name
:param email: str: filters affiliates for this email address
:param username: str: filters affiliates for this username
:return: requests.Response containing the successful result
|
f3912:c4:m64
|
def get_auth_provider_affiliate(self, auth_provider_id, username):
|
return self._get_single_item('<STR_LIT>'.format(auth_provider_id, username), {})<EOL>
|
Fetch single affiliate by username for an auth provider
:param auth_provider_id: str: uuid of the auth provider to list affiliates of
:param username: str: unique username for which to fetch affiliate
:return: requests.Response containing the successful result
|
f3912:c4:m65
|
def auth_provider_add_user(self, auth_provider_id, username):
|
url = "<STR_LIT>".format(auth_provider_id, username)<EOL>return self._post(url, {})<EOL>
|
Transform an institutional affiliates UID, such as a Duke NetID, to a DDS specific user identity;
can be used by clients prior to calling DDS APIs that require a DDS user in the request payload.
Returns user details. Can be safely called multiple times.
:param auth_provider_id: str: auth provider who supports user adding
:param username: str: netid we wish to register with DukeDS
:return: requests.Response containing the successful result
|
f3912:c4:m66
|
def _get_page_size(self):
|
config = self.auth.config<EOL>return config.page_size<EOL>
|
Return how many items we should include in each page for multi-page DukeDS results
:return: int
|
f3912:c4:m67
|
def __init__(self, base_response, merge_array_field_name):
|
self.base_response = base_response<EOL>self.merge_array_field_name = merge_array_field_name<EOL>self.combined_json = self.base_response.json()<EOL>
|
Setup response with primary response that will answer all methods/properties except json()
:param base_response: requests.Response containing the successful result that will respond methods/properties
:param merge_fieldname: str: name of the array field in the JSON data to merge when add_response is called
|
f3912:c5:m0
|
def __getattr__(self, attr):
|
return getattr(self.base_response, attr)<EOL>
|
Forwards all calls to base_response property
|
f3912:c5:m1
|
def json(self):
|
return self.combined_json<EOL>
|
Returns json created by merging the base response's json() merge_array_field_name value
:return: dict: combined dictionary from multiple responses
|
f3912:c5:m2
|
def add_response(self, response):
|
key = self.merge_array_field_name<EOL>response_json = response.json()<EOL>value = self.combined_json[key]<EOL>self.combined_json[self.merge_array_field_name] = value + response_json[key]<EOL>
|
Add data from json() to data returned by json()
:param response: requests.Response containing the successful JSON result to be merged
|
f3912:c5:m3
|
def add_func(context):
|
values = context.values<EOL>if context.message_data:<EOL><INDENT>context.send_message(context.message_data)<EOL><DEDENT>v1, v2 = values<EOL>return v1 + v2<EOL>
|
Function run by AddCommand
:param context
:return: sum of values
|
f3918:m1
|
def download_file_part_run(download_context):
|
destination_dir, file_url_data_dict, seek_amt, bytes_to_read = download_context.params<EOL>project_file = ProjectFile(file_url_data_dict)<EOL>local_path = project_file.get_local_path(destination_dir)<EOL>retry_chunk_downloader = RetryChunkDownloader(project_file, local_path,<EOL>seek_amt, bytes_to_read,<EOL>download_context)<EOL>retry_chunk_downloader.run()<EOL>return '<STR_LIT>'<EOL>
|
Function run by CreateProjectCommand to create the project.
Runs in a background process.
:param download_context: UploadContext: contains data service setup and project name to create.
|
f3925:m0
|
def __init__(self, remote_store, project, dest_directory, path_filter, file_download_pre_processor=None):
|
self.remote_store = remote_store<EOL>self.project = project<EOL>self.dest_directory = dest_directory<EOL>self.path_filter = path_filter<EOL>self.file_download_pre_processor = file_download_pre_processor<EOL>
|
Setup for downloading a remote project.
:param remote_store: RemoteStore: which remote store to download the project from
:param project: RemoteProject: project to download
:param dest_directory: str: path to where we will save the project contents
:param path_filter: PathFilter: determines which files will be downloaded
:param file_download_pre_processor: object: has run(data_service, RemoteFile) method to run before downloading
|
f3925:c0:m0
|
def run(self):
|
files_to_download = self.get_files_to_download()<EOL>total_files_size = self.get_total_files_size(files_to_download)<EOL>if self.file_download_pre_processor:<EOL><INDENT>self.run_preprocessor(files_to_download)<EOL><DEDENT>self.try_create_dir(self.dest_directory)<EOL>watcher = ProgressPrinter(total_files_size, msg_verb='<STR_LIT>')<EOL>self.download_files(files_to_download, watcher)<EOL>watcher.finished()<EOL>warnings = self.check_warnings()<EOL>if warnings:<EOL><INDENT>watcher.show_warning(warnings)<EOL><DEDENT>
|
Download the contents of the specified project name or id to dest_directory.
|
f3925:c0:m1
|
def run_preprocessor(self, files_to_download):
|
for project_file in files_to_download:<EOL><INDENT>self.file_download_pre_processor.run(self.remote_store.data_service, project_file)<EOL><DEDENT>
|
Run file_download_pre_processor for each file we are about to download.
:param files_to_download: [ProjectFile]: files that will be downloaded
|
f3925:c0:m6
|
def try_create_dir(self, path):
|
if not os.path.exists(path):<EOL><INDENT>os.mkdir(path)<EOL><DEDENT>elif not os.path.isdir(path):<EOL><INDENT>ValueError("<STR_LIT>" + path + "<STR_LIT>")<EOL><DEDENT>
|
Try to create a directory if it doesn't exist and raise error if there is a non-directory with the same name.
:param path: str path to the directory
|
f3925:c0:m8
|
def get_data_service_auth_data(self):
|
return self.remote_store.data_service.auth.get_auth_data()<EOL>
|
Serialize data_service setup into something that can be passed to another process.
:return: tuple of data service settings
|
f3925:c1:m1
|
def __init__(self, settings, params, message_queue, task_id):
|
self.data_service_auth_data = settings.get_data_service_auth_data()<EOL>self.config = settings.config<EOL>self.params = params<EOL>self.message_queue = message_queue<EOL>self.task_id = task_id<EOL>
|
Setup context so it can be passed.
:param settings: UploadSettings: project level info
:param params: tuple: values specific to the function being run
:param message_queue: Queue: queue background process can send messages to us on
:param task_id: int: id of this command's task so message will be routed correctly
|
f3925:c2:m0
|
def send_message(self, data):
|
self.message_queue.put((self.task_id, data))<EOL>
|
Sends a message to the command's on_message(data) method.
:param data: object: data sent to on_message
|
f3925:c2:m3
|
def __init__(self, settings, file_urls, watcher):
|
self.settings = settings<EOL>self.file_urls = file_urls<EOL>self.dest_directory = settings.dest_directory<EOL>self.bytes_per_chunk = self.settings.config.download_bytes_per_chunk<EOL>self.watcher = watcher<EOL>
|
:param: settings: DownloadSettings
:param file_urls: [ddsc.sdk.client.ProjectFileUrl]: file urls to be downloaded
|
f3925:c3:m0
|
def _get_parent_remote_paths(self):
|
parent_paths = set([item.get_remote_parent_path() for item in self.file_urls])<EOL>if '<STR_LIT>' in parent_paths:<EOL><INDENT>parent_paths.remove('<STR_LIT>')<EOL><DEDENT>return parent_paths<EOL>
|
Get list of remote folders based on the list of all file urls
:return: set([str]): set of remote folders (that contain files)
|
f3925:c3:m1
|
def make_local_directories(self):
|
for remote_path in self._get_parent_remote_paths():<EOL><INDENT>local_path = os.path.join(self.dest_directory, remote_path)<EOL>self._assure_dir_exists(local_path)<EOL><DEDENT>
|
Create directories necessary to download the files into dest_directory
|
f3925:c3:m2
|
def make_big_empty_files(self):
|
for file_url in self.file_urls:<EOL><INDENT>local_path = file_url.get_local_path(self.dest_directory)<EOL>with open(local_path, "<STR_LIT:wb>") as outfile:<EOL><INDENT>if file_url.size > <NUM_LIT:0>:<EOL><INDENT>outfile.seek(int(file_url.size) - <NUM_LIT:1>)<EOL>outfile.write(b'<STR_LIT>')<EOL><DEDENT><DEDENT><DEDENT>
|
Write out a empty file so the workers can seek to where they should write and write their data.
|
f3925:c3:m3
|
def make_ranges(self, file_url):
|
size = file_url.size<EOL>bytes_per_chunk = self.determine_bytes_per_chunk(size)<EOL>start = <NUM_LIT:0><EOL>ranges = []<EOL>while size > <NUM_LIT:0>:<EOL><INDENT>amount = bytes_per_chunk<EOL>if amount > size:<EOL><INDENT>amount = size<EOL><DEDENT>ranges.append((start, start + amount - <NUM_LIT:1>))<EOL>start += amount<EOL>size -= amount<EOL><DEDENT>return ranges<EOL>
|
Divides file_url size into an array of ranges to be downloaded by workers.
:param: file_url: ProjectFileUrl: file url to download
:return: [(int,int)]: array of (start, end) tuples
|
f3925:c3:m8
|
def determine_bytes_per_chunk(self, size):
|
workers = self.settings.config.download_workers<EOL>if not workers or workers == '<STR_LIT:None>':<EOL><INDENT>workers = <NUM_LIT:1><EOL><DEDENT>bytes_per_chunk = int(math.ceil(size / float(workers)))<EOL>if bytes_per_chunk < self.bytes_per_chunk:<EOL><INDENT>bytes_per_chunk = self.bytes_per_chunk<EOL><DEDENT>return bytes_per_chunk<EOL>
|
Calculate the size of chunk a worker should download.
The last worker may download less than this depending on file size.
:return: int: byte size for a worker
|
f3925:c3:m9
|
@staticmethod<EOL><INDENT>def _assure_dir_exists(path):<DEDENT>
|
if not os.path.exists(path):<EOL><INDENT>os.makedirs(path)<EOL><DEDENT>
|
If path doesn't exist create it and any necessary parent directories.
:param path: str: path to a directory to create
|
f3925:c3:m10
|
def split_file_urls_by_size(self, size):
|
large_items = []<EOL>small_items = []<EOL>for file_url in self.file_urls:<EOL><INDENT>if file_url.size >= size:<EOL><INDENT>large_items.append(file_url)<EOL><DEDENT>else:<EOL><INDENT>small_items.append(file_url)<EOL><DEDENT><DEDENT>return large_items, small_items<EOL>
|
Return tuple that contains a list large files and a list of small files based on size parameter
:param size: int: size (in bytes) that determines if a file is large or small
:return: ([ProjectFileUrl],[ProjectFileUrl]): (large file urls, small file urls)
|
f3925:c3:m11
|
def check_downloaded_files_sizes(self):
|
for file_url in self.file_urls:<EOL><INDENT>local_path = file_url.get_local_path(self.dest_directory)<EOL>self.check_file_size(file_url.size, local_path)<EOL><DEDENT>
|
Make sure the files sizes are correct. Since we manually create the files this will only catch overruns.
Raises ValueError if there is a problematic file.
|
f3925:c3:m12
|
@staticmethod<EOL><INDENT>def check_file_size(file_size, path):<DEDENT>
|
stat_info = os.stat(path)<EOL>if stat_info.st_size != file_size:<EOL><INDENT>format_str = "<STR_LIT>"<EOL>msg = format_str.format(path, stat_info.st_size, file_size)<EOL>raise ValueError(msg)<EOL><DEDENT>
|
Raise an error if we didn't get all of the file.
:param file_size: int: size of this file
:param path: str path where we downloaded the file to
|
f3925:c3:m13
|
def __init__(self, settings, file_url, seek_amt, bytes_to_read):
|
self.settings = settings<EOL>self.file_url = file_url<EOL>self.seek_amt = seek_amt<EOL>self.bytes_to_read = bytes_to_read<EOL>self.func = download_file_part_run<EOL>
|
Setup passing in all necessary data to download part of a file.
:param settings:
:param file_url:
:param seek_amt:
:param bytes_to_read:
|
f3925:c4:m0
|
def create_context(self, message_queue, task_id):
|
params = (self.settings.dest_directory, self.file_url.json_data, self.seek_amt, self.bytes_to_read)<EOL>return DownloadContext(self.settings, params, message_queue, task_id)<EOL>
|
Create data needed by upload_project_run(DukeDS connection info).
:param message_queue: Queue: queue background process can send messages to us on
:param task_id: int: id of this command's task so message will be routed correctly
|
f3925:c4:m2
|
def get_url_and_headers_for_range(self, file_download):
|
headers = self.get_range_headers()<EOL>if file_download.http_headers:<EOL><INDENT>headers.update(file_download.http_headers)<EOL><DEDENT>separator = "<STR_LIT>"<EOL>if not file_download.url.startswith("<STR_LIT:/>"):<EOL><INDENT>separator = "<STR_LIT:/>"<EOL><DEDENT>url = '<STR_LIT>'.format(file_download.host, separator, file_download.url)<EOL>return url, headers<EOL>
|
Return url and headers to use for downloading part of a file, adding range headers.
:param file_download: FileDownload: contains data about file we will download
:return: str, dict: url to download and headers to use
|
f3925:c5:m3
|
def download_chunk(self, url, headers):
|
response = requests.get(url, headers=headers, stream=True)<EOL>if response.status_code == SWIFT_EXPIRED_STATUS_CODEor response.status_code == S3_EXPIRED_STATUS_CODE:<EOL><INDENT>raise DownloadInconsistentError(response.text)<EOL><DEDENT>response.raise_for_status()<EOL>self.actual_bytes_read = <NUM_LIT:0><EOL>self._write_response_to_file(response)<EOL>self._verify_download_complete()<EOL>
|
Download part of a file and write to our file
:param url: str: URL to download this file
:param headers: dict: headers used to download this file chunk
|
f3925:c5:m5
|
def _write_response_to_file(self, response):
|
with open(self.local_path, '<STR_LIT>') as outfile: <EOL><INDENT>outfile.seek(self.seek_amt)<EOL>for chunk in response.iter_content(chunk_size=self.bytes_per_chunk):<EOL><INDENT>if chunk: <EOL><INDENT>outfile.write(chunk)<EOL>self._on_bytes_read(len(chunk))<EOL><DEDENT><DEDENT><DEDENT>
|
Write response to the appropriate section of the file at self.local_path.
:param response: requests.Response: response containing stream-able data
|
f3925:c5:m6
|
def _on_bytes_read(self, num_bytes_read):
|
self.actual_bytes_read += num_bytes_read<EOL>if self.actual_bytes_read > self.bytes_to_read:<EOL><INDENT>raise TooLargeChunkDownloadError(self.actual_bytes_read, self.bytes_to_read, self.local_path)<EOL><DEDENT>self.download_context.send_processed_message(num_bytes_read)<EOL>
|
Record our progress so we can validate that we receive all the data
:param num_bytes_read: int: number of bytes we received as part of one chunk
|
f3925:c5:m7
|
def _verify_download_complete(self):
|
if self.actual_bytes_read > self.bytes_to_read:<EOL><INDENT>raise TooLargeChunkDownloadError(self.actual_bytes_read, self.bytes_to_read, self.local_path)<EOL><DEDENT>elif self.actual_bytes_read < self.bytes_to_read:<EOL><INDENT>raise PartialChunkDownloadError(self.actual_bytes_read, self.bytes_to_read, self.local_path)<EOL><DEDENT>
|
Make sure we received all the data
|
f3925:c5:m8
|
def revert_progress(self):
|
undo_size = self.actual_bytes_read * -<NUM_LIT:1><EOL>self.download_context.send_processed_message(undo_size)<EOL>
|
Update progress monitor with negative number so it is accurate since this download failed.
|
f3925:c5:m9
|
def _name_to_child_map(children):
|
name_to_child = {}<EOL>for child in children:<EOL><INDENT>name_to_child[child.name] = child<EOL><DEDENT>return name_to_child<EOL>
|
Create a map of name to child based on a list.
:param children [LocalFolder/LocalFile] list of children:
:return: map child.name -> child
|
f3926:m0
|
def _update_remote_children(remote_parent, children):
|
name_to_child = _name_to_child_map(children)<EOL>for remote_child in remote_parent.children:<EOL><INDENT>local_child = name_to_child.get(remote_child.name)<EOL>if local_child:<EOL><INDENT>local_child.update_remote_ids(remote_child)<EOL><DEDENT><DEDENT>
|
Update remote_ids based on on parent matching up the names of children.
:param remote_parent: RemoteProject/RemoteFolder who has children
:param children: [LocalFolder,LocalFile] children to set remote_ids based on remote children
|
f3926:m1
|
def _build_project_tree(path, followsymlinks, file_filter):
|
result = None<EOL>if os.path.isfile(path):<EOL><INDENT>result = LocalFile(path)<EOL><DEDENT>else:<EOL><INDENT>result = _build_folder_tree(os.path.abspath(path), followsymlinks, file_filter)<EOL><DEDENT>return result<EOL>
|
Build a tree of LocalFolder with children or just a LocalFile based on a path.
:param path: str path to a directory to walk
:param followsymlinks: bool should we follow symlinks when walking
:param file_filter: FileFilter: include method returns True if we should include a file/folder
:return: the top node of the tree LocalFile or LocalFolder
|
f3926:m2
|
def _build_folder_tree(top_abspath, followsymlinks, file_filter):
|
path_to_content = {}<EOL>child_to_parent = {}<EOL>ignore_file_patterns = IgnoreFilePatterns(file_filter)<EOL>ignore_file_patterns.load_directory(top_abspath, followsymlinks)<EOL>for dir_name, child_dirs, child_files in os.walk(top_abspath, followlinks=followsymlinks):<EOL><INDENT>abspath = os.path.abspath(dir_name)<EOL>folder = LocalFolder(abspath)<EOL>path_to_content[abspath] = folder<EOL>parent_path = child_to_parent.get(abspath)<EOL>if parent_path:<EOL><INDENT>path_to_content[parent_path].add_child(folder)<EOL><DEDENT>remove_child_dirs = []<EOL>for child_dir in child_dirs:<EOL><INDENT>abs_child_path = os.path.abspath(os.path.join(dir_name, child_dir))<EOL>if ignore_file_patterns.include(abs_child_path, is_file=False):<EOL><INDENT>child_to_parent[abs_child_path] = abspath<EOL><DEDENT>else:<EOL><INDENT>remove_child_dirs.append(child_dir)<EOL><DEDENT><DEDENT>for remove_child_dir in remove_child_dirs:<EOL><INDENT>child_dirs.remove(remove_child_dir)<EOL><DEDENT>for child_filename in child_files:<EOL><INDENT>abs_child_filename = os.path.join(dir_name, child_filename)<EOL>if ignore_file_patterns.include(abs_child_filename, is_file=True):<EOL><INDENT>folder.add_child(LocalFile(abs_child_filename))<EOL><DEDENT><DEDENT><DEDENT>return path_to_content.get(top_abspath)<EOL>
|
Build a tree of LocalFolder with children based on a path.
:param top_abspath: str path to a directory to walk
:param followsymlinks: bool should we follow symlinks when walking
:param file_filter: FileFilter: include method returns True if we should include a file/folder
:return: the top node of the tree LocalFolder
|
f3926:m3
|
def __init__(self, followsymlinks, file_exclude_regex):
|
self.remote_id = '<STR_LIT>'<EOL>self.kind = KindType.project_str<EOL>self.children = []<EOL>self.sent_to_remote = False<EOL>self.followsymlinks = followsymlinks<EOL>self.file_filter = FileFilter(file_exclude_regex)<EOL>
|
Creates a list of local file system content that can be sent to a remote project.
:param followsymlinks: bool follow symbolic links when looking for content
:param file_exclude_regex: str: regex that should be used to filter out files we do not want to upload
|
f3926:c0:m0
|
def add_path(self, path):
|
abspath = os.path.abspath(path)<EOL>self.children.append(_build_project_tree(abspath, self.followsymlinks, self.file_filter))<EOL>
|
Add the path and any children files/folders to the list of content.
:param path: str path to add
|
f3926:c0:m1
|
def add_paths(self, path_list):
|
for path in path_list:<EOL><INDENT>self.add_path(path)<EOL><DEDENT>
|
Add a list of paths to the list of content.
:param path_list: [str] list of file system paths
|
f3926:c0:m2
|
def update_remote_ids(self, remote_project):
|
if remote_project:<EOL><INDENT>self.remote_id = remote_project.id<EOL>_update_remote_children(remote_project, self.children)<EOL><DEDENT>
|
Compare against remote_project saving off the matching uuids of of matching content.
:param remote_project: RemoteProject project to compare against
|
f3926:c0:m3
|
def set_remote_id_after_send(self, remote_id):
|
self.remote_id = remote_id<EOL>self.sent_to_remote = True<EOL>
|
Save remote_id after creating on remote side.
:param remote_id: str uuid of the project
|
f3926:c0:m4
|
def __init__(self, path):
|
self.path = os.path.abspath(path)<EOL>self.name = os.path.basename(self.path)<EOL>self.children = []<EOL>self.remote_id = '<STR_LIT>'<EOL>self.is_file = False<EOL>self.kind = KindType.folder_str<EOL>self.sent_to_remote = False<EOL>
|
Setup folder based on a path.
:param path: str path to filesystem directory
|
f3926:c1:m0
|
def add_child(self, child):
|
self.children.append(child)<EOL>
|
Add a child to this folder.
:param child: LocalFolder/LocalFile to add
|
f3926:c1:m1
|
def update_remote_ids(self, remote_folder):
|
self.remote_id = remote_folder.id<EOL>_update_remote_children(remote_folder, self.children)<EOL>
|
Set remote id based on remote_folder and check children against this folder's children.
:param remote_folder: RemoteFolder to compare against
|
f3926:c1:m2
|
def set_remote_id_after_send(self, remote_id):
|
self.sent_to_remote = True<EOL>self.remote_id = remote_id<EOL>
|
Set remote id after we sent this folder to a remote store.
:param remote_id: str uuid of this folder created on remote store
|
f3926:c1:m3
|
def __init__(self, path):
|
self.path = os.path.abspath(path)<EOL>self.path_data = PathData(self.path)<EOL>self.name = self.path_data.name()<EOL>self.size = self.path_data.size()<EOL>self.mimetype = self.path_data.mime_type()<EOL>self.need_to_send = True<EOL>self.remote_id = '<STR_LIT>'<EOL>self.is_file = True<EOL>self.kind = KindType.file_str<EOL>self.sent_to_remote = False<EOL>
|
Setup file based on filesystem path.
:param path: path to a file on the filesystem
|
f3926:c2:m0
|
def get_path_data(self):
|
return self.path_data<EOL>
|
Return PathData created from internal path.
|
f3926:c2:m1
|
def get_hash_value(self):
|
return self.path_data.get_hash().value<EOL>
|
Return the current hash value for our path.
:return: str: hash value
|
f3926:c2:m2
|
def update_remote_ids(self, remote_file):
|
self.remote_id = remote_file.id<EOL>hash_data = self.path_data.get_hash()<EOL>if hash_data.matches(remote_file.hash_alg, remote_file.file_hash):<EOL><INDENT>self.need_to_send = False<EOL><DEDENT>
|
Based on a remote file try to assign a remote_id and compare hash info.
:param remote_file: RemoteFile remote data pull remote_id from
|
f3926:c2:m3
|
def set_remote_id_after_send(self, remote_id):
|
self.sent_to_remote = True<EOL>self.remote_id = remote_id<EOL>
|
Set remote_id to specific value after this file has been sent to remote store.
:param remote_id: str uuid of the file in the remote store
|
f3926:c2:m4
|
def count_chunks(self, bytes_per_chunk):
|
chunks = math.ceil(float(self.size) / float(bytes_per_chunk))<EOL>return max(chunks, <NUM_LIT:1>)<EOL>
|
Based on the size of the file determine how many chunks we will need to upload.
For empty files 1 chunk is returned (DukeDS requires an empty chunk for empty files).
:param bytes_per_chunk: int: how many bytes should chunks to spglit the file into
:return: int: number of chunks that will need to be sent
|
f3926:c2:m5
|
def __init__(self, hash_util):
|
alg, value = hash_util.hexdigest()<EOL>self.alg = alg<EOL>self.value = value<EOL>
|
Create hash info from hash_util with data already loaded.
:param hash_util: HashUtil with data populated
|
f3926:c3:m0
|
def matches(self, hash_alg, hash_value):
|
return self.alg == hash_alg and self.value == hash_value<EOL>
|
Does our algorithm and hash value match the specified arguments.
:param hash_alg: str: hash algorithm
:param hash_value: str: hash value
:return: boolean
|
f3926:c3:m1
|
@staticmethod<EOL><INDENT>def create_from_path(path):<DEDENT>
|
hash_util = HashUtil()<EOL>hash_util.add_file(path)<EOL>return HashData(hash_util)<EOL>
|
Hash the local file at path and return HashData with results.
:param path: str: path to file we will hash
:return: HashData: hash alg and value
|
f3926:c3:m2
|
@staticmethod<EOL><INDENT>def create_from_chunk(chunk):<DEDENT>
|
hash_util = HashUtil()<EOL>hash_util.add_chunk(chunk)<EOL>return HashData(hash_util)<EOL>
|
Hash chunk and return HashData with results.
:param chunk: bytes/str: data to hash
:return: HashData: hash alg and value
|
f3926:c3:m3
|
def __init__(self, path):
|
self.path = path<EOL>
|
Setup with path pointing to existing file.
:param path: str: path
|
f3926:c4:m0
|
def name(self):
|
return os.path.basename(self.path)<EOL>
|
Get the name portion of the file(remove directory).
:return: str: filename
|
f3926:c4:m1
|
def mime_type(self):
|
mime_type, encoding = mimetypes.guess_type(self.path)<EOL>if not mime_type:<EOL><INDENT>mime_type = '<STR_LIT>'<EOL><DEDENT>return mime_type<EOL>
|
Guess the mimetype of a file or 'application/octet-stream' if unable to guess.
:return: str: mimetype
|
f3926:c4:m2
|
def size(self):
|
return os.path.getsize(self.path)<EOL>
|
Return the file size.
:return: int: size of file
|
f3926:c4:m3
|
def get_hash(self):
|
return HashData.create_from_path(self.path)<EOL>
|
Create HashData for the file
:return: HashData: alg and value of contents of the file
|
f3926:c4:m4
|
def read_whole_file(self):
|
chunk = None<EOL>with open(self.path, '<STR_LIT:rb>') as infile:<EOL><INDENT>chunk = infile.read()<EOL><DEDENT>return chunk<EOL>
|
Slurp the whole file into memory.
Should only be used with relatively small files.
:return: str: file contents
|
f3926:c4:m5
|
def add_file(self, filename, block_size=<NUM_LIT>):
|
with open(filename, "<STR_LIT:rb>") as f:<EOL><INDENT>for chunk in iter(lambda: f.read(block_size), b"<STR_LIT>"):<EOL><INDENT>self.hash.update(chunk)<EOL><DEDENT><DEDENT>
|
Add an entire file to this hash.
:param filename: str filename of the file to hash
:param block_size: int size of chunks when reading the file
|
f3926:c5:m1
|
def add_chunk(self, chunk):
|
self.hash.update(chunk)<EOL>
|
Add a single block of memory to the hash.
:param chunk: str data to hash
:return:
|
f3926:c5:m2
|
def hexdigest(self):
|
return HashUtil.HASH_NAME, self.hash.hexdigest()<EOL>
|
return a hash pair
:return: (str,str) -> (algorithm,value)
|
f3926:c5:m3
|
def __init__(self, config, data_service=None):
|
self.config = config<EOL>if data_service:<EOL><INDENT>self.data_service = data_service<EOL><DEDENT>else:<EOL><INDENT>auth = DataServiceAuth(self.config)<EOL>self.data_service = DataServiceApi(auth, self.config.url)<EOL><DEDENT>
|
Setup to allow fetching project tree.
:param config: ddsc.config.Config settings to use for connecting to the dataservice.
:param data_service: DataServiceApi: optional param to specify an existing DataServiceApi object
|
f3927:c0:m0
|
def fetch_remote_project(self, project_name_or_id, must_exist=False, include_children=True):
|
project = self._get_my_project(project_name_or_id)<EOL>if project:<EOL><INDENT>if include_children:<EOL><INDENT>self._add_project_children(project, PROJECT_LIST_EXCLUDE_RESPONSE_FIELDS)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if must_exist:<EOL><INDENT>project_description = project_name_or_id.description()<EOL>raise NotFoundError(u'<STR_LIT>'.format(project_description))<EOL><DEDENT><DEDENT>return project<EOL>
|
Retrieve the project via project name or id.
:param project_name_or_id: ProjectNameOrId name or id of the project to fetch
:param must_exist: should we error if the project doesn't exist
:param include_children: should we read children(folders/files)
:return: RemoteProject project requested or None if not found(and must_exist=False)
|
f3927:c0:m1
|
def fetch_remote_project_by_id(self, id):
|
response = self.data_service.get_project_by_id(id).json()<EOL>return RemoteProject(response)<EOL>
|
Retrieves project from via id
:param id: str id of project from data service
:return: RemoteProject we downloaded
|
f3927:c0:m2
|
def _get_my_project(self, project_name_or_id):
|
response = self.data_service.get_projects().json()<EOL>for project in response['<STR_LIT>']:<EOL><INDENT>if project_name_or_id.contained_in_dict(project):<EOL><INDENT>return RemoteProject(project)<EOL><DEDENT><DEDENT>return None<EOL>
|
Return project tree root for project_name_or_id.
:param project_name_or_id: ProjectNameOrId name or id of the project to lookup
:return: RemoteProject project we found or None
|
f3927:c0:m3
|
def _add_project_children(self, project, exclude_response_fields=None):
|
response = self.data_service.get_project_children(project.id, '<STR_LIT>', exclude_response_fields).json()<EOL>project_children = RemoteProjectChildren(project.id, response['<STR_LIT>'])<EOL>for child in project_children.get_tree():<EOL><INDENT>project.add_child(child)<EOL><DEDENT>
|
Add the rest of the project tree from the remote store to the project object.
:param project: RemoteProject root of the project tree to add children too
:param exclude_response_fields: [str]: list of fields to exclude in the children response items
|
f3927:c0:m4
|
def lookup_or_register_user_by_email_or_username(self, email, username):
|
if username:<EOL><INDENT>return self.get_or_register_user_by_username(username)<EOL><DEDENT>else:<EOL><INDENT>return self.lookup_user_by_email(email)<EOL><DEDENT>
|
Lookup user by email or username. Only fill in one field.
For username it will try to register if not found.
:param email: str: email address of the user
:param username: netid of the user to find
:return: RemoteUser
|
f3927:c0:m5
|
def lookup_user_by_name(self, full_name):
|
res = self.data_service.get_users_by_full_name(full_name)<EOL>json_data = res.json()<EOL>results = json_data['<STR_LIT>']<EOL>found_cnt = len(results)<EOL>if found_cnt == <NUM_LIT:0>:<EOL><INDENT>raise NotFoundError("<STR_LIT>" + full_name)<EOL><DEDENT>elif found_cnt > <NUM_LIT:1>:<EOL><INDENT>raise ValueError("<STR_LIT>" + full_name)<EOL><DEDENT>user = RemoteUser(results[<NUM_LIT:0>])<EOL>if user.full_name.lower() != full_name.lower():<EOL><INDENT>raise NotFoundError("<STR_LIT>" + full_name)<EOL><DEDENT>return user<EOL>
|
Query remote store for a single user with the name full_name or raise error.
:param full_name: str Users full name separated by a space.
:return: RemoteUser user info for single user with full_name
|
f3927:c0:m6
|
def lookup_user_by_username(self, username):
|
matches = self.fetch_users(username=username)<EOL>if not matches:<EOL><INDENT>raise NotFoundError('<STR_LIT>'.format(username))<EOL><DEDENT>if len(matches) > <NUM_LIT:1>:<EOL><INDENT>raise ValueError('<STR_LIT>'.format(username))<EOL><DEDENT>return matches[<NUM_LIT:0>]<EOL>
|
Finds the single user who has this username or raises ValueError.
:param username: str username we are looking for
:return: RemoteUser: user we found
|
f3927:c0:m7
|
def get_or_register_user_by_username(self, username):
|
try:<EOL><INDENT>return self.lookup_user_by_username(username)<EOL><DEDENT>except NotFoundError:<EOL><INDENT>return self.register_user_by_username(username)<EOL><DEDENT>
|
Try to lookup user by username. If not found try registering the user.
:param username: str: username to lookup
:return: RemoteUser: user we found
|
f3927:c0:m8
|
def register_user_by_username(self, username):
|
current_providers = [prov.id for prov in self.get_auth_providers() if not prov.is_deprecated]<EOL>if not current_providers:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>auth_provider_id = current_providers[<NUM_LIT:0>]<EOL>return self._register_user_by_username(auth_provider_id, username)<EOL>
|
Tries to register user with the first non-deprecated auth provider.
Raises ValueError if the data service doesn't have any non-deprecated providers.
:param username: str: netid of the user we are trying to register
:return: RemoteUser: user that was created for our netid
|
f3927:c0:m9
|
def _register_user_by_username(self, auth_provider_id, username):
|
user_json = self.data_service.auth_provider_add_user(auth_provider_id, username).json()<EOL>return RemoteUser(user_json)<EOL>
|
Tries to register a user who has a valid netid but isn't registered with DukeDS yet under auth_provider_id.
:param auth_provider_id: str: id from RemoteAuthProvider to use for registering
:param username: str: netid of the user we are trying to register
:return: RemoteUser: user that was created for our netid
|
f3927:c0:m10
|
def get_auth_providers(self):
|
providers = []<EOL>response = self.data_service.get_auth_providers().json()<EOL>for data in response['<STR_LIT>']:<EOL><INDENT>providers.append(RemoteAuthProvider(data))<EOL><DEDENT>return providers<EOL>
|
Return the list of authorization providers.
:return: [RemoteAuthProvider]: list of remote auth providers
|
f3927:c0:m11
|
def lookup_user_by_email(self, email):
|
matches = self.fetch_users(email=email)<EOL>if not matches:<EOL><INDENT>raise NotFoundError('<STR_LIT>'.format(email))<EOL><DEDENT>if len(matches) > <NUM_LIT:1>:<EOL><INDENT>raise ValueError('<STR_LIT>'.format(email))<EOL><DEDENT>return matches[<NUM_LIT:0>]<EOL>
|
Finds the single user who has this email or raises ValueError.
:param email: str email we are looking for
:return: RemoteUser user we found
|
f3927:c0:m12
|
def get_current_user(self):
|
response = self.data_service.get_current_user().json()<EOL>return RemoteUser(response)<EOL>
|
Fetch info about the current user
:return: RemoteUser user who we are logged in as(auth determines this).
|
f3927:c0:m13
|
def fetch_users(self, email=None, username=None):
|
users = []<EOL>result = self.data_service.get_users(email=email, username=username)<EOL>user_list_json = result.json()<EOL>for user_json in user_list_json['<STR_LIT>']:<EOL><INDENT>users.append(RemoteUser(user_json))<EOL><DEDENT>return users<EOL>
|
Retrieves users with optional email and/or username filtering from data service.
:param email: str: optional email to filter by
:param username: str: optional username to filter by
:return: [RemoteUser] list of all users we downloaded
|
f3927:c0:m14
|
def fetch_user(self, id):
|
response = self.data_service.get_user_by_id(id).json()<EOL>return RemoteUser(response)<EOL>
|
Retrieves user from data service having a specific id
:param id: str id of user from data service
:return: RemoteUser user we downloaded
|
f3927:c0:m15
|
def set_user_project_permission(self, project, user, auth_role):
|
self.data_service.set_user_project_permission(project.id, user.id, auth_role)<EOL>
|
Update remote store for user giving auth_role permissions on project.
:param project: RemoteProject project to give permissions to
:param user: RemoteUser user who we are giving permissions to
:param auth_role: str type of authorization to give user(project_admin)
|
f3927:c0:m16
|
def revoke_user_project_permission(self, project, user):
|
<EOL>try:<EOL><INDENT>self.data_service.get_user_project_permission(project.id, user.id)<EOL>self.data_service.revoke_user_project_permission(project.id, user.id)<EOL><DEDENT>except DataServiceError as e:<EOL><INDENT>if e.status_code != <NUM_LIT>:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>
|
Update remote store for user removing auth_role permissions on project.
:param project: RemoteProject project to remove permissions from
:param user: RemoteUser user who we are removing permissions from
|
f3927:c0:m17
|
def download_file(self, remote_file, path, watcher):
|
url_json = self.data_service.get_file_url(remote_file.id).json()<EOL>http_verb = url_json['<STR_LIT>']<EOL>host = url_json['<STR_LIT:host>']<EOL>url = url_json['<STR_LIT:url>']<EOL>http_headers = url_json['<STR_LIT>']<EOL>response = self.data_service.receive_external(http_verb, host, url, http_headers)<EOL>with open(path, '<STR_LIT:wb>') as f:<EOL><INDENT>for chunk in response.iter_content(chunk_size=DOWNLOAD_FILE_CHUNK_SIZE):<EOL><INDENT>if chunk: <EOL><INDENT>f.write(chunk)<EOL>watcher.transferring_item(remote_file, increment_amt=len(chunk))<EOL><DEDENT><DEDENT><DEDENT>
|
Download a remote file associated with the remote uuid(file_id) into local path.
:param remote_file: RemoteFile file to retrieve
:param path: str file system path to save the contents to.
:param watcher: object implementing send_item(item, increment_amt) that updates UI
|
f3927:c0:m18
|
def get_project_names(self):
|
names = []<EOL>response = self.data_service.get_projects().json()<EOL>for project in response['<STR_LIT>']:<EOL><INDENT>names.append(project['<STR_LIT:name>'])<EOL><DEDENT>return names<EOL>
|
Return a list of names of the remote projects owned by this user.
:return: [str]: the list of project names
|
f3927:c0:m19
|
def get_projects_details(self):
|
return self.data_service.get_projects().json()['<STR_LIT>']<EOL>
|
Return list of top level details for all projects
|
f3927:c0:m20
|
def get_projects_with_auth_role(self, auth_role):
|
user = self.get_current_user()<EOL>projects = []<EOL>response = self.data_service.get_projects().json()<EOL>for project in response['<STR_LIT>']:<EOL><INDENT>project_id = project['<STR_LIT:id>']<EOL>permissions = self.data_service.get_user_project_permission(project_id, user.id).json()<EOL>if auth_role == permissions['<STR_LIT>']['<STR_LIT:id>']:<EOL><INDENT>projects.append(project)<EOL><DEDENT><DEDENT>return projects<EOL>
|
Return the list of projects that have the specified auth role from the list that the current user has access to.
:param auth_role: str: auth role we are filtering for
:return: [dict]: list of projects that have auth_role permissions for the current user
|
f3927:c0:m21
|
def delete_project(self, project_name_or_id):
|
project = self._get_my_project(project_name_or_id)<EOL>if project:<EOL><INDENT>self.data_service.delete_project(project.id)<EOL><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>".format(project_name_or_id.description()))<EOL><DEDENT>
|
Find the project with project_name_or_id and delete it raise error if not found.
:param project_name_or_id: ProjectNameOrId: name or id of the project we want to be deleted
|
f3927:c0:m22
|
def get_active_auth_roles(self, context):
|
response = self.data_service.get_auth_roles(context).json()<EOL>return self.get_active_auth_roles_from_json(response)<EOL>
|
Retrieve non-deprecated authorization roles based on a context.
Context should be RemoteAuthRole.PROJECT_CONTEXT or RemoteAuthRole.SYSTEM_CONTEXT.
:param context: str: context for which auth roles to retrieve
:return: [RemoteAuthRole]: list of active auth_role objects
|
f3927:c0:m23
|
@staticmethod<EOL><INDENT>def get_active_auth_roles_from_json(json_data):<DEDENT>
|
result = []<EOL>for auth_role_properties in json_data['<STR_LIT>']:<EOL><INDENT>auth_role = RemoteAuthRole(auth_role_properties)<EOL>if not auth_role.is_deprecated:<EOL><INDENT>result.append(auth_role)<EOL><DEDENT><DEDENT>return result<EOL>
|
Given a json blob response containing a list of authorization roles return the active ones
in an array of RemoteAuthRole objects.
:param json_data: list of dictionaries - data from dds in auth_role format
:return: [RemoteAuthRole] list of active auth_role objects
|
f3927:c0:m24
|
def get_project_files(self, project):
|
files = []<EOL>result = self.data_service.get_project_files(project.id)<EOL>user_list_json = result.json()<EOL>for user_json in user_list_json['<STR_LIT>']:<EOL><INDENT>files.append(ProjectFile(user_json))<EOL><DEDENT>return files<EOL>
|
Returns a list of project files (files in the project including their download links)
:param project: RemoteProject
:return: [ProjectFile]: files in the specified project
|
f3927:c0:m25
|
def get_file_url(self, file_id):
|
return RemoteFileUrl(self.data_service.get_file_url(file_id).json())<EOL>
|
Given a file id return the RemoteFileUrl (file download url)
:param file_id: str: DukeDS file uuid
:return: RemoteFileUrl
|
f3927:c0:m26
|
def __init__(self, json_data):
|
self.id = json_data['<STR_LIT:id>']<EOL>self.kind = json_data['<STR_LIT>']<EOL>self.name = json_data['<STR_LIT:name>']<EOL>self.description = json_data['<STR_LIT:description>']<EOL>self.is_deleted = json_data['<STR_LIT>']<EOL>self.children = []<EOL>self.remote_path = '<STR_LIT>'<EOL>
|
Set properties based on json_data.
:param json_data: dict JSON data containing project info
|
f3927:c1:m0
|
def add_child(self, child):
|
self.children.append(child)<EOL>
|
Add a file or folder to our remote project.
:param child: RemoteFolder/RemoteFile child to add.
|
f3927:c1:m1
|
def get_project_name_or_id(self):
|
return ProjectNameOrId.create_from_remote_project(self)<EOL>
|
:return: ProjectNameOrId: contains key of id
|
f3927:c1:m2
|
def __init__(self, json_data, parent_remote_path):
|
self.id = json_data['<STR_LIT:id>']<EOL>self.kind = json_data['<STR_LIT>']<EOL>self.name = json_data['<STR_LIT:name>']<EOL>self.is_deleted = json_data['<STR_LIT>']<EOL>self.children = []<EOL>self.remote_path = os.path.join(parent_remote_path, self.name)<EOL>
|
Set properties based on json_data.
:param json_data: dict JSON data containing folder info
:param parent_remote_path: remote_path path to this folder's parent
|
f3927:c2:m0
|
def add_child(self, child):
|
self.children.append(child)<EOL>
|
Add remote file or folder to this folder.
:param child: RemoteFolder or remoteFile to add.
|
f3927:c2:m1
|
def __init__(self, json_data, parent_remote_path):
|
self.id = json_data['<STR_LIT:id>']<EOL>self.file_version_id = json_data['<STR_LIT>']['<STR_LIT:id>']<EOL>self.kind = json_data['<STR_LIT>']<EOL>self.name = json_data['<STR_LIT:name>']<EOL>self.path = self.name <EOL>self.is_deleted = json_data['<STR_LIT>']<EOL>upload = RemoteFile.get_upload_from_json(json_data)<EOL>self.size = upload['<STR_LIT:size>']<EOL>self.file_hash = None<EOL>self.hash_alg = None<EOL>hash_data = RemoteFile.get_hash_from_upload(upload)<EOL>if hash_data:<EOL><INDENT>self.file_hash = hash_data.get('<STR_LIT:value>')<EOL>self.hash_alg = hash_data.get('<STR_LIT>')<EOL><DEDENT>self.remote_path = os.path.join(parent_remote_path, self.name)<EOL>
|
Set properties based on json_data.
:param json_data: dict JSON data containing file info
:param parent_remote_path: remote_path path to this file's parent
|
f3927:c3:m0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.