code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
server = self._servers.get(server_name) return server.get_messages_count_in_buffer()
def get_server_unread_messages_count(self, server_name=None)
Gets count of unread messages from server
6.535281
5.928357
1.102376
if not val.startswith('-'): return to_int(val) value = _invert(to_bin_str_from_int_string(bits, bin(to_int(val[1:])))) return int(value, 2) + 1
def to_twos_comp(val, bits)
compute the 2's compliment of int value val
7.532503
7.132182
1.056129
self.uint(1, name, value, align)
def u8(self, name, value=None, align=None)
Add an unsigned 1 byte integer field to template. This is an convenience method that simply calls `Uint` keyword with predefined length.
9.7331
7.193233
1.353091
self.uint(2, name, value, align)
def u16(self, name, value=None, align=None)
Add an unsigned 2 byte integer field to template. This is an convenience method that simply calls `Uint` keyword with predefined length.
10.078196
7.30392
1.379834
self.uint(3, name, value, align)
def u24(self, name, value=None, align=None)
Add an unsigned 3 byte integer field to template. This is an convenience method that simply calls `Uint` keyword with predefined length.
10.010938
6.731399
1.4872
self.uint(4, name, value, align)
def u32(self, name, value=None, align=None)
Add an unsigned 4 byte integer field to template. This is an convenience method that simply calls `Uint` keyword with predefined length.
9.09144
6.801206
1.336739
self.uint(5, name, value, align)
def u40(self, name, value=None, align=None)
Add an unsigned 5 byte integer field to template. This is an convenience method that simply calls `Uint` keyword with predefined length.
11.076948
6.556525
1.689454
self.uint(8, name, value, align)
def u64(self, name, value=None, align=None)
Add an unsigned 8 byte integer field to template. This is an convenience method that simply calls `Uint` keyword with predefined length.
9.34686
6.958958
1.343141
self.uint(16, name, value, align)
def u128(self, name, value=None, align=None)
Add an unsigned 16 byte integer field to template. This is an convenience method that simply calls `Uint` keyword with predefined length.
8.004209
5.94874
1.34553
self.int(1, name, value, align)
def i8(self, name, value=None, align=None)
Add an 1 byte integer field to template. This is an convenience method that simply calls `Int` keyword with predefined length.
9.549657
8.096901
1.179421
self.int(4, name, value, align)
def i32(self, name, value=None, align=None)
Add an 32 byte integer field to template. This is an convenience method that simply calls `Int` keyword with predefined length.
9.18522
8.193423
1.121048
self._new_list(size, name) BuiltIn().run_keyword(type, '', *parameters) self._end_list()
def array(self, size, type, name, *parameters)
Define a new array of given `size` and containing fields of type `type`. `name` if the name of this array element. The `type` is the name of keyword that is executed as the contents of the array and optional extra parameters are passed as arguments to this keyword. Examples: | Array | 8 | u16 | myArray | | u32 | length | | Array | length | someStruct | myArray | <argument for someStruct> |
9.277406
21.012222
0.441524
self.new_struct('Container', name, 'length=%s' % length) BuiltIn().run_keyword(type, *parameters) self.end_struct()
def container(self, name, length, type, *parameters)
Define a container with given length. This is a convenience method creating a `Struct` with `length` containing fields defined in `type`.
7.51222
9.714873
0.77327
# TODO: check we are inside a bag! self._start_bag_case(size) BuiltIn().run_keyword(kw, *parameters) self._end_bag_case()
def case(self, size, kw, *parameters)
An element inside a bag started with `Start Bag`. The first argument is size which can be absolute value like `1`, a range like `0-3`, or just `*` to accept any number of elements. Examples: | Start bag | intBag | | case | 0-1 | u8 | foo | 42 | | case | 0-2 | u8 | bar | 1 | | End bag |
8.490276
8.630988
0.983697
test_name = BuiltIn().replace_variables('${TEST NAME}') outputdir = BuiltIn().replace_variables('${OUTPUTDIR}') path = os.path.join(outputdir, test_name + '.seqdiag') SeqdiagGenerator().compile(path, self._message_sequence)
def embed_seqdiag_sequence(self)
Create a message sequence diagram png file to output folder and embed the image to log file. You need to have seqdiag installed to create the sequence diagram. See http://blockdiag.com/en/seqdiag/
7.326994
5.721775
1.280546
originalValue = originalValue[0] calculatedValue = calculatedValue[0] # error is zero if not originalValue and not calculatedValue: return 0.0 return abs(calculatedValue - originalValue)/ ((abs(originalValue) + abs(calculatedValue))/2) * 100
def local_error(self, originalValue, calculatedValue)
Calculates the error between the two given values. :param list originalValue: List containing the values of the original data. :param list calculatedValue: List containing the values of the calculated TimeSeries that corresponds to originalValue. :return: Returns the error measure of the two given values. :rtype: numeric
3.416266
4.417773
0.7733
injected_parts = '' for part in self.parts: injected = part.tostring(inject) tei_tag = next( (attribute for attribute in part.attributes if attribute.key == "tei-tag"), None) if tei_tag and tei_tag.text == "w" and injected_parts: # make sure words can be tokenized correctly if injected_parts and injected_parts[-1] != ' ': injected_parts += ' ' injected_parts += injected.strip() + ' ' else: injected_parts += injected return inject(self, injected_parts)
def tostring(self, inject)
Convert an element to a single string and allow the passed inject method to place content before any element.
4.16238
3.907511
1.065225
parser = CommandParser(get_internal_version_str()) parser.register_list_command(self._setup_run_command(ListCommand)) parser.register_upload_command(self._setup_run_command(UploadCommand)) parser.register_add_user_command(self._setup_run_command(AddUserCommand)) parser.register_remove_user_command(self._setup_run_command(RemoveUserCommand)) parser.register_download_command(self._setup_run_command(DownloadCommand)) parser.register_share_command(self._setup_run_command(ShareCommand)) parser.register_deliver_command(self._setup_run_command(DeliverCommand)) parser.register_delete_command(self._setup_run_command(DeleteCommand)) parser.register_list_auth_roles_command(self._setup_run_command(ListAuthRolesCommand)) return parser
def _create_parser(self)
Create a parser hooking up the command methods below to be run when chosen. :return: CommandParser parser with commands attached.
2.210933
2.180381
1.014012
try: check_version() except VersionException as err: print(str(err), file=sys.stderr) time.sleep(TWO_SECONDS)
def _check_pypi_version(self)
When the version is out of date or we have trouble retrieving it print a error to stderr and pause.
7.742011
4.745362
1.63149
verify_terminal_encoding(sys.stdout.encoding) self._check_pypi_version() config = create_config(allow_insecure_config_file=args.allow_insecure_config_file) self.show_error_stack_trace = config.debug_mode command = command_constructor(config) command.run(args)
def _run_command(self, command_constructor, args)
Run command_constructor and call run(args) on the resulting object :param command_constructor: class of an object that implements run(args) :param args: object arguments for specific command created by CommandParser
7.153402
7.398435
0.966881
to_users = [] remaining_emails = [] if not emails else list(emails) remaining_usernames = [] if not usernames else list(usernames) for user in self.remote_store.fetch_users(): if user.email in remaining_emails: to_users.append(user) remaining_emails.remove(user.email) elif user.username in remaining_usernames: to_users.append(user) remaining_usernames.remove(user.username) if remaining_emails or remaining_usernames: unable_to_find_users = ','.join(remaining_emails + remaining_usernames) msg = "Unable to find users for the following email/usernames: {}".format(unable_to_find_users) raise ValueError(msg) return to_users
def make_user_list(self, emails, usernames)
Given a list of emails and usernames fetch DukeDS user info. Parameters that are None will be skipped. :param emails: [str]: list of emails (can be null) :param usernames: [str]: list of usernames(netid) :return: [RemoteUser]: details about any users referenced the two parameters
2.131612
2.169607
0.982488
project_name_or_id = self.create_project_name_or_id_from_args(args) folders = args.folders # list of local files/folders to upload into the project follow_symlinks = args.follow_symlinks # should we follow symlinks when traversing folders dry_run = args.dry_run # do not upload anything, instead print out what you would upload project_upload = ProjectUpload(self.config, project_name_or_id, folders, follow_symlinks=follow_symlinks) if dry_run: print(project_upload.dry_run_report()) else: print(project_upload.get_differences_summary()) if project_upload.needs_to_upload(): project_upload.run() print('\n') print(project_upload.get_upload_report()) print('\n') print(project_upload.get_url_msg())
def run(self, args)
Upload contents of folders to a project with project_name on remote store. If follow_symlinks we will traverse symlinked directories. If content is already on remote site it will not be sent. :param args: Namespace arguments parsed from the command line.
3.460133
3.153376
1.097279
project_name_or_id = self.create_project_name_or_id_from_args(args) folder = args.folder # path to a folder to download data into # Default to project name with spaces replaced with '_' if not specified if not folder: folder = replace_invalid_path_chars(project_name_or_id.value.replace(' ', '_')) destination_path = format_destination_path(folder) path_filter = PathFilter(args.include_paths, args.exclude_paths) project = self.fetch_project(args, must_exist=True) project_download = ProjectDownload(self.remote_store, project, destination_path, path_filter) project_download.run()
def run(self, args)
Download a project based on passed in args. :param args: Namespace arguments parsed from the command line.
4.993935
4.702271
1.062026
email = args.email # email of person to give permissions, will be None if username is specified username = args.username # username of person to give permissions, will be None if email is specified auth_role = args.auth_role # type of permission(project_admin) project = self.fetch_project(args, must_exist=True, include_children=False) user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username) self.remote_store.set_user_project_permission(project, user, auth_role) print(u'Gave user {} {} permissions for project {}.'.format(user.full_name, auth_role, project.name))
def run(self, args)
Give the user with user_full_name the auth_role permissions on the remote project with project_name. :param args Namespace arguments parsed from the command line
4.823104
4.120839
1.170418
email = args.email # email of person to remove permissions from (None if username specified) username = args.username # username of person to remove permissions from (None if email is specified) project = self.fetch_project(args, must_exist=True, include_children=False) user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username) self.remote_store.revoke_user_project_permission(project, user) print(u'Removed permissions from user {} for project {}.'.format(user.full_name, project.name))
def run(self, args)
Remove permissions from the user with user_full_name or email on the remote project with project_name. :param args Namespace arguments parsed from the command line
4.829822
4.137706
1.16727
email = args.email # email of person to send email to username = args.username # username of person to send email to, will be None if email is specified force_send = args.resend # is this a resend so we should force sending auth_role = args.auth_role # authorization role(project permissions) to give to the user msg_file = args.msg_file # message file who's contents will be sent with the share message = read_argument_file_contents(msg_file) print("Sharing project.") to_user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username) try: project = self.fetch_project(args, must_exist=True, include_children=False) dest_email = self.service.share(project, to_user, force_send, auth_role, message) print("Share email message sent to " + dest_email) except D4S2Error as ex: if ex.warning: print(ex.message) else: raise
def run(self, args)
Gives user permission based on auth_role arg and sends email to that user. :param args Namespace arguments parsed from the command line
6.164611
5.723034
1.077158
email = args.email # email of person to deliver to, will be None if username is specified username = args.username # username of person to deliver to, will be None if email is specified copy_project = args.copy_project # should we deliver a copy of the project force_send = args.resend # is this a resend so we should force sending msg_file = args.msg_file # message file who's contents will be sent with the delivery share_usernames = args.share_usernames # usernames who will have this project shared once it is accepted share_emails = args.share_emails # emails of users who will have this project shared once it is accepted message = read_argument_file_contents(msg_file) project = self.fetch_project(args, must_exist=True, include_children=False) share_users = self.make_user_list(share_emails, share_usernames) print("Delivering project.") new_project_name = None if copy_project: new_project_name = self.get_new_project_name(project.name) to_user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username) try: path_filter = PathFilter(args.include_paths, args.exclude_paths) dest_email = self.service.deliver(project, new_project_name, to_user, share_users, force_send, path_filter, message) print("Delivery email message sent to " + dest_email) except D4S2Error as ex: if ex.warning: print(ex.message) else: raise
def run(self, args)
Begins process that will transfer the project to another user. Send delivery message to D4S2 service specifying a project and a user. When user accepts delivery they receive access and we lose admin privileges. :param args Namespace arguments parsed from the command line
4.427431
3.976667
1.113352
timestamp_str = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M') return "{} {}".format(project_name, timestamp_str)
def get_new_project_name(self, project_name)
Return a unique project name for the copy. :param project_name: str: name of project we will copy :return: str
3.86268
4.449745
0.868068
long_format = args.long_format # project_name and auth_role args are mutually exclusive if args.project_name or args.project_id: project = self.fetch_project(args, must_exist=True, include_children=True) self.print_project_details(project, long_format) else: self.print_project_list_details(args.auth_role, long_format)
def run(self, args)
Lists project names. :param args Namespace arguments parsed from the command line
4.577225
4.901262
0.933887
if filter_auth_role: projects_details = self.remote_store.get_projects_with_auth_role(auth_role=filter_auth_role) else: projects_details = self.remote_store.get_projects_details() if projects_details: for projects_detail in projects_details: print(self.get_project_info_line(projects_detail, long_format)) else: print(NO_PROJECTS_FOUND_MESSAGE)
def print_project_list_details(self, filter_auth_role, long_format)
Prints project names to stdout for all projects or just those with the specified auth_role :param filter_auth_role: str: optional auth_role to filter project list
2.469126
2.575125
0.958837
project = self.fetch_project(args, must_exist=True, include_children=False) if not args.force: delete_prompt = "Are you sure you wish to delete {} (y/n)?".format(project.name) if not boolean_input_prompt(delete_prompt): return self.remote_store.delete_project(self.create_project_name_or_id_from_args(args))
def run(self, args)
Deletes a single project specified by project_name in args. :param args Namespace arguments parsed from the command line
5.090596
4.804492
1.059549
auth_roles = self.remote_store.get_active_auth_roles(RemoteAuthRole.PROJECT_CONTEXT) if auth_roles: for auth_role in auth_roles: print(auth_role.id, "-", auth_role.description) else: print("No authorization roles found.")
def run(self, args)
Prints out non deprecated project-type auth roles. :param args Namespace arguments parsed from the command line
6.29502
4.97483
1.265374
cert_paths = environ.get("TXAWS_CERTS_PATH", DEFAULT_CERTS_PATH).split(":") certificate_authority_map = {} for path in cert_paths: if not path: continue for cert_file_name in glob(os.path.join(path, "*.pem")): # There might be some dead symlinks in there, so let's make sure # it's real. if not os.path.exists(cert_file_name): continue cert_file = open(cert_file_name) data = cert_file.read() cert_file.close() x509 = load_certificate(FILETYPE_PEM, data) digest = x509.digest("sha1") # Now, de-duplicate in case the same cert has multiple names. certificate_authority_map[digest] = x509 values = certificate_authority_map.values() if len(values) == 0: raise exception.CertsNotFoundError("Could not find any .pem files.") return values
def get_ca_certs(environ=os.environ)
Retrieve a list of CAs at either the DEFAULT_CERTS_PATH or the env override, TXAWS_CERTS_PATH. In order to find .pem files, this function checks first for presence of the TXAWS_CERTS_PATH environment variable that should point to a directory containing cert files. In the absense of this variable, the module-level DEFAULT_CERTS_PATH will be used instead. Note that both of these variables have have multiple paths in them, just like the familiar PATH environment variable (separated by colons).
3.348873
2.937666
1.139977
# get the defined subset of error values errorValues = self._get_error_values(startingPercentage, endPercentage, startDate, endDate) errorValues = filter(lambda item: item is None, errorValues) if errorValues[0] is None: return 1.0 share = 1.0 / float(len(errorValues)) product = 1.0 for errorValue in errorValues: # never multiply with zero! if 0 == errorValue: continue product *= errorValue**share return product
def _calculate(self, startingPercentage, endPercentage, startDate, endDate)
This is the error calculation function that gets called by :py:meth:`BaseErrorMeasure.get_error`. Both parameters will be correct at this time. :param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0]. It represents the value, where the error calculation should be started. 25.0 for example means that the first 25% of all calculated errors will be ignored. :param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0]. It represents the value, after which all error values will be ignored. 90.0 for example means that the last 10% of all local errors will be ignored. :param float startDate: Epoch representing the start date used for error calculation. :param float endDate: Epoch representing the end date used in the error calculation. :return: Returns a float representing the error. :rtype: float
4.485005
4.468203
1.00376
if time_tuple: return time.strftime("%Y-%m-%dT%H:%M:%SZ", time_tuple) else: return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def iso8601time(time_tuple)
Format time_tuple as a ISO8601 time string. :param time_tuple: Either None, to use the current time, or a tuple tuple.
1.723535
1.95912
0.879749
url = url.strip() parsed = urlparse(url) scheme = parsed[0] path = urlunparse(("", "") + parsed[2:]) host = parsed[1] if ":" in host: host, port = host.split(":") try: port = int(port) except ValueError: # A non-numeric port was given, it will be replaced with # an appropriate default value if defaultPort is True port = None else: port = None if port is None and defaultPort: if scheme == "https": port = 443 else: port = 80 if path == "": path = "/" return (str(scheme), str(host), port, str(path))
def parse(url, defaultPort=True)
Split the given URL into the scheme, host, port, and path. @type url: C{str} @param url: An URL to parse. @type defaultPort: C{bool} @param defaultPort: Whether to return the default port associated with the scheme in the given url, when the url doesn't specify one. @return: A four-tuple of the scheme, host, port, and path of the URL. All of these are C{str} instances except for port, which is an C{int}.
2.595608
2.85948
0.90772
return inject(self, '\n'.join(document.tostring(inject) for document in self.documents))
def tostring(self, inject)
Get the entire text content as str
11.042267
11.109397
0.993957
def retry_function(*args, **kwds): showed_status_msg = False status_watcher = args[0] while True: try: result = func(*args, **kwds) if showed_status_msg: status_watcher.set_status_message('') return result except DataServiceError as dse: if dse.status_code == 503: if not showed_status_msg: message = SERVICE_DOWN_MESSAGE.format(datetime.datetime.utcnow()) status_watcher.set_status_message(message) showed_status_msg = True time.sleep(SERVICE_DOWN_RETRY_SECONDS) else: raise return retry_function
def retry_when_service_down(func)
Decorator that will retry a function while it fails with status code 503 Assumes the first argument to the fuction will be an object with a set_status_message method. :param func: function: will be called until it doesn't fail with DataServiceError status 503 :return: value returned by func
2.605973
2.458415
1.060021
waiting = False while True: try: resp = func() if waiting and monitor: monitor.done_waiting() return resp except DSResourceNotConsistentError: if not waiting and monitor: monitor.start_waiting() waiting = True time.sleep(RESOURCE_NOT_CONSISTENT_RETRY_SECONDS)
def retry_until_resource_is_consistent(func, monitor)
Runs func, if func raises DSResourceNotConsistentError will retry func indefinitely. Notifies monitor if we have to wait(only happens if DukeDS API raises DSResourceNotConsistentError. :param func: func(): function to run :param monitor: object: has start_waiting() and done_waiting() methods when waiting for non-consistent resource :return: whatever func returns
4.319673
2.975784
1.451608
if self.legacy_auth(): return self._auth if not self.auth_expired(): return self._auth self.claim_new_token() return self._auth
def get_auth(self)
Gets an active token refreshing it if necessary. :return: str valid active authentication token.
7.264563
6.321466
1.14919
# Intentionally doing this manually so we don't have a chicken and egg problem with DataServiceApi. headers = { 'Content-Type': ContentType.json, 'User-Agent': self.user_agent_str, } data = { "agent_key": self.config.agent_key, "user_key": self.config.user_key, } url_suffix = "/software_agents/api_token" url = self.config.url + url_suffix response = requests.post(url, headers=headers, data=json.dumps(data)) if response.status_code == 404: if not self.config.agent_key: raise MissingInitialSetupError() else: raise SoftwareAgentNotFoundError() elif response.status_code == 503: raise DataServiceError(response, url_suffix, data) elif response.status_code != 201: raise AuthTokenCreationError(response) resp_json = response.json() self._auth = resp_json['api_token'] self._expires = resp_json['expires_on']
def claim_new_token(self)
Update internal state to have a new token using a no authorization data service.
3.629598
3.422189
1.060607
if self._auth and self._expires: now_with_skew = time.time() + AUTH_TOKEN_CLOCK_SKEW_MAX return now_with_skew > self._expires return True
def auth_expired(self)
Compare the expiration value of our current token including a CLOCK_SKEW. :return: true if the token has expired
6.187682
5.658545
1.093511
url = self.base_url + url_suffix send_data = data if content_type == ContentType.json: send_data = json.dumps(data) headers = { 'Content-Type': content_type, 'User-Agent': self.user_agent_str, } if self.auth: headers['Authorization'] = self.auth.get_auth() return url, send_data, headers
def _url_parts(self, url_suffix, data, content_type)
Format the url data based on config_type. :param url_suffix: str URL path we are sending a GET/POST/PUT to :param data: object data we are sending :param content_type: str from ContentType that determines how we format the data :return: complete url, formatted data, and headers for sending
2.452847
2.373684
1.03335
(url, data_str, headers) = self._url_parts(url_suffix, data, content_type=content_type) resp = self.http.post(url, data_str, headers=headers) return self._check_err(resp, url_suffix, data, allow_pagination=False)
def _post(self, url_suffix, data, content_type=ContentType.json)
Send POST request to API at url_suffix with post_data. Raises error if x-total-pages is contained in the response. :param url_suffix: str URL path we are sending a POST to :param data: object data we are sending :param content_type: str from ContentType that determines how we format the data :return: requests.Response containing the result
3.935789
4.725022
0.832967
(url, data_str, headers) = self._url_parts(url_suffix, data, content_type=content_type) resp = self.http.put(url, data_str, headers=headers) return self._check_err(resp, url_suffix, data, allow_pagination=False)
def _put(self, url_suffix, data, content_type=ContentType.json)
Send PUT request to API at url_suffix with post_data. Raises error if x-total-pages is contained in the response. :param url_suffix: str URL path we are sending a PUT to :param data: object data we are sending :param content_type: str from ContentType that determines how we format the data :return: requests.Response containing the result
3.918533
4.791942
0.817734
(url, data_str, headers) = self._url_parts(url_suffix, data, content_type=content_type) resp = self.http.get(url, headers=headers, params=data_str) return self._check_err(resp, url_suffix, data, allow_pagination=False)
def _get_single_item(self, url_suffix, data, content_type=ContentType.json)
Send GET request to API at url_suffix with post_data. Raises error if x-total-pages is contained in the response. :param url_suffix: str URL path we are sending a GET to :param url_data: object data we are sending :param content_type: str from ContentType that determines how we format the data :return: requests.Response containing the result
3.968876
4.837405
0.820456
data_with_per_page = dict(data) data_with_per_page['page'] = page_num data_with_per_page['per_page'] = self._get_page_size() (url, data_str, headers) = self._url_parts(url_suffix, data_with_per_page, content_type=ContentType.form) resp = self.http.get(url, headers=headers, params=data_str) return self._check_err(resp, url_suffix, data, allow_pagination=True)
def _get_single_page(self, url_suffix, data, page_num)
Send GET request to API at url_suffix with post_data adding page and per_page parameters to retrieve a single page. Page size is determined by config.page_size. :param url_suffix: str URL path we are sending a GET to :param data: object data we are sending :param page_num: int: page number to fetch :return: requests.Response containing the result
3.551291
3.450121
1.029323
response = self._get_single_page(url_suffix, data, page_num=1) total_pages_str = response.headers.get('x-total-pages') if total_pages_str: total_pages = int(total_pages_str) if total_pages > 1: multi_response = MultiJSONResponse(base_response=response, merge_array_field_name="results") for page in range(2, total_pages + 1): additional_response = self._get_single_page(url_suffix, data, page_num=page) multi_response.add_response(additional_response) return multi_response return response
def _get_collection(self, url_suffix, data)
Performs GET for all pages based on x-total-pages in first response headers. Merges the json() 'results' arrays. If x-total-pages is missing or 1 just returns the response without fetching multiple pages. :param url_suffix: str URL path we are sending a GET to :param data: object data we are sending :return: requests.Response containing the result
2.763544
2.40376
1.149675
total_pages = resp.headers.get('x-total-pages') if not allow_pagination and total_pages: raise UnexpectedPagingReceivedError() if 200 <= resp.status_code < 300: return resp if resp.status_code == 404: if resp.json().get("code") == "resource_not_consistent": raise DSResourceNotConsistentError(resp, url_suffix, data) raise DataServiceError(resp, url_suffix, data)
def _check_err(resp, url_suffix, data, allow_pagination)
Raise DataServiceError if the response wasn't successful. :param resp: requests.Response back from the request :param url_suffix: str url to include in an error message :param data: data payload we sent :param allow_pagination: when False and response headers contains 'x-total-pages' raises an error. :return: requests.Response containing the successful result
3.44153
3.155139
1.09077
data = { "name": project_name, "description": desc } return self._post("/projects", data)
def create_project(self, project_name, desc)
Send POST to /projects creating a new project with the specified name and desc. Raises DataServiceError on error. :param project_name: str name of the project :param desc: str description of the project :return: requests.Response containing the successful result
2.968914
3.505705
0.846881
data = { 'name': folder_name, 'parent': { 'kind': parent_kind_str, 'id': parent_uuid } } return self._post("/folders", data)
def create_folder(self, folder_name, parent_kind_str, parent_uuid)
Send POST to /folders to create a new folder with specified name and parent. :param folder_name: str name of the new folder :param parent_kind_str: str type of parent folder has(dds-folder,dds-project) :param parent_uuid: str uuid of the parent object :return: requests.Response containing the successful result
2.420655
2.863977
0.845207
return self._get_children('projects', project_id, name_contains, exclude_response_fields)
def get_project_children(self, project_id, name_contains, exclude_response_fields=None)
Send GET to /projects/{project_id}/children filtering by a name. :param project_id: str uuid of the project :param name_contains: str name to filter folders by (if not None this method works recursively) :param exclude_response_fields: [str]: list of fields to exclude in the response items :return: requests.Response containing the successful result
3.385796
6.19416
0.546611
data = {} if name_contains is not None: data['name_contains'] = name_contains if exclude_response_fields: data['exclude_response_fields'] = ' '.join(exclude_response_fields) url_prefix = "/{}/{}/children".format(parent_name, parent_id) return self._get_collection(url_prefix, data)
def _get_children(self, parent_name, parent_id, name_contains, exclude_response_fields=None)
Send GET message to /<parent_name>/<parent_id>/children to fetch info about children(files and folders) :param parent_name: str 'projects' or 'folders' :param parent_id: str uuid of project or folder :param name_contains: name filtering (if not None this method works recursively) :param exclude_response_fields: [str]: list of fields to exclude in the response items :return: requests.Response containing the successful result
2.39625
2.590733
0.924932
data = { "name": filename, "content_type": content_type, "size": size, "hash": { "value": hash_value, "algorithm": hash_alg }, "chunked": chunked, } if storage_provider_id: data['storage_provider'] = {'id': storage_provider_id} return self._post("/projects/" + project_id + "/uploads", data)
def create_upload(self, project_id, filename, content_type, size, hash_value, hash_alg, storage_provider_id=None, chunked=True)
Post to /projects/{project_id}/uploads to create a uuid for uploading chunks. NOTE: The optional hash_value and hash_alg parameters are being removed from the DukeDS API. :param project_id: str uuid of the project we are uploading data for. :param filename: str name of the file we want to upload :param content_type: str mime type of the file :param size: int size of the file in bytes :param hash_value: str hash value of the entire file :param hash_alg: str algorithm used to create hash_value :param storage_provider_id: str optional storage provider id :param chunked: is the uploaded file made up of multiple chunks. When False a single upload url is returned. For more see https://github.com/Duke-Translational-Bioinformatics/duke-data-service/blob/develop/api_design/DDS-1182-nonchucked_upload_api_design.md :return: requests.Response containing the successful result
1.67099
2.151157
0.776787
if number < 1: raise ValueError("Chunk number must be > 0") data = { "number": number, "size": size, "hash": { "value": hash_value, "algorithm": hash_alg } } return self._put("/uploads/" + upload_id + "/chunks", data)
def create_upload_url(self, upload_id, number, size, hash_value, hash_alg)
Given an upload created by create_upload retrieve a url where we can upload a chunk. :param upload_id: uuid of the upload :param number: int incrementing number of the upload (1-based index) :param size: int size of the chunk in bytes :param hash_value: str hash value of chunk :param hash_alg: str algorithm used to create hash :return: requests.Response containing the successful result
2.460474
2.541329
0.968184
data = { "hash[value]": hash_value, "hash[algorithm]": hash_alg } return self._put("/uploads/" + upload_id + "/complete", data, content_type=ContentType.form)
def complete_upload(self, upload_id, hash_value, hash_alg)
Mark the upload we created in create_upload complete. :param upload_id: str uuid of the upload to complete. :param hash_value: str hash value of chunk :param hash_alg: str algorithm used to create hash :return: requests.Response containing the successful result
3.714387
4.263373
0.871232
data = { "parent": { "kind": parent_kind, "id": parent_id }, "upload": { "id": upload_id } } return self._post("/files/", data)
def create_file(self, parent_kind, parent_id, upload_id)
Create a new file after completing an upload. :param parent_kind: str kind of parent(dds-folder,dds-project) :param parent_id: str uuid of parent :param upload_id: str uuid of complete upload :return: requests.Response containing the successful result
2.225802
2.878061
0.773368
put_data = { "upload[id]": upload_id, } return self._put("/files/" + file_id, put_data, content_type=ContentType.form)
def update_file(self, file_id, upload_id)
Send PUT request to /files/{file_id} to update the file contents to upload_id and sets a label. :param file_id: str uuid of file :param upload_id: str uuid of the upload where all the file chunks where uploaded :param label: str short display label for the file :return: requests.Response containing the successful result
5.466552
6.14762
0.889214
if http_verb == 'PUT': return self.http.put(host + url, data=chunk, headers=http_headers) elif http_verb == 'POST': return self.http.post(host + url, data=chunk, headers=http_headers) else: raise ValueError("Unsupported http_verb:" + http_verb)
def send_external(self, http_verb, host, url, http_headers, chunk)
Used with create_upload_url to send a chunk the the possibly external object store. :param http_verb: str PUT or POST :param host: str host we are sending the chunk to :param url: str url to use when sending :param http_headers: object headers to send with the request :param chunk: content to send :return: requests.Response containing the successful result
1.991205
1.946772
1.022824
if http_verb == 'GET': return self.http.get(host + url, headers=http_headers, stream=True) else: raise ValueError("Unsupported http_verb:" + http_verb)
def receive_external(self, http_verb, host, url, http_headers)
Retrieve a streaming request for a file. :param http_verb: str GET is only supported right now :param host: str host we are requesting the file from :param url: str url to ask the host for :param http_headers: object headers to send with the request :return: requests.Response containing the successful result
3.061968
3.02562
1.012013
data = {} if full_name: data['full_name_contains'] = full_name if email: data['email'] = email if username: data['username'] = username return self._get_collection('/users', data)
def get_users(self, full_name=None, email=None, username=None)
Send GET request to /users for users with optional full_name, email, and/or username filtering. :param full_name: str name of the user we are searching for :param email: str: optional email to filter by :param username: str: optional username to filter by :return: requests.Response containing the successful result
2.443543
2.696527
0.906182
put_data = { "auth_role[id]": auth_role } return self._put("/projects/" + project_id + "/permissions/" + user_id, put_data, content_type=ContentType.form)
def set_user_project_permission(self, project_id, user_id, auth_role)
Send PUT request to /projects/{project_id}/permissions/{user_id/ with auth_role value. :param project_id: str uuid of the project :param user_id: str uuid of the user :param auth_role: str project role eg 'project_admin' :return: requests.Response containing the successful result
5.185935
5.356636
0.968133
data = { "agent_key": agent_key, "user_key": user_key, } return self._post("/software_agents/api_token", data)
def get_api_token(self, agent_key, user_key)
Send POST request to get an auth token. This method doesn't require auth obviously. :param agent_key: str agent key (who is acting on behalf of the user) :param user_key: str secret user key :return: requests.Response containing the successful result
3.531117
4.274688
0.826053
data = { "to_users[][id]": to_user_ids, } return self._post("/projects/" + project_id + "/transfers", data, content_type=ContentType.form)
def create_project_transfer(self, project_id, to_user_ids)
Send POST request to initiate transfer of a project to the specified user ids :param project_id: str uuid of the project :param to_users: list of user uuids to receive the project :return: requests.Response containing the successful result
5.126266
6.272715
0.817232
data = {} if status_comment: data["status_comment"] = status_comment path = "/project_transfers/{}/{}".format(transfer_id, action) return self._put(path, data, content_type=ContentType.form)
def _process_project_transfer(self, action, transfer_id, status_comment)
Send PUT request to one of the project transfer action endpoints :param action: str name of the action (reject/accept/cancel) :param transfer_id: str uuid of the project_transfer :param status_comment: str comment about the action, optional :return: requests.Response containing the successful result
4.037332
4.308099
0.937149
data = { "name": activity_name, "description": desc, "started_on": started_on, "ended_on": ended_on } return self._post("/activities", data)
def create_activity(self, activity_name, desc=None, started_on=None, ended_on=None)
Send POST to /activities creating a new activity with the specified name and desc. Raises DataServiceError on error. :param activity_name: str name of the activity :param desc: str description of the activity (optional) :param started_on: str datetime when the activity started (optional) :param ended_on: str datetime when the activity ended (optional) :return: requests.Response containing the successful result
1.953787
2.247669
0.86925
put_data = { "name": activity_name, "description": desc, "started_on": started_on, "ended_on": ended_on } return self._put("/activities/" + activity_id, put_data)
def update_activity(self, activity_id, activity_name=None, desc=None, started_on=None, ended_on=None)
Send PUT request to /activities/{activity_id} to update the activity metadata. Raises ValueError if at least one field is not updated. :param activity_id: str uuid of activity :param activity_name: str new name of the activity (optional) :param desc: str description of the activity (optional) :param started_on: str date the updated activity began on (optional) :param ended_on: str date the updated activity ended on (optional) :return: requests.Response containing the successful result
2.040198
2.401746
0.849465
return self._create_activity_relation(activity_id, entity_kind, entity_id, ActivityRelationTypes.USED)
def create_used_relation(self, activity_id, entity_kind, entity_id)
Create a was used by relationship between an activity and a entity(file). :param activity_id: str: uuid of the activity :param entity_kind: str: kind of entity('dds-file') :param entity_id: str: uuid of the entity :return: requests.Response containing the successful result
3.215236
5.057864
0.63569
return self._create_activity_relation(activity_id, entity_kind, entity_id, ActivityRelationTypes.WAS_GENERATED_BY)
def create_was_generated_by_relation(self, activity_id, entity_kind, entity_id)
Create a was generated by relationship between an activity and a entity(file). :param activity_id: str: uuid of the activity :param entity_kind: str: kind of entity('dds-file') :param entity_id: str: uuid of the entity :return: requests.Response containing the successful result
2.835146
3.931308
0.721171
return self._create_activity_relation(activity_id, entity_kind, entity_id, ActivityRelationTypes.WAS_INVALIDATED_BY)
def create_was_invalidated_by_relation(self, activity_id, entity_kind, entity_id)
Create a was invalidated by relationship between an activity and a entity(file). :param activity_id: str: uuid of the activity :param entity_kind: str: kind of entity('dds-file') :param entity_id: str: uuid of the entity :return: requests.Response containing the successful result
2.735484
3.777744
0.724105
data = { "used_entity": { "id": used_entity_id, "kind": used_entity_kind }, "generated_entity": { "id": generated_entity_id, "kind": generated_entity_kind } } return self._post("/relations/was_derived_from", data)
def create_was_derived_from_relation(self, used_entity_id, used_entity_kind, generated_entity_id, generated_entity_kind)
Create a was derived from relation. :param used_entity_id: str: uuid of the used entity (file_version_id) :param used_entity_kind: str: kind of entity ('dds-file') :param generated_entity_id: uuid of the generated entity (file_version_id) :param generated_entity_kind: str: kind of entity ('dds-file') :return: requests.Response containing the successful result
1.772677
2.075364
0.854152
data = {} if full_name_contains: data['full_name_contains'] = full_name_contains if email: data['email'] = email if username: data['username'] = username return self._get_collection("/auth_providers/{}/affiliates/".format(auth_provider_id), data)
def get_auth_provider_affiliates(self, auth_provider_id, full_name_contains=None, email=None, username=None)
List affiliates for a specific auth provider. :param auth_provider_id: str: uuid of the auth provider to list affiliates of :param full_name_contains: str: filters affiliates for this name :param email: str: filters affiliates for this email address :param username: str: filters affiliates for this username :return: requests.Response containing the successful result
1.997756
2.274783
0.878218
url = "/auth_providers/{}/affiliates/{}/dds_user/".format(auth_provider_id, username) return self._post(url, {})
def auth_provider_add_user(self, auth_provider_id, username)
Transform an institutional affiliates UID, such as a Duke NetID, to a DDS specific user identity; can be used by clients prior to calling DDS APIs that require a DDS user in the request payload. Returns user details. Can be safely called multiple times. :param auth_provider_id: str: auth provider who supports user adding :param username: str: netid we wish to register with DukeDS :return: requests.Response containing the successful result
7.202199
6.627524
1.08671
key = self.merge_array_field_name response_json = response.json() value = self.combined_json[key] self.combined_json[self.merge_array_field_name] = value + response_json[key]
def add_response(self, response)
Add data from json() to data returned by json() :param response: requests.Response containing the successful JSON result to be merged
5.541059
5.087242
1.089207
try: ts = load_data(filename, format_file) validator = CloudUPDRSDataFrameValidator() if validator.is_valid(ts): return ts else: logging.error('Error loading data, wrong format.') return None except IOError as e: ierr = "({}): {}".format(e.errno, e.strerror) logging.error("load data, file not found, I/O error %s", ierr) except ValueError as verr: logging.error("load data ValueError ->%s", verr.message) except: logging.error("Unexpected error on load data method: %s", sys.exc_info()[0])
def load(self, filename, format_file='cloudupdrs')
This is a general load data method where the format of data to load can be passed as a parameter, :param str filename: The path to load data from :param str format_file: format of the file. Default is CloudUPDRS. Set to mpower for mpower data. :return dataframe: data_frame.x, data_frame.y, data_frame.z: x, y, z components of the acceleration \ data_frame.index is the datetime-like index
4.151287
4.128023
1.005635
method_class = self.registry.get(call.action, call.version) method = method_class(*args, **kwargs) if not method.is_available(): raise APIError(400, "InvalidAction", "The action %s is not " "valid for this web service." % call.action) else: return method
def get_method(self, call, *args, **kwargs)
Return the L{Method} instance to invoke for the given L{Call}. @param args: Positional arguments to pass to the method constructor. @param kwargs: Keyword arguments to pass to the method constructor.
4.350993
4.92627
0.883223
request.id = str(uuid4()) deferred = maybeDeferred(self._validate, request) deferred.addCallback(self.execute) def write_response(response): request.setHeader("Content-Length", str(len(response))) request.setHeader("Content-Type", self.content_type) # Prevent browsers from trying to guess a different content type. request.setHeader("X-Content-Type-Options", "nosniff") request.write(response) request.finish() return response def write_error(failure): if failure.check(APIError): status = failure.value.status # Don't log the stack traces for 4xx responses. if status < 400 or status >= 500: log.err(failure) else: log.msg("status: %s message: %s" % ( status, safe_str(failure.value))) body = failure.value.response if body is None: body = self.dump_error(failure.value, request) else: # If the error is a generic one (not an APIError), log the # message , but don't send it back to the client, as it could # contain sensitive information. Send a generic server error # message instead. log.err(failure) body = "Server error" status = 500 request.setResponseCode(status) write_response(body) deferred.addCallback(write_response) deferred.addErrback(write_error) return deferred
def handle(self, request)
Handle an HTTP request for executing an API call. This method authenticates the request checking its signature, and then calls the C{execute} method, passing it a L{Call} object set with the principal for the authenticated user and the generic parameters extracted from the request. @param request: The L{HTTPRequest} to handle.
3.125983
3.153463
0.991286
method = self.get_method(call) deferred = maybeDeferred(self.authorize, method, call) deferred.addCallback(lambda _: method.invoke(call)) return deferred.addCallback(self.dump_result)
def execute(self, call)
Execute an API L{Call}. At this point the request has been authenticated and C{call.principal} is set with the L{Principal} for the L{User} requesting the call. @return: The response to write in the request for the given L{Call}. @raises: An L{APIError} in case the execution fails, sporting an error message the HTTP status code to return.
5.252373
5.900368
0.890177
params = dict((k, v[-1]) for k, v in request.args.iteritems()) args, rest = self.schema.extract(params) # Get rid of Signature so it doesn't mess with signature verification params.pop("Signature") result = { "transport_args": { "action": args.Action, "access_key_id": args.AWSAccessKeyId, "timestamp": args.Timestamp, "expires": args.Expires, "version": args.Version, "signature_method": args.SignatureMethod, "signature": args.Signature, "signature_version": args.SignatureVersion}, "handler_args": rest, "raw_args": params } return result
def get_call_arguments(self, request)
Get call arguments from a request. Override this if you want to use a wire format different from AWS's. The return value is a dictionary with three keys: 'transport_args', 'handler_args', and 'raw_args'. The value of 'transport_args' must be a dictionary with the following keys: - action - access_key_id - timestamp - expires - version - signature_method - signature - signature_version The value of 'handler_args' should be the application arguments that are meant to be passed to the action handler. The value of 'raw_args', the unprocessed arguments, are used for signature verification. This should be the same dictionary of data that the client used to sign the request. Note that this data must not contain the signature itself.
3.994408
2.648198
1.508349
call_arguments = self.get_call_arguments(request) args = call_arguments["transport_args"] rest = call_arguments["handler_args"] params = call_arguments["raw_args"] self._validate_generic_parameters(args) def create_call(principal): self._validate_principal(principal, args) self._validate_signature(request, principal, args, params) return Call(raw_params=rest, principal=principal, action=args["action"], version=args["version"], id=request.id) deferred = maybeDeferred(self.get_principal, args["access_key_id"]) deferred.addCallback(create_call) return deferred
def _validate(self, request)
Validate an L{HTTPRequest} before executing it. The following conditions are checked: - The request contains all the generic parameters. - The action specified in the request is a supported one. - The signature mechanism is a supported one. - The provided signature matches the one calculated using the locally stored secret access key for the user. - The signature hasn't expired. @return: The validated L{Call}, set with its default arguments and the the principal of the accessing L{User}.
5.188373
4.33491
1.196881
utc_now = self.get_utc_time() if getattr(self, "actions", None) is not None: # Check the deprecated 'actions' attribute if not args["action"] in self.actions: raise APIError(400, "InvalidAction", "The action %s is not " "valid for this web service." % args["action"]) else: self.registry.check(args["action"], args["version"]) if not args["signature_version"] in self.signature_versions: raise APIError(403, "InvalidSignature", "SignatureVersion '%s' " "not supported" % args["signature_version"]) if args["expires"] and args["timestamp"]: raise APIError(400, "InvalidParameterCombination", "The parameter Timestamp cannot be used with " "the parameter Expires") if args["expires"] and args["expires"] < utc_now: raise APIError(400, "RequestExpired", "Request has expired. Expires date is %s" % ( args["expires"].strftime(self.time_format))) if (args["timestamp"] and args["timestamp"] + timedelta(minutes=15) < utc_now): raise APIError(400, "RequestExpired", "Request has expired. Timestamp date is %s" % ( args["timestamp"].strftime(self.time_format)))
def _validate_generic_parameters(self, args)
Validate the generic request parameters. @param args: Parsed schema arguments. @raises APIError: In the following cases: - Action is not included in C{self.actions} - SignatureVersion is not included in C{self.signature_versions} - Expires and Timestamp are present - Expires is before the current time - Timestamp is older than 15 minutes.
3.020621
2.599394
1.162048
creds = AWSCredentials(principal.access_key, principal.secret_key) endpoint = AWSServiceEndpoint() endpoint.set_method(request.method) endpoint.set_canonical_host(request.getHeader("Host")) path = request.path if self.path is not None: path = "%s/%s" % (self.path.rstrip("/"), path.lstrip("/")) endpoint.set_path(path) signature = Signature(creds, endpoint, params, signature_method=args["signature_method"], signature_version=args["signature_version"] ) if signature.compute() != args["signature"]: raise APIError(403, "SignatureDoesNotMatch", "The request signature we calculated does not " "match the signature you provided. Check your " "key and signing method.")
def _validate_signature(self, request, principal, args, params)
Validate the signature.
3.340056
3.33442
1.00169
if not request.args: request.setHeader("Content-Type", "text/plain") return self.get_status_text() else: self.handle(request) return NOT_DONE_YET
def render_GET(self, request)
Handle a GET request.
3.872023
3.725823
1.03924
from twisted.internet.main import installReactor from twisted.internet.selectreactor import SelectReactor class ExitCodeReactor(SelectReactor): def stop(self, exitStatus=0): super(ExitCodeReactor, self).stop() self.exitStatus = exitStatus def run(self, *args, **kwargs): super(ExitCodeReactor, self).run(*args, **kwargs) return self.exitStatus reactor = ExitCodeReactor() installReactor(reactor) return reactor
def get_exitcode_reactor()
This is only neccesary until a fix like the one outlined here is implemented for Twisted: http://twistedmatrix.com/trac/ticket/2182
2.299058
2.249112
1.022207
kDate = sign((b'AWS4' + key), dateStamp) kRegion = sign(kDate, regionName) kService = sign(kRegion, serviceName) kSigning = sign(kService, b'aws4_request') return kSigning
def getSignatureKey(key, dateStamp, regionName, serviceName)
Generate the signing key for AWS V4 requests. @param key: The secret key to use. @type key: L{bytes} @param dateStamp: The UTC date and time, serialized as an AWS date stamp. @type dateStamp: L{bytes} @param regionName: The name of the region. @type regionName: L{bytes} @param serviceName: The name of the service to which the request will be sent. @type serviceName: L{bytes} @return: The signature. @rtype: L{bytes}
1.862677
2.698488
0.690267
path = urllib.quote(parsed.path) canonical_parsed = parsed._replace(path=path, params='', query='', fragment='') return urlparse.urlunparse(canonical_parsed)
def _make_canonical_uri(parsed)
Return the canonical URI for a parsed URL. @param parsed: The parsed URL from which to extract the canonical URI @type parsed: L{urlparse.ParseResult} @return: The canonical URI. @rtype: L{str}
4.10245
5.411431
0.758108
query_params = urlparse.parse_qs(parsed.query, keep_blank_values=True) sorted_query_params = sorted((k, v) for k, vs in query_params.items() for v in vs) return urllib.urlencode(sorted_query_params)
def _make_canonical_query_string(parsed)
Return the canonical query string for a parsed URL. @param parsed: The parsed URL from which to extract the canonical query string. @type parsed: L{urlparse.ParseResult} @return: The canonical query string. @rtype: L{str}
2.379659
2.817748
0.844525
pairs = [] for name in headers_to_sign: if name not in headers: continue values = headers[name] if not isinstance(values, (list, tuple)): values = [values] comma_values = b','.join(' '.join(line.strip().split()) for value in values for line in value.splitlines()) pairs.append((name.lower(), comma_values)) sorted_pairs = sorted(b'%s:%s' % (name, value) for name, value in sorted(pairs)) return b'\n'.join(sorted_pairs) + b'\n'
def _make_canonical_headers(headers, headers_to_sign)
Return canonicalized headers. @param headers: The request headers. @type headers: L{dict} @param headers_to_sign: A sequence of header names that should be signed. @type headers_to_sign: A sequence of L{bytes} @return: The canonicalized headers. @rtype: L{bytes}
2.794983
2.872044
0.973168
return b";".join(header.lower() for header in sorted(headers_to_sign) if header in headers)
def _make_signed_headers(headers, headers_to_sign)
Return a semicolon-delimited list of headers to sign. @param headers: The request headers. @type headers: L{dict} @param headers_to_sign: A sequence of header names that should be signed. @type headers_to_sign: L{bytes} @return: The semicolon-delimited list of headers. @rtype: L{bytes}
5.773762
6.361914
0.907551
date_stamp = makeDateStamp(instant) amz_date = makeAMZDate(instant) scope = _CredentialScope( date_stamp=date_stamp, region=region, service=service ) signable = _SignableAWS4HMAC256Token( amz_date, scope, canonical_request, ) signature = signable.signature( getSignatureKey(credentials.secret_key, date_stamp, region, service) ) v4credential = _Credential( access_key=credentials.access_key, credential_scope=scope, ) return ( b"%s " % (_SignableAWS4HMAC256Token.ALGORITHM,) + b", ".join([ b"Credential=%s" % (v4credential.serialize(),), b"SignedHeaders=%s" % (canonical_request.signed_headers,), b"Signature=%s" % (signature,), ]))
def _make_authorization_header(region, service, canonical_request, credentials, instant)
Construct an AWS version 4 authorization value for use in an C{Authorization} header. @param region: The AWS region name (e.g., C{'us-east-1'}). @type region: L{str} @param service: The AWS service's name (e.g., C{'s3'}). @type service: L{str} @param canonical_request: The canonical form of the request. @type canonical_request: L{_CanonicalRequest} (use L{_CanonicalRequest.from_payload_and_headers}) @param credentials: The AWS credentials. @type credentials: L{txaws.credentials.AWSCredentials} @param instant: The current UTC date and time @type instant: A naive local L{datetime.datetime} (as returned by L{datetime.datetime.utcnow}) @return: A value suitable for use in an C{Authorization} header @rtype: L{bytes}
3.253084
3.435935
0.946783
result = [] for step in xrange(0, steps): fpart = (steps - step) * first lpart = (step + 1) * last value = (fpart + lpart) / float(steps + 1) result.append(value) return result
def linear_interpolation(first, last, steps)
Interpolates all missing values using linear interpolation. :param numeric first: Start value for the interpolation. :param numeric last: End Value for the interpolation :param integer steps: Number of missing values that have to be calculated. :return: Returns a list of floats containing only the missing values. :rtype: list :todo: Define a more general interface!
3.518515
4.226773
0.832435
data_service = upload_context.make_data_service() project_name = upload_context.project_name_or_id.get_name_or_raise() result = data_service.create_project(project_name, project_name) return result.json()['id']
def upload_project_run(upload_context)
Function run by CreateProjectCommand to create the project. Runs in a background process. :param upload_context: UploadContext: contains data service setup and project name to create.
4.527114
3.940655
1.148823
data_service = upload_context.make_data_service() folder_name, parent_kind, parent_remote_id = upload_context.params result = data_service.create_folder(folder_name, parent_kind, parent_remote_id) return result.json()['id']
def upload_folder_run(upload_context)
Function run by CreateFolderCommand to create the folder. Runs in a background process. :param upload_context: UploadContext: contains data service setup and folder details.
4.461246
4.045926
1.102652
data_service = upload_context.make_data_service() parent_data, path_data, remote_file_id = upload_context.params # The small file will fit into one chunk so read into memory and hash it. chunk = path_data.read_whole_file() hash_data = path_data.get_hash() # Talk to data service uploading chunk and creating the file. upload_operations = FileUploadOperations(data_service, upload_context) upload_id, url_info = upload_operations.create_upload_and_chunk_url( upload_context.project_id, path_data, hash_data, storage_provider_id=upload_context.config.storage_provider_id) upload_operations.send_file_external(url_info, chunk) return upload_operations.finish_upload(upload_id, hash_data, parent_data, remote_file_id)
def create_small_file(upload_context)
Function run by CreateSmallFileCommand to create the file. Runs in a background process. :param upload_context: UploadContext: contains data service setup and file details. :return dict: DukeDS file data
5.952675
5.8229
1.022287
auth = DataServiceAuth(config) auth.set_auth_data(data_service_auth_data) return DataServiceApi(auth, config.url)
def rebuild_data_service(config, data_service_auth_data)
Deserialize value into DataServiceApi object. :param config: :param data_service_auth_data: :return:
4.460732
4.273439
1.043827
# Walk project adding small items to runner saving large items to large_items ProjectWalker.walk_project(local_project, self) # Run small items in parallel self.runner.run() # Run parts of each large item in parallel self.upload_large_items()
def run(self, local_project)
Upload a project by uploading project, folders, and small files then uploading the large files. :param local_project: LocalProject: project to upload
17.29928
13.667549
1.265719
if self.is_large_file(item): self.large_items.append((item, parent)) else: self.small_item_task_builder.visit_file(item, parent)
def visit_file(self, item, parent)
If file is large add it to the large items to be processed after small task list. else file is small add it to the small task list.
5.307974
2.991979
1.774068
for local_file, parent in self.large_items: if local_file.need_to_send: self.process_large_file(local_file, parent)
def upload_large_items(self)
Upload files that were too large.
6.328906
5.651639
1.119835
file_content_sender = FileUploader(self.settings.config, self.settings.data_service, local_file, self.settings.watcher, self.settings.file_upload_post_processor) remote_id = file_content_sender.upload(self.settings.project_id, parent.kind, parent.remote_id) local_file.set_remote_id_after_send(remote_id)
def process_large_file(self, local_file, parent)
Upload a single file using multiple processes to upload multiple chunks at the same time. Updates local_file with it's remote_id when done. :param local_file: LocalFile: file we are uploading :param parent: LocalFolder/LocalProject: parent of the file
6.170608
5.710527
1.080567
if not item.remote_id: command = CreateProjectCommand(self.settings, item) self.task_runner_add(None, item, command) else: self.settings.project_id = item.remote_id
def visit_project(self, item)
Adds create project command to task runner if project doesn't already exist.
7.194037
4.763407
1.510271