sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def set_user_project_permission(self, project_id, user_id, auth_role):
"""
Send PUT request to /projects/{project_id}/permissions/{user_id/ with auth_role value.
:param project_id: str uuid of the project
:param user_id: str uuid of the user
:param auth_role: str project role eg 'project_admin'
:return: requests.Response containing the successful result
"""
put_data = {
"auth_role[id]": auth_role
}
return self._put("/projects/" + project_id + "/permissions/" + user_id, put_data,
content_type=ContentType.form) | Send PUT request to /projects/{project_id}/permissions/{user_id/ with auth_role value.
:param project_id: str uuid of the project
:param user_id: str uuid of the user
:param auth_role: str project role eg 'project_admin'
:return: requests.Response containing the successful result | entailment |
def get_api_token(self, agent_key, user_key):
"""
Send POST request to get an auth token.
This method doesn't require auth obviously.
:param agent_key: str agent key (who is acting on behalf of the user)
:param user_key: str secret user key
:return: requests.Response containing the successful result
"""
data = {
"agent_key": agent_key,
"user_key": user_key,
}
return self._post("/software_agents/api_token", data) | Send POST request to get an auth token.
This method doesn't require auth obviously.
:param agent_key: str agent key (who is acting on behalf of the user)
:param user_key: str secret user key
:return: requests.Response containing the successful result | entailment |
def create_project_transfer(self, project_id, to_user_ids):
"""
Send POST request to initiate transfer of a project to the specified user ids
:param project_id: str uuid of the project
:param to_users: list of user uuids to receive the project
:return: requests.Response containing the successful result
"""
data = {
"to_users[][id]": to_user_ids,
}
return self._post("/projects/" + project_id + "/transfers", data,
content_type=ContentType.form) | Send POST request to initiate transfer of a project to the specified user ids
:param project_id: str uuid of the project
:param to_users: list of user uuids to receive the project
:return: requests.Response containing the successful result | entailment |
def _process_project_transfer(self, action, transfer_id, status_comment):
"""
Send PUT request to one of the project transfer action endpoints
:param action: str name of the action (reject/accept/cancel)
:param transfer_id: str uuid of the project_transfer
:param status_comment: str comment about the action, optional
:return: requests.Response containing the successful result
"""
data = {}
if status_comment:
data["status_comment"] = status_comment
path = "/project_transfers/{}/{}".format(transfer_id, action)
return self._put(path, data, content_type=ContentType.form) | Send PUT request to one of the project transfer action endpoints
:param action: str name of the action (reject/accept/cancel)
:param transfer_id: str uuid of the project_transfer
:param status_comment: str comment about the action, optional
:return: requests.Response containing the successful result | entailment |
def create_activity(self, activity_name, desc=None, started_on=None, ended_on=None):
"""
Send POST to /activities creating a new activity with the specified name and desc.
Raises DataServiceError on error.
:param activity_name: str name of the activity
:param desc: str description of the activity (optional)
:param started_on: str datetime when the activity started (optional)
:param ended_on: str datetime when the activity ended (optional)
:return: requests.Response containing the successful result
"""
data = {
"name": activity_name,
"description": desc,
"started_on": started_on,
"ended_on": ended_on
}
return self._post("/activities", data) | Send POST to /activities creating a new activity with the specified name and desc.
Raises DataServiceError on error.
:param activity_name: str name of the activity
:param desc: str description of the activity (optional)
:param started_on: str datetime when the activity started (optional)
:param ended_on: str datetime when the activity ended (optional)
:return: requests.Response containing the successful result | entailment |
def update_activity(self, activity_id, activity_name=None, desc=None,
started_on=None, ended_on=None):
"""
Send PUT request to /activities/{activity_id} to update the activity metadata.
Raises ValueError if at least one field is not updated.
:param activity_id: str uuid of activity
:param activity_name: str new name of the activity (optional)
:param desc: str description of the activity (optional)
:param started_on: str date the updated activity began on (optional)
:param ended_on: str date the updated activity ended on (optional)
:return: requests.Response containing the successful result
"""
put_data = {
"name": activity_name,
"description": desc,
"started_on": started_on,
"ended_on": ended_on
}
return self._put("/activities/" + activity_id, put_data) | Send PUT request to /activities/{activity_id} to update the activity metadata.
Raises ValueError if at least one field is not updated.
:param activity_id: str uuid of activity
:param activity_name: str new name of the activity (optional)
:param desc: str description of the activity (optional)
:param started_on: str date the updated activity began on (optional)
:param ended_on: str date the updated activity ended on (optional)
:return: requests.Response containing the successful result | entailment |
def create_used_relation(self, activity_id, entity_kind, entity_id):
"""
Create a was used by relationship between an activity and a entity(file).
:param activity_id: str: uuid of the activity
:param entity_kind: str: kind of entity('dds-file')
:param entity_id: str: uuid of the entity
:return: requests.Response containing the successful result
"""
return self._create_activity_relation(activity_id, entity_kind, entity_id, ActivityRelationTypes.USED) | Create a was used by relationship between an activity and a entity(file).
:param activity_id: str: uuid of the activity
:param entity_kind: str: kind of entity('dds-file')
:param entity_id: str: uuid of the entity
:return: requests.Response containing the successful result | entailment |
def create_was_generated_by_relation(self, activity_id, entity_kind, entity_id):
"""
Create a was generated by relationship between an activity and a entity(file).
:param activity_id: str: uuid of the activity
:param entity_kind: str: kind of entity('dds-file')
:param entity_id: str: uuid of the entity
:return: requests.Response containing the successful result
"""
return self._create_activity_relation(activity_id, entity_kind, entity_id, ActivityRelationTypes.WAS_GENERATED_BY) | Create a was generated by relationship between an activity and a entity(file).
:param activity_id: str: uuid of the activity
:param entity_kind: str: kind of entity('dds-file')
:param entity_id: str: uuid of the entity
:return: requests.Response containing the successful result | entailment |
def create_was_invalidated_by_relation(self, activity_id, entity_kind, entity_id):
"""
Create a was invalidated by relationship between an activity and a entity(file).
:param activity_id: str: uuid of the activity
:param entity_kind: str: kind of entity('dds-file')
:param entity_id: str: uuid of the entity
:return: requests.Response containing the successful result
"""
return self._create_activity_relation(activity_id, entity_kind, entity_id, ActivityRelationTypes.WAS_INVALIDATED_BY) | Create a was invalidated by relationship between an activity and a entity(file).
:param activity_id: str: uuid of the activity
:param entity_kind: str: kind of entity('dds-file')
:param entity_id: str: uuid of the entity
:return: requests.Response containing the successful result | entailment |
def create_was_derived_from_relation(self, used_entity_id, used_entity_kind,
generated_entity_id, generated_entity_kind):
"""
Create a was derived from relation.
:param used_entity_id: str: uuid of the used entity (file_version_id)
:param used_entity_kind: str: kind of entity ('dds-file')
:param generated_entity_id: uuid of the generated entity (file_version_id)
:param generated_entity_kind: str: kind of entity ('dds-file')
:return: requests.Response containing the successful result
"""
data = {
"used_entity": {
"id": used_entity_id,
"kind": used_entity_kind
},
"generated_entity": {
"id": generated_entity_id,
"kind": generated_entity_kind
}
}
return self._post("/relations/was_derived_from", data) | Create a was derived from relation.
:param used_entity_id: str: uuid of the used entity (file_version_id)
:param used_entity_kind: str: kind of entity ('dds-file')
:param generated_entity_id: uuid of the generated entity (file_version_id)
:param generated_entity_kind: str: kind of entity ('dds-file')
:return: requests.Response containing the successful result | entailment |
def get_auth_provider_affiliates(self, auth_provider_id, full_name_contains=None, email=None, username=None):
"""
List affiliates for a specific auth provider.
:param auth_provider_id: str: uuid of the auth provider to list affiliates of
:param full_name_contains: str: filters affiliates for this name
:param email: str: filters affiliates for this email address
:param username: str: filters affiliates for this username
:return: requests.Response containing the successful result
"""
data = {}
if full_name_contains:
data['full_name_contains'] = full_name_contains
if email:
data['email'] = email
if username:
data['username'] = username
return self._get_collection("/auth_providers/{}/affiliates/".format(auth_provider_id), data) | List affiliates for a specific auth provider.
:param auth_provider_id: str: uuid of the auth provider to list affiliates of
:param full_name_contains: str: filters affiliates for this name
:param email: str: filters affiliates for this email address
:param username: str: filters affiliates for this username
:return: requests.Response containing the successful result | entailment |
def auth_provider_add_user(self, auth_provider_id, username):
"""
Transform an institutional affiliates UID, such as a Duke NetID, to a DDS specific user identity;
can be used by clients prior to calling DDS APIs that require a DDS user in the request payload.
Returns user details. Can be safely called multiple times.
:param auth_provider_id: str: auth provider who supports user adding
:param username: str: netid we wish to register with DukeDS
:return: requests.Response containing the successful result
"""
url = "/auth_providers/{}/affiliates/{}/dds_user/".format(auth_provider_id, username)
return self._post(url, {}) | Transform an institutional affiliates UID, such as a Duke NetID, to a DDS specific user identity;
can be used by clients prior to calling DDS APIs that require a DDS user in the request payload.
Returns user details. Can be safely called multiple times.
:param auth_provider_id: str: auth provider who supports user adding
:param username: str: netid we wish to register with DukeDS
:return: requests.Response containing the successful result | entailment |
def add_response(self, response):
"""
Add data from json() to data returned by json()
:param response: requests.Response containing the successful JSON result to be merged
"""
key = self.merge_array_field_name
response_json = response.json()
value = self.combined_json[key]
self.combined_json[self.merge_array_field_name] = value + response_json[key] | Add data from json() to data returned by json()
:param response: requests.Response containing the successful JSON result to be merged | entailment |
def load(self, filename, format_file='cloudupdrs'):
"""
This is a general load data method where the format of data to load can be passed as a parameter,
:param str filename: The path to load data from
:param str format_file: format of the file. Default is CloudUPDRS. Set to mpower for mpower data.
:return dataframe: data_frame.x, data_frame.y, data_frame.z: x, y, z components of the acceleration \
data_frame.index is the datetime-like index
"""
try:
ts = load_data(filename, format_file)
validator = CloudUPDRSDataFrameValidator()
if validator.is_valid(ts):
return ts
else:
logging.error('Error loading data, wrong format.')
return None
except IOError as e:
ierr = "({}): {}".format(e.errno, e.strerror)
logging.error("load data, file not found, I/O error %s", ierr)
except ValueError as verr:
logging.error("load data ValueError ->%s", verr.message)
except:
logging.error("Unexpected error on load data method: %s", sys.exc_info()[0]) | This is a general load data method where the format of data to load can be passed as a parameter,
:param str filename: The path to load data from
:param str format_file: format of the file. Default is CloudUPDRS. Set to mpower for mpower data.
:return dataframe: data_frame.x, data_frame.y, data_frame.z: x, y, z components of the acceleration \
data_frame.index is the datetime-like index | entailment |
def get_method(self, call, *args, **kwargs):
"""Return the L{Method} instance to invoke for the given L{Call}.
@param args: Positional arguments to pass to the method constructor.
@param kwargs: Keyword arguments to pass to the method constructor.
"""
method_class = self.registry.get(call.action, call.version)
method = method_class(*args, **kwargs)
if not method.is_available():
raise APIError(400, "InvalidAction", "The action %s is not "
"valid for this web service." % call.action)
else:
return method | Return the L{Method} instance to invoke for the given L{Call}.
@param args: Positional arguments to pass to the method constructor.
@param kwargs: Keyword arguments to pass to the method constructor. | entailment |
def handle(self, request):
"""Handle an HTTP request for executing an API call.
This method authenticates the request checking its signature, and then
calls the C{execute} method, passing it a L{Call} object set with the
principal for the authenticated user and the generic parameters
extracted from the request.
@param request: The L{HTTPRequest} to handle.
"""
request.id = str(uuid4())
deferred = maybeDeferred(self._validate, request)
deferred.addCallback(self.execute)
def write_response(response):
request.setHeader("Content-Length", str(len(response)))
request.setHeader("Content-Type", self.content_type)
# Prevent browsers from trying to guess a different content type.
request.setHeader("X-Content-Type-Options", "nosniff")
request.write(response)
request.finish()
return response
def write_error(failure):
if failure.check(APIError):
status = failure.value.status
# Don't log the stack traces for 4xx responses.
if status < 400 or status >= 500:
log.err(failure)
else:
log.msg("status: %s message: %s" % (
status, safe_str(failure.value)))
body = failure.value.response
if body is None:
body = self.dump_error(failure.value, request)
else:
# If the error is a generic one (not an APIError), log the
# message , but don't send it back to the client, as it could
# contain sensitive information. Send a generic server error
# message instead.
log.err(failure)
body = "Server error"
status = 500
request.setResponseCode(status)
write_response(body)
deferred.addCallback(write_response)
deferred.addErrback(write_error)
return deferred | Handle an HTTP request for executing an API call.
This method authenticates the request checking its signature, and then
calls the C{execute} method, passing it a L{Call} object set with the
principal for the authenticated user and the generic parameters
extracted from the request.
@param request: The L{HTTPRequest} to handle. | entailment |
def execute(self, call):
"""Execute an API L{Call}.
At this point the request has been authenticated and C{call.principal}
is set with the L{Principal} for the L{User} requesting the call.
@return: The response to write in the request for the given L{Call}.
@raises: An L{APIError} in case the execution fails, sporting an error
message the HTTP status code to return.
"""
method = self.get_method(call)
deferred = maybeDeferred(self.authorize, method, call)
deferred.addCallback(lambda _: method.invoke(call))
return deferred.addCallback(self.dump_result) | Execute an API L{Call}.
At this point the request has been authenticated and C{call.principal}
is set with the L{Principal} for the L{User} requesting the call.
@return: The response to write in the request for the given L{Call}.
@raises: An L{APIError} in case the execution fails, sporting an error
message the HTTP status code to return. | entailment |
def get_call_arguments(self, request):
"""
Get call arguments from a request. Override this if you want to use a
wire format different from AWS's.
The return value is a dictionary with three keys: 'transport_args',
'handler_args', and 'raw_args'.
The value of 'transport_args' must be a dictionary with the following
keys:
- action
- access_key_id
- timestamp
- expires
- version
- signature_method
- signature
- signature_version
The value of 'handler_args' should be the application arguments that
are meant to be passed to the action handler.
The value of 'raw_args', the unprocessed arguments, are used for
signature verification. This should be the same dictionary of data that
the client used to sign the request. Note that this data must not
contain the signature itself.
"""
params = dict((k, v[-1]) for k, v in request.args.iteritems())
args, rest = self.schema.extract(params)
# Get rid of Signature so it doesn't mess with signature verification
params.pop("Signature")
result = {
"transport_args": {
"action": args.Action,
"access_key_id": args.AWSAccessKeyId,
"timestamp": args.Timestamp,
"expires": args.Expires,
"version": args.Version,
"signature_method": args.SignatureMethod,
"signature": args.Signature,
"signature_version": args.SignatureVersion},
"handler_args": rest,
"raw_args": params
}
return result | Get call arguments from a request. Override this if you want to use a
wire format different from AWS's.
The return value is a dictionary with three keys: 'transport_args',
'handler_args', and 'raw_args'.
The value of 'transport_args' must be a dictionary with the following
keys:
- action
- access_key_id
- timestamp
- expires
- version
- signature_method
- signature
- signature_version
The value of 'handler_args' should be the application arguments that
are meant to be passed to the action handler.
The value of 'raw_args', the unprocessed arguments, are used for
signature verification. This should be the same dictionary of data that
the client used to sign the request. Note that this data must not
contain the signature itself. | entailment |
def _validate(self, request):
"""Validate an L{HTTPRequest} before executing it.
The following conditions are checked:
- The request contains all the generic parameters.
- The action specified in the request is a supported one.
- The signature mechanism is a supported one.
- The provided signature matches the one calculated using the locally
stored secret access key for the user.
- The signature hasn't expired.
@return: The validated L{Call}, set with its default arguments and the
the principal of the accessing L{User}.
"""
call_arguments = self.get_call_arguments(request)
args = call_arguments["transport_args"]
rest = call_arguments["handler_args"]
params = call_arguments["raw_args"]
self._validate_generic_parameters(args)
def create_call(principal):
self._validate_principal(principal, args)
self._validate_signature(request, principal, args, params)
return Call(raw_params=rest,
principal=principal,
action=args["action"],
version=args["version"],
id=request.id)
deferred = maybeDeferred(self.get_principal, args["access_key_id"])
deferred.addCallback(create_call)
return deferred | Validate an L{HTTPRequest} before executing it.
The following conditions are checked:
- The request contains all the generic parameters.
- The action specified in the request is a supported one.
- The signature mechanism is a supported one.
- The provided signature matches the one calculated using the locally
stored secret access key for the user.
- The signature hasn't expired.
@return: The validated L{Call}, set with its default arguments and the
the principal of the accessing L{User}. | entailment |
def _validate_generic_parameters(self, args):
"""Validate the generic request parameters.
@param args: Parsed schema arguments.
@raises APIError: In the following cases:
- Action is not included in C{self.actions}
- SignatureVersion is not included in C{self.signature_versions}
- Expires and Timestamp are present
- Expires is before the current time
- Timestamp is older than 15 minutes.
"""
utc_now = self.get_utc_time()
if getattr(self, "actions", None) is not None:
# Check the deprecated 'actions' attribute
if not args["action"] in self.actions:
raise APIError(400, "InvalidAction", "The action %s is not "
"valid for this web service." % args["action"])
else:
self.registry.check(args["action"], args["version"])
if not args["signature_version"] in self.signature_versions:
raise APIError(403, "InvalidSignature", "SignatureVersion '%s' "
"not supported" % args["signature_version"])
if args["expires"] and args["timestamp"]:
raise APIError(400, "InvalidParameterCombination",
"The parameter Timestamp cannot be used with "
"the parameter Expires")
if args["expires"] and args["expires"] < utc_now:
raise APIError(400,
"RequestExpired",
"Request has expired. Expires date is %s" % (
args["expires"].strftime(self.time_format)))
if (args["timestamp"]
and args["timestamp"] + timedelta(minutes=15) < utc_now):
raise APIError(400,
"RequestExpired",
"Request has expired. Timestamp date is %s" % (
args["timestamp"].strftime(self.time_format))) | Validate the generic request parameters.
@param args: Parsed schema arguments.
@raises APIError: In the following cases:
- Action is not included in C{self.actions}
- SignatureVersion is not included in C{self.signature_versions}
- Expires and Timestamp are present
- Expires is before the current time
- Timestamp is older than 15 minutes. | entailment |
def _validate_signature(self, request, principal, args, params):
"""Validate the signature."""
creds = AWSCredentials(principal.access_key, principal.secret_key)
endpoint = AWSServiceEndpoint()
endpoint.set_method(request.method)
endpoint.set_canonical_host(request.getHeader("Host"))
path = request.path
if self.path is not None:
path = "%s/%s" % (self.path.rstrip("/"), path.lstrip("/"))
endpoint.set_path(path)
signature = Signature(creds, endpoint, params,
signature_method=args["signature_method"],
signature_version=args["signature_version"]
)
if signature.compute() != args["signature"]:
raise APIError(403, "SignatureDoesNotMatch",
"The request signature we calculated does not "
"match the signature you provided. Check your "
"key and signing method.") | Validate the signature. | entailment |
def render_GET(self, request):
"""Handle a GET request."""
if not request.args:
request.setHeader("Content-Type", "text/plain")
return self.get_status_text()
else:
self.handle(request)
return NOT_DONE_YET | Handle a GET request. | entailment |
def get_exitcode_reactor():
"""
This is only neccesary until a fix like the one outlined here is
implemented for Twisted:
http://twistedmatrix.com/trac/ticket/2182
"""
from twisted.internet.main import installReactor
from twisted.internet.selectreactor import SelectReactor
class ExitCodeReactor(SelectReactor):
def stop(self, exitStatus=0):
super(ExitCodeReactor, self).stop()
self.exitStatus = exitStatus
def run(self, *args, **kwargs):
super(ExitCodeReactor, self).run(*args, **kwargs)
return self.exitStatus
reactor = ExitCodeReactor()
installReactor(reactor)
return reactor | This is only neccesary until a fix like the one outlined here is
implemented for Twisted:
http://twistedmatrix.com/trac/ticket/2182 | entailment |
def getSignatureKey(key, dateStamp, regionName, serviceName):
"""
Generate the signing key for AWS V4 requests.
@param key: The secret key to use.
@type key: L{bytes}
@param dateStamp: The UTC date and time, serialized as an AWS date
stamp.
@type dateStamp: L{bytes}
@param regionName: The name of the region.
@type regionName: L{bytes}
@param serviceName: The name of the service to which the request
will be sent.
@type serviceName: L{bytes}
@return: The signature.
@rtype: L{bytes}
"""
kDate = sign((b'AWS4' + key), dateStamp)
kRegion = sign(kDate, regionName)
kService = sign(kRegion, serviceName)
kSigning = sign(kService, b'aws4_request')
return kSigning | Generate the signing key for AWS V4 requests.
@param key: The secret key to use.
@type key: L{bytes}
@param dateStamp: The UTC date and time, serialized as an AWS date
stamp.
@type dateStamp: L{bytes}
@param regionName: The name of the region.
@type regionName: L{bytes}
@param serviceName: The name of the service to which the request
will be sent.
@type serviceName: L{bytes}
@return: The signature.
@rtype: L{bytes} | entailment |
def _make_canonical_uri(parsed):
"""
Return the canonical URI for a parsed URL.
@param parsed: The parsed URL from which to extract the canonical
URI
@type parsed: L{urlparse.ParseResult}
@return: The canonical URI.
@rtype: L{str}
"""
path = urllib.quote(parsed.path)
canonical_parsed = parsed._replace(path=path,
params='', query='', fragment='')
return urlparse.urlunparse(canonical_parsed) | Return the canonical URI for a parsed URL.
@param parsed: The parsed URL from which to extract the canonical
URI
@type parsed: L{urlparse.ParseResult}
@return: The canonical URI.
@rtype: L{str} | entailment |
def _make_canonical_query_string(parsed):
"""
Return the canonical query string for a parsed URL.
@param parsed: The parsed URL from which to extract the canonical
query string.
@type parsed: L{urlparse.ParseResult}
@return: The canonical query string.
@rtype: L{str}
"""
query_params = urlparse.parse_qs(parsed.query, keep_blank_values=True)
sorted_query_params = sorted((k, v)
for k, vs in query_params.items()
for v in vs)
return urllib.urlencode(sorted_query_params) | Return the canonical query string for a parsed URL.
@param parsed: The parsed URL from which to extract the canonical
query string.
@type parsed: L{urlparse.ParseResult}
@return: The canonical query string.
@rtype: L{str} | entailment |
def _make_canonical_headers(headers, headers_to_sign):
"""
Return canonicalized headers.
@param headers: The request headers.
@type headers: L{dict}
@param headers_to_sign: A sequence of header names that should be
signed.
@type headers_to_sign: A sequence of L{bytes}
@return: The canonicalized headers.
@rtype: L{bytes}
"""
pairs = []
for name in headers_to_sign:
if name not in headers:
continue
values = headers[name]
if not isinstance(values, (list, tuple)):
values = [values]
comma_values = b','.join(' '.join(line.strip().split())
for value in values
for line in value.splitlines())
pairs.append((name.lower(), comma_values))
sorted_pairs = sorted(b'%s:%s' % (name, value)
for name, value in sorted(pairs))
return b'\n'.join(sorted_pairs) + b'\n' | Return canonicalized headers.
@param headers: The request headers.
@type headers: L{dict}
@param headers_to_sign: A sequence of header names that should be
signed.
@type headers_to_sign: A sequence of L{bytes}
@return: The canonicalized headers.
@rtype: L{bytes} | entailment |
def _make_signed_headers(headers, headers_to_sign):
"""
Return a semicolon-delimited list of headers to sign.
@param headers: The request headers.
@type headers: L{dict}
@param headers_to_sign: A sequence of header names that should be
signed.
@type headers_to_sign: L{bytes}
@return: The semicolon-delimited list of headers.
@rtype: L{bytes}
"""
return b";".join(header.lower() for header in sorted(headers_to_sign)
if header in headers) | Return a semicolon-delimited list of headers to sign.
@param headers: The request headers.
@type headers: L{dict}
@param headers_to_sign: A sequence of header names that should be
signed.
@type headers_to_sign: L{bytes}
@return: The semicolon-delimited list of headers.
@rtype: L{bytes} | entailment |
def _make_authorization_header(region,
service,
canonical_request,
credentials,
instant):
"""
Construct an AWS version 4 authorization value for use in an
C{Authorization} header.
@param region: The AWS region name (e.g., C{'us-east-1'}).
@type region: L{str}
@param service: The AWS service's name (e.g., C{'s3'}).
@type service: L{str}
@param canonical_request: The canonical form of the request.
@type canonical_request: L{_CanonicalRequest} (use
L{_CanonicalRequest.from_payload_and_headers})
@param credentials: The AWS credentials.
@type credentials: L{txaws.credentials.AWSCredentials}
@param instant: The current UTC date and time
@type instant: A naive local L{datetime.datetime} (as returned by
L{datetime.datetime.utcnow})
@return: A value suitable for use in an C{Authorization} header
@rtype: L{bytes}
"""
date_stamp = makeDateStamp(instant)
amz_date = makeAMZDate(instant)
scope = _CredentialScope(
date_stamp=date_stamp,
region=region,
service=service
)
signable = _SignableAWS4HMAC256Token(
amz_date,
scope,
canonical_request,
)
signature = signable.signature(
getSignatureKey(credentials.secret_key,
date_stamp,
region,
service)
)
v4credential = _Credential(
access_key=credentials.access_key,
credential_scope=scope,
)
return (
b"%s " % (_SignableAWS4HMAC256Token.ALGORITHM,) +
b", ".join([
b"Credential=%s" % (v4credential.serialize(),),
b"SignedHeaders=%s" % (canonical_request.signed_headers,),
b"Signature=%s" % (signature,),
])) | Construct an AWS version 4 authorization value for use in an
C{Authorization} header.
@param region: The AWS region name (e.g., C{'us-east-1'}).
@type region: L{str}
@param service: The AWS service's name (e.g., C{'s3'}).
@type service: L{str}
@param canonical_request: The canonical form of the request.
@type canonical_request: L{_CanonicalRequest} (use
L{_CanonicalRequest.from_payload_and_headers})
@param credentials: The AWS credentials.
@type credentials: L{txaws.credentials.AWSCredentials}
@param instant: The current UTC date and time
@type instant: A naive local L{datetime.datetime} (as returned by
L{datetime.datetime.utcnow})
@return: A value suitable for use in an C{Authorization} header
@rtype: L{bytes} | entailment |
def linear_interpolation(first, last, steps):
"""Interpolates all missing values using linear interpolation.
:param numeric first: Start value for the interpolation.
:param numeric last: End Value for the interpolation
:param integer steps: Number of missing values that have to be calculated.
:return: Returns a list of floats containing only the missing values.
:rtype: list
:todo: Define a more general interface!
"""
result = []
for step in xrange(0, steps):
fpart = (steps - step) * first
lpart = (step + 1) * last
value = (fpart + lpart) / float(steps + 1)
result.append(value)
return result | Interpolates all missing values using linear interpolation.
:param numeric first: Start value for the interpolation.
:param numeric last: End Value for the interpolation
:param integer steps: Number of missing values that have to be calculated.
:return: Returns a list of floats containing only the missing values.
:rtype: list
:todo: Define a more general interface! | entailment |
def upload_project_run(upload_context):
"""
Function run by CreateProjectCommand to create the project.
Runs in a background process.
:param upload_context: UploadContext: contains data service setup and project name to create.
"""
data_service = upload_context.make_data_service()
project_name = upload_context.project_name_or_id.get_name_or_raise()
result = data_service.create_project(project_name, project_name)
return result.json()['id'] | Function run by CreateProjectCommand to create the project.
Runs in a background process.
:param upload_context: UploadContext: contains data service setup and project name to create. | entailment |
def upload_folder_run(upload_context):
"""
Function run by CreateFolderCommand to create the folder.
Runs in a background process.
:param upload_context: UploadContext: contains data service setup and folder details.
"""
data_service = upload_context.make_data_service()
folder_name, parent_kind, parent_remote_id = upload_context.params
result = data_service.create_folder(folder_name, parent_kind, parent_remote_id)
return result.json()['id'] | Function run by CreateFolderCommand to create the folder.
Runs in a background process.
:param upload_context: UploadContext: contains data service setup and folder details. | entailment |
def create_small_file(upload_context):
"""
Function run by CreateSmallFileCommand to create the file.
Runs in a background process.
:param upload_context: UploadContext: contains data service setup and file details.
:return dict: DukeDS file data
"""
data_service = upload_context.make_data_service()
parent_data, path_data, remote_file_id = upload_context.params
# The small file will fit into one chunk so read into memory and hash it.
chunk = path_data.read_whole_file()
hash_data = path_data.get_hash()
# Talk to data service uploading chunk and creating the file.
upload_operations = FileUploadOperations(data_service, upload_context)
upload_id, url_info = upload_operations.create_upload_and_chunk_url(
upload_context.project_id, path_data, hash_data, storage_provider_id=upload_context.config.storage_provider_id)
upload_operations.send_file_external(url_info, chunk)
return upload_operations.finish_upload(upload_id, hash_data, parent_data, remote_file_id) | Function run by CreateSmallFileCommand to create the file.
Runs in a background process.
:param upload_context: UploadContext: contains data service setup and file details.
:return dict: DukeDS file data | entailment |
def rebuild_data_service(config, data_service_auth_data):
"""
Deserialize value into DataServiceApi object.
:param config:
:param data_service_auth_data:
:return:
"""
auth = DataServiceAuth(config)
auth.set_auth_data(data_service_auth_data)
return DataServiceApi(auth, config.url) | Deserialize value into DataServiceApi object.
:param config:
:param data_service_auth_data:
:return: | entailment |
def run(self, local_project):
"""
Upload a project by uploading project, folders, and small files then uploading the large files.
:param local_project: LocalProject: project to upload
"""
# Walk project adding small items to runner saving large items to large_items
ProjectWalker.walk_project(local_project, self)
# Run small items in parallel
self.runner.run()
# Run parts of each large item in parallel
self.upload_large_items() | Upload a project by uploading project, folders, and small files then uploading the large files.
:param local_project: LocalProject: project to upload | entailment |
def visit_file(self, item, parent):
"""
If file is large add it to the large items to be processed after small task list.
else file is small add it to the small task list.
"""
if self.is_large_file(item):
self.large_items.append((item, parent))
else:
self.small_item_task_builder.visit_file(item, parent) | If file is large add it to the large items to be processed after small task list.
else file is small add it to the small task list. | entailment |
def upload_large_items(self):
"""
Upload files that were too large.
"""
for local_file, parent in self.large_items:
if local_file.need_to_send:
self.process_large_file(local_file, parent) | Upload files that were too large. | entailment |
def process_large_file(self, local_file, parent):
"""
Upload a single file using multiple processes to upload multiple chunks at the same time.
Updates local_file with it's remote_id when done.
:param local_file: LocalFile: file we are uploading
:param parent: LocalFolder/LocalProject: parent of the file
"""
file_content_sender = FileUploader(self.settings.config, self.settings.data_service, local_file,
self.settings.watcher, self.settings.file_upload_post_processor)
remote_id = file_content_sender.upload(self.settings.project_id, parent.kind, parent.remote_id)
local_file.set_remote_id_after_send(remote_id) | Upload a single file using multiple processes to upload multiple chunks at the same time.
Updates local_file with it's remote_id when done.
:param local_file: LocalFile: file we are uploading
:param parent: LocalFolder/LocalProject: parent of the file | entailment |
def visit_project(self, item):
"""
Adds create project command to task runner if project doesn't already exist.
"""
if not item.remote_id:
command = CreateProjectCommand(self.settings, item)
self.task_runner_add(None, item, command)
else:
self.settings.project_id = item.remote_id | Adds create project command to task runner if project doesn't already exist. | entailment |
def visit_folder(self, item, parent):
"""
Adds create folder command to task runner if folder doesn't already exist.
"""
if not item.remote_id:
command = CreateFolderCommand(self.settings, item, parent)
self.task_runner_add(parent, item, command) | Adds create folder command to task runner if folder doesn't already exist. | entailment |
def visit_file(self, item, parent):
"""
If file is small add create small file command otherwise raise error.
Large files shouldn't be passed to SmallItemUploadTaskBuilder.
"""
if item.need_to_send:
if item.size > self.settings.config.upload_bytes_per_chunk:
msg = "Programmer Error: Trying to upload large file as small item size:{} name:{}"
raise ValueError(msg.format(item.size, item.name))
else:
command = CreateSmallFileCommand(self.settings, item, parent,
self.settings.file_upload_post_processor)
self.task_runner_add(parent, item, command) | If file is small add create small file command otherwise raise error.
Large files shouldn't be passed to SmallItemUploadTaskBuilder. | entailment |
def task_runner_add(self, parent, item, command):
"""
Add command to task runner with parent's task id createing a task id for item/command.
Save this item's id to a lookup.
:param parent: object: parent of item
:param item: object: item we are running command on
:param command: parallel TaskCommand we want to have run
"""
parent_task_id = self.item_to_id.get(parent)
task_id = self.task_runner.add(parent_task_id, command)
self.item_to_id[item] = task_id | Add command to task runner with parent's task id createing a task id for item/command.
Save this item's id to a lookup.
:param parent: object: parent of item
:param item: object: item we are running command on
:param command: parallel TaskCommand we want to have run | entailment |
def after_run(self, result_id):
"""
Save uuid associated with project we just created.
:param result_id: str: uuid of the project
"""
self.local_project.set_remote_id_after_send(result_id)
self.settings.project_id = result_id | Save uuid associated with project we just created.
:param result_id: str: uuid of the project | entailment |
def create_context(self, message_queue, task_id):
"""
Create values to be used by upload_folder_run function.
:param message_queue: Queue: queue background process can send messages to us on
:param task_id: int: id of this command's task so message will be routed correctly
"""
params = (self.remote_folder.name, self.parent.kind, self.parent.remote_id)
return UploadContext(self.settings, params, message_queue, task_id) | Create values to be used by upload_folder_run function.
:param message_queue: Queue: queue background process can send messages to us on
:param task_id: int: id of this command's task so message will be routed correctly | entailment |
def create_context(self, message_queue, task_id):
"""
Create values to be used by create_small_file function.
:param message_queue: Queue: queue background process can send messages to us on
:param task_id: int: id of this command's task so message will be routed correctly
"""
parent_data = ParentData(self.parent.kind, self.parent.remote_id)
path_data = self.local_file.get_path_data()
params = parent_data, path_data, self.local_file.remote_id
return UploadContext(self.settings, params, message_queue, task_id) | Create values to be used by create_small_file function.
:param message_queue: Queue: queue background process can send messages to us on
:param task_id: int: id of this command's task so message will be routed correctly | entailment |
def after_run(self, remote_file_data):
"""
Save uuid of file to our LocalFile
:param remote_file_data: dict: DukeDS file data
"""
if self.file_upload_post_processor:
self.file_upload_post_processor.run(self.settings.data_service, remote_file_data)
remote_file_id = remote_file_data['id']
self.settings.watcher.transferring_item(self.local_file)
self.local_file.set_remote_id_after_send(remote_file_id) | Save uuid of file to our LocalFile
:param remote_file_data: dict: DukeDS file data | entailment |
def on_message(self, started_waiting):
"""
Receives started_waiting boolean from create_small_file method and notifies project_status_monitor in settings.
:param started_waiting: boolean: True when we start waiting, False when done
"""
watcher = self.settings.watcher
if started_waiting:
watcher.start_waiting()
else:
watcher.done_waiting() | Receives started_waiting boolean from create_small_file method and notifies project_status_monitor in settings.
:param started_waiting: boolean: True when we start waiting, False when done | entailment |
def _visit_recur(self, item):
"""
Recursively visits children of item.
:param item: object: project, folder or file we will add to upload_items if necessary.
"""
if item.kind == KindType.file_str:
if item.need_to_send:
self.add_upload_item(item.path)
else:
if item.kind == KindType.project_str:
pass
else:
if not item.remote_id:
self.add_upload_item(item.path)
for child in item.children:
self._visit_recur(child) | Recursively visits children of item.
:param item: object: project, folder or file we will add to upload_items if necessary. | entailment |
def add_project_name_arg(arg_parser, required, help_text):
"""
Adds project_name parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param help_text: str label displayed in usage
"""
arg_parser.add_argument("-p", '--project-name',
metavar='ProjectName',
type=to_unicode,
dest='project_name',
help=help_text,
required=required) | Adds project_name parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param help_text: str label displayed in usage | entailment |
def add_project_name_or_id_arg(arg_parser, required=True, help_text_suffix="manage"):
"""
Adds project name or project id argument. These two are mutually exclusive.
:param arg_parser:
:param required:
:param help_text:
:return:
"""
project_name_or_id = arg_parser.add_mutually_exclusive_group(required=required)
name_help_text = "Name of the project to {}.".format(help_text_suffix)
add_project_name_arg(project_name_or_id, required=False, help_text=name_help_text)
id_help_text = "ID of the project to {}.".format(help_text_suffix)
add_project_id_arg(project_name_or_id, required=False, help_text=id_help_text) | Adds project name or project id argument. These two are mutually exclusive.
:param arg_parser:
:param required:
:param help_text:
:return: | entailment |
def _paths_must_exists(path):
"""
Raises error if path doesn't exist.
:param path: str path to check
:return: str same path passed in
"""
path = to_unicode(path)
if not os.path.exists(path):
raise argparse.ArgumentTypeError("{} is not a valid file/folder.".format(path))
return path | Raises error if path doesn't exist.
:param path: str path to check
:return: str same path passed in | entailment |
def _path_has_ok_chars(path):
"""
Validate path for invalid characters.
:param path: str possible filesystem path
:return: path if it was ok otherwise raises error
"""
basename = os.path.basename(path)
if any([bad_char in basename for bad_char in INVALID_PATH_CHARS]):
raise argparse.ArgumentTypeError("{} contains invalid characters for a directory.".format(path))
return path | Validate path for invalid characters.
:param path: str possible filesystem path
:return: path if it was ok otherwise raises error | entailment |
def _add_auth_role_arg(arg_parser, default_permissions):
"""
Adds optional auth_role parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param default_permissions: default value to use for this argument
"""
help_text = "Specifies which project permissions to give to the user. Example: 'project_admin'. "
help_text += "See command list_auth_roles for AuthRole values."
arg_parser.add_argument("--auth-role",
metavar='AuthRole',
type=to_unicode,
dest='auth_role',
help=help_text,
default=default_permissions) | Adds optional auth_role parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param default_permissions: default value to use for this argument | entailment |
def _add_project_filter_auth_role_arg(arg_parser):
"""
Adds optional auth_role filtering parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
"""
help_text = "Filters project listing to just those projects with the specified role. "
help_text += "See command list_auth_roles for AuthRole values."
arg_parser.add_argument("--auth-role",
metavar='AuthRole',
type=to_unicode,
dest='auth_role',
help=help_text,
default=None) | Adds optional auth_role filtering parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to. | entailment |
def _add_resend_arg(arg_parser, resend_help):
"""
Adds resend parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param type_str
"""
arg_parser.add_argument("--resend",
action='store_true',
default=False,
dest='resend',
help=resend_help) | Adds resend parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param type_str | entailment |
def _add_include_arg(arg_parser):
"""
Adds optional repeatable include parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to.
"""
arg_parser.add_argument("--include",
metavar='Path',
action='append',
type=to_unicode,
dest='include_paths',
help="Specifies a single path to include. This argument can be repeated.",
default=[]) | Adds optional repeatable include parameter to a parser.
:param arg_parser: ArgumentParser parser to add this argument to. | entailment |
def _add_message_file(arg_parser, help_text):
"""
Add mesage file argument with help_text to arg_parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param help_text: str: help text for this argument
"""
arg_parser.add_argument('--msg-file',
type=argparse.FileType('r'),
help=help_text) | Add mesage file argument with help_text to arg_parser.
:param arg_parser: ArgumentParser parser to add this argument to.
:param help_text: str: help text for this argument | entailment |
def register_upload_command(self, upload_func):
"""
Add the upload command to the parser and call upload_func(project_name, folders, follow_symlinks) when chosen.
:param upload_func: func Called when this option is chosen: upload_func(project_name, folders, follow_symlinks).
"""
description = "Uploads local files and folders to a remote host."
upload_parser = self.subparsers.add_parser('upload', description=description)
_add_dry_run(upload_parser, help_text="Instead of uploading displays a list of folders/files that "
"need to be uploaded.")
add_project_name_or_id_arg(upload_parser, help_text_suffix="upload files/folders to.")
_add_folders_positional_arg(upload_parser)
_add_follow_symlinks_arg(upload_parser)
upload_parser.set_defaults(func=upload_func) | Add the upload command to the parser and call upload_func(project_name, folders, follow_symlinks) when chosen.
:param upload_func: func Called when this option is chosen: upload_func(project_name, folders, follow_symlinks). | entailment |
def register_add_user_command(self, add_user_func):
"""
Add the add-user command to the parser and call add_user_func(project_name, user_full_name, auth_role)
when chosen.
:param add_user_func: func Called when this option is chosen: upload_func(project_name, user_full_name, auth_role).
"""
description = "Gives user permission to access a remote project."
add_user_parser = self.subparsers.add_parser('add-user', description=description)
add_project_name_or_id_arg(add_user_parser, help_text_suffix="add a user to")
user_or_email = add_user_parser.add_mutually_exclusive_group(required=True)
add_user_arg(user_or_email)
add_email_arg(user_or_email)
_add_auth_role_arg(add_user_parser, default_permissions='project_admin')
add_user_parser.set_defaults(func=add_user_func) | Add the add-user command to the parser and call add_user_func(project_name, user_full_name, auth_role)
when chosen.
:param add_user_func: func Called when this option is chosen: upload_func(project_name, user_full_name, auth_role). | entailment |
def register_remove_user_command(self, remove_user_func):
"""
Add the remove-user command to the parser and call remove_user_func(project_name, user_full_name) when chosen.
:param remove_user_func: func Called when this option is chosen: remove_user_func(project_name, user_full_name).
"""
description = "Removes user permission to access a remote project."
remove_user_parser = self.subparsers.add_parser('remove-user', description=description)
add_project_name_or_id_arg(remove_user_parser, help_text_suffix="remove a user from")
user_or_email = remove_user_parser.add_mutually_exclusive_group(required=True)
add_user_arg(user_or_email)
add_email_arg(user_or_email)
remove_user_parser.set_defaults(func=remove_user_func) | Add the remove-user command to the parser and call remove_user_func(project_name, user_full_name) when chosen.
:param remove_user_func: func Called when this option is chosen: remove_user_func(project_name, user_full_name). | entailment |
def register_download_command(self, download_func):
"""
Add 'download' command for downloading a project to a directory.
For non empty directories it will download remote files replacing local files.
:param download_func: function to run when user choses this option
"""
description = "Download the contents of a remote remote project to a local folder."
download_parser = self.subparsers.add_parser('download', description=description)
add_project_name_or_id_arg(download_parser, help_text_suffix="download")
_add_folder_positional_arg(download_parser)
include_or_exclude = download_parser.add_mutually_exclusive_group(required=False)
_add_include_arg(include_or_exclude)
_add_exclude_arg(include_or_exclude)
download_parser.set_defaults(func=download_func) | Add 'download' command for downloading a project to a directory.
For non empty directories it will download remote files replacing local files.
:param download_func: function to run when user choses this option | entailment |
def register_share_command(self, share_func):
"""
Add 'share' command for adding view only project permissions and sending email via another service.
:param share_func: function to run when user choses this option
"""
description = "Share a project with another user with specified permissions. " \
"Sends the other user an email message via D4S2 service. " \
"If not specified this command gives user download permissions."
share_parser = self.subparsers.add_parser('share', description=description)
add_project_name_or_id_arg(share_parser)
user_or_email = share_parser.add_mutually_exclusive_group(required=True)
add_user_arg(user_or_email)
add_email_arg(user_or_email)
_add_auth_role_arg(share_parser, default_permissions='file_downloader')
_add_resend_arg(share_parser, "Resend share")
_add_message_file(share_parser, "Filename containing a message to be sent with the share. "
"Pass - to read from stdin.")
share_parser.set_defaults(func=share_func) | Add 'share' command for adding view only project permissions and sending email via another service.
:param share_func: function to run when user choses this option | entailment |
def register_deliver_command(self, deliver_func):
"""
Add 'deliver' command for transferring a project to another user.,
:param deliver_func: function to run when user choses this option
"""
description = "Initiate delivery of a project to another user. Removes other user's current permissions. " \
"Send message to D4S2 service to send email and allow access to the project once user " \
"acknowledges receiving the data."
deliver_parser = self.subparsers.add_parser('deliver', description=description)
add_project_name_or_id_arg(deliver_parser)
user_or_email = deliver_parser.add_mutually_exclusive_group(required=True)
add_user_arg(user_or_email)
add_email_arg(user_or_email)
add_share_usernames_arg(deliver_parser)
add_share_emails_arg(deliver_parser)
_add_copy_project_arg(deliver_parser)
_add_resend_arg(deliver_parser, "Resend delivery")
include_or_exclude = deliver_parser.add_mutually_exclusive_group(required=False)
_add_include_arg(include_or_exclude)
_add_exclude_arg(include_or_exclude)
_add_message_file(deliver_parser, "Filename containing a message to be sent with the delivery. "
"Pass - to read from stdin.")
deliver_parser.set_defaults(func=deliver_func) | Add 'deliver' command for transferring a project to another user.,
:param deliver_func: function to run when user choses this option | entailment |
def register_list_command(self, list_func):
"""
Add 'list' command to get a list of projects or details about one project.
:param list_func: function: run when user choses this option.
"""
description = "Show a list of project names or folders/files of a single project."
list_parser = self.subparsers.add_parser('list', description=description)
project_name_or_auth_role = list_parser.add_mutually_exclusive_group(required=False)
_add_project_filter_auth_role_arg(project_name_or_auth_role)
add_project_name_or_id_arg(project_name_or_auth_role, required=False,
help_text_suffix="show details for")
_add_long_format_option(list_parser, 'Display long format.')
list_parser.set_defaults(func=list_func) | Add 'list' command to get a list of projects or details about one project.
:param list_func: function: run when user choses this option. | entailment |
def register_delete_command(self, delete_func):
"""
Add 'delete' command delete a project from the remote store.
:param delete_func: function: run when user choses this option.
"""
description = "Permanently delete a project."
delete_parser = self.subparsers.add_parser('delete', description=description)
add_project_name_or_id_arg(delete_parser, help_text_suffix="delete")
_add_force_arg(delete_parser, "Do not prompt before deleting.")
delete_parser.set_defaults(func=delete_func) | Add 'delete' command delete a project from the remote store.
:param delete_func: function: run when user choses this option. | entailment |
def register_list_auth_roles_command(self, list_auth_roles_func):
"""
Add 'list_auth_roles' command to list project authorization roles that can be used with add_user.
:param list_auth_roles_func: function: run when user choses this option.
"""
description = "List authorization roles for use with add_user command."
list_auth_roles_parser = self.subparsers.add_parser('list-auth-roles', description=description)
list_auth_roles_parser.set_defaults(func=list_auth_roles_func) | Add 'list_auth_roles' command to list project authorization roles that can be used with add_user.
:param list_auth_roles_func: function: run when user choses this option. | entailment |
def run_command(self, args):
"""
Parse command line arguments and run function registered for the appropriate command.
:param args: [str] command line arguments
"""
parsed_args = self.parser.parse_args(args)
if hasattr(parsed_args, 'func'):
parsed_args.func(parsed_args)
else:
self.parser.print_help() | Parse command line arguments and run function registered for the appropriate command.
:param args: [str] command line arguments | entailment |
def resample_signal(self, data_frame):
"""
Convenience method for frequency conversion and resampling of data frame.
Object must have a DatetimeIndex. After re-sampling, this methods interpolate the time magnitude sum
acceleration values and the x,y,z values of the data frame acceleration
:param data_frame: the data frame to resample
:type data_frame: pandas.DataFrame
:return: the resampled data frame
:rtype: pandas.DataFrame
"""
df_resampled = data_frame.resample(str(1 / self.sampling_frequency) + 'S').mean()
f = interpolate.interp1d(data_frame.td, data_frame.mag_sum_acc)
new_timestamp = np.arange(data_frame.td[0], data_frame.td[-1], 1.0 / self.sampling_frequency)
df_resampled.mag_sum_acc = f(new_timestamp)
logging.debug("resample signal")
return df_resampled.interpolate(method='linear') | Convenience method for frequency conversion and resampling of data frame.
Object must have a DatetimeIndex. After re-sampling, this methods interpolate the time magnitude sum
acceleration values and the x,y,z values of the data frame acceleration
:param data_frame: the data frame to resample
:type data_frame: pandas.DataFrame
:return: the resampled data frame
:rtype: pandas.DataFrame | entailment |
def filter_signal(self, data_frame, ts='mag_sum_acc'):
"""
This method filters a data frame signal as suggested in :cite:`Kassavetis2015`. First step is to high \
pass filter the data frame using a \
`Butterworth <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.signal.butter.html>`_ \
digital and analog filter. Then this method
filters the data frame along one-dimension using a \
`digital filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`_.
:param data_frame: the input data frame
:type data_frame: pandas.DataFrame
:param ts: time series name of data frame to filter
:type ts: str
:return data_frame: adds a column named 'filtered_signal' to the data frame
:rtype data_frame: pandas.DataFrame
"""
b, a = signal.butter(self.filter_order, 2*self.cutoff_frequency/self.sampling_frequency,'high', analog=False)
filtered_signal = signal.lfilter(b, a, data_frame[ts].values)
data_frame['filtered_signal'] = filtered_signal
logging.debug("filter signal")
return data_frame | This method filters a data frame signal as suggested in :cite:`Kassavetis2015`. First step is to high \
pass filter the data frame using a \
`Butterworth <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.signal.butter.html>`_ \
digital and analog filter. Then this method
filters the data frame along one-dimension using a \
`digital filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`_.
:param data_frame: the input data frame
:type data_frame: pandas.DataFrame
:param ts: time series name of data frame to filter
:type ts: str
:return data_frame: adds a column named 'filtered_signal' to the data frame
:rtype data_frame: pandas.DataFrame | entailment |
def fft_signal(self, data_frame):
"""
This method perform Fast Fourier Transform on the data frame using a \
`hanning window <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.signal.hann.html>`_
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: data frame with a 'filtered_singal', 'transformed_signal' and 'dt' columns
:rtype: pandas.DataFrame
"""
signal_length = len(data_frame.filtered_signal.values)
ll = int(signal_length / 2 - self.window / 2)
rr = int(signal_length / 2 + self.window / 2)
msa = data_frame.filtered_signal[ll:rr].values
hann_window = signal.hann(self.window)
msa_window = (msa * hann_window)
transformed_signal = fft(msa_window)
data = {'filtered_signal': msa_window, 'transformed_signal': transformed_signal,
'dt': data_frame.td[ll:rr].values}
data_frame_fft = pd.DataFrame(data, index=data_frame.index[ll:rr],
columns=['filtered_signal', 'transformed_signal', 'dt'])
logging.debug("fft signal")
return data_frame_fft | This method perform Fast Fourier Transform on the data frame using a \
`hanning window <https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.signal.hann.html>`_
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: data frame with a 'filtered_singal', 'transformed_signal' and 'dt' columns
:rtype: pandas.DataFrame | entailment |
def amplitude_by_fft(self, data_frame):
"""
This methods extract the fft components and sum the ones from lower to upper freq as per \
:cite:`Kassavetis2015`
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return ampl: the ampl
:rtype ampl: float
:return freq: the freq
:rtype freq: float
"""
signal_length = len(data_frame.filtered_signal)
normalised_transformed_signal = data_frame.transformed_signal.values / signal_length
k = np.arange(signal_length)
T = signal_length / self.sampling_frequency
f = k / T # two sides frequency range
f = f[range(int(signal_length / 2))] # one side frequency range
ts = normalised_transformed_signal[range(int(signal_length / 2))]
ampl = sum(abs(ts[(f > self.lower_frequency) & (f < self.upper_frequency)]))
freq = f[abs(ts).argmax(axis=0)]
logging.debug("tremor ampl calculated")
return ampl, freq | This methods extract the fft components and sum the ones from lower to upper freq as per \
:cite:`Kassavetis2015`
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return ampl: the ampl
:rtype ampl: float
:return freq: the freq
:rtype freq: float | entailment |
def amplitude_by_welch(self, data_frame):
"""
This methods uses the Welch method :cite:`Welch1967` to obtain the power spectral density, this is a robust
alternative to using fft_signal & amplitude
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: the ampl
:rtype ampl: float
:return: the freq
:rtype freq: float
"""
frq, Pxx_den = signal.welch(data_frame.filtered_signal.values, self.sampling_frequency, nperseg=self.window)
freq = frq[Pxx_den.argmax(axis=0)]
ampl = sum(Pxx_den[(frq > self.lower_frequency) & (frq < self.upper_frequency)])
logging.debug("tremor amplitude by welch calculated")
return ampl, freq | This methods uses the Welch method :cite:`Welch1967` to obtain the power spectral density, this is a robust
alternative to using fft_signal & amplitude
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: the ampl
:rtype ampl: float
:return: the freq
:rtype freq: float | entailment |
def approximate_entropy(self, x, m=None, r=None):
"""
As in tsfresh \
`approximate_entropy <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L1601>`_
Implements a `vectorized approximate entropy algorithm <https://en.wikipedia.org/wiki/Approximate_entropy>`_
For short time-series this method is highly dependent on the parameters,
but should be stable for N > 2000, see :cite:`Yentes2013`. Other shortcomings and alternatives discussed in \
:cite:`Richman2000`
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param m: Length of compared run of data
:type m: int
:param r: Filtering level, must be positive
:type r: float
:return: Approximate entropy
:rtype: float
"""
if m is None or r is None:
m = 2
r = 0.3
entropy = feature_calculators.approximate_entropy(x, m, r)
logging.debug("approximate entropy by tsfresh calculated")
return entropy | As in tsfresh \
`approximate_entropy <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L1601>`_
Implements a `vectorized approximate entropy algorithm <https://en.wikipedia.org/wiki/Approximate_entropy>`_
For short time-series this method is highly dependent on the parameters,
but should be stable for N > 2000, see :cite:`Yentes2013`. Other shortcomings and alternatives discussed in \
:cite:`Richman2000`
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param m: Length of compared run of data
:type m: int
:param r: Filtering level, must be positive
:type r: float
:return: Approximate entropy
:rtype: float | entailment |
def autocorrelation(self, x, lag):
"""
As in tsfresh `autocorrelation <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L1457>`_
Calculates the autocorrelation of the specified lag, according to the `formula <https://en.wikipedia.org/wiki/\
Autocorrelation#Estimation>`_:
.. math::
\\frac{1}{(n-l)\sigma^{2}} \\sum_{t=1}^{n-l}(X_{t}-\\mu )(X_{t+l}-\\mu)
where :math:`n` is the length of the time series :math:`X_i`, :math:`\sigma^2` its variance and :math:`\mu` its
mean. `l` denotes the lag.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param lag: the lag
:type lag: int
:return: the value of this feature
:rtype: float
"""
# This is important: If a series is passed, the product below is calculated
# based on the index, which corresponds to squaring the series.
if lag is None:
lag = 0
_autoc = feature_calculators.autocorrelation(x, lag)
logging.debug("autocorrelation by tsfresh calculated")
return _autoc | As in tsfresh `autocorrelation <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L1457>`_
Calculates the autocorrelation of the specified lag, according to the `formula <https://en.wikipedia.org/wiki/\
Autocorrelation#Estimation>`_:
.. math::
\\frac{1}{(n-l)\sigma^{2}} \\sum_{t=1}^{n-l}(X_{t}-\\mu )(X_{t+l}-\\mu)
where :math:`n` is the length of the time series :math:`X_i`, :math:`\sigma^2` its variance and :math:`\mu` its
mean. `l` denotes the lag.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param lag: the lag
:type lag: int
:return: the value of this feature
:rtype: float | entailment |
def partial_autocorrelation(self, x, param=None):
"""
As in tsfresh `partial_autocorrelation <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L308>`_
Calculates the value of the partial autocorrelation function at the given lag. The lag `k` partial \
autocorrelation of a time series :math:`\\lbrace x_t, t = 1 \\ldots T \\rbrace` equals the partial correlation \
of :math:`x_t` and \
:math:`x_{t-k}`, adjusted for the intermediate variables \
:math:`\\lbrace x_{t-1}, \\ldots, x_{t-k+1} \\rbrace` (:cite:`Wilson2015`). \
Following `this notes <https://onlinecourses.science.psu.edu/stat510/node/62>`_, it can be defined as
.. math::
\\alpha_k = \\frac{ Cov(x_t, x_{t-k} | x_{t-1}, \\ldots, x_{t-k+1})}
{\\sqrt{ Var(x_t | x_{t-1}, \\ldots, x_{t-k+1}) Var(x_{t-k} | x_{t-1}, \\ldots, x_{t-k+1} )}}
with (a) :math:`x_t = f(x_{t-1}, \\ldots, x_{t-k+1})` and (b) :math:`x_{t-k} = f(x_{t-1}, \\ldots, x_{t-k+1})` \
being AR(k-1) models that can be fitted by OLS. Be aware that in (a), the regression is done on past values to \
predict :math:`x_t` whereas in (b), future values are used to calculate the past value :math:`x_{t-k}`.\
It is said in :cite:`Wilson2015` that "for an AR(p), the partial autocorrelations [ :math:`\\alpha_k` ] \
will be nonzero for `k<=p` and zero for `k>p`."\
With this property, it is used to determine the lag of an AR-Process.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"lag": val} with int val indicating the lag to be returned
:type param: list
:return: the value of this feature
:rtype: float
"""
if param is None:
param = [{'lag': 3}, {'lag': 5}, {'lag': 6}]
_partialc = feature_calculators.partial_autocorrelation(x, param)
logging.debug("partial autocorrelation by tsfresh calculated")
return _partialc | As in tsfresh `partial_autocorrelation <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L308>`_
Calculates the value of the partial autocorrelation function at the given lag. The lag `k` partial \
autocorrelation of a time series :math:`\\lbrace x_t, t = 1 \\ldots T \\rbrace` equals the partial correlation \
of :math:`x_t` and \
:math:`x_{t-k}`, adjusted for the intermediate variables \
:math:`\\lbrace x_{t-1}, \\ldots, x_{t-k+1} \\rbrace` (:cite:`Wilson2015`). \
Following `this notes <https://onlinecourses.science.psu.edu/stat510/node/62>`_, it can be defined as
.. math::
\\alpha_k = \\frac{ Cov(x_t, x_{t-k} | x_{t-1}, \\ldots, x_{t-k+1})}
{\\sqrt{ Var(x_t | x_{t-1}, \\ldots, x_{t-k+1}) Var(x_{t-k} | x_{t-1}, \\ldots, x_{t-k+1} )}}
with (a) :math:`x_t = f(x_{t-1}, \\ldots, x_{t-k+1})` and (b) :math:`x_{t-k} = f(x_{t-1}, \\ldots, x_{t-k+1})` \
being AR(k-1) models that can be fitted by OLS. Be aware that in (a), the regression is done on past values to \
predict :math:`x_t` whereas in (b), future values are used to calculate the past value :math:`x_{t-k}`.\
It is said in :cite:`Wilson2015` that "for an AR(p), the partial autocorrelations [ :math:`\\alpha_k` ] \
will be nonzero for `k<=p` and zero for `k>p`."\
With this property, it is used to determine the lag of an AR-Process.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"lag": val} with int val indicating the lag to be returned
:type param: list
:return: the value of this feature
:rtype: float | entailment |
def ratio_value_number_to_time_series_length(self, x):
"""
As in tsfresh `ratio_value_number_to_time_series_length <https://github.com/blue-yonder/tsfresh/blob/master\
/tsfresh/feature_extraction/feature_calculators.py#L830>`_
Returns a factor which is 1 if all values in the time series occur only once,
and below one if this is not the case.
In principle, it just returns: # unique values / # values
:param x: the time series to calculate the feature of
:type x: pandas.Series
:return: the value of this feature
:rtype: float
"""
ratio = feature_calculators.ratio_value_number_to_time_series_length(x)
logging.debug("ratio value number to time series length by tsfresh calculated")
return ratio | As in tsfresh `ratio_value_number_to_time_series_length <https://github.com/blue-yonder/tsfresh/blob/master\
/tsfresh/feature_extraction/feature_calculators.py#L830>`_
Returns a factor which is 1 if all values in the time series occur only once,
and below one if this is not the case.
In principle, it just returns: # unique values / # values
:param x: the time series to calculate the feature of
:type x: pandas.Series
:return: the value of this feature
:rtype: float | entailment |
def change_quantiles(self, x, ql=None, qh=None, isabs=None, f_agg=None):
"""
As in tsfresh `change_quantiles <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L1248>`_
First fixes a corridor given by the quantiles ql and qh of the distribution of x. Then calculates the \
average, absolute value of consecutive changes of the series x inside this corridor. Think about selecting \
a corridor on the y-Axis and only calculating the mean of the absolute change of the time series inside \
this corridor.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param ql: the lower quantile of the corridor
:type ql: float
:param qh: the higher quantile of the corridor
:type qh: float
:param isabs: should the absolute differences be taken?
:type isabs: bool
:param f_agg: the aggregator function that is applied to the differences in the bin
:type f_agg: str, name of a numpy function (e.g. mean, var, std, median)
:return: the value of this feature
:rtype: float
"""
if ql is None or qh is None or isabs is None or f_agg is None:
f_agg = 'mean'
isabs = True
qh = 0.2
ql = 0.0
quantile = feature_calculators.change_quantiles(x, ql, qh, isabs, f_agg)
logging.debug("change_quantiles by tsfresh calculated")
return quantile | As in tsfresh `change_quantiles <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L1248>`_
First fixes a corridor given by the quantiles ql and qh of the distribution of x. Then calculates the \
average, absolute value of consecutive changes of the series x inside this corridor. Think about selecting \
a corridor on the y-Axis and only calculating the mean of the absolute change of the time series inside \
this corridor.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param ql: the lower quantile of the corridor
:type ql: float
:param qh: the higher quantile of the corridor
:type qh: float
:param isabs: should the absolute differences be taken?
:type isabs: bool
:param f_agg: the aggregator function that is applied to the differences in the bin
:type f_agg: str, name of a numpy function (e.g. mean, var, std, median)
:return: the value of this feature
:rtype: float | entailment |
def number_peaks(self, x, n=None):
"""
As in tsfresh `number_peaks <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L1003>`_
Calculates the number of peaks of at least support n in the time series x. A peak of support n is defined \
as a subsequence of x where a value occurs, which is bigger than its n neighbours to the left and to the right.
Hence in the sequence
>>> x = [3, 0, 0, 4, 0, 0, 13]
4 is a peak of support 1 and 2 because in the subsequences
>>> [0, 4, 0]
>>> [0, 0, 4, 0, 0]
4 is still the highest value. Here, 4 is not a peak of support 3 because 13 is the 3th neighbour to the \
right of 4 and its bigger than 4.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param n: the support of the peak
:type n: int
:return: the value of this feature
:rtype: float
"""
if n is None:
n = 5
peaks = feature_calculators.number_peaks(x, n)
logging.debug("agg linear trend by tsfresh calculated")
return peaks | As in tsfresh `number_peaks <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L1003>`_
Calculates the number of peaks of at least support n in the time series x. A peak of support n is defined \
as a subsequence of x where a value occurs, which is bigger than its n neighbours to the left and to the right.
Hence in the sequence
>>> x = [3, 0, 0, 4, 0, 0, 13]
4 is a peak of support 1 and 2 because in the subsequences
>>> [0, 4, 0]
>>> [0, 0, 4, 0, 0]
4 is still the highest value. Here, 4 is not a peak of support 3 because 13 is the 3th neighbour to the \
right of 4 and its bigger than 4.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param n: the support of the peak
:type n: int
:return: the value of this feature
:rtype: float | entailment |
def agg_linear_trend(self, x, param=None):
"""
As in tsfresh `agg_inear_trend <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L1727>`_
Calculates a linear least-squares regression for values of the time series that were aggregated over chunks\
versus the sequence from 0 up to the number of chunks minus one.
This feature assumes the signal to be uniformly sampled. It will not use the time stamps to fit the model.
The parameters attr controls which of the characteristics are returned. Possible extracted attributes are\
"pvalue", "rvalue", "intercept", "slope", "stderr", see the documentation of linregress for more \
information.
The chunksize is regulated by "chunk_len". It specifies how many time series values are in each chunk.
Further, the aggregation function is controlled by "f_agg", which can use "max", "min" or , "mean", "median"
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"attr": x, "chunk_len": l, "f_agg": f} with x, f a str and l an int
:type param: list
:return: the different feature values
:rtype: pandas.Series
"""
if param is None:
param = [{'attr': 'intercept', 'chunk_len': 5, 'f_agg': 'min'},
{'attr': 'rvalue', 'chunk_len': 10, 'f_agg': 'var'},
{'attr': 'intercept', 'chunk_len': 10, 'f_agg': 'min'}]
agg = feature_calculators.agg_linear_trend(x, param)
logging.debug("agg linear trend by tsfresh calculated")
return list(agg) | As in tsfresh `agg_inear_trend <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L1727>`_
Calculates a linear least-squares regression for values of the time series that were aggregated over chunks\
versus the sequence from 0 up to the number of chunks minus one.
This feature assumes the signal to be uniformly sampled. It will not use the time stamps to fit the model.
The parameters attr controls which of the characteristics are returned. Possible extracted attributes are\
"pvalue", "rvalue", "intercept", "slope", "stderr", see the documentation of linregress for more \
information.
The chunksize is regulated by "chunk_len". It specifies how many time series values are in each chunk.
Further, the aggregation function is controlled by "f_agg", which can use "max", "min" or , "mean", "median"
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"attr": x, "chunk_len": l, "f_agg": f} with x, f a str and l an int
:type param: list
:return: the different feature values
:rtype: pandas.Series | entailment |
def spkt_welch_density(self, x, param=None):
"""
As in tsfresh `spkt_welch_density <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L1162>`_ . This feature calculator estimates the cross power \
spectral density of the time series x at different frequencies. To do so, the time series is first shifted \
from the time domain to the frequency domain. \
The feature calculators returns the power spectrum of the different frequencies.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"coeff": x} with x int
:type param: list
:return: the different feature values
:rtype: pandas.Series
"""
if param is None:
param = [{'coeff': 2}, {'coeff': 5}, {'coeff': 8}]
welch = feature_calculators.spkt_welch_density(x, param)
logging.debug("spkt welch density by tsfresh calculated")
return list(welch) | As in tsfresh `spkt_welch_density <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/\
feature_extraction/feature_calculators.py#L1162>`_ . This feature calculator estimates the cross power \
spectral density of the time series x at different frequencies. To do so, the time series is first shifted \
from the time domain to the frequency domain. \
The feature calculators returns the power spectrum of the different frequencies.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"coeff": x} with x int
:type param: list
:return: the different feature values
:rtype: pandas.Series | entailment |
def percentage_of_reoccurring_datapoints_to_all_datapoints(self, x):
"""
As in tsfresh `percentage_of_reoccurring_datapoints_to_all_datapoints <https://github.com/blue-yonder/tsfresh/\
blob/master/tsfresh/feature_extraction/feature_calculators.py#L739>`_ \
Returns the percentage of unique values, that are present in the time series more than once.\
len(different values occurring more than once) / len(different values)\
This means the percentage is normalized to the number of unique values, in contrast to the \
percentage_of_reoccurring_values_to_all_values.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:return: the value of this feature
:rtype: float
"""
_perc = feature_calculators.percentage_of_reoccurring_datapoints_to_all_datapoints(x)
logging.debug("percentage of reoccurring datapoints to all datapoints by tsfresh calculated")
return _perc | As in tsfresh `percentage_of_reoccurring_datapoints_to_all_datapoints <https://github.com/blue-yonder/tsfresh/\
blob/master/tsfresh/feature_extraction/feature_calculators.py#L739>`_ \
Returns the percentage of unique values, that are present in the time series more than once.\
len(different values occurring more than once) / len(different values)\
This means the percentage is normalized to the number of unique values, in contrast to the \
percentage_of_reoccurring_values_to_all_values.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:return: the value of this feature
:rtype: float | entailment |
def abs_energy(self, x):
"""
As in tsfresh `abs_energy <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L390>`_ \
Returns the absolute energy of the time series which is the sum over the squared values\
.. math::
E=\\sum_{i=1,\ldots, n}x_i^2
:param x: the time series to calculate the feature of
:type x: pandas.Series
:return: the value of this feature
:rtype: float
"""
_energy = feature_calculators.abs_energy(x)
logging.debug("abs energy by tsfresh calculated")
return _energy | As in tsfresh `abs_energy <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L390>`_ \
Returns the absolute energy of the time series which is the sum over the squared values\
.. math::
E=\\sum_{i=1,\ldots, n}x_i^2
:param x: the time series to calculate the feature of
:type x: pandas.Series
:return: the value of this feature
:rtype: float | entailment |
def fft_aggregated(self, x, param=None):
"""
As in tsfresh `fft_aggregated <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L896>`_
Returns the spectral centroid (mean), variance, skew, and kurtosis of the absolute fourier transform spectrum.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"aggtype": s} where s str and in ["centroid", "variance",
"skew", "kurtosis"]
:type param: list
:return: the different feature values
:rtype: pandas.Series
"""
if param is None:
param = [{'aggtype': 'centroid'}]
_fft_agg = feature_calculators.fft_aggregated(x, param)
logging.debug("fft aggregated by tsfresh calculated")
return list(_fft_agg) | As in tsfresh `fft_aggregated <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L896>`_
Returns the spectral centroid (mean), variance, skew, and kurtosis of the absolute fourier transform spectrum.
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"aggtype": s} where s str and in ["centroid", "variance",
"skew", "kurtosis"]
:type param: list
:return: the different feature values
:rtype: pandas.Series | entailment |
def fft_coefficient(self, x, param=None):
"""
As in tsfresh `fft_coefficient <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L852>`_ \
Calculates the fourier coefficients of the one-dimensional discrete Fourier Transform for real input by fast \
fourier transformation algorithm
.. math::
A_k = \\sum_{m=0}^{n-1} a_m \\exp \\left \\{ -2 \\pi i \\frac{m k}{n} \\right \\}, \\qquad k = 0, \\ldots , n-1.
The resulting coefficients will be complex, this feature calculator can return the real part (attr=="real"), \
the imaginary part (attr=="imag), the absolute value (attr=""abs) and the angle in degrees (attr=="angle).
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"coeff": x, "attr": s} with x int and x >= 0, s str and in ["real", "imag"\
, "abs", "angle"]
:type param: list
:return: the different feature values
:rtype: pandas.Series
"""
if param is None:
param = [{'attr': 'abs', 'coeff': 44}, {'attr': 'abs', 'coeff': 63}, {'attr': 'abs', 'coeff': 0},
{'attr': 'real', 'coeff': 0}, {'attr': 'real', 'coeff': 23}]
_fft_coef = feature_calculators.fft_coefficient(x, param)
logging.debug("fft coefficient by tsfresh calculated")
return list(_fft_coef) | As in tsfresh `fft_coefficient <https://github.com/blue-yonder/tsfresh/blob/master/tsfresh/feature_extraction/\
feature_calculators.py#L852>`_ \
Calculates the fourier coefficients of the one-dimensional discrete Fourier Transform for real input by fast \
fourier transformation algorithm
.. math::
A_k = \\sum_{m=0}^{n-1} a_m \\exp \\left \\{ -2 \\pi i \\frac{m k}{n} \\right \\}, \\qquad k = 0, \\ldots , n-1.
The resulting coefficients will be complex, this feature calculator can return the real part (attr=="real"), \
the imaginary part (attr=="imag), the absolute value (attr=""abs) and the angle in degrees (attr=="angle).
:param x: the time series to calculate the feature of
:type x: pandas.Series
:param param: contains dictionaries {"coeff": x, "attr": s} with x int and x >= 0, s str and in ["real", "imag"\
, "abs", "angle"]
:type param: list
:return: the different feature values
:rtype: pandas.Series | entailment |
def dc_remove_signal(self, data_frame):
"""
Removes the dc component of the signal as per :cite:`Kassavetis2015`
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: the data frame with dc remove signal field
:rtype: pandas.DataFrame
"""
mean_signal = np.mean(data_frame.mag_sum_acc)
data_frame['dc_mag_sum_acc'] = data_frame.mag_sum_acc - mean_signal
logging.debug("dc remove signal")
return data_frame | Removes the dc component of the signal as per :cite:`Kassavetis2015`
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: the data frame with dc remove signal field
:rtype: pandas.DataFrame | entailment |
def bradykinesia(self, data_frame, method='fft'):
"""
This method calculates the bradykinesia amplitude of the data frame. It accepts two different methods, \
'fft' and 'welch'. First the signal gets re-sampled, dc removed and then high pass filtered.
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:param method: fft or welch.
:type method: str
:return ampl: the amplitude of the Bradykinesia
:rtype ampl: float
:return freq: the frequency of the Bradykinesia
:rtype freq: float
"""
try:
data_frame_resampled = self.resample_signal(data_frame)
data_frame_dc = self.dc_remove_signal(data_frame_resampled)
data_frame_filtered = self.filter_signal(data_frame_dc, 'dc_mag_sum_acc')
if method == 'fft':
data_frame_fft = self.fft_signal(data_frame_filtered)
return self.amplitude_by_fft(data_frame_fft)
else:
return self.amplitude_by_welch(data_frame_filtered)
except ValueError as verr:
logging.error("TremorProcessor bradykinesia ValueError ->%s", verr.message)
except:
logging.error("Unexpected error on TemorProcessor bradykinesia: %s", sys.exc_info()[0]) | This method calculates the bradykinesia amplitude of the data frame. It accepts two different methods, \
'fft' and 'welch'. First the signal gets re-sampled, dc removed and then high pass filtered.
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:param method: fft or welch.
:type method: str
:return ampl: the amplitude of the Bradykinesia
:rtype ampl: float
:return freq: the frequency of the Bradykinesia
:rtype freq: float | entailment |
def extract_features(self, data_frame, pre=''):
"""
This method extracts all the features available to the Tremor Processor class.
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: amplitude_by_fft, frequency_by_fft, amplitude_by_welch, frequency_by_fft, bradykinesia_amplitude_by_fft, \
bradykinesia_frequency_by_fft, bradykinesia_amplitude_by_welch, bradykinesia_frequency_by_welch, \
magnitude_approximate_entropy, magnitude_autocorrelation_lag_8, magnitude_autocorrelation_lag_9, \
magnitude_partial_autocorrelation_lag_3, magnitude_partial_autocorrelation_lag_5, \
magnitude_partial_autocorrelation_lag_6, magnitude_minimum, magnitude_mean, \
magnitude_ratio_value_number_to_time_series_length, magnitude_change_quantiles, magnitude_number_peaks, \
magnitude_agg_linear_trend_min_chunk_len_5_attr_intercept, \
magnitude_agg_linear_trend_var_chunk_len_10_attr_rvalue, \
magnitude_agg_linear_trend_min_chunk_len_10_attr_intercept, \
magnitude_spkt_welch_density_coeff_2, magnitude_spkt_welch_density_coeff_5, \
magnitude_spkt_welch_density_coeff_8, magnitude_percentage_of_reoccurring_datapoints_to_all_datapoints, \
magnitude_abs_energy, magnitude_fft_aggregated_centroid, magnitude_fft_aggregated_centroid, \
magnitude_fft_coefficient_abs_coeff_44, magnitude_fft_coefficient_abs_coeff_63, \
magnitude_fft_coefficient_abs_coeff_0, magnitude_fft_coefficient_real_coeff_0, \
magnitude_fft_coefficient_real_coeff_23, magnitude_sum_values
:rtype: list
"""
try:
magnitude_partial_autocorrelation = self.partial_autocorrelation(data_frame.mag_sum_acc)
magnitude_agg_linear = self.agg_linear_trend(data_frame.mag_sum_acc)
magnitude_spkt_welch_density = self.spkt_welch_density(data_frame.mag_sum_acc)
magnitude_fft_coefficient = self.fft_coefficient(data_frame.mag_sum_acc)
return {pre+'amplitude_by_fft': self.amplitude(data_frame)[0],
pre+'frequency_by_fft': self.amplitude(data_frame)[1],
pre+'amplitude_by_welch': self.amplitude(data_frame, 'welch')[0],
pre+'frequency_by_welch': self.amplitude(data_frame, 'welch')[1],
pre+'bradykinesia_amplitude_by_fft': self.bradykinesia(data_frame)[0],
pre+'bradykinesia_frequency_by_fft': self.bradykinesia(data_frame)[1],
pre+'bradykinesia_amplitude_by_welch': self.bradykinesia(data_frame, 'welch')[0],
pre+'bradykinesia_frequency_by_welch': self.bradykinesia(data_frame, 'welch')[1],
pre+'magnitude_approximate_entropy': self.approximate_entropy(data_frame.mag_sum_acc),
pre+'magnitude_autocorrelation_lag_8': self.autocorrelation(data_frame.mag_sum_acc, 8),
pre+'magnitude_autocorrelation_lag_9': self.autocorrelation(data_frame.mag_sum_acc, 9),
pre+'magnitude_partial_autocorrelation_lag_3': magnitude_partial_autocorrelation[0][1],
pre+'magnitude_partial_autocorrelation_lag_5': magnitude_partial_autocorrelation[1][1],
pre+'magnitude_partial_autocorrelation_lag_6': magnitude_partial_autocorrelation[2][1],
pre+'magnitude_minimum': self.minimum(data_frame.mag_sum_acc),
pre+'magnitude_mean': self.mean(data_frame.mag_sum_acc),
pre+'magnitude_ratio_value_number_to_time_series_length':
self.ratio_value_number_to_time_series_length(data_frame.mag_sum_acc),
pre+'magnitude_change_quantiles': self.change_quantiles(data_frame.mag_sum_acc),
pre+'magnitude_number_peaks': self.number_peaks(data_frame.mag_sum_acc),
pre+'magnitude_agg_linear_trend_min_chunk_len_5_attr_intercept': magnitude_agg_linear[0][1],
pre+'magnitude_agg_linear_trend_var_chunk_len_10_attr_rvalue': magnitude_agg_linear[1][1],
pre+'magnitude_agg_linear_trend_min_chunk_len_10_attr_intercept': magnitude_agg_linear[2][1],
pre+'magnitude_spkt_welch_density_coeff_2': magnitude_spkt_welch_density[0][1],
pre+'magnitude_spkt_welch_density_coeff_5': magnitude_spkt_welch_density[1][1],
pre+'magnitude_spkt_welch_density_coeff_8': magnitude_spkt_welch_density[2][1],
pre+'magnitude_percentage_of_reoccurring_datapoints_to_all_datapoints':
self.percentage_of_reoccurring_datapoints_to_all_datapoints(data_frame.mag_sum_acc),
pre+'magnitude_abs_energy': self.abs_energy(data_frame.mag_sum_acc),
pre+'magnitude_fft_aggregated_centroid': self.fft_aggregated(data_frame.mag_sum_acc)[0][1],
pre+'magnitude_fft_coefficient_abs_coeff_44': magnitude_fft_coefficient[0][1],
pre+'magnitude_fft_coefficient_abs_coeff_63': magnitude_fft_coefficient[1][1],
pre+'magnitude_fft_coefficient_abs_coeff_0': magnitude_fft_coefficient[2][1],
pre+'magnitude_fft_coefficient_real_coeff_0': magnitude_fft_coefficient[3][1],
pre+'magnitude_fft_coefficient_real_coeff_23': magnitude_fft_coefficient[4][1],
pre+'magnitude_sum_values': self.sum_values(data_frame.mag_sum_acc)}
except:
logging.error("Error on TremorProcessor process, extract features: %s", sys.exc_info()[0]) | This method extracts all the features available to the Tremor Processor class.
:param data_frame: the data frame
:type data_frame: pandas.DataFrame
:return: amplitude_by_fft, frequency_by_fft, amplitude_by_welch, frequency_by_fft, bradykinesia_amplitude_by_fft, \
bradykinesia_frequency_by_fft, bradykinesia_amplitude_by_welch, bradykinesia_frequency_by_welch, \
magnitude_approximate_entropy, magnitude_autocorrelation_lag_8, magnitude_autocorrelation_lag_9, \
magnitude_partial_autocorrelation_lag_3, magnitude_partial_autocorrelation_lag_5, \
magnitude_partial_autocorrelation_lag_6, magnitude_minimum, magnitude_mean, \
magnitude_ratio_value_number_to_time_series_length, magnitude_change_quantiles, magnitude_number_peaks, \
magnitude_agg_linear_trend_min_chunk_len_5_attr_intercept, \
magnitude_agg_linear_trend_var_chunk_len_10_attr_rvalue, \
magnitude_agg_linear_trend_min_chunk_len_10_attr_intercept, \
magnitude_spkt_welch_density_coeff_2, magnitude_spkt_welch_density_coeff_5, \
magnitude_spkt_welch_density_coeff_8, magnitude_percentage_of_reoccurring_datapoints_to_all_datapoints, \
magnitude_abs_energy, magnitude_fft_aggregated_centroid, magnitude_fft_aggregated_centroid, \
magnitude_fft_coefficient_abs_coeff_44, magnitude_fft_coefficient_abs_coeff_63, \
magnitude_fft_coefficient_abs_coeff_0, magnitude_fft_coefficient_real_coeff_0, \
magnitude_fft_coefficient_real_coeff_23, magnitude_sum_values
:rtype: list | entailment |
def main(argv, reactor=None):
"""Run the client GUI.
Typical use:
>>> sys.exit(main(sys.argv))
@param argv: The arguments to run it with, e.g. sys.argv.
@param reactor: The reactor to use. Must be compatible with gtk as this
module uses gtk API"s.
@return exitcode: The exit code it returned, as per sys.exit.
"""
if reactor is None:
from twisted.internet import gtk2reactor
gtk2reactor.install()
from twisted.internet import reactor
try:
AWSStatusIndicator(reactor)
gobject.set_application_name("aws-status")
reactor.run()
except ValueError:
# In this case, the user cancelled, and the exception bubbled to here.
pass | Run the client GUI.
Typical use:
>>> sys.exit(main(sys.argv))
@param argv: The arguments to run it with, e.g. sys.argv.
@param reactor: The reactor to use. Must be compatible with gtk as this
module uses gtk API"s.
@return exitcode: The exit code it returned, as per sys.exit. | entailment |
def parse_options(arguments):
"""Parse command line arguments.
The parsing logic is fairly simple. It can only parse long-style
parameters of the form::
--key value
Several parameters can be defined in the environment and will be used
unless explicitly overridden with command-line arguments. The access key,
secret and endpoint values will be loaded from C{AWS_ACCESS_KEY_ID},
C{AWS_SECRET_ACCESS_KEY} and C{AWS_ENDPOINT} environment variables.
@param arguments: A list of command-line arguments. The first item is
expected to be the name of the program being run.
@raises OptionError: Raised if incorrectly formed command-line arguments
are specified, or if required command-line arguments are not present.
@raises UsageError: Raised if C{--help} is present in command-line
arguments.
@return: A C{dict} with key/value pairs extracted from the argument list.
"""
arguments = arguments[1:]
options = {}
while arguments:
key = arguments.pop(0)
if key in ("-h", "--help"):
raise UsageError("Help requested.")
if key.startswith("--"):
key = key[2:]
try:
value = arguments.pop(0)
except IndexError:
raise OptionError("'--%s' is missing a value." % key)
options[key] = value
else:
raise OptionError("Encountered unexpected value '%s'." % key)
default_key = os.environ.get("AWS_ACCESS_KEY_ID")
if "key" not in options and default_key:
options["key"] = default_key
default_secret = os.environ.get("AWS_SECRET_ACCESS_KEY")
if "secret" not in options and default_secret:
options["secret"] = default_secret
default_endpoint = os.environ.get("AWS_ENDPOINT")
if "endpoint" not in options and default_endpoint:
options["endpoint"] = default_endpoint
for name in ("key", "secret", "endpoint", "action"):
if name not in options:
raise OptionError(
"The '--%s' command-line argument is required." % name)
return options | Parse command line arguments.
The parsing logic is fairly simple. It can only parse long-style
parameters of the form::
--key value
Several parameters can be defined in the environment and will be used
unless explicitly overridden with command-line arguments. The access key,
secret and endpoint values will be loaded from C{AWS_ACCESS_KEY_ID},
C{AWS_SECRET_ACCESS_KEY} and C{AWS_ENDPOINT} environment variables.
@param arguments: A list of command-line arguments. The first item is
expected to be the name of the program being run.
@raises OptionError: Raised if incorrectly formed command-line arguments
are specified, or if required command-line arguments are not present.
@raises UsageError: Raised if C{--help} is present in command-line
arguments.
@return: A C{dict} with key/value pairs extracted from the argument list. | entailment |
def get_command(arguments, output=None):
"""Parse C{arguments} and configure a L{Command} instance.
An access key, secret key, endpoint and action are required. Additional
parameters included with the request are passed as parameters to the
method call. For example, the following command will create a L{Command}
object that can invoke the C{DescribeRegions} method with the optional
C{RegionName.0} parameter included in the request::
txaws-discover --key KEY --secret SECRET --endpoint URL \
--action DescribeRegions --RegionName.0 us-west-1
@param arguments: The command-line arguments to parse.
@raises OptionError: Raised if C{arguments} can't be used to create a
L{Command} object.
@return: A L{Command} instance configured to make an EC2 API method call.
"""
options = parse_options(arguments)
key = options.pop("key")
secret = options.pop("secret")
endpoint = options.pop("endpoint")
action = options.pop("action")
return Command(key, secret, endpoint, action, options, output) | Parse C{arguments} and configure a L{Command} instance.
An access key, secret key, endpoint and action are required. Additional
parameters included with the request are passed as parameters to the
method call. For example, the following command will create a L{Command}
object that can invoke the C{DescribeRegions} method with the optional
C{RegionName.0} parameter included in the request::
txaws-discover --key KEY --secret SECRET --endpoint URL \
--action DescribeRegions --RegionName.0 us-west-1
@param arguments: The command-line arguments to parse.
@raises OptionError: Raised if C{arguments} can't be used to create a
L{Command} object.
@return: A L{Command} instance configured to make an EC2 API method call. | entailment |
def main(arguments, output=None, testing_mode=None):
"""
Entry point parses command-line arguments, runs the specified EC2 API
method and prints the response to the screen.
@param arguments: Command-line arguments, typically retrieved from
C{sys.argv}.
@param output: Optionally, a stream to write output to.
@param testing_mode: Optionally, a condition that specifies whether or not
to run in test mode. When the value is true a reactor will not be run
or stopped, to prevent interfering with the test suite.
"""
def run_command(arguments, output, reactor):
if output is None:
output = sys.stdout
try:
command = get_command(arguments, output)
except UsageError:
print >>output, USAGE_MESSAGE.strip()
if reactor:
reactor.callLater(0, reactor.stop)
except Exception, e:
print >>output, "ERROR:", str(e)
if reactor:
reactor.callLater(0, reactor.stop)
else:
deferred = command.run()
if reactor:
deferred.addCallback(lambda ignored: reactor.stop())
if not testing_mode:
from twisted.internet import reactor
reactor.callLater(0, run_command, arguments, output, reactor)
reactor.run()
else:
run_command(arguments, output, None) | Entry point parses command-line arguments, runs the specified EC2 API
method and prints the response to the screen.
@param arguments: Command-line arguments, typically retrieved from
C{sys.argv}.
@param output: Optionally, a stream to write output to.
@param testing_mode: Optionally, a condition that specifies whether or not
to run in test mode. When the value is true a reactor will not be run
or stopped, to prevent interfering with the test suite. | entailment |
def execute(self, timeSeries):
"""Creates a new TimeSeries containing the smoothed and forcasted values.
:return: TimeSeries object containing the smoothed TimeSeries,
including the forecasted values.
:rtype: TimeSeries
:note: The first normalized value is chosen as the starting point.
"""
# determine the number of values to forecast, if necessary
self._calculate_values_to_forecast(timeSeries)
# extract the required parameters, performance improvement
alpha = self._parameters["smoothingFactor"]
valuesToForecast = self._parameters["valuesToForecast"]
# initialize some variables
resultList = []
estimator = None
lastT = None
# "It's always about performance!"
append = resultList.append
# smooth the existing TimeSeries data
for idx in xrange(len(timeSeries)):
# get the current to increase performance
t = timeSeries[idx]
# get the initial estimate
if estimator is None:
estimator = t[1]
continue
# add the first value to the resultList without any correction
if 0 == len(resultList):
append([t[0], estimator])
lastT = t
continue
# calculate the error made during the last estimation
error = lastT[1] - estimator
# calculate the new estimator, based on the last occured value, the error and the smoothingFactor
estimator = estimator + alpha * error
# save the current value for the next iteration
lastT = t
# add an entry to the result
append([t[0], estimator])
# forecast additional values if requested
if valuesToForecast > 0:
currentTime = resultList[-1][0]
normalizedTimeDiff = currentTime - resultList[-2][0]
for idx in xrange(valuesToForecast):
currentTime += normalizedTimeDiff
# reuse everything
error = lastT[1] - estimator
estimator = estimator + alpha * error
# add a forecasted value
append([currentTime, estimator])
# set variables for next iteration
lastT = resultList[-1]
# return a TimeSeries, containing the result
return TimeSeries.from_twodim_list(resultList) | Creates a new TimeSeries containing the smoothed and forcasted values.
:return: TimeSeries object containing the smoothed TimeSeries,
including the forecasted values.
:rtype: TimeSeries
:note: The first normalized value is chosen as the starting point. | entailment |
def _get_parameter_intervals(self):
"""Returns the intervals for the methods parameter.
Only parameters with defined intervals can be used for optimization!
:return: Returns a dictionary containing the parameter intervals, using the parameter
name as key, while the value hast the following format:
[minValue, maxValue, minIntervalClosed, maxIntervalClosed]
- minValue
Minimal value for the parameter
- maxValue
Maximal value for the parameter
- minIntervalClosed
:py:const:`True`, if minValue represents a valid value for the parameter.
:py:const:`False` otherwise.
- maxIntervalClosed:
:py:const:`True`, if maxValue represents a valid value for the parameter.
:py:const:`False` otherwise.
:rtype: dictionary
"""
parameterIntervals = {}
parameterIntervals["smoothingFactor"] = [0.0, 1.0, False, False]
parameterIntervals["trendSmoothingFactor"] = [0.0, 1.0, False, False]
return parameterIntervals | Returns the intervals for the methods parameter.
Only parameters with defined intervals can be used for optimization!
:return: Returns a dictionary containing the parameter intervals, using the parameter
name as key, while the value hast the following format:
[minValue, maxValue, minIntervalClosed, maxIntervalClosed]
- minValue
Minimal value for the parameter
- maxValue
Maximal value for the parameter
- minIntervalClosed
:py:const:`True`, if minValue represents a valid value for the parameter.
:py:const:`False` otherwise.
- maxIntervalClosed:
:py:const:`True`, if maxValue represents a valid value for the parameter.
:py:const:`False` otherwise.
:rtype: dictionary | entailment |
def execute(self, timeSeries):
"""Creates a new TimeSeries containing the smoothed values.
:return: TimeSeries object containing the smoothed TimeSeries,
including the forecasted values.
:rtype: TimeSeries
:note: The first normalized value is chosen as the starting point.
"""
# determine the number of values to forecast, if necessary
self._calculate_values_to_forecast(timeSeries)
# extract the required parameters, performance improvement
alpha = self._parameters["smoothingFactor"]
beta = self._parameters["trendSmoothingFactor"]
# initialize some variables
resultList = []
estimator = None
trend = None
lastT = None
# "It's always about performance!"
append = resultList.append
# smooth the existing TimeSeries data
for idx in xrange(len(timeSeries)):
# get the current to increase performance
t = timeSeries[idx]
# get the initial estimate
if estimator is None:
estimator = t[1]
lastT = t
continue
# add the first value to the resultList without any correction
if 0 == len(resultList):
append([t[0], estimator])
trend = t[1] - lastT[1]
# store current values for next iteration
lastT = t
lastEstimator = estimator
continue
# calculate the new estimator and trend, based on the last occured value, the error and the smoothingFactor
estimator = alpha * t[1] + (1 - alpha) * (estimator + trend)
trend = beta * (estimator - lastEstimator) + (1 - beta) * trend
# add an entry to the result
append([t[0], estimator])
# store current values for next iteration
lastT = t
lastEstimator = estimator
# forecast additional values if requested
if self._parameters["valuesToForecast"] > 0:
currentTime = resultList[-1][0]
normalizedTimeDiff = currentTime - resultList[-2][0]
for idx in xrange(1, self._parameters["valuesToForecast"] + 1):
currentTime += normalizedTimeDiff
# reuse everything
forecast = estimator + idx * trend
# add a forecasted value
append([currentTime, forecast])
# return a TimeSeries, containing the result
return TimeSeries.from_twodim_list(resultList) | Creates a new TimeSeries containing the smoothed values.
:return: TimeSeries object containing the smoothed TimeSeries,
including the forecasted values.
:rtype: TimeSeries
:note: The first normalized value is chosen as the starting point. | entailment |
def execute(self, timeSeries):
"""Creates a new TimeSeries containing the smoothed values.
:param TimeSeries timeSeries: TimeSeries containing hte data.
:return: TimeSeries object containing the exponentially smoothed TimeSeries,
including the forecasted values.
:rtype: TimeSeries
:note: Currently the first normalized value is simply chosen as the starting point.
"""
# determine the number of values to forecast, if necessary
self._calculate_values_to_forecast(timeSeries)
seasonLength = self.get_parameter("seasonLength")
if len(timeSeries) < seasonLength:
raise ValueError("The time series must contain at least one full season.")
alpha = self.get_parameter("smoothingFactor")
beta = self.get_parameter("trendSmoothingFactor")
gamma = self.get_parameter("seasonSmoothingFactor")
seasonValues = self.initSeasonFactors(timeSeries)
resultList = []
lastEstimator = 0
for idx in xrange(len(timeSeries)):
t = timeSeries[idx][0]
x_t = timeSeries[idx][1]
if idx == 0:
lastTrend = self.initialTrendSmoothingFactors(timeSeries)
lastEstimator = x_t
resultList.append([t, x_t])
continue
lastSeasonValue = seasonValues[idx % seasonLength]
estimator = alpha * x_t/lastSeasonValue + (1 - alpha) * (lastEstimator + lastTrend)
lastTrend = beta * (estimator - lastEstimator) + (1 - beta) * lastTrend
seasonValues[idx % seasonLength] = gamma * x_t/estimator + (1 - gamma) * lastSeasonValue
lastEstimator = estimator
resultList.append([t, estimator])
resultList += self._calculate_forecast(timeSeries, resultList, seasonValues, [lastEstimator, lastSeasonValue, lastTrend])
return TimeSeries.from_twodim_list(resultList) | Creates a new TimeSeries containing the smoothed values.
:param TimeSeries timeSeries: TimeSeries containing hte data.
:return: TimeSeries object containing the exponentially smoothed TimeSeries,
including the forecasted values.
:rtype: TimeSeries
:note: Currently the first normalized value is simply chosen as the starting point. | entailment |
def _calculate_forecast(self, originalTimeSeries, smoothedData, seasonValues, lastSmoothingParams):
"""Calculates the actual forecasted based on the input data.
:param TimeSeries timeSeries: TimeSeries containing hte data.
:param list smoothedData: Contains the smoothed time series data.
:param list seasonValues: Contains the seasonal values for the forecast.
:param list lastSmoothingParams: List containing the last [estimator, season value, trend] calculated during
smoothing the TimeSeries.
:return: Returns a list containing forecasted values
"""
forecastResults = []
lastEstimator, lastSeasonValue, lastTrend = lastSmoothingParams
seasonLength = self.get_parameter("seasonLength")
#Forecasting. Determine the time difference between two points for extrapolation
currentTime = smoothedData[-1][0]
normalizedTimeDiff = currentTime - smoothedData[-2][0]
for m in xrange(1, self._parameters["valuesToForecast"] + 1):
currentTime += normalizedTimeDiff
lastSeasonValue = seasonValues[(len(originalTimeSeries) + m - 2) % seasonLength]
forecast = (lastEstimator + m * lastTrend) * lastSeasonValue
forecastResults.append([currentTime, forecast])
return forecastResults | Calculates the actual forecasted based on the input data.
:param TimeSeries timeSeries: TimeSeries containing hte data.
:param list smoothedData: Contains the smoothed time series data.
:param list seasonValues: Contains the seasonal values for the forecast.
:param list lastSmoothingParams: List containing the last [estimator, season value, trend] calculated during
smoothing the TimeSeries.
:return: Returns a list containing forecasted values | entailment |
def initSeasonFactors(self, timeSeries):
""" Computes the initial season smoothing factors.
:return: Returns a list of season vectors of length "seasonLength".
:rtype: list
"""
seasonLength = self.get_parameter("seasonLength")
try:
seasonValues = self.get_parameter("seasonValues")
assert seasonLength == len(seasonValues), "Preset Season Values have to have to be of season's length"
return seasonValues
except KeyError:
pass
seasonValues = []
completeCycles = len(timeSeries) / seasonLength
A = {} #cache values for A_j
for i in xrange(seasonLength):
c_i = 0
for j in xrange(completeCycles):
if j not in A:
A[j] = self.computeA(j, timeSeries)
c_i += timeSeries[(seasonLength * j) + i][1] / A[j] #wikipedia suggests j-1, but we worked with indices in the first place
c_i /= completeCycles
seasonValues.append(c_i)
return seasonValues | Computes the initial season smoothing factors.
:return: Returns a list of season vectors of length "seasonLength".
:rtype: list | entailment |
def initialTrendSmoothingFactors(self, timeSeries):
""" Calculate the initial Trend smoothing Factor b0.
Explanation:
http://en.wikipedia.org/wiki/Exponential_smoothing#Triple_exponential_smoothing
:return: Returns the initial trend smoothing factor b0
"""
result = 0.0
seasonLength = self.get_parameter("seasonLength")
k = min(len(timeSeries) - seasonLength, seasonLength) #In case of only one full season, use average trend of the months that we have twice
for i in xrange(0, k):
result += (timeSeries[seasonLength + i][1] - timeSeries[i][1]) / seasonLength
return result / k | Calculate the initial Trend smoothing Factor b0.
Explanation:
http://en.wikipedia.org/wiki/Exponential_smoothing#Triple_exponential_smoothing
:return: Returns the initial trend smoothing factor b0 | entailment |
def computeA(self, j, timeSeries):
""" Calculates A_j. Aj is the average value of x in the jth cycle of your data
:return: A_j
:rtype: numeric
"""
seasonLength = self.get_parameter("seasonLength")
A_j = 0
for i in range(seasonLength):
A_j += timeSeries[(seasonLength * (j)) + i][1]
return A_j / seasonLength | Calculates A_j. Aj is the average value of x in the jth cycle of your data
:return: A_j
:rtype: numeric | entailment |
def _split_quoted(text):
"""
Split a unicode string on *SPACE* characters.
Splitting is not done at *SPACE* characters occurring within matched
*QUOTATION MARK*s. *REVERSE SOLIDUS* can be used to remove all
interpretation from the following character.
:param unicode text: The string to split.
:return: A two-tuple of unicode giving the two split pieces.
"""
quoted = False
escaped = False
result = []
for i, ch in enumerate(text):
if escaped:
escaped = False
result.append(ch)
elif ch == u'\\':
escaped = True
elif ch == u'"':
quoted = not quoted
elif not quoted and ch == u' ':
return u"".join(result), text[i:].lstrip()
else:
result.append(ch)
return u"".join(result), u"" | Split a unicode string on *SPACE* characters.
Splitting is not done at *SPACE* characters occurring within matched
*QUOTATION MARK*s. *REVERSE SOLIDUS* can be used to remove all
interpretation from the following character.
:param unicode text: The string to split.
:return: A two-tuple of unicode giving the two split pieces. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.