desc stringlengths 3 26.7k | decl stringlengths 11 7.89k | bodies stringlengths 8 553k |
|---|---|---|
'This action is only used by the Amazon EC2 Container Service
agent, and it is not intended for use outside of the agent.
Sent to acknowledge that a task changed states.
:type cluster: string
:param cluster: The short name or full Amazon Resource Name (ARN) of
the cluster that hosts the task.
:type task: string
:param ... | def submit_task_state_change(self, cluster=None, task=None, status=None, reason=None):
| params = {}
if (cluster is not None):
params['cluster'] = cluster
if (task is not None):
params['task'] = task
if (status is not None):
params['status'] = status
if (reason is not None):
params['reason'] = reason
return self._make_request(action='SubmitTaskStateCh... |
'Initialize the handler.
We have extended the constructor to accept a username/password
for SMTP authentication.'
| def __init__(self, mailhost, username, password, fromaddr, toaddrs, subject):
| super(AuthSMTPHandler, self).__init__(mailhost, fromaddr, toaddrs, subject)
self.username = username
self.password = password
|
'Emit a record.
Format the record and send it to the specified addressees.
It would be really nice if I could add authorization to this class
without having to resort to cut and paste inheritance but, no.'
| def emit(self, record):
| try:
port = self.mailport
if (not port):
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port)
smtp.login(self.username, self.password)
msg = self.format(record)
msg = ('From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s' % (se... |
'Load the string from an initial value, this should be the
raw hashed password.'
| def __init__(self, str=None, hashfunc=None):
| self.str = str
if hashfunc:
self.hashfunc = hashfunc
|
'From the command line, use `create-subscription`.
Creates a trail that specifies the settings for delivery of
log data to an Amazon S3 bucket.
:type name: string
:param name: Specifies the name of the trail.
:type s3_bucket_name: string
:param s3_bucket_name: Specifies the name of the Amazon S3 bucket
designated for p... | def create_trail(self, name, s3_bucket_name, s3_key_prefix=None, sns_topic_name=None, include_global_service_events=None, cloud_watch_logs_log_group_arn=None, cloud_watch_logs_role_arn=None):
| params = {'Name': name, 'S3BucketName': s3_bucket_name}
if (s3_key_prefix is not None):
params['S3KeyPrefix'] = s3_key_prefix
if (sns_topic_name is not None):
params['SnsTopicName'] = sns_topic_name
if (include_global_service_events is not None):
params['IncludeGlobalServiceEvent... |
'Deletes a trail.
:type name: string
:param name: The name of a trail to be deleted.'
| def delete_trail(self, name):
| params = {'Name': name}
return self.make_request(action='DeleteTrail', body=json.dumps(params))
|
'Retrieves settings for the trail associated with the current
region for your account.
:type trail_name_list: list
:param trail_name_list: The trail returned.'
| def describe_trails(self, trail_name_list=None):
| params = {}
if (trail_name_list is not None):
params['trailNameList'] = trail_name_list
return self.make_request(action='DescribeTrails', body=json.dumps(params))
|
'Returns a JSON-formatted list of information about the
specified trail. Fields include information on delivery
errors, Amazon SNS and Amazon S3 errors, and start and stop
logging times for each trail.
:type name: string
:param name: The name of the trail for which you are requesting the
current status.'
| def get_trail_status(self, name):
| params = {'Name': name}
return self.make_request(action='GetTrailStatus', body=json.dumps(params))
|
'Looks up API activity events captured by CloudTrail that
create, update, or delete resources in your account. Events
for a region can be looked up for the times in which you had
CloudTrail turned on in that region during the last seven
days. Lookup supports five different attributes: time range
(defined by a start tim... | def lookup_events(self, lookup_attributes=None, start_time=None, end_time=None, max_results=None, next_token=None):
| params = {}
if (lookup_attributes is not None):
params['LookupAttributes'] = lookup_attributes
if (start_time is not None):
params['StartTime'] = start_time
if (end_time is not None):
params['EndTime'] = end_time
if (max_results is not None):
params['MaxResults'] = ma... |
'Starts the recording of AWS API calls and log file delivery
for a trail.
:type name: string
:param name: The name of the trail for which CloudTrail logs AWS API
calls.'
| def start_logging(self, name):
| params = {'Name': name}
return self.make_request(action='StartLogging', body=json.dumps(params))
|
'Suspends the recording of AWS API calls and log file delivery
for the specified trail. Under most circumstances, there is no
need to use this action. You can update a trail without
stopping it first. This action is the only way to stop
recording.
:type name: string
:param name: Communicates to CloudTrail the name of t... | def stop_logging(self, name):
| params = {'Name': name}
return self.make_request(action='StopLogging', body=json.dumps(params))
|
'From the command line, use `update-subscription`.
Updates the settings that specify delivery of log files.
Changes to a trail do not require stopping the CloudTrail
service. Use this action to designate an existing bucket for
log delivery. If the existing bucket has previously been a
target for CloudTrail log files, a... | def update_trail(self, name, s3_bucket_name=None, s3_key_prefix=None, sns_topic_name=None, include_global_service_events=None, cloud_watch_logs_log_group_arn=None, cloud_watch_logs_role_arn=None):
| params = {'Name': name}
if (s3_bucket_name is not None):
params['S3BucketName'] = s3_bucket_name
if (s3_key_prefix is not None):
params['S3KeyPrefix'] = s3_key_prefix
if (sns_topic_name is not None):
params['SnsTopicName'] = sns_topic_name
if (include_global_service_events is... |
'This class method recurses through request data dictionary and removes
any default values.
:type data: dict
:param data: Specifies request parameters with default values to be removed.'
| @classmethod
def _normalize_request_dict(cls, data):
| for item in list(data.keys()):
if isinstance(data[item], dict):
cls._normalize_request_dict(data[item])
if (data[item] in (None, {})):
del data[item]
|
'This method wraps around make_request() to normalize and serialize the
dictionary with request parameters.
:type action: string
:param action: Specifies an SWF action.
:type data: dict
:param data: Specifies request parameters associated with the action.'
| def json_request(self, action, data, object_hook=None):
| self._normalize_request_dict(data)
json_input = json.dumps(data)
return self.make_request(action, json_input, object_hook)
|
':raises: ``SWFResponseError`` if response status is not 200.'
| def make_request(self, action, body='', object_hook=None):
| headers = {'X-Amz-Target': ('%s.%s' % (self.ServiceName, action)), 'Host': self.region.endpoint, 'Content-Type': 'application/json; charset=UTF-8', 'Content-Encoding': 'amz-1.0', 'Content-Length': str(len(body))}
http_request = self.build_base_http_request('POST', '/', '/', {}, headers, body, None)
respo... |
'Used by workers to get an ActivityTask from the specified
activity taskList. This initiates a long poll, where the
service holds the HTTP connection open and responds as soon as
a task becomes available. The maximum time the service holds
on to the request before responding is 60 seconds. If no task
is available withi... | def poll_for_activity_task(self, domain, task_list, identity=None):
| return self.json_request('PollForActivityTask', {'domain': domain, 'taskList': {'name': task_list}, 'identity': identity})
|
'Used by workers to tell the service that the ActivityTask
identified by the taskToken completed successfully with a
result (if provided).
:type task_token: string
:param task_token: The taskToken of the ActivityTask.
:type result: string
:param result: The result of the activity task. It is a free
form string that is ... | def respond_activity_task_completed(self, task_token, result=None):
| return self.json_request('RespondActivityTaskCompleted', {'taskToken': task_token, 'result': result})
|
'Used by workers to tell the service that the ActivityTask
identified by the taskToken has failed with reason (if
specified).
:type task_token: string
:param task_token: The taskToken of the ActivityTask.
:type details: string
:param details: Optional detailed information about the failure.
:type reason: string
:param ... | def respond_activity_task_failed(self, task_token, details=None, reason=None):
| return self.json_request('RespondActivityTaskFailed', {'taskToken': task_token, 'details': details, 'reason': reason})
|
'Used by workers to tell the service that the ActivityTask
identified by the taskToken was successfully
canceled. Additional details can be optionally provided using
the details argument.
:type task_token: string
:param task_token: The taskToken of the ActivityTask.
:type details: string
:param details: Optional detail... | def respond_activity_task_canceled(self, task_token, details=None):
| return self.json_request('RespondActivityTaskCanceled', {'taskToken': task_token, 'details': details})
|
'Used by activity workers to report to the service that the
ActivityTask represented by the specified taskToken is still
making progress. The worker can also (optionally) specify
details of the progress, for example percent complete, using
the details parameter. This action can also be used by the
worker as a mechanism... | def record_activity_task_heartbeat(self, task_token, details=None):
| return self.json_request('RecordActivityTaskHeartbeat', {'taskToken': task_token, 'details': details})
|
'Used by deciders to get a DecisionTask from the specified
decision taskList. A decision task may be returned for any
open workflow execution that is using the specified task
list. The task includes a paginated view of the history of the
workflow execution. The decider should use the workflow type
and the history to de... | def poll_for_decision_task(self, domain, task_list, identity=None, maximum_page_size=None, next_page_token=None, reverse_order=None):
| return self.json_request('PollForDecisionTask', {'domain': domain, 'taskList': {'name': task_list}, 'identity': identity, 'maximumPageSize': maximum_page_size, 'nextPageToken': next_page_token, 'reverseOrder': reverse_order})
|
'Used by deciders to tell the service that the DecisionTask
identified by the taskToken has successfully completed.
The decisions argument specifies the list of decisions
made while processing the task.
:type task_token: string
:param task_token: The taskToken of the ActivityTask.
:type decisions: list
:param decisions... | def respond_decision_task_completed(self, task_token, decisions=None, execution_context=None):
| return self.json_request('RespondDecisionTaskCompleted', {'taskToken': task_token, 'decisions': decisions, 'executionContext': execution_context})
|
'Records a WorkflowExecutionCancelRequested event in the
currently running workflow execution identified by the given
domain, workflowId, and runId. This logically requests the
cancellation of the workflow execution as a whole. It is up to
the decider to take appropriate actions when it receives an
execution history wi... | def request_cancel_workflow_execution(self, domain, workflow_id, run_id=None):
| return self.json_request('RequestCancelWorkflowExecution', {'domain': domain, 'workflowId': workflow_id, 'runId': run_id})
|
'Starts an execution of the workflow type in the specified
domain using the provided workflowId and input data.
:type domain: string
:param domain: The name of the domain in which the workflow
execution is created.
:type workflow_id: string
:param workflow_id: The user defined identifier associated with
the workflow ex... | def start_workflow_execution(self, domain, workflow_id, workflow_name, workflow_version, task_list=None, child_policy=None, execution_start_to_close_timeout=None, input=None, tag_list=None, task_start_to_close_timeout=None):
| return self.json_request('StartWorkflowExecution', {'domain': domain, 'workflowId': workflow_id, 'workflowType': {'name': workflow_name, 'version': workflow_version}, 'taskList': {'name': task_list}, 'childPolicy': child_policy, 'executionStartToCloseTimeout': execution_start_to_close_timeout, 'input': input, 'tagL... |
'Records a WorkflowExecutionSignaled event in the workflow
execution history and creates a decision task for the workflow
execution identified by the given domain, workflowId and
runId. The event is recorded with the specified user defined
signalName and input (if provided).
:type domain: string
:param domain: The name... | def signal_workflow_execution(self, domain, signal_name, workflow_id, input=None, run_id=None):
| return self.json_request('SignalWorkflowExecution', {'domain': domain, 'signalName': signal_name, 'workflowId': workflow_id, 'input': input, 'runId': run_id})
|
'Records a WorkflowExecutionTerminated event and forces closure
of the workflow execution identified by the given domain,
runId, and workflowId. The child policy, registered with the
workflow type or specified when starting this execution, is
applied to any open child workflow executions of this workflow
execution.
:ty... | def terminate_workflow_execution(self, domain, workflow_id, child_policy=None, details=None, reason=None, run_id=None):
| return self.json_request('TerminateWorkflowExecution', {'domain': domain, 'workflowId': workflow_id, 'childPolicy': child_policy, 'details': details, 'reason': reason, 'runId': run_id})
|
'Registers a new activity type along with its configuration
settings in the specified domain.
:type domain: string
:param domain: The name of the domain in which this activity is
to be registered.
:type name: string
:param name: The name of the activity type within the domain.
:type version: string
:param version: The ... | def register_activity_type(self, domain, name, version, task_list=None, default_task_heartbeat_timeout=None, default_task_schedule_to_close_timeout=None, default_task_schedule_to_start_timeout=None, default_task_start_to_close_timeout=None, description=None):
| return self.json_request('RegisterActivityType', {'domain': domain, 'name': name, 'version': version, 'defaultTaskList': {'name': task_list}, 'defaultTaskHeartbeatTimeout': default_task_heartbeat_timeout, 'defaultTaskScheduleToCloseTimeout': default_task_schedule_to_close_timeout, 'defaultTaskScheduleToStartTimeout... |
'Deprecates the specified activity type. After an activity
type has been deprecated, you cannot create new tasks of
that activity type. Tasks of this type that were scheduled
before the type was deprecated will continue to run.
:type domain: string
:param domain: The name of the domain in which the activity
type is reg... | def deprecate_activity_type(self, domain, activity_name, activity_version):
| return self.json_request('DeprecateActivityType', {'domain': domain, 'activityType': {'name': activity_name, 'version': activity_version}})
|
'Registers a new workflow type and its configuration settings
in the specified domain.
:type domain: string
:param domain: The name of the domain in which to register
the workflow type.
:type name: string
:param name: The name of the workflow type.
:type version: string
:param version: The version of the workflow type.... | def register_workflow_type(self, domain, name, version, task_list=None, default_child_policy=None, default_execution_start_to_close_timeout=None, default_task_start_to_close_timeout=None, description=None):
| return self.json_request('RegisterWorkflowType', {'domain': domain, 'name': name, 'version': version, 'defaultTaskList': {'name': task_list}, 'defaultChildPolicy': default_child_policy, 'defaultExecutionStartToCloseTimeout': default_execution_start_to_close_timeout, 'defaultTaskStartToCloseTimeout': default_task_st... |
'Deprecates the specified workflow type. After a workflow type
has been deprecated, you cannot create new executions of that
type. Executions that were started before the type was
deprecated will continue to run. A deprecated workflow type
may still be used when calling visibility actions.
:type domain: string
:param d... | def deprecate_workflow_type(self, domain, workflow_name, workflow_version):
| return self.json_request('DeprecateWorkflowType', {'domain': domain, 'workflowType': {'name': workflow_name, 'version': workflow_version}})
|
'Registers a new domain.
:type name: string
:param name: Name of the domain to register. The name must be unique.
:type workflow_execution_retention_period_in_days: string
:param workflow_execution_retention_period_in_days: Specifies
the duration *in days* for which the record (including the
history) of workflow execut... | def register_domain(self, name, workflow_execution_retention_period_in_days, description=None):
| return self.json_request('RegisterDomain', {'name': name, 'workflowExecutionRetentionPeriodInDays': workflow_execution_retention_period_in_days, 'description': description})
|
'Deprecates the specified domain. After a domain has been
deprecated it cannot be used to create new workflow executions
or register new types. However, you can still use visibility
actions on this domain. Deprecating a domain also deprecates
all activity and workflow types registered in the
domain. Executions that wer... | def deprecate_domain(self, name):
| return self.json_request('DeprecateDomain', {'name': name})
|
'Returns information about all activities registered in the
specified domain that match the specified name and
registration status. The result includes information like
creation date, current status of the activity, etc. The
results may be split into multiple pages. To retrieve
subsequent pages, make the call again usi... | def list_activity_types(self, domain, registration_status, name=None, maximum_page_size=None, next_page_token=None, reverse_order=None):
| return self.json_request('ListActivityTypes', {'domain': domain, 'name': name, 'registrationStatus': registration_status, 'maximumPageSize': maximum_page_size, 'nextPageToken': next_page_token, 'reverseOrder': reverse_order})
|
'Returns information about the specified activity type. This
includes configuration settings provided at registration time
as well as other general information about the type.
:type domain: string
:param domain: The name of the domain in which the activity
type is registered.
:type activity_name: string
:param activity... | def describe_activity_type(self, domain, activity_name, activity_version):
| return self.json_request('DescribeActivityType', {'domain': domain, 'activityType': {'name': activity_name, 'version': activity_version}})
|
'Returns information about workflow types in the specified
domain. The results may be split into multiple pages that can
be retrieved by making the call repeatedly.
:type domain: string
:param domain: The name of the domain in which the workflow
types have been registered.
:type registration_status: string
:param regis... | def list_workflow_types(self, domain, registration_status, maximum_page_size=None, name=None, next_page_token=None, reverse_order=None):
| return self.json_request('ListWorkflowTypes', {'domain': domain, 'name': name, 'registrationStatus': registration_status, 'maximumPageSize': maximum_page_size, 'nextPageToken': next_page_token, 'reverseOrder': reverse_order})
|
'Returns information about the specified workflow type. This
includes configuration settings specified when the type was
registered and other information such as creation date,
current status, etc.
:type domain: string
:param domain: The name of the domain in which this workflow
type is registered.
:type workflow_name:... | def describe_workflow_type(self, domain, workflow_name, workflow_version):
| return self.json_request('DescribeWorkflowType', {'domain': domain, 'workflowType': {'name': workflow_name, 'version': workflow_version}})
|
'Returns information about the specified workflow execution
including its type and some statistics.
:type domain: string
:param domain: The name of the domain containing the
workflow execution.
:type run_id: string
:param run_id: A system generated unique identifier for the
workflow execution.
:type workflow_id: string... | def describe_workflow_execution(self, domain, run_id, workflow_id):
| return self.json_request('DescribeWorkflowExecution', {'domain': domain, 'execution': {'runId': run_id, 'workflowId': workflow_id}})
|
'Returns the history of the specified workflow execution. The
results may be split into multiple pages. To retrieve
subsequent pages, make the call again using the nextPageToken
returned by the initial call.
:type domain: string
:param domain: The name of the domain containing the
workflow execution.
:type run_id: stri... | def get_workflow_execution_history(self, domain, run_id, workflow_id, maximum_page_size=None, next_page_token=None, reverse_order=None):
| return self.json_request('GetWorkflowExecutionHistory', {'domain': domain, 'execution': {'runId': run_id, 'workflowId': workflow_id}, 'maximumPageSize': maximum_page_size, 'nextPageToken': next_page_token, 'reverseOrder': reverse_order})
|
'Returns the number of open workflow executions within the
given domain that meet the specified filtering criteria.
.. note:
workflow_id, workflow_name/workflow_version and tag are mutually
exclusive. You can specify at most one of these in a request.
:type domain: string
:param domain: The name of the domain containin... | def count_open_workflow_executions(self, domain, latest_date, oldest_date, tag=None, workflow_id=None, workflow_name=None, workflow_version=None):
| return self.json_request('CountOpenWorkflowExecutions', {'domain': domain, 'startTimeFilter': {'oldestDate': oldest_date, 'latestDate': latest_date}, 'typeFilter': {'name': workflow_name, 'version': workflow_version}, 'executionFilter': {'workflowId': workflow_id}, 'tagFilter': {'tag': tag}})
|
'Returns the list of open workflow executions within the
given domain that meet the specified filtering criteria.
.. note:
workflow_id, workflow_name/workflow_version
and tag are mutually exclusive. You can specify at most
one of these in a request.
:type domain: string
:param domain: The name of the domain containing ... | def list_open_workflow_executions(self, domain, oldest_date, latest_date=None, tag=None, workflow_id=None, workflow_name=None, workflow_version=None, maximum_page_size=None, next_page_token=None, reverse_order=None):
| return self.json_request('ListOpenWorkflowExecutions', {'domain': domain, 'startTimeFilter': {'oldestDate': oldest_date, 'latestDate': latest_date}, 'tagFilter': {'tag': tag}, 'typeFilter': {'name': workflow_name, 'version': workflow_version}, 'executionFilter': {'workflowId': workflow_id}, 'maximumPageSize': maxim... |
'Returns the number of closed workflow executions within the
given domain that meet the specified filtering criteria.
.. note:
close_status, workflow_id, workflow_name/workflow_version
and tag are mutually exclusive. You can specify at most
one of these in a request.
.. note:
start_latest_date/start_oldest_date and
clo... | def count_closed_workflow_executions(self, domain, start_latest_date=None, start_oldest_date=None, close_latest_date=None, close_oldest_date=None, close_status=None, tag=None, workflow_id=None, workflow_name=None, workflow_version=None):
| return self.json_request('CountClosedWorkflowExecutions', {'domain': domain, 'startTimeFilter': {'oldestDate': start_oldest_date, 'latestDate': start_latest_date}, 'closeTimeFilter': {'oldestDate': close_oldest_date, 'latestDate': close_latest_date}, 'closeStatusFilter': {'status': close_status}, 'tagFilter': {'tag... |
'Returns the number of closed workflow executions within the
given domain that meet the specified filtering criteria.
.. note:
close_status, workflow_id, workflow_name/workflow_version
and tag are mutually exclusive. You can specify at most
one of these in a request.
.. note:
start_latest_date/start_oldest_date and
clo... | def list_closed_workflow_executions(self, domain, start_latest_date=None, start_oldest_date=None, close_latest_date=None, close_oldest_date=None, close_status=None, tag=None, workflow_id=None, workflow_name=None, workflow_version=None, maximum_page_size=None, next_page_token=None, reverse_order=None):
| return self.json_request('ListClosedWorkflowExecutions', {'domain': domain, 'startTimeFilter': {'oldestDate': start_oldest_date, 'latestDate': start_latest_date}, 'closeTimeFilter': {'oldestDate': close_oldest_date, 'latestDate': close_latest_date}, 'executionFilter': {'workflowId': workflow_id}, 'closeStatusFilter... |
'Returns the list of domains registered in the account. The
results may be split into multiple pages. To retrieve
subsequent pages, make the call again using the nextPageToken
returned by the initial call.
:type registration_status: string
:param registration_status: Specifies the registration status
of the domains to ... | def list_domains(self, registration_status, maximum_page_size=None, next_page_token=None, reverse_order=None):
| return self.json_request('ListDomains', {'registrationStatus': registration_status, 'maximumPageSize': maximum_page_size, 'nextPageToken': next_page_token, 'reverseOrder': reverse_order})
|
'Returns information about the specified domain including
description and status.
:type name: string
:param name: The name of the domain to describe.
:raises: UnknownResourceFault, SWFOperationNotPermittedError'
| def describe_domain(self, name):
| return self.json_request('DescribeDomain', {'name': name})
|
'Returns the estimated number of decision tasks in the
specified task list. The count returned is an approximation
and is not guaranteed to be exact. If you specify a task list
that no decision task was ever scheduled in then 0 will be
returned.
:type domain: string
:param domain: The name of the domain that contains t... | def count_pending_decision_tasks(self, domain, task_list):
| return self.json_request('CountPendingDecisionTasks', {'domain': domain, 'taskList': {'name': task_list}})
|
'Returns the estimated number of activity tasks in the
specified task list. The count returned is an approximation
and is not guaranteed to be exact. If you specify a task list
that no activity task was ever scheduled in then 0 will be
returned.
:type domain: string
:param domain: The name of the domain that contains t... | def count_pending_activity_tasks(self, domain, task_list):
| return self.json_request('CountPendingActivityTasks', {'domain': domain, 'taskList': {'name': task_list}})
|
'Schedules an activity task.
:type activity_id: string
:param activity_id: The activityId of the type of the activity
being scheduled.
:type activity_type_name: string
:param activity_type_name: The name of the type of the activity
being scheduled.
:type activity_type_version: string
:param activity_type_version: The v... | def schedule_activity_task(self, activity_id, activity_type_name, activity_type_version, task_list=None, control=None, heartbeat_timeout=None, schedule_to_close_timeout=None, schedule_to_start_timeout=None, start_to_close_timeout=None, input=None):
| o = {}
o['decisionType'] = 'ScheduleActivityTask'
attrs = o['scheduleActivityTaskDecisionAttributes'] = {}
attrs['activityId'] = activity_id
attrs['activityType'] = {'name': activity_type_name, 'version': activity_type_version}
if (task_list is not None):
attrs['taskList'] = {'name': tas... |
'Attempts to cancel a previously scheduled activity task. If
the activity task was scheduled but has not been assigned to a
worker, then it will be canceled. If the activity task was
already assigned to a worker, then the worker will be informed
that cancellation has been requested in the response to
RecordActivityTask... | def request_cancel_activity_task(self, activity_id):
| o = {}
o['decisionType'] = 'RequestCancelActivityTask'
attrs = o['requestCancelActivityTaskDecisionAttributes'] = {}
attrs['activityId'] = activity_id
self._data.append(o)
|
'Records a MarkerRecorded event in the history. Markers can be
used for adding custom information in the history for instance
to let deciders know that they do not need to look at the
history beyond the marker event.'
| def record_marker(self, marker_name, details=None):
| o = {}
o['decisionType'] = 'RecordMarker'
attrs = o['recordMarkerDecisionAttributes'] = {}
attrs['markerName'] = marker_name
if (details is not None):
attrs['details'] = details
self._data.append(o)
|
'Closes the workflow execution and records a WorkflowExecutionCompleted
event in the history'
| def complete_workflow_execution(self, result=None):
| o = {}
o['decisionType'] = 'CompleteWorkflowExecution'
attrs = o['completeWorkflowExecutionDecisionAttributes'] = {}
if (result is not None):
attrs['result'] = result
self._data.append(o)
|
'Closes the workflow execution and records a
WorkflowExecutionFailed event in the history.'
| def fail_workflow_execution(self, reason=None, details=None):
| o = {}
o['decisionType'] = 'FailWorkflowExecution'
attrs = o['failWorkflowExecutionDecisionAttributes'] = {}
if (reason is not None):
attrs['reason'] = reason
if (details is not None):
attrs['details'] = details
self._data.append(o)
|
'Closes the workflow execution and records a WorkflowExecutionCanceled
event in the history.'
| def cancel_workflow_executions(self, details=None):
| o = {}
o['decisionType'] = 'CancelWorkflowExecution'
attrs = o['cancelWorkflowExecutionsDecisionAttributes'] = {}
if (details is not None):
attrs['details'] = details
self._data.append(o)
|
'Closes the workflow execution and starts a new workflow execution of
the same type using the same workflow id and a unique run Id. A
WorkflowExecutionContinuedAsNew event is recorded in the history.'
| def continue_as_new_workflow_execution(self, child_policy=None, execution_start_to_close_timeout=None, input=None, tag_list=None, task_list=None, start_to_close_timeout=None, workflow_type_version=None):
| o = {}
o['decisionType'] = 'ContinueAsNewWorkflowExecution'
attrs = o['continueAsNewWorkflowExecutionDecisionAttributes'] = {}
if (child_policy is not None):
attrs['childPolicy'] = child_policy
if (execution_start_to_close_timeout is not None):
attrs['executionStartToCloseTimeout'] =... |
'Starts a timer for this workflow execution and records a TimerStarted
event in the history. This timer will fire after the specified delay
and record a TimerFired event.'
| def start_timer(self, start_to_fire_timeout, timer_id, control=None):
| o = {}
o['decisionType'] = 'StartTimer'
attrs = o['startTimerDecisionAttributes'] = {}
attrs['startToFireTimeout'] = start_to_fire_timeout
attrs['timerId'] = timer_id
if (control is not None):
attrs['control'] = control
self._data.append(o)
|
'Cancels a previously started timer and records a TimerCanceled
event in the history.'
| def cancel_timer(self, timer_id):
| o = {}
o['decisionType'] = 'CancelTimer'
attrs = o['cancelTimerDecisionAttributes'] = {}
attrs['timerId'] = timer_id
self._data.append(o)
|
'Requests a signal to be delivered to the specified external workflow
execution and records a SignalExternalWorkflowExecutionInitiated
event in the history.'
| def signal_external_workflow_execution(self, workflow_id, signal_name, run_id=None, control=None, input=None):
| o = {}
o['decisionType'] = 'SignalExternalWorkflowExecution'
attrs = o['signalExternalWorkflowExecutionDecisionAttributes'] = {}
attrs['workflowId'] = workflow_id
attrs['signalName'] = signal_name
if (run_id is not None):
attrs['runId'] = run_id
if (control is not None):
attr... |
'Requests that a request be made to cancel the specified
external workflow execution and records a
RequestCancelExternalWorkflowExecutionInitiated event in the
history.'
| def request_cancel_external_workflow_execution(self, workflow_id, control=None, run_id=None):
| o = {}
o['decisionType'] = 'RequestCancelExternalWorkflowExecution'
attrs = o['requestCancelExternalWorkflowExecutionDecisionAttributes'] = {}
attrs['workflowId'] = workflow_id
if (control is not None):
attrs['control'] = control
if (run_id is not None):
attrs['runId'] = run_id
... |
'Requests that a child workflow execution be started and
records a StartChildWorkflowExecutionInitiated event in the
history. The child workflow execution is a separate workflow
execution with its own history.'
| def start_child_workflow_execution(self, workflow_type_name, workflow_type_version, workflow_id, child_policy=None, control=None, execution_start_to_close_timeout=None, input=None, tag_list=None, task_list=None, task_start_to_close_timeout=None):
| o = {}
o['decisionType'] = 'StartChildWorkflowExecution'
attrs = o['startChildWorkflowExecutionDecisionAttributes'] = {}
attrs['workflowType'] = {'name': workflow_type_name, 'version': workflow_type_version}
attrs['workflowId'] = workflow_id
if (child_policy is not None):
attrs['childPol... |
'DescribeDomain.'
| @wraps(Layer1.describe_domain)
def describe(self):
| return self._swf.describe_domain(self.name)
|
'DeprecateDomain'
| @wraps(Layer1.deprecate_domain)
def deprecate(self):
| self._swf.deprecate_domain(self.name)
|
'RegisterDomain.'
| @wraps(Layer1.register_domain)
def register(self):
| self._swf.register_domain(self.name, str(self.retention), self.description)
|
'ListActivityTypes.'
| @wraps(Layer1.list_activity_types)
def activities(self, status='REGISTERED', **kwargs):
| act_types = self._swf.list_activity_types(self.name, status, **kwargs)
act_objects = []
for act_args in act_types['typeInfos']:
act_ident = act_args['activityType']
del act_args['activityType']
act_args.update(act_ident)
act_args.update({'aws_access_key_id': self.aws_access_k... |
'ListWorkflowTypes.'
| @wraps(Layer1.list_workflow_types)
def workflows(self, status='REGISTERED', **kwargs):
| wf_types = self._swf.list_workflow_types(self.name, status, **kwargs)
wf_objects = []
for wf_args in wf_types['typeInfos']:
wf_ident = wf_args['workflowType']
del wf_args['workflowType']
wf_args.update(wf_ident)
wf_args.update({'aws_access_key_id': self.aws_access_key_id, 'aw... |
'List list open/closed executions.
For a full list of available parameters refer to
:py:func:`boto.swf.layer1.Layer1.list_closed_workflow_executions` and
:py:func:`boto.swf.layer1.Layer1.list_open_workflow_executions`'
| def executions(self, closed=False, **kwargs):
| if closed:
executions = self._swf.list_closed_workflow_executions(self.name, **kwargs)
else:
if ('oldest_date' not in kwargs):
kwargs['oldest_date'] = (time.time() - (3600 * 24))
executions = self._swf.list_open_workflow_executions(self.name, **kwargs)
exe_objects = []
... |
'CountPendingActivityTasks.'
| @wraps(Layer1.count_pending_activity_tasks)
def count_pending_activity_tasks(self, task_list):
| return self._swf.count_pending_activity_tasks(self.name, task_list)
|
'CountPendingDecisionTasks.'
| @wraps(Layer1.count_pending_decision_tasks)
def count_pending_decision_tasks(self, task_list):
| return self._swf.count_pending_decision_tasks(self.name, task_list)
|
'To be overloaded by subclasses.'
| def run(self):
| raise NotImplementedError()
|
'RespondActivityTaskCanceled.'
| @wraps(Layer1.respond_activity_task_canceled)
def cancel(self, task_token=None, details=None):
| if (task_token is None):
task_token = self.last_tasktoken
return self._swf.respond_activity_task_canceled(task_token, details)
|
'RespondActivityTaskCompleted.'
| @wraps(Layer1.respond_activity_task_completed)
def complete(self, task_token=None, result=None):
| if (task_token is None):
task_token = self.last_tasktoken
return self._swf.respond_activity_task_completed(task_token, result)
|
'RespondActivityTaskFailed.'
| @wraps(Layer1.respond_activity_task_failed)
def fail(self, task_token=None, details=None, reason=None):
| if (task_token is None):
task_token = self.last_tasktoken
return self._swf.respond_activity_task_failed(task_token, details, reason)
|
'RecordActivityTaskHeartbeat.'
| @wraps(Layer1.record_activity_task_heartbeat)
def heartbeat(self, task_token=None, details=None):
| if (task_token is None):
task_token = self.last_tasktoken
return self._swf.record_activity_task_heartbeat(task_token, details)
|
'PollForActivityTask.'
| @wraps(Layer1.poll_for_activity_task)
def poll(self, **kwargs):
| task_list = self.task_list
if ('task_list' in kwargs):
task_list = kwargs.get('task_list')
del kwargs['task_list']
task = self._swf.poll_for_activity_task(self.domain, task_list, **kwargs)
self.last_tasktoken = task.get('taskToken')
return task
|
'RespondDecisionTaskCompleted.'
| @wraps(Layer1.respond_decision_task_completed)
def complete(self, task_token=None, decisions=None, **kwargs):
| if isinstance(decisions, Layer1Decisions):
decisions = decisions._data
if (task_token is None):
task_token = self.last_tasktoken
return self._swf.respond_decision_task_completed(task_token, decisions, **kwargs)
|
'PollForDecisionTask.'
| @wraps(Layer1.poll_for_decision_task)
def poll(self, **kwargs):
| task_list = self.task_list
if ('task_list' in kwargs):
task_list = kwargs.get('task_list')
del kwargs['task_list']
decision_task = self._swf.poll_for_decision_task(self.domain, task_list, **kwargs)
self.last_tasktoken = decision_task.get('taskToken')
return decision_task
|
'DescribeWorkflowType.'
| @wraps(Layer1.describe_workflow_type)
def describe(self):
| return self._swf.describe_workflow_type(self.domain, self.name, self.version)
|
'RegisterWorkflowType.'
| @wraps(Layer1.register_workflow_type)
def register(self, **kwargs):
| args = {'default_execution_start_to_close_timeout': '3600', 'default_task_start_to_close_timeout': '300', 'default_child_policy': 'TERMINATE'}
args.update(kwargs)
self._swf.register_workflow_type(self.domain, self.name, self.version, **args)
|
'DeprecateWorkflowType.'
| @wraps(Layer1.deprecate_workflow_type)
def deprecate(self):
| self._swf.deprecate_workflow_type(self.domain, self.name, self.version)
|
'StartWorkflowExecution.'
| @wraps(Layer1.start_workflow_execution)
def start(self, **kwargs):
| if ('workflow_id' in kwargs):
workflow_id = kwargs['workflow_id']
del kwargs['workflow_id']
else:
workflow_id = ('%s-%s-%i' % (self.name, self.version, time.time()))
for def_attr in ('task_list', 'child_policy'):
kwargs[def_attr] = kwargs.get(def_attr, getattr(self, def_attr)... |
'SignalWorkflowExecution.'
| @wraps(Layer1.signal_workflow_execution)
def signal(self, signame, **kwargs):
| self._swf.signal_workflow_execution(self.domain, signame, self.workflowId, **kwargs)
|
'TerminateWorkflowExecution (p. 103).'
| @wraps(Layer1.terminate_workflow_execution)
def terminate(self, **kwargs):
| return self._swf.terminate_workflow_execution(self.domain, self.workflowId, **kwargs)
|
'GetWorkflowExecutionHistory.'
| @wraps(Layer1.get_workflow_execution_history)
def history(self, **kwargs):
| return self._swf.get_workflow_execution_history(self.domain, self.runId, self.workflowId, **kwargs)['events']
|
'DescribeWorkflowExecution.'
| @wraps(Layer1.describe_workflow_execution)
def describe(self):
| return self._swf.describe_workflow_execution(self.domain, self.runId, self.workflowId)
|
'RequestCancelWorkflowExecution.'
| @wraps(Layer1.request_cancel_workflow_execution)
def request_cancel(self):
| return self._swf.request_cancel_workflow_execution(self.domain, self.workflowId, self.runId)
|
'DeprecateActivityType.'
| @wraps(Layer1.deprecate_activity_type)
def deprecate(self):
| return self._swf.deprecate_activity_type(self.domain, self.name, self.version)
|
'DescribeActivityType.'
| @wraps(Layer1.describe_activity_type)
def describe(self):
| return self._swf.describe_activity_type(self.domain, self.name, self.version)
|
'RegisterActivityType.'
| @wraps(Layer1.register_activity_type)
def register(self, **kwargs):
| args = {'default_task_heartbeat_timeout': '600', 'default_task_schedule_to_close_timeout': '3900', 'default_task_schedule_to_start_timeout': '300', 'default_task_start_to_close_timeout': '3600'}
args.update(kwargs)
self._swf.register_activity_type(self.domain, self.name, self.version, **args)
|
'Returns a list of Server instances, one for each Server object
persisted in the db'
| @classmethod
def Inventory(cls):
| l = ServerSet()
rs = cls.find()
for server in rs:
l.append(server)
return l
|
'Set SDB based config'
| def set_config(self, config):
| self._config = config
self._config.dump_to_sdb('botoConfigs', self.id)
|
'Attach an EBS volume to this server
:param volume: EBS Volume to attach
:type volume: boto.ec2.volume.Volume
:param device: Device to attach to (default to /dev/sdp)
:type device: string'
| def attach_volume(self, volume, device='/dev/sdp'):
| if hasattr(volume, 'id'):
volume_id = volume.id
else:
volume_id = volume
return self.ec2.attach_volume(volume_id=volume_id, instance_id=self.instance_id, device=device)
|
'Detach an EBS volume from this server
:param volume: EBS Volume to detach
:type volume: boto.ec2.volume.Volume'
| def detach_volume(self, volume):
| if hasattr(volume, 'id'):
volume_id = volume.id
else:
volume_id = volume
return self.ec2.detach_volume(volume_id=volume_id, instance_id=self.instance_id)
|
'Adds a source identifier to an existing RDS event notification
subscription.
:type subscription_name: string
:param subscription_name: The name of the RDS event notification
subscription you want to add a source identifier to.
:type source_identifier: string
:param source_identifier:
The identifier of the event source... | def add_source_identifier_to_subscription(self, subscription_name, source_identifier):
| params = {'SubscriptionName': subscription_name, 'SourceIdentifier': source_identifier}
return self._make_request(action='AddSourceIdentifierToSubscription', verb='POST', path='/', params=params)
|
'Adds metadata tags to an Amazon RDS resource. These tags can
also be used with cost allocation reporting to track cost
associated with Amazon RDS resources, or used in Condition
statement in IAM policy for Amazon RDS.
For an overview on tagging Amazon RDS resources, see `Tagging
Amazon RDS Resources`_.
:type resource_... | def add_tags_to_resource(self, resource_name, tags):
| params = {'ResourceName': resource_name}
self.build_complex_list_params(params, tags, 'Tags.member', ('Key', 'Value'))
return self._make_request(action='AddTagsToResource', verb='POST', path='/', params=params)
|
'Enables ingress to a DBSecurityGroup using one of two forms of
authorization. First, EC2 or VPC security groups can be added
to the DBSecurityGroup if the application using the database
is running on EC2 or VPC instances. Second, IP ranges are
available if the application accessing your database is
running on the Inte... | def authorize_db_security_group_ingress(self, db_security_group_name, cidrip=None, ec2_security_group_name=None, ec2_security_group_id=None, ec2_security_group_owner_id=None):
| params = {'DBSecurityGroupName': db_security_group_name}
if (cidrip is not None):
params['CIDRIP'] = cidrip
if (ec2_security_group_name is not None):
params['EC2SecurityGroupName'] = ec2_security_group_name
if (ec2_security_group_id is not None):
params['EC2SecurityGroupId'] = ec... |
'Copies the specified DBSnapshot. The source DBSnapshot must be
in the "available" state.
:type source_db_snapshot_identifier: string
:param source_db_snapshot_identifier: The identifier for the source DB
snapshot.
Constraints:
+ Must be the identifier for a valid system snapshot in the "available"
state.
Example: `rds... | def copy_db_snapshot(self, source_db_snapshot_identifier, target_db_snapshot_identifier, tags=None):
| params = {'SourceDBSnapshotIdentifier': source_db_snapshot_identifier, 'TargetDBSnapshotIdentifier': target_db_snapshot_identifier}
if (tags is not None):
self.build_complex_list_params(params, tags, 'Tags.member', ('Key', 'Value'))
return self._make_request(action='CopyDBSnapshot', verb='POST', pat... |
'Creates a new DB instance.
:type db_name: string
:param db_name: The meaning of this parameter differs according to the
database engine you use.
**MySQL**
The name of the database to create when the DB instance is created. If
this parameter is not specified, no database is created in the DB
instance.
Constraints:
+ Mu... | def create_db_instance(self, db_instance_identifier, allocated_storage, db_instance_class, engine, master_username, master_user_password, db_name=None, db_security_groups=None, vpc_security_group_ids=None, availability_zone=None, db_subnet_group_name=None, preferred_maintenance_window=None, db_parameter_group_name=None... | params = {'DBInstanceIdentifier': db_instance_identifier, 'AllocatedStorage': allocated_storage, 'DBInstanceClass': db_instance_class, 'Engine': engine, 'MasterUsername': master_username, 'MasterUserPassword': master_user_password}
if (db_name is not None):
params['DBName'] = db_name
if (db_security... |
'Creates a DB instance that acts as a read replica of a source
DB instance.
All read replica DB instances are created as Single-AZ
deployments with backups disabled. All other DB instance
attributes (including DB security groups and DB parameter
groups) are inherited from the source DB instance, except as
specified bel... | def create_db_instance_read_replica(self, db_instance_identifier, source_db_instance_identifier, db_instance_class=None, availability_zone=None, port=None, auto_minor_version_upgrade=None, iops=None, option_group_name=None, publicly_accessible=None, tags=None):
| params = {'DBInstanceIdentifier': db_instance_identifier, 'SourceDBInstanceIdentifier': source_db_instance_identifier}
if (db_instance_class is not None):
params['DBInstanceClass'] = db_instance_class
if (availability_zone is not None):
params['AvailabilityZone'] = availability_zone
if (... |
'Creates a new DB parameter group.
A DB parameter group is initially created with the default
parameters for the database engine used by the DB instance. To
provide custom values for any of the parameters, you must
modify the group after creating it using
ModifyDBParameterGroup . Once you\'ve created a DB parameter
gro... | def create_db_parameter_group(self, db_parameter_group_name, db_parameter_group_family, description, tags=None):
| params = {'DBParameterGroupName': db_parameter_group_name, 'DBParameterGroupFamily': db_parameter_group_family, 'Description': description}
if (tags is not None):
self.build_complex_list_params(params, tags, 'Tags.member', ('Key', 'Value'))
return self._make_request(action='CreateDBParameterGroup', ... |
'Creates a new DB security group. DB security groups control
access to a DB instance.
:type db_security_group_name: string
:param db_security_group_name: The name for the DB security group. This
value is stored as a lowercase string.
Constraints:
+ Must be 1 to 255 alphanumeric characters
+ First character must be a le... | def create_db_security_group(self, db_security_group_name, db_security_group_description, tags=None):
| params = {'DBSecurityGroupName': db_security_group_name, 'DBSecurityGroupDescription': db_security_group_description}
if (tags is not None):
self.build_complex_list_params(params, tags, 'Tags.member', ('Key', 'Value'))
return self._make_request(action='CreateDBSecurityGroup', verb='POST', path='/', ... |
'Creates a DBSnapshot. The source DBInstance must be in
"available" state.
:type db_snapshot_identifier: string
:param db_snapshot_identifier: The identifier for the DB snapshot.
Constraints:
+ Cannot be null, empty, or blank
+ Must contain from 1 to 255 alphanumeric characters or hyphens
+ First character must be a le... | def create_db_snapshot(self, db_snapshot_identifier, db_instance_identifier, tags=None):
| params = {'DBSnapshotIdentifier': db_snapshot_identifier, 'DBInstanceIdentifier': db_instance_identifier}
if (tags is not None):
self.build_complex_list_params(params, tags, 'Tags.member', ('Key', 'Value'))
return self._make_request(action='CreateDBSnapshot', verb='POST', path='/', params=params)
|
'Creates a new DB subnet group. DB subnet groups must contain
at least one subnet in at least two AZs in the region.
:type db_subnet_group_name: string
:param db_subnet_group_name: The name for the DB subnet group. This
value is stored as a lowercase string.
Constraints: Must contain no more than 255 alphanumeric chara... | def create_db_subnet_group(self, db_subnet_group_name, db_subnet_group_description, subnet_ids, tags=None):
| params = {'DBSubnetGroupName': db_subnet_group_name, 'DBSubnetGroupDescription': db_subnet_group_description}
self.build_list_params(params, subnet_ids, 'SubnetIds.member')
if (tags is not None):
self.build_complex_list_params(params, tags, 'Tags.member', ('Key', 'Value'))
return self._make_requ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.