partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
train
ApiGenerator._add_auth
Add Auth configuration to the Swagger file, if necessary
samtranslator/model/api/api_generator.py
def _add_auth(self): """ Add Auth configuration to the Swagger file, if necessary """ if not self.auth: return if self.auth and not self.definition_body: raise InvalidResourceException(self.logical_id, "Auth works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized if not all(key in AuthProperties._fields for key in self.auth.keys()): raise InvalidResourceException( self.logical_id, "Invalid value for 'Auth' property") if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException(self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) auth_properties = AuthProperties(**self.auth) authorizers = self._get_authorizers(auth_properties.Authorizers, auth_properties.DefaultAuthorizer) if authorizers: swagger_editor.add_authorizers(authorizers) self._set_default_authorizer(swagger_editor, authorizers, auth_properties.DefaultAuthorizer) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
def _add_auth(self): """ Add Auth configuration to the Swagger file, if necessary """ if not self.auth: return if self.auth and not self.definition_body: raise InvalidResourceException(self.logical_id, "Auth works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized if not all(key in AuthProperties._fields for key in self.auth.keys()): raise InvalidResourceException( self.logical_id, "Invalid value for 'Auth' property") if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException(self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) auth_properties = AuthProperties(**self.auth) authorizers = self._get_authorizers(auth_properties.Authorizers, auth_properties.DefaultAuthorizer) if authorizers: swagger_editor.add_authorizers(authorizers) self._set_default_authorizer(swagger_editor, authorizers, auth_properties.DefaultAuthorizer) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
[ "Add", "Auth", "configuration", "to", "the", "Swagger", "file", "if", "necessary" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L251-L281
[ "def", "_add_auth", "(", "self", ")", ":", "if", "not", "self", ".", "auth", ":", "return", "if", "self", ".", "auth", "and", "not", "self", ".", "definition_body", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Auth work...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
ApiGenerator._add_gateway_responses
Add Gateway Response configuration to the Swagger file, if necessary
samtranslator/model/api/api_generator.py
def _add_gateway_responses(self): """ Add Gateway Response configuration to the Swagger file, if necessary """ if not self.gateway_responses: return if self.gateway_responses and not self.definition_body: raise InvalidResourceException( self.logical_id, "GatewayResponses works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized for responses_key, responses_value in self.gateway_responses.items(): for response_key in responses_value.keys(): if response_key not in GatewayResponseProperties: raise InvalidResourceException( self.logical_id, "Invalid property '{}' in 'GatewayResponses' property '{}'".format(response_key, responses_key)) if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException( self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) gateway_responses = {} for response_type, response in self.gateway_responses.items(): gateway_responses[response_type] = ApiGatewayResponse( api_logical_id=self.logical_id, response_parameters=response.get('ResponseParameters', {}), response_templates=response.get('ResponseTemplates', {}), status_code=response.get('StatusCode', None) ) if gateway_responses: swagger_editor.add_gateway_responses(gateway_responses) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
def _add_gateway_responses(self): """ Add Gateway Response configuration to the Swagger file, if necessary """ if not self.gateway_responses: return if self.gateway_responses and not self.definition_body: raise InvalidResourceException( self.logical_id, "GatewayResponses works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized for responses_key, responses_value in self.gateway_responses.items(): for response_key in responses_value.keys(): if response_key not in GatewayResponseProperties: raise InvalidResourceException( self.logical_id, "Invalid property '{}' in 'GatewayResponses' property '{}'".format(response_key, responses_key)) if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException( self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) gateway_responses = {} for response_type, response in self.gateway_responses.items(): gateway_responses[response_type] = ApiGatewayResponse( api_logical_id=self.logical_id, response_parameters=response.get('ResponseParameters', {}), response_templates=response.get('ResponseTemplates', {}), status_code=response.get('StatusCode', None) ) if gateway_responses: swagger_editor.add_gateway_responses(gateway_responses) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
[ "Add", "Gateway", "Response", "configuration", "to", "the", "Swagger", "file", "if", "necessary" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L283-L324
[ "def", "_add_gateway_responses", "(", "self", ")", ":", "if", "not", "self", ".", "gateway_responses", ":", "return", "if", "self", ".", "gateway_responses", "and", "not", "self", ".", "definition_body", ":", "raise", "InvalidResourceException", "(", "self", "."...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
ApiGenerator._get_permission
Constructs and returns the Lambda Permission resource allowing the Authorizer to invoke the function. :returns: the permission resource :rtype: model.lambda_.LambdaPermission
samtranslator/model/api/api_generator.py
def _get_permission(self, authorizer_name, authorizer_lambda_function_arn): """Constructs and returns the Lambda Permission resource allowing the Authorizer to invoke the function. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ rest_api = ApiGatewayRestApi(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) api_id = rest_api.get_runtime_attr('rest_api_id') partition = ArnGenerator.get_partition_name() resource = '${__ApiId__}/authorizers/*' source_arn = fnSub(ArnGenerator.generate_arn(partition=partition, service='execute-api', resource=resource), {"__ApiId__": api_id}) lambda_permission = LambdaPermission(self.logical_id + authorizer_name + 'AuthorizerPermission', attributes=self.passthrough_resource_attributes) lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = authorizer_lambda_function_arn lambda_permission.Principal = 'apigateway.amazonaws.com' lambda_permission.SourceArn = source_arn return lambda_permission
def _get_permission(self, authorizer_name, authorizer_lambda_function_arn): """Constructs and returns the Lambda Permission resource allowing the Authorizer to invoke the function. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ rest_api = ApiGatewayRestApi(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) api_id = rest_api.get_runtime_attr('rest_api_id') partition = ArnGenerator.get_partition_name() resource = '${__ApiId__}/authorizers/*' source_arn = fnSub(ArnGenerator.generate_arn(partition=partition, service='execute-api', resource=resource), {"__ApiId__": api_id}) lambda_permission = LambdaPermission(self.logical_id + authorizer_name + 'AuthorizerPermission', attributes=self.passthrough_resource_attributes) lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = authorizer_lambda_function_arn lambda_permission.Principal = 'apigateway.amazonaws.com' lambda_permission.SourceArn = source_arn return lambda_permission
[ "Constructs", "and", "returns", "the", "Lambda", "Permission", "resource", "allowing", "the", "Authorizer", "to", "invoke", "the", "function", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L360-L381
[ "def", "_get_permission", "(", "self", ",", "authorizer_name", ",", "authorizer_lambda_function_arn", ")", ":", "rest_api", "=", "ApiGatewayRestApi", "(", "self", ".", "logical_id", ",", "depends_on", "=", "self", ".", "depends_on", ",", "attributes", "=", "self",...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
ApiGenerator._set_endpoint_configuration
Sets endpoint configuration property of AWS::ApiGateway::RestApi resource :param rest_api: RestApi resource :param string/dict value: Value to be set
samtranslator/model/api/api_generator.py
def _set_endpoint_configuration(self, rest_api, value): """ Sets endpoint configuration property of AWS::ApiGateway::RestApi resource :param rest_api: RestApi resource :param string/dict value: Value to be set """ rest_api.EndpointConfiguration = {"Types": [value]} rest_api.Parameters = {"endpointConfigurationTypes": value}
def _set_endpoint_configuration(self, rest_api, value): """ Sets endpoint configuration property of AWS::ApiGateway::RestApi resource :param rest_api: RestApi resource :param string/dict value: Value to be set """ rest_api.EndpointConfiguration = {"Types": [value]} rest_api.Parameters = {"endpointConfigurationTypes": value}
[ "Sets", "endpoint", "configuration", "property", "of", "AWS", "::", "ApiGateway", "::", "RestApi", "resource", ":", "param", "rest_api", ":", "RestApi", "resource", ":", "param", "string", "/", "dict", "value", ":", "Value", "to", "be", "set" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L416-L424
[ "def", "_set_endpoint_configuration", "(", "self", ",", "rest_api", ",", "value", ")", ":", "rest_api", ".", "EndpointConfiguration", "=", "{", "\"Types\"", ":", "[", "value", "]", "}", "rest_api", ".", "Parameters", "=", "{", "\"endpointConfigurationTypes\"", "...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
retry
The retry function will keep retrying `task_to_try` until either: (1) it returns None, then retry() finishes (2) `max_attempts` is reached, then retry() raises an exception. (3) if retrying one more time will cause total wait time to go above: `expiration_duration`, then retry() raises an exception Beware that any exception raised by task_to_try won't get surfaced until (2) or (3) is satisfied. At step n, it sleeps for [0, delay), where delay is defined as the following: `delay = min(max_delay, multiplier * (backoff_coefficient ** (n - 1))) * time_unit` seconds Additionally, if you enable jitter, for each retry, the function will instead sleep for: random.random() * sleep, that is [0, sleep) seconds. :param time_unit: This field represents a fraction of a second, which is used as a multiplier to compute the amount of time to sleep. :type time_unit: float :param multiplier: The initial wait duration for the first retry. :type multiplier: float :param backoff_coefficient: the base value for exponential retry. :type backoff_coefficient: float :param max_delay: The maximum amount of time to wait per try. :type max_delay: float :param max_attempts: This method will retry up to this value. :type max_attempts: int :param expiration_duration: the maximum amount of time retry can wait. :type expiration_duration: float :param enable_jitter: Setting this to true will add jitter. :type enable_jitter: bool
examples/apps/greengrass-hello-world/greengrass_ipc_python_sdk/utils/exponential_backoff.py
def retry(time_unit, multiplier, backoff_coefficient, max_delay, max_attempts, expiration_duration, enable_jitter): """ The retry function will keep retrying `task_to_try` until either: (1) it returns None, then retry() finishes (2) `max_attempts` is reached, then retry() raises an exception. (3) if retrying one more time will cause total wait time to go above: `expiration_duration`, then retry() raises an exception Beware that any exception raised by task_to_try won't get surfaced until (2) or (3) is satisfied. At step n, it sleeps for [0, delay), where delay is defined as the following: `delay = min(max_delay, multiplier * (backoff_coefficient ** (n - 1))) * time_unit` seconds Additionally, if you enable jitter, for each retry, the function will instead sleep for: random.random() * sleep, that is [0, sleep) seconds. :param time_unit: This field represents a fraction of a second, which is used as a multiplier to compute the amount of time to sleep. :type time_unit: float :param multiplier: The initial wait duration for the first retry. :type multiplier: float :param backoff_coefficient: the base value for exponential retry. :type backoff_coefficient: float :param max_delay: The maximum amount of time to wait per try. :type max_delay: float :param max_attempts: This method will retry up to this value. :type max_attempts: int :param expiration_duration: the maximum amount of time retry can wait. :type expiration_duration: float :param enable_jitter: Setting this to true will add jitter. :type enable_jitter: bool """ def deco_retry(task_to_try): @wraps(task_to_try) def retry_impl(*args, **kwargs): total_wait_time = 0 have_tried = 0 retry_errors = [] while have_tried < max_attempts: try: task_to_try(*args, **kwargs) return except Exception as e: retry_errors.append(e) going_to_sleep_for = min(max_delay, multiplier * (backoff_coefficient ** have_tried)) if enable_jitter: going_to_sleep_for = random.random() * going_to_sleep_for duration = going_to_sleep_for * time_unit if total_wait_time + duration > expiration_duration: raise RetryTimeoutException(task_to_try.__name__, have_tried, max_attempts, total_wait_time, multiplier, backoff_coefficient, enable_jitter, retry_errors) runtime_logger.warn('Retrying [{0}], going to sleep for {1} seconds, exception stacktrace:\n{2}' .format(task_to_try.__name__, duration, traceback.format_exc())) time.sleep(duration) total_wait_time += duration have_tried += 1 raise RetryTimeoutException(task_to_try.__name__, have_tried, max_attempts, total_wait_time, multiplier, backoff_coefficient, enable_jitter, retry_errors) return retry_impl return deco_retry
def retry(time_unit, multiplier, backoff_coefficient, max_delay, max_attempts, expiration_duration, enable_jitter): """ The retry function will keep retrying `task_to_try` until either: (1) it returns None, then retry() finishes (2) `max_attempts` is reached, then retry() raises an exception. (3) if retrying one more time will cause total wait time to go above: `expiration_duration`, then retry() raises an exception Beware that any exception raised by task_to_try won't get surfaced until (2) or (3) is satisfied. At step n, it sleeps for [0, delay), where delay is defined as the following: `delay = min(max_delay, multiplier * (backoff_coefficient ** (n - 1))) * time_unit` seconds Additionally, if you enable jitter, for each retry, the function will instead sleep for: random.random() * sleep, that is [0, sleep) seconds. :param time_unit: This field represents a fraction of a second, which is used as a multiplier to compute the amount of time to sleep. :type time_unit: float :param multiplier: The initial wait duration for the first retry. :type multiplier: float :param backoff_coefficient: the base value for exponential retry. :type backoff_coefficient: float :param max_delay: The maximum amount of time to wait per try. :type max_delay: float :param max_attempts: This method will retry up to this value. :type max_attempts: int :param expiration_duration: the maximum amount of time retry can wait. :type expiration_duration: float :param enable_jitter: Setting this to true will add jitter. :type enable_jitter: bool """ def deco_retry(task_to_try): @wraps(task_to_try) def retry_impl(*args, **kwargs): total_wait_time = 0 have_tried = 0 retry_errors = [] while have_tried < max_attempts: try: task_to_try(*args, **kwargs) return except Exception as e: retry_errors.append(e) going_to_sleep_for = min(max_delay, multiplier * (backoff_coefficient ** have_tried)) if enable_jitter: going_to_sleep_for = random.random() * going_to_sleep_for duration = going_to_sleep_for * time_unit if total_wait_time + duration > expiration_duration: raise RetryTimeoutException(task_to_try.__name__, have_tried, max_attempts, total_wait_time, multiplier, backoff_coefficient, enable_jitter, retry_errors) runtime_logger.warn('Retrying [{0}], going to sleep for {1} seconds, exception stacktrace:\n{2}' .format(task_to_try.__name__, duration, traceback.format_exc())) time.sleep(duration) total_wait_time += duration have_tried += 1 raise RetryTimeoutException(task_to_try.__name__, have_tried, max_attempts, total_wait_time, multiplier, backoff_coefficient, enable_jitter, retry_errors) return retry_impl return deco_retry
[ "The", "retry", "function", "will", "keep", "retrying", "task_to_try", "until", "either", ":", "(", "1", ")", "it", "returns", "None", "then", "retry", "()", "finishes", "(", "2", ")", "max_attempts", "is", "reached", "then", "retry", "()", "raises", "an",...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/examples/apps/greengrass-hello-world/greengrass_ipc_python_sdk/utils/exponential_backoff.py#L47-L116
[ "def", "retry", "(", "time_unit", ",", "multiplier", ",", "backoff_coefficient", ",", "max_delay", ",", "max_attempts", ",", "expiration_duration", ",", "enable_jitter", ")", ":", "def", "deco_retry", "(", "task_to_try", ")", ":", "@", "wraps", "(", "task_to_try...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
to_cloudformation
Returns the Lambda function, role, and event resources to which this SAM Function corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list
samtranslator/model/sam_resources.py
def to_cloudformation(self, **kwargs): """Returns the Lambda function, role, and event resources to which this SAM Function corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] intrinsics_resolver = kwargs["intrinsics_resolver"] if self.DeadLetterQueue: self._validate_dlq() lambda_function = self._construct_lambda_function() resources.append(lambda_function) lambda_alias = None if self.AutoPublishAlias: alias_name = self._get_resolved_alias_name("AutoPublishAlias", self.AutoPublishAlias, intrinsics_resolver) lambda_version = self._construct_version(lambda_function, intrinsics_resolver=intrinsics_resolver) lambda_alias = self._construct_alias(alias_name, lambda_function, lambda_version) resources.append(lambda_version) resources.append(lambda_alias) if self.DeploymentPreference: self._validate_deployment_preference_and_add_update_policy(kwargs.get('deployment_preference_collection', None), lambda_alias, intrinsics_resolver) managed_policy_map = kwargs.get('managed_policy_map', {}) if not managed_policy_map: raise Exception('Managed policy map is empty, but should not be.') execution_role = None if lambda_function.Role is None: execution_role = self._construct_role(managed_policy_map) lambda_function.Role = execution_role.get_runtime_attr('arn') resources.append(execution_role) try: resources += self._generate_event_resources(lambda_function, execution_role, kwargs['event_resources'], lambda_alias=lambda_alias) except InvalidEventException as e: raise InvalidResourceException(self.logical_id, e.message) return resources
def to_cloudformation(self, **kwargs): """Returns the Lambda function, role, and event resources to which this SAM Function corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] intrinsics_resolver = kwargs["intrinsics_resolver"] if self.DeadLetterQueue: self._validate_dlq() lambda_function = self._construct_lambda_function() resources.append(lambda_function) lambda_alias = None if self.AutoPublishAlias: alias_name = self._get_resolved_alias_name("AutoPublishAlias", self.AutoPublishAlias, intrinsics_resolver) lambda_version = self._construct_version(lambda_function, intrinsics_resolver=intrinsics_resolver) lambda_alias = self._construct_alias(alias_name, lambda_function, lambda_version) resources.append(lambda_version) resources.append(lambda_alias) if self.DeploymentPreference: self._validate_deployment_preference_and_add_update_policy(kwargs.get('deployment_preference_collection', None), lambda_alias, intrinsics_resolver) managed_policy_map = kwargs.get('managed_policy_map', {}) if not managed_policy_map: raise Exception('Managed policy map is empty, but should not be.') execution_role = None if lambda_function.Role is None: execution_role = self._construct_role(managed_policy_map) lambda_function.Role = execution_role.get_runtime_attr('arn') resources.append(execution_role) try: resources += self._generate_event_resources(lambda_function, execution_role, kwargs['event_resources'], lambda_alias=lambda_alias) except InvalidEventException as e: raise InvalidResourceException(self.logical_id, e.message) return resources
[ "Returns", "the", "Lambda", "function", "role", "and", "event", "resources", "to", "which", "this", "SAM", "Function", "corresponds", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L80-L126
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "intrinsics_resolver", "=", "kwargs", "[", "\"intrinsics_resolver\"", "]", "if", "self", ".", "DeadLetterQueue", ":", "self", ".", "_validate_dlq", "(", "...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
_get_resolved_alias_name
Alias names can be supplied as an intrinsic function. This method tries to extract alias name from a reference to a parameter. If it cannot completely resolve (ie. if a complex intrinsic function was used), then this method raises an exception. If alias name is just a plain string, it will return as is :param dict or string original_alias_value: Value of Alias property as provided by the customer :param samtranslator.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Instance of the resolver that knows how to resolve parameter references :return string: Alias name :raises InvalidResourceException: If the value is a complex intrinsic function that cannot be resolved
samtranslator/model/sam_resources.py
def _get_resolved_alias_name(self, property_name, original_alias_value, intrinsics_resolver): """ Alias names can be supplied as an intrinsic function. This method tries to extract alias name from a reference to a parameter. If it cannot completely resolve (ie. if a complex intrinsic function was used), then this method raises an exception. If alias name is just a plain string, it will return as is :param dict or string original_alias_value: Value of Alias property as provided by the customer :param samtranslator.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Instance of the resolver that knows how to resolve parameter references :return string: Alias name :raises InvalidResourceException: If the value is a complex intrinsic function that cannot be resolved """ # Try to resolve. resolved_alias_name = intrinsics_resolver.resolve_parameter_refs(original_alias_value) if not isinstance(resolved_alias_name, string_types): # This is still a dictionary which means we are not able to completely resolve intrinsics raise InvalidResourceException(self.logical_id, "'{}' must be a string or a Ref to a template parameter" .format(property_name)) return resolved_alias_name
def _get_resolved_alias_name(self, property_name, original_alias_value, intrinsics_resolver): """ Alias names can be supplied as an intrinsic function. This method tries to extract alias name from a reference to a parameter. If it cannot completely resolve (ie. if a complex intrinsic function was used), then this method raises an exception. If alias name is just a plain string, it will return as is :param dict or string original_alias_value: Value of Alias property as provided by the customer :param samtranslator.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Instance of the resolver that knows how to resolve parameter references :return string: Alias name :raises InvalidResourceException: If the value is a complex intrinsic function that cannot be resolved """ # Try to resolve. resolved_alias_name = intrinsics_resolver.resolve_parameter_refs(original_alias_value) if not isinstance(resolved_alias_name, string_types): # This is still a dictionary which means we are not able to completely resolve intrinsics raise InvalidResourceException(self.logical_id, "'{}' must be a string or a Ref to a template parameter" .format(property_name)) return resolved_alias_name
[ "Alias", "names", "can", "be", "supplied", "as", "an", "intrinsic", "function", ".", "This", "method", "tries", "to", "extract", "alias", "name", "from", "a", "reference", "to", "a", "parameter", ".", "If", "it", "cannot", "completely", "resolve", "(", "ie...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L128-L150
[ "def", "_get_resolved_alias_name", "(", "self", ",", "property_name", ",", "original_alias_value", ",", "intrinsics_resolver", ")", ":", "# Try to resolve.", "resolved_alias_name", "=", "intrinsics_resolver", ".", "resolve_parameter_refs", "(", "original_alias_value", ")", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
_construct_lambda_function
Constructs and returns the Lambda function. :returns: a list containing the Lambda function and execution role resources :rtype: list
samtranslator/model/sam_resources.py
def _construct_lambda_function(self): """Constructs and returns the Lambda function. :returns: a list containing the Lambda function and execution role resources :rtype: list """ lambda_function = LambdaFunction(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) if self.FunctionName: lambda_function.FunctionName = self.FunctionName lambda_function.Handler = self.Handler lambda_function.Runtime = self.Runtime lambda_function.Description = self.Description lambda_function.MemorySize = self.MemorySize lambda_function.Timeout = self.Timeout lambda_function.VpcConfig = self.VpcConfig lambda_function.Role = self.Role lambda_function.Environment = self.Environment lambda_function.Code = self._construct_code_dict() lambda_function.KmsKeyArn = self.KmsKeyArn lambda_function.ReservedConcurrentExecutions = self.ReservedConcurrentExecutions lambda_function.Tags = self._construct_tag_list(self.Tags) lambda_function.Layers = self.Layers if self.Tracing: lambda_function.TracingConfig = {"Mode": self.Tracing} if self.DeadLetterQueue: lambda_function.DeadLetterConfig = {"TargetArn": self.DeadLetterQueue['TargetArn']} return lambda_function
def _construct_lambda_function(self): """Constructs and returns the Lambda function. :returns: a list containing the Lambda function and execution role resources :rtype: list """ lambda_function = LambdaFunction(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) if self.FunctionName: lambda_function.FunctionName = self.FunctionName lambda_function.Handler = self.Handler lambda_function.Runtime = self.Runtime lambda_function.Description = self.Description lambda_function.MemorySize = self.MemorySize lambda_function.Timeout = self.Timeout lambda_function.VpcConfig = self.VpcConfig lambda_function.Role = self.Role lambda_function.Environment = self.Environment lambda_function.Code = self._construct_code_dict() lambda_function.KmsKeyArn = self.KmsKeyArn lambda_function.ReservedConcurrentExecutions = self.ReservedConcurrentExecutions lambda_function.Tags = self._construct_tag_list(self.Tags) lambda_function.Layers = self.Layers if self.Tracing: lambda_function.TracingConfig = {"Mode": self.Tracing} if self.DeadLetterQueue: lambda_function.DeadLetterConfig = {"TargetArn": self.DeadLetterQueue['TargetArn']} return lambda_function
[ "Constructs", "and", "returns", "the", "Lambda", "function", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L152-L184
[ "def", "_construct_lambda_function", "(", "self", ")", ":", "lambda_function", "=", "LambdaFunction", "(", "self", ".", "logical_id", ",", "depends_on", "=", "self", ".", "depends_on", ",", "attributes", "=", "self", ".", "resource_attributes", ")", "if", "self"...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
_construct_role
Constructs a Lambda execution role based on this SAM function's Policies property. :returns: the generated IAM Role :rtype: model.iam.IAMRole
samtranslator/model/sam_resources.py
def _construct_role(self, managed_policy_map): """Constructs a Lambda execution role based on this SAM function's Policies property. :returns: the generated IAM Role :rtype: model.iam.IAMRole """ execution_role = IAMRole(self.logical_id + 'Role', attributes=self.get_passthrough_resource_attributes()) execution_role.AssumeRolePolicyDocument = IAMRolePolicies.lambda_assume_role_policy() managed_policy_arns = [ArnGenerator.generate_aws_managed_policy_arn('service-role/AWSLambdaBasicExecutionRole')] if self.Tracing: managed_policy_arns.append(ArnGenerator.generate_aws_managed_policy_arn('AWSXrayWriteOnlyAccess')) function_policies = FunctionPolicies({"Policies": self.Policies}, # No support for policy templates in the "core" policy_template_processor=None) policy_documents = [] if self.DeadLetterQueue: policy_documents.append(IAMRolePolicies.dead_letter_queue_policy( self.dead_letter_queue_policy_actions[self.DeadLetterQueue['Type']], self.DeadLetterQueue['TargetArn'])) for index, policy_entry in enumerate(function_policies.get()): if policy_entry.type is PolicyTypes.POLICY_STATEMENT: policy_documents.append({ 'PolicyName': execution_role.logical_id + 'Policy' + str(index), 'PolicyDocument': policy_entry.data }) elif policy_entry.type is PolicyTypes.MANAGED_POLICY: # There are three options: # Managed Policy Name (string): Try to convert to Managed Policy ARN # Managed Policy Arn (string): Insert it directly into the list # Intrinsic Function (dict): Insert it directly into the list # # When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice # policy_arn = policy_entry.data if isinstance(policy_entry.data, string_types) and policy_entry.data in managed_policy_map: policy_arn = managed_policy_map[policy_entry.data] # De-Duplicate managed policy arns before inserting. Mainly useful # when customer specifies a managed policy which is already inserted # by SAM, such as AWSLambdaBasicExecutionRole if policy_arn not in managed_policy_arns: managed_policy_arns.append(policy_arn) else: # Policy Templates are not supported here in the "core" raise InvalidResourceException( self.logical_id, "Policy at index {} in the 'Policies' property is not valid".format(index)) execution_role.ManagedPolicyArns = list(managed_policy_arns) execution_role.Policies = policy_documents or None execution_role.PermissionsBoundary = self.PermissionsBoundary return execution_role
def _construct_role(self, managed_policy_map): """Constructs a Lambda execution role based on this SAM function's Policies property. :returns: the generated IAM Role :rtype: model.iam.IAMRole """ execution_role = IAMRole(self.logical_id + 'Role', attributes=self.get_passthrough_resource_attributes()) execution_role.AssumeRolePolicyDocument = IAMRolePolicies.lambda_assume_role_policy() managed_policy_arns = [ArnGenerator.generate_aws_managed_policy_arn('service-role/AWSLambdaBasicExecutionRole')] if self.Tracing: managed_policy_arns.append(ArnGenerator.generate_aws_managed_policy_arn('AWSXrayWriteOnlyAccess')) function_policies = FunctionPolicies({"Policies": self.Policies}, # No support for policy templates in the "core" policy_template_processor=None) policy_documents = [] if self.DeadLetterQueue: policy_documents.append(IAMRolePolicies.dead_letter_queue_policy( self.dead_letter_queue_policy_actions[self.DeadLetterQueue['Type']], self.DeadLetterQueue['TargetArn'])) for index, policy_entry in enumerate(function_policies.get()): if policy_entry.type is PolicyTypes.POLICY_STATEMENT: policy_documents.append({ 'PolicyName': execution_role.logical_id + 'Policy' + str(index), 'PolicyDocument': policy_entry.data }) elif policy_entry.type is PolicyTypes.MANAGED_POLICY: # There are three options: # Managed Policy Name (string): Try to convert to Managed Policy ARN # Managed Policy Arn (string): Insert it directly into the list # Intrinsic Function (dict): Insert it directly into the list # # When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice # policy_arn = policy_entry.data if isinstance(policy_entry.data, string_types) and policy_entry.data in managed_policy_map: policy_arn = managed_policy_map[policy_entry.data] # De-Duplicate managed policy arns before inserting. Mainly useful # when customer specifies a managed policy which is already inserted # by SAM, such as AWSLambdaBasicExecutionRole if policy_arn not in managed_policy_arns: managed_policy_arns.append(policy_arn) else: # Policy Templates are not supported here in the "core" raise InvalidResourceException( self.logical_id, "Policy at index {} in the 'Policies' property is not valid".format(index)) execution_role.ManagedPolicyArns = list(managed_policy_arns) execution_role.Policies = policy_documents or None execution_role.PermissionsBoundary = self.PermissionsBoundary return execution_role
[ "Constructs", "a", "Lambda", "execution", "role", "based", "on", "this", "SAM", "function", "s", "Policies", "property", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L186-L246
[ "def", "_construct_role", "(", "self", ",", "managed_policy_map", ")", ":", "execution_role", "=", "IAMRole", "(", "self", ".", "logical_id", "+", "'Role'", ",", "attributes", "=", "self", ".", "get_passthrough_resource_attributes", "(", ")", ")", "execution_role"...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
_validate_dlq
Validates whether the DeadLetterQueue LogicalId is validation :raise: InvalidResourceException
samtranslator/model/sam_resources.py
def _validate_dlq(self): """Validates whether the DeadLetterQueue LogicalId is validation :raise: InvalidResourceException """ # Validate required logical ids valid_dlq_types = str(list(self.dead_letter_queue_policy_actions.keys())) if not self.DeadLetterQueue.get('Type') or not self.DeadLetterQueue.get('TargetArn'): raise InvalidResourceException(self.logical_id, "'DeadLetterQueue' requires Type and TargetArn properties to be specified" .format(valid_dlq_types)) # Validate required Types if not self.DeadLetterQueue['Type'] in self.dead_letter_queue_policy_actions: raise InvalidResourceException(self.logical_id, "'DeadLetterQueue' requires Type of {}".format(valid_dlq_types))
def _validate_dlq(self): """Validates whether the DeadLetterQueue LogicalId is validation :raise: InvalidResourceException """ # Validate required logical ids valid_dlq_types = str(list(self.dead_letter_queue_policy_actions.keys())) if not self.DeadLetterQueue.get('Type') or not self.DeadLetterQueue.get('TargetArn'): raise InvalidResourceException(self.logical_id, "'DeadLetterQueue' requires Type and TargetArn properties to be specified" .format(valid_dlq_types)) # Validate required Types if not self.DeadLetterQueue['Type'] in self.dead_letter_queue_policy_actions: raise InvalidResourceException(self.logical_id, "'DeadLetterQueue' requires Type of {}".format(valid_dlq_types))
[ "Validates", "whether", "the", "DeadLetterQueue", "LogicalId", "is", "validation", ":", "raise", ":", "InvalidResourceException" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L248-L262
[ "def", "_validate_dlq", "(", "self", ")", ":", "# Validate required logical ids", "valid_dlq_types", "=", "str", "(", "list", "(", "self", ".", "dead_letter_queue_policy_actions", ".", "keys", "(", ")", ")", ")", "if", "not", "self", ".", "DeadLetterQueue", ".",...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
_generate_event_resources
Generates and returns the resources associated with this function's events. :param model.lambda_.LambdaFunction lambda_function: generated Lambda function :param iam.IAMRole execution_role: generated Lambda execution role :param implicit_api: Global Implicit API resource where the implicit APIs get attached to, if necessary :param implicit_api_stage: Global implicit API stage resource where implicit APIs get attached to, if necessary :param event_resources: All the event sources associated with this Lambda function :param model.lambda_.LambdaAlias lambda_alias: Optional Lambda Alias resource if we want to connect the event sources to this alias :returns: a list containing the function's event resources :rtype: list
samtranslator/model/sam_resources.py
def _generate_event_resources(self, lambda_function, execution_role, event_resources, lambda_alias=None): """Generates and returns the resources associated with this function's events. :param model.lambda_.LambdaFunction lambda_function: generated Lambda function :param iam.IAMRole execution_role: generated Lambda execution role :param implicit_api: Global Implicit API resource where the implicit APIs get attached to, if necessary :param implicit_api_stage: Global implicit API stage resource where implicit APIs get attached to, if necessary :param event_resources: All the event sources associated with this Lambda function :param model.lambda_.LambdaAlias lambda_alias: Optional Lambda Alias resource if we want to connect the event sources to this alias :returns: a list containing the function's event resources :rtype: list """ resources = [] if self.Events: for logical_id, event_dict in self.Events.items(): try: eventsource = self.event_resolver.resolve_resource_type(event_dict).from_dict( lambda_function.logical_id + logical_id, event_dict, logical_id) except TypeError as e: raise InvalidEventException(logical_id, "{}".format(e)) kwargs = { # When Alias is provided, connect all event sources to the alias and *not* the function 'function': lambda_alias or lambda_function, 'role': execution_role, } for name, resource in event_resources[logical_id].items(): kwargs[name] = resource resources += eventsource.to_cloudformation(**kwargs) return resources
def _generate_event_resources(self, lambda_function, execution_role, event_resources, lambda_alias=None): """Generates and returns the resources associated with this function's events. :param model.lambda_.LambdaFunction lambda_function: generated Lambda function :param iam.IAMRole execution_role: generated Lambda execution role :param implicit_api: Global Implicit API resource where the implicit APIs get attached to, if necessary :param implicit_api_stage: Global implicit API stage resource where implicit APIs get attached to, if necessary :param event_resources: All the event sources associated with this Lambda function :param model.lambda_.LambdaAlias lambda_alias: Optional Lambda Alias resource if we want to connect the event sources to this alias :returns: a list containing the function's event resources :rtype: list """ resources = [] if self.Events: for logical_id, event_dict in self.Events.items(): try: eventsource = self.event_resolver.resolve_resource_type(event_dict).from_dict( lambda_function.logical_id + logical_id, event_dict, logical_id) except TypeError as e: raise InvalidEventException(logical_id, "{}".format(e)) kwargs = { # When Alias is provided, connect all event sources to the alias and *not* the function 'function': lambda_alias or lambda_function, 'role': execution_role, } for name, resource in event_resources[logical_id].items(): kwargs[name] = resource resources += eventsource.to_cloudformation(**kwargs) return resources
[ "Generates", "and", "returns", "the", "resources", "associated", "with", "this", "function", "s", "events", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L276-L309
[ "def", "_generate_event_resources", "(", "self", ",", "lambda_function", ",", "execution_role", ",", "event_resources", ",", "lambda_alias", "=", "None", ")", ":", "resources", "=", "[", "]", "if", "self", ".", "Events", ":", "for", "logical_id", ",", "event_d...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
_construct_version
Constructs a Lambda Version resource that will be auto-published when CodeUri of the function changes. Old versions will not be deleted without a direct reference from the CloudFormation template. :param model.lambda_.LambdaFunction function: Lambda function object that is being connected to a version :param model.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Class that can help resolve references to parameters present in CodeUri. It is a common usecase to set S3Key of Code to be a template parameter. Need to resolve the values otherwise we will never detect a change in Code dict :return: Lambda function Version resource
samtranslator/model/sam_resources.py
def _construct_version(self, function, intrinsics_resolver): """Constructs a Lambda Version resource that will be auto-published when CodeUri of the function changes. Old versions will not be deleted without a direct reference from the CloudFormation template. :param model.lambda_.LambdaFunction function: Lambda function object that is being connected to a version :param model.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Class that can help resolve references to parameters present in CodeUri. It is a common usecase to set S3Key of Code to be a template parameter. Need to resolve the values otherwise we will never detect a change in Code dict :return: Lambda function Version resource """ code_dict = function.Code if not code_dict: raise ValueError("Lambda function code must be a valid non-empty dictionary") if not intrinsics_resolver: raise ValueError("intrinsics_resolver is required for versions creation") # Resolve references to template parameters before creating hash. This will *not* resolve all intrinsics # because we cannot resolve runtime values like Arn of a resource. For purposes of detecting changes, this # is good enough. Here is why: # # When using intrinsic functions there are two cases when has must change: # - Value of the template parameter changes # - (or) LogicalId of a referenced resource changes ie. !GetAtt NewResource.Arn # # Later case will already change the hash because some value in the Code dictionary changes. We handle the # first case by resolving references to template parameters. It is okay even if these references are # present inside another intrinsic such as !Join. The resolver will replace the reference with the parameter's # value and keep all other parts of !Join identical. This will still trigger a change in the hash. code_dict = intrinsics_resolver.resolve_parameter_refs(code_dict) # Construct the LogicalID of Lambda version by appending 10 characters of SHA of CodeUri. This is necessary # to trigger creation of a new version every time code location changes. Since logicalId changes, CloudFormation # will drop the old version and create a new one for us. We set a DeletionPolicy on the version resource to # prevent CloudFormation from actually deleting the underlying version resource # # SHA Collisions: For purposes of triggering a new update, we are concerned about just the difference previous # and next hashes. The chances that two subsequent hashes collide is fairly low. prefix = "{id}Version".format(id=self.logical_id) logical_id = logical_id_generator.LogicalIdGenerator(prefix, code_dict).gen() attributes = self.get_passthrough_resource_attributes() if attributes is None: attributes = {} attributes["DeletionPolicy"] = "Retain" lambda_version = LambdaVersion(logical_id=logical_id, attributes=attributes) lambda_version.FunctionName = function.get_runtime_attr('name') lambda_version.Description = self.VersionDescription return lambda_version
def _construct_version(self, function, intrinsics_resolver): """Constructs a Lambda Version resource that will be auto-published when CodeUri of the function changes. Old versions will not be deleted without a direct reference from the CloudFormation template. :param model.lambda_.LambdaFunction function: Lambda function object that is being connected to a version :param model.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Class that can help resolve references to parameters present in CodeUri. It is a common usecase to set S3Key of Code to be a template parameter. Need to resolve the values otherwise we will never detect a change in Code dict :return: Lambda function Version resource """ code_dict = function.Code if not code_dict: raise ValueError("Lambda function code must be a valid non-empty dictionary") if not intrinsics_resolver: raise ValueError("intrinsics_resolver is required for versions creation") # Resolve references to template parameters before creating hash. This will *not* resolve all intrinsics # because we cannot resolve runtime values like Arn of a resource. For purposes of detecting changes, this # is good enough. Here is why: # # When using intrinsic functions there are two cases when has must change: # - Value of the template parameter changes # - (or) LogicalId of a referenced resource changes ie. !GetAtt NewResource.Arn # # Later case will already change the hash because some value in the Code dictionary changes. We handle the # first case by resolving references to template parameters. It is okay even if these references are # present inside another intrinsic such as !Join. The resolver will replace the reference with the parameter's # value and keep all other parts of !Join identical. This will still trigger a change in the hash. code_dict = intrinsics_resolver.resolve_parameter_refs(code_dict) # Construct the LogicalID of Lambda version by appending 10 characters of SHA of CodeUri. This is necessary # to trigger creation of a new version every time code location changes. Since logicalId changes, CloudFormation # will drop the old version and create a new one for us. We set a DeletionPolicy on the version resource to # prevent CloudFormation from actually deleting the underlying version resource # # SHA Collisions: For purposes of triggering a new update, we are concerned about just the difference previous # and next hashes. The chances that two subsequent hashes collide is fairly low. prefix = "{id}Version".format(id=self.logical_id) logical_id = logical_id_generator.LogicalIdGenerator(prefix, code_dict).gen() attributes = self.get_passthrough_resource_attributes() if attributes is None: attributes = {} attributes["DeletionPolicy"] = "Retain" lambda_version = LambdaVersion(logical_id=logical_id, attributes=attributes) lambda_version.FunctionName = function.get_runtime_attr('name') lambda_version.Description = self.VersionDescription return lambda_version
[ "Constructs", "a", "Lambda", "Version", "resource", "that", "will", "be", "auto", "-", "published", "when", "CodeUri", "of", "the", "function", "changes", ".", "Old", "versions", "will", "not", "be", "deleted", "without", "a", "direct", "reference", "from", ...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L321-L371
[ "def", "_construct_version", "(", "self", ",", "function", ",", "intrinsics_resolver", ")", ":", "code_dict", "=", "function", ".", "Code", "if", "not", "code_dict", ":", "raise", "ValueError", "(", "\"Lambda function code must be a valid non-empty dictionary\"", ")", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
_construct_alias
Constructs a Lambda Alias for the given function and pointing to the given version :param string name: Name of the alias :param model.lambda_.LambdaFunction function: Lambda function object to associate the alias with :param model.lambda_.LambdaVersion version: Lambda version object to associate the alias with :return: Lambda alias object :rtype model.lambda_.LambdaAlias
samtranslator/model/sam_resources.py
def _construct_alias(self, name, function, version): """Constructs a Lambda Alias for the given function and pointing to the given version :param string name: Name of the alias :param model.lambda_.LambdaFunction function: Lambda function object to associate the alias with :param model.lambda_.LambdaVersion version: Lambda version object to associate the alias with :return: Lambda alias object :rtype model.lambda_.LambdaAlias """ if not name: raise InvalidResourceException(self.logical_id, "Alias name is required to create an alias") logical_id = "{id}Alias{suffix}".format(id=function.logical_id, suffix=name) alias = LambdaAlias(logical_id=logical_id, attributes=self.get_passthrough_resource_attributes()) alias.Name = name alias.FunctionName = function.get_runtime_attr('name') alias.FunctionVersion = version.get_runtime_attr("version") return alias
def _construct_alias(self, name, function, version): """Constructs a Lambda Alias for the given function and pointing to the given version :param string name: Name of the alias :param model.lambda_.LambdaFunction function: Lambda function object to associate the alias with :param model.lambda_.LambdaVersion version: Lambda version object to associate the alias with :return: Lambda alias object :rtype model.lambda_.LambdaAlias """ if not name: raise InvalidResourceException(self.logical_id, "Alias name is required to create an alias") logical_id = "{id}Alias{suffix}".format(id=function.logical_id, suffix=name) alias = LambdaAlias(logical_id=logical_id, attributes=self.get_passthrough_resource_attributes()) alias.Name = name alias.FunctionName = function.get_runtime_attr('name') alias.FunctionVersion = version.get_runtime_attr("version") return alias
[ "Constructs", "a", "Lambda", "Alias", "for", "the", "given", "function", "and", "pointing", "to", "the", "given", "version" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L373-L392
[ "def", "_construct_alias", "(", "self", ",", "name", ",", "function", ",", "version", ")", ":", "if", "not", "name", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Alias name is required to create an alias\"", ")", "logical_id", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
SamApi.to_cloudformation
Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list
samtranslator/model/sam_resources.py
def to_cloudformation(self, **kwargs): """Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] api_generator = ApiGenerator(self.logical_id, self.CacheClusterEnabled, self.CacheClusterSize, self.Variables, self.depends_on, self.DefinitionBody, self.DefinitionUri, self.Name, self.StageName, endpoint_configuration=self.EndpointConfiguration, method_settings=self.MethodSettings, binary_media=self.BinaryMediaTypes, minimum_compression_size=self.MinimumCompressionSize, cors=self.Cors, auth=self.Auth, gateway_responses=self.GatewayResponses, access_log_setting=self.AccessLogSetting, canary_setting=self.CanarySetting, tracing_enabled=self.TracingEnabled, resource_attributes=self.resource_attributes, passthrough_resource_attributes=self.get_passthrough_resource_attributes()) rest_api, deployment, stage, permissions = api_generator.to_cloudformation() resources.extend([rest_api, deployment, stage]) resources.extend(permissions) return resources
def to_cloudformation(self, **kwargs): """Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] api_generator = ApiGenerator(self.logical_id, self.CacheClusterEnabled, self.CacheClusterSize, self.Variables, self.depends_on, self.DefinitionBody, self.DefinitionUri, self.Name, self.StageName, endpoint_configuration=self.EndpointConfiguration, method_settings=self.MethodSettings, binary_media=self.BinaryMediaTypes, minimum_compression_size=self.MinimumCompressionSize, cors=self.Cors, auth=self.Auth, gateway_responses=self.GatewayResponses, access_log_setting=self.AccessLogSetting, canary_setting=self.CanarySetting, tracing_enabled=self.TracingEnabled, resource_attributes=self.resource_attributes, passthrough_resource_attributes=self.get_passthrough_resource_attributes()) rest_api, deployment, stage, permissions = api_generator.to_cloudformation() resources.extend([rest_api, deployment, stage]) resources.extend(permissions) return resources
[ "Returns", "the", "API", "Gateway", "RestApi", "Deployment", "and", "Stage", "to", "which", "this", "SAM", "Api", "corresponds", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L456-L493
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "api_generator", "=", "ApiGenerator", "(", "self", ".", "logical_id", ",", "self", ".", "CacheClusterEnabled", ",", "self", ".", "CacheClusterSize", ",", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
SamApplication._construct_nested_stack
Constructs a AWS::CloudFormation::Stack resource
samtranslator/model/sam_resources.py
def _construct_nested_stack(self): """Constructs a AWS::CloudFormation::Stack resource """ nested_stack = NestedStack(self.logical_id, depends_on=self.depends_on, attributes=self.get_passthrough_resource_attributes()) nested_stack.Parameters = self.Parameters nested_stack.NotificationArns = self.NotificationArns application_tags = self._get_application_tags() nested_stack.Tags = self._construct_tag_list(self.Tags, application_tags) nested_stack.TimeoutInMinutes = self.TimeoutInMinutes nested_stack.TemplateURL = self.TemplateUrl if self.TemplateUrl else "" return nested_stack
def _construct_nested_stack(self): """Constructs a AWS::CloudFormation::Stack resource """ nested_stack = NestedStack(self.logical_id, depends_on=self.depends_on, attributes=self.get_passthrough_resource_attributes()) nested_stack.Parameters = self.Parameters nested_stack.NotificationArns = self.NotificationArns application_tags = self._get_application_tags() nested_stack.Tags = self._construct_tag_list(self.Tags, application_tags) nested_stack.TimeoutInMinutes = self.TimeoutInMinutes nested_stack.TemplateURL = self.TemplateUrl if self.TemplateUrl else "" return nested_stack
[ "Constructs", "a", "AWS", "::", "CloudFormation", "::", "Stack", "resource" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L583-L595
[ "def", "_construct_nested_stack", "(", "self", ")", ":", "nested_stack", "=", "NestedStack", "(", "self", ".", "logical_id", ",", "depends_on", "=", "self", ".", "depends_on", ",", "attributes", "=", "self", ".", "get_passthrough_resource_attributes", "(", ")", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
SamApplication._get_application_tags
Adds tags to the stack if this resource is using the serverless app repo
samtranslator/model/sam_resources.py
def _get_application_tags(self): """Adds tags to the stack if this resource is using the serverless app repo """ application_tags = {} if isinstance(self.Location, dict): if (self.APPLICATION_ID_KEY in self.Location.keys() and self.Location[self.APPLICATION_ID_KEY] is not None): application_tags[self._SAR_APP_KEY] = self.Location[self.APPLICATION_ID_KEY] if (self.SEMANTIC_VERSION_KEY in self.Location.keys() and self.Location[self.SEMANTIC_VERSION_KEY] is not None): application_tags[self._SAR_SEMVER_KEY] = self.Location[self.SEMANTIC_VERSION_KEY] return application_tags
def _get_application_tags(self): """Adds tags to the stack if this resource is using the serverless app repo """ application_tags = {} if isinstance(self.Location, dict): if (self.APPLICATION_ID_KEY in self.Location.keys() and self.Location[self.APPLICATION_ID_KEY] is not None): application_tags[self._SAR_APP_KEY] = self.Location[self.APPLICATION_ID_KEY] if (self.SEMANTIC_VERSION_KEY in self.Location.keys() and self.Location[self.SEMANTIC_VERSION_KEY] is not None): application_tags[self._SAR_SEMVER_KEY] = self.Location[self.SEMANTIC_VERSION_KEY] return application_tags
[ "Adds", "tags", "to", "the", "stack", "if", "this", "resource", "is", "using", "the", "serverless", "app", "repo" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L597-L608
[ "def", "_get_application_tags", "(", "self", ")", ":", "application_tags", "=", "{", "}", "if", "isinstance", "(", "self", ".", "Location", ",", "dict", ")", ":", "if", "(", "self", ".", "APPLICATION_ID_KEY", "in", "self", ".", "Location", ".", "keys", "...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
SamLayerVersion.to_cloudformation
Returns the Lambda layer to which this SAM Layer corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list
samtranslator/model/sam_resources.py
def to_cloudformation(self, **kwargs): """Returns the Lambda layer to which this SAM Layer corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] # Append any CFN resources: intrinsics_resolver = kwargs["intrinsics_resolver"] resources.append(self._construct_lambda_layer(intrinsics_resolver)) return resources
def to_cloudformation(self, **kwargs): """Returns the Lambda layer to which this SAM Layer corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] # Append any CFN resources: intrinsics_resolver = kwargs["intrinsics_resolver"] resources.append(self._construct_lambda_layer(intrinsics_resolver)) return resources
[ "Returns", "the", "Lambda", "layer", "to", "which", "this", "SAM", "Layer", "corresponds", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L628-L642
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "# Append any CFN resources:", "intrinsics_resolver", "=", "kwargs", "[", "\"intrinsics_resolver\"", "]", "resources", ".", "append", "(", "self", ".", "_cons...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
SamLayerVersion._construct_lambda_layer
Constructs and returns the Lambda function. :returns: a list containing the Lambda function and execution role resources :rtype: list
samtranslator/model/sam_resources.py
def _construct_lambda_layer(self, intrinsics_resolver): """Constructs and returns the Lambda function. :returns: a list containing the Lambda function and execution role resources :rtype: list """ # Resolve intrinsics if applicable: self.LayerName = self._resolve_string_parameter(intrinsics_resolver, self.LayerName, 'LayerName') self.LicenseInfo = self._resolve_string_parameter(intrinsics_resolver, self.LicenseInfo, 'LicenseInfo') self.Description = self._resolve_string_parameter(intrinsics_resolver, self.Description, 'Description') self.RetentionPolicy = self._resolve_string_parameter(intrinsics_resolver, self.RetentionPolicy, 'RetentionPolicy') retention_policy_value = self._get_retention_policy_value() attributes = self.get_passthrough_resource_attributes() if attributes is None: attributes = {} attributes['DeletionPolicy'] = retention_policy_value old_logical_id = self.logical_id new_logical_id = logical_id_generator.LogicalIdGenerator(old_logical_id, self.to_dict()).gen() self.logical_id = new_logical_id lambda_layer = LambdaLayerVersion(self.logical_id, depends_on=self.depends_on, attributes=attributes) # Changing the LayerName property: when a layer is published, it is given an Arn # example: arn:aws:lambda:us-west-2:123456789012:layer:MyLayer:1 # where MyLayer is the LayerName property if it exists; otherwise, it is the # LogicalId of this resource. Since a LayerVersion is an immutable resource, when # CloudFormation updates this resource, it will ALWAYS create a new version then # delete the old version if the logical ids match. What this does is change the # logical id of every layer (so a `DeletionPolicy: Retain` can work) and set the # LayerName property of the layer so that the Arn will still always be the same # with the exception of an incrementing version number. if not self.LayerName: self.LayerName = old_logical_id lambda_layer.LayerName = self.LayerName lambda_layer.Description = self.Description lambda_layer.Content = construct_s3_location_object(self.ContentUri, self.logical_id, 'ContentUri') lambda_layer.CompatibleRuntimes = self.CompatibleRuntimes lambda_layer.LicenseInfo = self.LicenseInfo return lambda_layer
def _construct_lambda_layer(self, intrinsics_resolver): """Constructs and returns the Lambda function. :returns: a list containing the Lambda function and execution role resources :rtype: list """ # Resolve intrinsics if applicable: self.LayerName = self._resolve_string_parameter(intrinsics_resolver, self.LayerName, 'LayerName') self.LicenseInfo = self._resolve_string_parameter(intrinsics_resolver, self.LicenseInfo, 'LicenseInfo') self.Description = self._resolve_string_parameter(intrinsics_resolver, self.Description, 'Description') self.RetentionPolicy = self._resolve_string_parameter(intrinsics_resolver, self.RetentionPolicy, 'RetentionPolicy') retention_policy_value = self._get_retention_policy_value() attributes = self.get_passthrough_resource_attributes() if attributes is None: attributes = {} attributes['DeletionPolicy'] = retention_policy_value old_logical_id = self.logical_id new_logical_id = logical_id_generator.LogicalIdGenerator(old_logical_id, self.to_dict()).gen() self.logical_id = new_logical_id lambda_layer = LambdaLayerVersion(self.logical_id, depends_on=self.depends_on, attributes=attributes) # Changing the LayerName property: when a layer is published, it is given an Arn # example: arn:aws:lambda:us-west-2:123456789012:layer:MyLayer:1 # where MyLayer is the LayerName property if it exists; otherwise, it is the # LogicalId of this resource. Since a LayerVersion is an immutable resource, when # CloudFormation updates this resource, it will ALWAYS create a new version then # delete the old version if the logical ids match. What this does is change the # logical id of every layer (so a `DeletionPolicy: Retain` can work) and set the # LayerName property of the layer so that the Arn will still always be the same # with the exception of an incrementing version number. if not self.LayerName: self.LayerName = old_logical_id lambda_layer.LayerName = self.LayerName lambda_layer.Description = self.Description lambda_layer.Content = construct_s3_location_object(self.ContentUri, self.logical_id, 'ContentUri') lambda_layer.CompatibleRuntimes = self.CompatibleRuntimes lambda_layer.LicenseInfo = self.LicenseInfo return lambda_layer
[ "Constructs", "and", "returns", "the", "Lambda", "function", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L644-L688
[ "def", "_construct_lambda_layer", "(", "self", ",", "intrinsics_resolver", ")", ":", "# Resolve intrinsics if applicable:", "self", ".", "LayerName", "=", "self", ".", "_resolve_string_parameter", "(", "intrinsics_resolver", ",", "self", ".", "LayerName", ",", "'LayerNa...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
SamLayerVersion._get_retention_policy_value
Sets the deletion policy on this resource. The default is 'Retain'. :return: value for the DeletionPolicy attribute.
samtranslator/model/sam_resources.py
def _get_retention_policy_value(self): """ Sets the deletion policy on this resource. The default is 'Retain'. :return: value for the DeletionPolicy attribute. """ if self.RetentionPolicy is None or self.RetentionPolicy.lower() == self.RETAIN.lower(): return self.RETAIN elif self.RetentionPolicy.lower() == self.DELETE.lower(): return self.DELETE elif self.RetentionPolicy.lower() not in self.retention_policy_options: raise InvalidResourceException(self.logical_id, "'{}' must be one of the following options: {}." .format('RetentionPolicy', [self.RETAIN, self.DELETE]))
def _get_retention_policy_value(self): """ Sets the deletion policy on this resource. The default is 'Retain'. :return: value for the DeletionPolicy attribute. """ if self.RetentionPolicy is None or self.RetentionPolicy.lower() == self.RETAIN.lower(): return self.RETAIN elif self.RetentionPolicy.lower() == self.DELETE.lower(): return self.DELETE elif self.RetentionPolicy.lower() not in self.retention_policy_options: raise InvalidResourceException(self.logical_id, "'{}' must be one of the following options: {}." .format('RetentionPolicy', [self.RETAIN, self.DELETE]))
[ "Sets", "the", "deletion", "policy", "on", "this", "resource", ".", "The", "default", "is", "Retain", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L690-L704
[ "def", "_get_retention_policy_value", "(", "self", ")", ":", "if", "self", ".", "RetentionPolicy", "is", "None", "or", "self", ".", "RetentionPolicy", ".", "lower", "(", ")", "==", "self", ".", "RETAIN", ".", "lower", "(", ")", ":", "return", "self", "."...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
order_flowers
Performs dialog management and fulfillment for ordering flowers. Beyond fulfillment, the implementation of this intent demonstrates the use of the elicitSlot dialog action in slot validation and re-prompting.
examples/apps/lex-order-flowers-python/lambda_function.py
def order_flowers(intent_request): """ Performs dialog management and fulfillment for ordering flowers. Beyond fulfillment, the implementation of this intent demonstrates the use of the elicitSlot dialog action in slot validation and re-prompting. """ flower_type = get_slots(intent_request)["FlowerType"] date = get_slots(intent_request)["PickupDate"] time = get_slots(intent_request)["PickupTime"] source = intent_request['invocationSource'] if source == 'DialogCodeHook': # Perform basic validation on the supplied input slots. # Use the elicitSlot dialog action to re-prompt for the first violation detected. slots = get_slots(intent_request) validation_result = validate_order_flowers(flower_type, date, time) if not validation_result['isValid']: slots[validation_result['violatedSlot']] = None return elicit_slot(intent_request['sessionAttributes'], intent_request['currentIntent']['name'], slots, validation_result['violatedSlot'], validation_result['message']) # Pass the price of the flowers back through session attributes to be used in various prompts defined # on the bot model. output_session_attributes = intent_request['sessionAttributes'] if flower_type is not None: output_session_attributes['Price'] = len(flower_type) * 5 # Elegant pricing model return delegate(output_session_attributes, get_slots(intent_request)) # Order the flowers, and rely on the goodbye message of the bot to define the message to the end user. # In a real bot, this would likely involve a call to a backend service. return close(intent_request['sessionAttributes'], 'Fulfilled', {'contentType': 'PlainText', 'content': 'Thanks, your order for {} has been placed and will be ready for pickup by {} on {}'.format(flower_type, time, date)})
def order_flowers(intent_request): """ Performs dialog management and fulfillment for ordering flowers. Beyond fulfillment, the implementation of this intent demonstrates the use of the elicitSlot dialog action in slot validation and re-prompting. """ flower_type = get_slots(intent_request)["FlowerType"] date = get_slots(intent_request)["PickupDate"] time = get_slots(intent_request)["PickupTime"] source = intent_request['invocationSource'] if source == 'DialogCodeHook': # Perform basic validation on the supplied input slots. # Use the elicitSlot dialog action to re-prompt for the first violation detected. slots = get_slots(intent_request) validation_result = validate_order_flowers(flower_type, date, time) if not validation_result['isValid']: slots[validation_result['violatedSlot']] = None return elicit_slot(intent_request['sessionAttributes'], intent_request['currentIntent']['name'], slots, validation_result['violatedSlot'], validation_result['message']) # Pass the price of the flowers back through session attributes to be used in various prompts defined # on the bot model. output_session_attributes = intent_request['sessionAttributes'] if flower_type is not None: output_session_attributes['Price'] = len(flower_type) * 5 # Elegant pricing model return delegate(output_session_attributes, get_slots(intent_request)) # Order the flowers, and rely on the goodbye message of the bot to define the message to the end user. # In a real bot, this would likely involve a call to a backend service. return close(intent_request['sessionAttributes'], 'Fulfilled', {'contentType': 'PlainText', 'content': 'Thanks, your order for {} has been placed and will be ready for pickup by {} on {}'.format(flower_type, time, date)})
[ "Performs", "dialog", "management", "and", "fulfillment", "for", "ordering", "flowers", ".", "Beyond", "fulfillment", "the", "implementation", "of", "this", "intent", "demonstrates", "the", "use", "of", "the", "elicitSlot", "dialog", "action", "in", "slot", "valid...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/examples/apps/lex-order-flowers-python/lambda_function.py#L119-L158
[ "def", "order_flowers", "(", "intent_request", ")", ":", "flower_type", "=", "get_slots", "(", "intent_request", ")", "[", "\"FlowerType\"", "]", "date", "=", "get_slots", "(", "intent_request", ")", "[", "\"PickupDate\"", "]", "time", "=", "get_slots", "(", "...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
dispatch
Called when the user specifies an intent for this bot.
examples/apps/lex-order-flowers-python/lambda_function.py
def dispatch(intent_request): """ Called when the user specifies an intent for this bot. """ logger.debug('dispatch userId={}, intentName={}'.format(intent_request['userId'], intent_request['currentIntent']['name'])) intent_name = intent_request['currentIntent']['name'] # Dispatch to your bot's intent handlers if intent_name == 'OrderFlowers': return order_flowers(intent_request) raise Exception('Intent with name ' + intent_name + ' not supported')
def dispatch(intent_request): """ Called when the user specifies an intent for this bot. """ logger.debug('dispatch userId={}, intentName={}'.format(intent_request['userId'], intent_request['currentIntent']['name'])) intent_name = intent_request['currentIntent']['name'] # Dispatch to your bot's intent handlers if intent_name == 'OrderFlowers': return order_flowers(intent_request) raise Exception('Intent with name ' + intent_name + ' not supported')
[ "Called", "when", "the", "user", "specifies", "an", "intent", "for", "this", "bot", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/examples/apps/lex-order-flowers-python/lambda_function.py#L164-L177
[ "def", "dispatch", "(", "intent_request", ")", ":", "logger", ".", "debug", "(", "'dispatch userId={}, intentName={}'", ".", "format", "(", "intent_request", "[", "'userId'", "]", ",", "intent_request", "[", "'currentIntent'", "]", "[", "'name'", "]", ")", ")", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
PushEventSource._construct_permission
Constructs the Lambda Permission resource allowing the source service to invoke the function this event source triggers. :returns: the permission resource :rtype: model.lambda_.LambdaPermission
samtranslator/model/eventsources/push.py
def _construct_permission(self, function, source_arn=None, source_account=None, suffix="", event_source_token=None): """Constructs the Lambda Permission resource allowing the source service to invoke the function this event source triggers. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ lambda_permission = LambdaPermission(self.logical_id + 'Permission' + suffix, attributes=function.get_passthrough_resource_attributes()) try: # Name will not be available for Alias resources function_name_or_arn = function.get_runtime_attr("name") except NotImplementedError: function_name_or_arn = function.get_runtime_attr("arn") lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = function_name_or_arn lambda_permission.Principal = self.principal lambda_permission.SourceArn = source_arn lambda_permission.SourceAccount = source_account lambda_permission.EventSourceToken = event_source_token return lambda_permission
def _construct_permission(self, function, source_arn=None, source_account=None, suffix="", event_source_token=None): """Constructs the Lambda Permission resource allowing the source service to invoke the function this event source triggers. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ lambda_permission = LambdaPermission(self.logical_id + 'Permission' + suffix, attributes=function.get_passthrough_resource_attributes()) try: # Name will not be available for Alias resources function_name_or_arn = function.get_runtime_attr("name") except NotImplementedError: function_name_or_arn = function.get_runtime_attr("arn") lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = function_name_or_arn lambda_permission.Principal = self.principal lambda_permission.SourceArn = source_arn lambda_permission.SourceAccount = source_account lambda_permission.EventSourceToken = event_source_token return lambda_permission
[ "Constructs", "the", "Lambda", "Permission", "resource", "allowing", "the", "source", "service", "to", "invoke", "the", "function", "this", "event", "source", "triggers", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L43-L66
[ "def", "_construct_permission", "(", "self", ",", "function", ",", "source_arn", "=", "None", ",", "source_account", "=", "None", ",", "suffix", "=", "\"\"", ",", "event_source_token", "=", "None", ")", ":", "lambda_permission", "=", "LambdaPermission", "(", "...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
Schedule.to_cloudformation
Returns the CloudWatch Events Rule and Lambda Permission to which this Schedule event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this pull event expands :rtype: list
samtranslator/model/eventsources/push.py
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Events Rule and Lambda Permission to which this Schedule event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this pull event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") resources = [] events_rule = EventsRule(self.logical_id) resources.append(events_rule) events_rule.ScheduleExpression = self.Schedule events_rule.Targets = [self._construct_target(function)] source_arn = events_rule.get_runtime_attr("arn") if CONDITION in function.resource_attributes: events_rule.set_resource_attribute(CONDITION, function.resource_attributes[CONDITION]) resources.append(self._construct_permission(function, source_arn=source_arn)) return resources
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Events Rule and Lambda Permission to which this Schedule event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this pull event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") resources = [] events_rule = EventsRule(self.logical_id) resources.append(events_rule) events_rule.ScheduleExpression = self.Schedule events_rule.Targets = [self._construct_target(function)] source_arn = events_rule.get_runtime_attr("arn") if CONDITION in function.resource_attributes: events_rule.set_resource_attribute(CONDITION, function.resource_attributes[CONDITION]) resources.append(self._construct_permission(function, source_arn=source_arn)) return resources
[ "Returns", "the", "CloudWatch", "Events", "Rule", "and", "Lambda", "Permission", "to", "which", "this", "Schedule", "event", "source", "corresponds", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L78-L103
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "resources...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
CloudWatchEvent._construct_target
Constructs the Target property for the CloudWatch Events Rule. :returns: the Target property :rtype: dict
samtranslator/model/eventsources/push.py
def _construct_target(self, function): """Constructs the Target property for the CloudWatch Events Rule. :returns: the Target property :rtype: dict """ target = { 'Arn': function.get_runtime_attr("arn"), 'Id': self.logical_id + 'LambdaTarget' } if self.Input is not None: target['Input'] = self.Input if self.InputPath is not None: target['InputPath'] = self.InputPath return target
def _construct_target(self, function): """Constructs the Target property for the CloudWatch Events Rule. :returns: the Target property :rtype: dict """ target = { 'Arn': function.get_runtime_attr("arn"), 'Id': self.logical_id + 'LambdaTarget' } if self.Input is not None: target['Input'] = self.Input if self.InputPath is not None: target['InputPath'] = self.InputPath return target
[ "Constructs", "the", "Target", "property", "for", "the", "CloudWatch", "Events", "Rule", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L159-L174
[ "def", "_construct_target", "(", "self", ",", "function", ")", ":", "target", "=", "{", "'Arn'", ":", "function", ".", "get_runtime_attr", "(", "\"arn\"", ")", ",", "'Id'", ":", "self", ".", "logical_id", "+", "'LambdaTarget'", "}", "if", "self", ".", "I...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
S3.to_cloudformation
Returns the Lambda Permission resource allowing S3 to invoke the function this event source triggers. :param dict kwargs: S3 bucket resource :returns: a list of vanilla CloudFormation Resources, to which this S3 event expands :rtype: list
samtranslator/model/eventsources/push.py
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing S3 to invoke the function this event source triggers. :param dict kwargs: S3 bucket resource :returns: a list of vanilla CloudFormation Resources, to which this S3 event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if 'bucket' not in kwargs or kwargs['bucket'] is None: raise TypeError("Missing required keyword argument: bucket") if 'bucket_id' not in kwargs or kwargs['bucket_id'] is None: raise TypeError("Missing required keyword argument: bucket_id") bucket = kwargs['bucket'] bucket_id = kwargs['bucket_id'] resources = [] source_account = ref('AWS::AccountId') permission = self._construct_permission(function, source_account=source_account) if CONDITION in permission.resource_attributes: self._depend_on_lambda_permissions_using_tag(bucket, permission) else: self._depend_on_lambda_permissions(bucket, permission) resources.append(permission) # NOTE: `bucket` here is a dictionary representing the S3 Bucket resource in your SAM template. If there are # multiple S3 Events attached to the same bucket, we will update the Bucket resource with notification # configuration for each event. This is the reason why we continue to use existing bucket dict and append onto # it. # # NOTE: There is some fragile logic here where we will append multiple resources to output # SAM template but de-dupe them when merging into output CFN template. This is scary because the order of # merging is literally "last one wins", which works fine because we linearly loop through the template once. # The de-dupe happens inside `samtranslator.translator.Translator.translate` method when merging results of # to_cloudformation() to output template. self._inject_notification_configuration(function, bucket) resources.append(S3Bucket.from_dict(bucket_id, bucket)) return resources
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing S3 to invoke the function this event source triggers. :param dict kwargs: S3 bucket resource :returns: a list of vanilla CloudFormation Resources, to which this S3 event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if 'bucket' not in kwargs or kwargs['bucket'] is None: raise TypeError("Missing required keyword argument: bucket") if 'bucket_id' not in kwargs or kwargs['bucket_id'] is None: raise TypeError("Missing required keyword argument: bucket_id") bucket = kwargs['bucket'] bucket_id = kwargs['bucket_id'] resources = [] source_account = ref('AWS::AccountId') permission = self._construct_permission(function, source_account=source_account) if CONDITION in permission.resource_attributes: self._depend_on_lambda_permissions_using_tag(bucket, permission) else: self._depend_on_lambda_permissions(bucket, permission) resources.append(permission) # NOTE: `bucket` here is a dictionary representing the S3 Bucket resource in your SAM template. If there are # multiple S3 Events attached to the same bucket, we will update the Bucket resource with notification # configuration for each event. This is the reason why we continue to use existing bucket dict and append onto # it. # # NOTE: There is some fragile logic here where we will append multiple resources to output # SAM template but de-dupe them when merging into output CFN template. This is scary because the order of # merging is literally "last one wins", which works fine because we linearly loop through the template once. # The de-dupe happens inside `samtranslator.translator.Translator.translate` method when merging results of # to_cloudformation() to output template. self._inject_notification_configuration(function, bucket) resources.append(S3Bucket.from_dict(bucket_id, bucket)) return resources
[ "Returns", "the", "Lambda", "Permission", "resource", "allowing", "S3", "to", "invoke", "the", "function", "this", "event", "source", "triggers", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L197-L241
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "if", "'...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
S3._depend_on_lambda_permissions
Make the S3 bucket depends on Lambda Permissions resource because when S3 adds a Notification Configuration, it will check whether it has permissions to access Lambda. This will fail if the Lambda::Permissions is not already applied for this bucket to invoke the Lambda. :param dict bucket: Dictionary representing the bucket in SAM template. This is a raw dictionary and not a "resource" object :param model.lambda_.lambda_permission permission: Lambda Permission resource that needs to be created before the bucket. :return: Modified Bucket dictionary
samtranslator/model/eventsources/push.py
def _depend_on_lambda_permissions(self, bucket, permission): """ Make the S3 bucket depends on Lambda Permissions resource because when S3 adds a Notification Configuration, it will check whether it has permissions to access Lambda. This will fail if the Lambda::Permissions is not already applied for this bucket to invoke the Lambda. :param dict bucket: Dictionary representing the bucket in SAM template. This is a raw dictionary and not a "resource" object :param model.lambda_.lambda_permission permission: Lambda Permission resource that needs to be created before the bucket. :return: Modified Bucket dictionary """ depends_on = bucket.get("DependsOn", []) # DependsOn can be either a list of strings or a scalar string if isinstance(depends_on, string_types): depends_on = [depends_on] depends_on_set = set(depends_on) depends_on_set.add(permission.logical_id) bucket["DependsOn"] = list(depends_on_set) return bucket
def _depend_on_lambda_permissions(self, bucket, permission): """ Make the S3 bucket depends on Lambda Permissions resource because when S3 adds a Notification Configuration, it will check whether it has permissions to access Lambda. This will fail if the Lambda::Permissions is not already applied for this bucket to invoke the Lambda. :param dict bucket: Dictionary representing the bucket in SAM template. This is a raw dictionary and not a "resource" object :param model.lambda_.lambda_permission permission: Lambda Permission resource that needs to be created before the bucket. :return: Modified Bucket dictionary """ depends_on = bucket.get("DependsOn", []) # DependsOn can be either a list of strings or a scalar string if isinstance(depends_on, string_types): depends_on = [depends_on] depends_on_set = set(depends_on) depends_on_set.add(permission.logical_id) bucket["DependsOn"] = list(depends_on_set) return bucket
[ "Make", "the", "S3", "bucket", "depends", "on", "Lambda", "Permissions", "resource", "because", "when", "S3", "adds", "a", "Notification", "Configuration", "it", "will", "check", "whether", "it", "has", "permissions", "to", "access", "Lambda", ".", "This", "wi...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L243-L266
[ "def", "_depend_on_lambda_permissions", "(", "self", ",", "bucket", ",", "permission", ")", ":", "depends_on", "=", "bucket", ".", "get", "(", "\"DependsOn\"", ",", "[", "]", ")", "# DependsOn can be either a list of strings or a scalar string", "if", "isinstance", "(...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
S3._depend_on_lambda_permissions_using_tag
Since conditional DependsOn is not supported this undocumented way of implicitely making dependency through tags is used. See https://stackoverflow.com/questions/34607476/cloudformation-apply-condition-on-dependson It is done by using Ref wrapped in a conditional Fn::If. Using Ref implies a dependency, so CloudFormation will automatically wait once it reaches that function, the same as if you were using a DependsOn.
samtranslator/model/eventsources/push.py
def _depend_on_lambda_permissions_using_tag(self, bucket, permission): """ Since conditional DependsOn is not supported this undocumented way of implicitely making dependency through tags is used. See https://stackoverflow.com/questions/34607476/cloudformation-apply-condition-on-dependson It is done by using Ref wrapped in a conditional Fn::If. Using Ref implies a dependency, so CloudFormation will automatically wait once it reaches that function, the same as if you were using a DependsOn. """ properties = bucket.get('Properties', None) if properties is None: properties = {} bucket['Properties'] = properties tags = properties.get('Tags', None) if tags is None: tags = [] properties['Tags'] = tags dep_tag = { 'sam:ConditionalDependsOn:' + permission.logical_id: { 'Fn::If': [ permission.resource_attributes[CONDITION], ref(permission.logical_id), 'no dependency' ] } } properties['Tags'] = tags + get_tag_list(dep_tag) return bucket
def _depend_on_lambda_permissions_using_tag(self, bucket, permission): """ Since conditional DependsOn is not supported this undocumented way of implicitely making dependency through tags is used. See https://stackoverflow.com/questions/34607476/cloudformation-apply-condition-on-dependson It is done by using Ref wrapped in a conditional Fn::If. Using Ref implies a dependency, so CloudFormation will automatically wait once it reaches that function, the same as if you were using a DependsOn. """ properties = bucket.get('Properties', None) if properties is None: properties = {} bucket['Properties'] = properties tags = properties.get('Tags', None) if tags is None: tags = [] properties['Tags'] = tags dep_tag = { 'sam:ConditionalDependsOn:' + permission.logical_id: { 'Fn::If': [ permission.resource_attributes[CONDITION], ref(permission.logical_id), 'no dependency' ] } } properties['Tags'] = tags + get_tag_list(dep_tag) return bucket
[ "Since", "conditional", "DependsOn", "is", "not", "supported", "this", "undocumented", "way", "of", "implicitely", "making", "dependency", "through", "tags", "is", "used", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L268-L297
[ "def", "_depend_on_lambda_permissions_using_tag", "(", "self", ",", "bucket", ",", "permission", ")", ":", "properties", "=", "bucket", ".", "get", "(", "'Properties'", ",", "None", ")", "if", "properties", "is", "None", ":", "properties", "=", "{", "}", "bu...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
SNS.to_cloudformation
Returns the Lambda Permission resource allowing SNS to invoke the function this event source triggers. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this SNS event expands :rtype: list
samtranslator/model/eventsources/push.py
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing SNS to invoke the function this event source triggers. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this SNS event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") return [self._construct_permission(function, source_arn=self.Topic), self._inject_subscription(function, self.Topic, self.FilterPolicy)]
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing SNS to invoke the function this event source triggers. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this SNS event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") return [self._construct_permission(function, source_arn=self.Topic), self._inject_subscription(function, self.Topic, self.FilterPolicy)]
[ "Returns", "the", "Lambda", "Permission", "resource", "allowing", "SNS", "to", "invoke", "the", "function", "this", "event", "source", "triggers", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L350-L363
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "return", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
Api.resources_to_link
If this API Event Source refers to an explicit API resource, resolve the reference and grab necessary data from the explicit API
samtranslator/model/eventsources/push.py
def resources_to_link(self, resources): """ If this API Event Source refers to an explicit API resource, resolve the reference and grab necessary data from the explicit API """ rest_api_id = self.RestApiId if isinstance(rest_api_id, dict) and "Ref" in rest_api_id: rest_api_id = rest_api_id["Ref"] # If RestApiId is a resource in the same template, then we try find the StageName by following the reference # Otherwise we default to a wildcard. This stage name is solely used to construct the permission to # allow this stage to invoke the Lambda function. If we are unable to resolve the stage name, we will # simply permit all stages to invoke this Lambda function # This hack is necessary because customers could use !ImportValue, !Ref or other intrinsic functions which # can be sometimes impossible to resolve (ie. when it has cross-stack references) permitted_stage = "*" stage_suffix = "AllStages" explicit_api = None if isinstance(rest_api_id, string_types): if rest_api_id in resources \ and "Properties" in resources[rest_api_id] \ and "StageName" in resources[rest_api_id]["Properties"]: explicit_api = resources[rest_api_id]["Properties"] permitted_stage = explicit_api["StageName"] # Stage could be a intrinsic, in which case leave the suffix to default value if isinstance(permitted_stage, string_types): if not permitted_stage: raise InvalidResourceException(rest_api_id, 'StageName cannot be empty.') stage_suffix = permitted_stage else: stage_suffix = "Stage" else: # RestApiId is a string, not an intrinsic, but we did not find a valid API resource for this ID raise InvalidEventException(self.relative_id, "RestApiId property of Api event must reference a valid " "resource in the same template.") return { 'explicit_api': explicit_api, 'explicit_api_stage': { 'permitted_stage': permitted_stage, 'suffix': stage_suffix } }
def resources_to_link(self, resources): """ If this API Event Source refers to an explicit API resource, resolve the reference and grab necessary data from the explicit API """ rest_api_id = self.RestApiId if isinstance(rest_api_id, dict) and "Ref" in rest_api_id: rest_api_id = rest_api_id["Ref"] # If RestApiId is a resource in the same template, then we try find the StageName by following the reference # Otherwise we default to a wildcard. This stage name is solely used to construct the permission to # allow this stage to invoke the Lambda function. If we are unable to resolve the stage name, we will # simply permit all stages to invoke this Lambda function # This hack is necessary because customers could use !ImportValue, !Ref or other intrinsic functions which # can be sometimes impossible to resolve (ie. when it has cross-stack references) permitted_stage = "*" stage_suffix = "AllStages" explicit_api = None if isinstance(rest_api_id, string_types): if rest_api_id in resources \ and "Properties" in resources[rest_api_id] \ and "StageName" in resources[rest_api_id]["Properties"]: explicit_api = resources[rest_api_id]["Properties"] permitted_stage = explicit_api["StageName"] # Stage could be a intrinsic, in which case leave the suffix to default value if isinstance(permitted_stage, string_types): if not permitted_stage: raise InvalidResourceException(rest_api_id, 'StageName cannot be empty.') stage_suffix = permitted_stage else: stage_suffix = "Stage" else: # RestApiId is a string, not an intrinsic, but we did not find a valid API resource for this ID raise InvalidEventException(self.relative_id, "RestApiId property of Api event must reference a valid " "resource in the same template.") return { 'explicit_api': explicit_api, 'explicit_api_stage': { 'permitted_stage': permitted_stage, 'suffix': stage_suffix } }
[ "If", "this", "API", "Event", "Source", "refers", "to", "an", "explicit", "API", "resource", "resolve", "the", "reference", "and", "grab", "necessary", "data", "from", "the", "explicit", "API" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L392-L439
[ "def", "resources_to_link", "(", "self", ",", "resources", ")", ":", "rest_api_id", "=", "self", ".", "RestApiId", "if", "isinstance", "(", "rest_api_id", ",", "dict", ")", "and", "\"Ref\"", "in", "rest_api_id", ":", "rest_api_id", "=", "rest_api_id", "[", "...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
Api.to_cloudformation
If the Api event source has a RestApi property, then simply return the Lambda Permission resource allowing API Gateway to call the function. If no RestApi is provided, then additionally inject the path, method, and the x-amazon-apigateway-integration into the Swagger body for a provided implicit API. :param dict kwargs: a dict containing the implicit RestApi to be modified, should no explicit RestApi \ be provided. :returns: a list of vanilla CloudFormation Resources, to which this Api event expands :rtype: list
samtranslator/model/eventsources/push.py
def to_cloudformation(self, **kwargs): """If the Api event source has a RestApi property, then simply return the Lambda Permission resource allowing API Gateway to call the function. If no RestApi is provided, then additionally inject the path, method, and the x-amazon-apigateway-integration into the Swagger body for a provided implicit API. :param dict kwargs: a dict containing the implicit RestApi to be modified, should no explicit RestApi \ be provided. :returns: a list of vanilla CloudFormation Resources, to which this Api event expands :rtype: list """ resources = [] function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if self.Method is not None: # Convert to lower case so that user can specify either GET or get self.Method = self.Method.lower() resources.extend(self._get_permissions(kwargs)) explicit_api = kwargs['explicit_api'] if explicit_api.get("__MANAGE_SWAGGER"): self._add_swagger_integration(explicit_api, function) return resources
def to_cloudformation(self, **kwargs): """If the Api event source has a RestApi property, then simply return the Lambda Permission resource allowing API Gateway to call the function. If no RestApi is provided, then additionally inject the path, method, and the x-amazon-apigateway-integration into the Swagger body for a provided implicit API. :param dict kwargs: a dict containing the implicit RestApi to be modified, should no explicit RestApi \ be provided. :returns: a list of vanilla CloudFormation Resources, to which this Api event expands :rtype: list """ resources = [] function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if self.Method is not None: # Convert to lower case so that user can specify either GET or get self.Method = self.Method.lower() resources.extend(self._get_permissions(kwargs)) explicit_api = kwargs['explicit_api'] if explicit_api.get("__MANAGE_SWAGGER"): self._add_swagger_integration(explicit_api, function) return resources
[ "If", "the", "Api", "event", "source", "has", "a", "RestApi", "property", "then", "simply", "return", "the", "Lambda", "Permission", "resource", "allowing", "API", "Gateway", "to", "call", "the", "function", ".", "If", "no", "RestApi", "is", "provided", "the...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L441-L468
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argume...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
Api._add_swagger_integration
Adds the path and method for this Api event source to the Swagger body for the provided RestApi. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi to which the path and method should be added.
samtranslator/model/eventsources/push.py
def _add_swagger_integration(self, api, function): """Adds the path and method for this Api event source to the Swagger body for the provided RestApi. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi to which the path and method should be added. """ swagger_body = api.get("DefinitionBody") if swagger_body is None: return function_arn = function.get_runtime_attr('arn') partition = ArnGenerator.get_partition_name() uri = fnSub('arn:' + partition + ':apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/' + make_shorthand(function_arn) + '/invocations') editor = SwaggerEditor(swagger_body) if editor.has_integration(self.Path, self.Method): # Cannot add the Lambda Integration, if it is already present raise InvalidEventException( self.relative_id, 'API method "{method}" defined multiple times for path "{path}".'.format( method=self.Method, path=self.Path)) condition = None if CONDITION in function.resource_attributes: condition = function.resource_attributes[CONDITION] editor.add_lambda_integration(self.Path, self.Method, uri, self.Auth, api.get('Auth'), condition=condition) if self.Auth: method_authorizer = self.Auth.get('Authorizer') if method_authorizer: api_auth = api.get('Auth') api_authorizers = api_auth and api_auth.get('Authorizers') if method_authorizer != 'AWS_IAM': if not api_authorizers: raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because the related API does not define any Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer != 'NONE' and not api_authorizers.get(method_authorizer): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because it wasn\'t defined in the API\'s Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer == 'NONE' and not api_auth.get('DefaultAuthorizer'): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer on API method [{method}] for path [{path}] because \'NONE\' ' 'is only a valid value when a DefaultAuthorizer on the API is specified.'.format( method=self.Method, path=self.Path)) editor.add_auth_to_method(api=api, path=self.Path, method_name=self.Method, auth=self.Auth) api["DefinitionBody"] = editor.swagger
def _add_swagger_integration(self, api, function): """Adds the path and method for this Api event source to the Swagger body for the provided RestApi. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi to which the path and method should be added. """ swagger_body = api.get("DefinitionBody") if swagger_body is None: return function_arn = function.get_runtime_attr('arn') partition = ArnGenerator.get_partition_name() uri = fnSub('arn:' + partition + ':apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/' + make_shorthand(function_arn) + '/invocations') editor = SwaggerEditor(swagger_body) if editor.has_integration(self.Path, self.Method): # Cannot add the Lambda Integration, if it is already present raise InvalidEventException( self.relative_id, 'API method "{method}" defined multiple times for path "{path}".'.format( method=self.Method, path=self.Path)) condition = None if CONDITION in function.resource_attributes: condition = function.resource_attributes[CONDITION] editor.add_lambda_integration(self.Path, self.Method, uri, self.Auth, api.get('Auth'), condition=condition) if self.Auth: method_authorizer = self.Auth.get('Authorizer') if method_authorizer: api_auth = api.get('Auth') api_authorizers = api_auth and api_auth.get('Authorizers') if method_authorizer != 'AWS_IAM': if not api_authorizers: raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because the related API does not define any Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer != 'NONE' and not api_authorizers.get(method_authorizer): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because it wasn\'t defined in the API\'s Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer == 'NONE' and not api_auth.get('DefaultAuthorizer'): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer on API method [{method}] for path [{path}] because \'NONE\' ' 'is only a valid value when a DefaultAuthorizer on the API is specified.'.format( method=self.Method, path=self.Path)) editor.add_auth_to_method(api=api, path=self.Path, method_name=self.Method, auth=self.Auth) api["DefinitionBody"] = editor.swagger
[ "Adds", "the", "path", "and", "method", "for", "this", "Api", "event", "source", "to", "the", "Swagger", "body", "for", "the", "provided", "RestApi", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L507-L567
[ "def", "_add_swagger_integration", "(", "self", ",", "api", ",", "function", ")", ":", "swagger_body", "=", "api", ".", "get", "(", "\"DefinitionBody\"", ")", "if", "swagger_body", "is", "None", ":", "return", "function_arn", "=", "function", ".", "get_runtime...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver.resolve_parameter_refs
Resolves references to parameters within the given dictionary recursively. Other intrinsic functions such as !GetAtt, !Sub or !Ref to non-parameters will be left untouched. Result is a dictionary where parameter values are inlined. Don't pass this dictionary directly into transform's output because it changes the template structure by inlining parameter values. :param input: Any primitive type (dict, array, string etc) whose values might contain intrinsic functions :return: A copy of a dictionary with parameter references replaced by actual value.
samtranslator/intrinsics/resolver.py
def resolve_parameter_refs(self, input): """ Resolves references to parameters within the given dictionary recursively. Other intrinsic functions such as !GetAtt, !Sub or !Ref to non-parameters will be left untouched. Result is a dictionary where parameter values are inlined. Don't pass this dictionary directly into transform's output because it changes the template structure by inlining parameter values. :param input: Any primitive type (dict, array, string etc) whose values might contain intrinsic functions :return: A copy of a dictionary with parameter references replaced by actual value. """ return self._traverse(input, self.parameters, self._try_resolve_parameter_refs)
def resolve_parameter_refs(self, input): """ Resolves references to parameters within the given dictionary recursively. Other intrinsic functions such as !GetAtt, !Sub or !Ref to non-parameters will be left untouched. Result is a dictionary where parameter values are inlined. Don't pass this dictionary directly into transform's output because it changes the template structure by inlining parameter values. :param input: Any primitive type (dict, array, string etc) whose values might contain intrinsic functions :return: A copy of a dictionary with parameter references replaced by actual value. """ return self._traverse(input, self.parameters, self._try_resolve_parameter_refs)
[ "Resolves", "references", "to", "parameters", "within", "the", "given", "dictionary", "recursively", ".", "Other", "intrinsic", "functions", "such", "as", "!GetAtt", "!Sub", "or", "!Ref", "to", "non", "-", "parameters", "will", "be", "left", "untouched", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L30-L41
[ "def", "resolve_parameter_refs", "(", "self", ",", "input", ")", ":", "return", "self", ".", "_traverse", "(", "input", ",", "self", ".", "parameters", ",", "self", ".", "_try_resolve_parameter_refs", ")" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver.resolve_sam_resource_refs
Customers can provide a reference to a "derived" SAM resource such as Alias of a Function or Stage of an API resource. This method recursively walks the tree, converting all derived references to the real resource name, if it is present. Example: {"Ref": "MyFunction.Alias"} -> {"Ref": "MyFunctionAliasLive"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param SupportedResourceReferences supported_resource_refs: Object that contains information about the resource references supported in this SAM template, along with the value they should resolve to. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise
samtranslator/intrinsics/resolver.py
def resolve_sam_resource_refs(self, input, supported_resource_refs): """ Customers can provide a reference to a "derived" SAM resource such as Alias of a Function or Stage of an API resource. This method recursively walks the tree, converting all derived references to the real resource name, if it is present. Example: {"Ref": "MyFunction.Alias"} -> {"Ref": "MyFunctionAliasLive"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param SupportedResourceReferences supported_resource_refs: Object that contains information about the resource references supported in this SAM template, along with the value they should resolve to. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_refs, self._try_resolve_sam_resource_refs)
def resolve_sam_resource_refs(self, input, supported_resource_refs): """ Customers can provide a reference to a "derived" SAM resource such as Alias of a Function or Stage of an API resource. This method recursively walks the tree, converting all derived references to the real resource name, if it is present. Example: {"Ref": "MyFunction.Alias"} -> {"Ref": "MyFunctionAliasLive"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param SupportedResourceReferences supported_resource_refs: Object that contains information about the resource references supported in this SAM template, along with the value they should resolve to. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_refs, self._try_resolve_sam_resource_refs)
[ "Customers", "can", "provide", "a", "reference", "to", "a", "derived", "SAM", "resource", "such", "as", "Alias", "of", "a", "Function", "or", "Stage", "of", "an", "API", "resource", ".", "This", "method", "recursively", "walks", "the", "tree", "converting", ...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L43-L65
[ "def", "resolve_sam_resource_refs", "(", "self", ",", "input", ",", "supported_resource_refs", ")", ":", "return", "self", ".", "_traverse", "(", "input", ",", "supported_resource_refs", ",", "self", ".", "_try_resolve_sam_resource_refs", ")" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver.resolve_sam_resource_id_refs
Some SAM resources have their logical ids mutated from the original id that the customer writes in the template. This method recursively walks the tree and updates these logical ids from the old value to the new value that is generated by SAM. Example: {"Ref": "MyLayer"} -> {"Ref": "MyLayerABC123"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise
samtranslator/intrinsics/resolver.py
def resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Some SAM resources have their logical ids mutated from the original id that the customer writes in the template. This method recursively walks the tree and updates these logical ids from the old value to the new value that is generated by SAM. Example: {"Ref": "MyLayer"} -> {"Ref": "MyLayerABC123"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_id_refs, self._try_resolve_sam_resource_id_refs)
def resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Some SAM resources have their logical ids mutated from the original id that the customer writes in the template. This method recursively walks the tree and updates these logical ids from the old value to the new value that is generated by SAM. Example: {"Ref": "MyLayer"} -> {"Ref": "MyLayerABC123"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_id_refs, self._try_resolve_sam_resource_id_refs)
[ "Some", "SAM", "resources", "have", "their", "logical", "ids", "mutated", "from", "the", "original", "id", "that", "the", "customer", "writes", "in", "the", "template", ".", "This", "method", "recursively", "walks", "the", "tree", "and", "updates", "these", ...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L67-L88
[ "def", "resolve_sam_resource_id_refs", "(", "self", ",", "input", ",", "supported_resource_id_refs", ")", ":", "return", "self", ".", "_traverse", "(", "input", ",", "supported_resource_id_refs", ",", "self", ".", "_try_resolve_sam_resource_id_refs", ")" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver._traverse
Driver method that performs the actual traversal of input and calls the appropriate `resolver_method` when to perform the resolution. :param input: Any primitive type (dict, array, string etc) whose value might contain an intrinsic function :param resolution_data: Data that will help with resolution. For example, when resolving parameter references, this object will contain a dictionary of parameter names and their values. :param resolver_method: Method that will be called to actually resolve an intrinsic function. This method is called with the parameters `(input, resolution_data)`. :return: Modified `input` with intrinsics resolved
samtranslator/intrinsics/resolver.py
def _traverse(self, input, resolution_data, resolver_method): """ Driver method that performs the actual traversal of input and calls the appropriate `resolver_method` when to perform the resolution. :param input: Any primitive type (dict, array, string etc) whose value might contain an intrinsic function :param resolution_data: Data that will help with resolution. For example, when resolving parameter references, this object will contain a dictionary of parameter names and their values. :param resolver_method: Method that will be called to actually resolve an intrinsic function. This method is called with the parameters `(input, resolution_data)`. :return: Modified `input` with intrinsics resolved """ # There is data to help with resolution. Skip the traversal altogether if len(resolution_data) == 0: return input # # Traversal Algorithm: # # Imagine the input dictionary/list as a tree. We are doing a Pre-Order tree traversal here where we first # process the root node before going to its children. Dict and Lists are the only two iterable nodes. # Everything else is a leaf node. # # We do a Pre-Order traversal to handle the case where `input` contains intrinsic function as its only child # ie. input = {"Ref": "foo}. # # We will try to resolve the intrinsics if we can, otherwise return the original input. In some cases, resolving # an intrinsic will result in a terminal state ie. {"Ref": "foo"} could resolve to a string "bar". In other # cases, resolving intrinsics is only partial and we might need to continue traversing the tree (ex: Fn::Sub) # to handle nested intrinsics. All of these cases lend well towards a Pre-Order traversal where we try and # process the intrinsic, which results in a modified sub-tree to traverse. # input = resolver_method(input, resolution_data) if isinstance(input, dict): return self._traverse_dict(input, resolution_data, resolver_method) elif isinstance(input, list): return self._traverse_list(input, resolution_data, resolver_method) else: # We can iterate only over dict or list types. Primitive types are terminals return input
def _traverse(self, input, resolution_data, resolver_method): """ Driver method that performs the actual traversal of input and calls the appropriate `resolver_method` when to perform the resolution. :param input: Any primitive type (dict, array, string etc) whose value might contain an intrinsic function :param resolution_data: Data that will help with resolution. For example, when resolving parameter references, this object will contain a dictionary of parameter names and their values. :param resolver_method: Method that will be called to actually resolve an intrinsic function. This method is called with the parameters `(input, resolution_data)`. :return: Modified `input` with intrinsics resolved """ # There is data to help with resolution. Skip the traversal altogether if len(resolution_data) == 0: return input # # Traversal Algorithm: # # Imagine the input dictionary/list as a tree. We are doing a Pre-Order tree traversal here where we first # process the root node before going to its children. Dict and Lists are the only two iterable nodes. # Everything else is a leaf node. # # We do a Pre-Order traversal to handle the case where `input` contains intrinsic function as its only child # ie. input = {"Ref": "foo}. # # We will try to resolve the intrinsics if we can, otherwise return the original input. In some cases, resolving # an intrinsic will result in a terminal state ie. {"Ref": "foo"} could resolve to a string "bar". In other # cases, resolving intrinsics is only partial and we might need to continue traversing the tree (ex: Fn::Sub) # to handle nested intrinsics. All of these cases lend well towards a Pre-Order traversal where we try and # process the intrinsic, which results in a modified sub-tree to traverse. # input = resolver_method(input, resolution_data) if isinstance(input, dict): return self._traverse_dict(input, resolution_data, resolver_method) elif isinstance(input, list): return self._traverse_list(input, resolution_data, resolver_method) else: # We can iterate only over dict or list types. Primitive types are terminals return input
[ "Driver", "method", "that", "performs", "the", "actual", "traversal", "of", "input", "and", "calls", "the", "appropriate", "resolver_method", "when", "to", "perform", "the", "resolution", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L90-L132
[ "def", "_traverse", "(", "self", ",", "input", ",", "resolution_data", ",", "resolver_method", ")", ":", "# There is data to help with resolution. Skip the traversal altogether", "if", "len", "(", "resolution_data", ")", "==", "0", ":", "return", "input", "#", "# Trav...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver._traverse_dict
Traverse a dictionary to resolve intrinsic functions on every value :param input_dict: Input dictionary to traverse :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified dictionary with values resolved
samtranslator/intrinsics/resolver.py
def _traverse_dict(self, input_dict, resolution_data, resolver_method): """ Traverse a dictionary to resolve intrinsic functions on every value :param input_dict: Input dictionary to traverse :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified dictionary with values resolved """ for key, value in input_dict.items(): input_dict[key] = self._traverse(value, resolution_data, resolver_method) return input_dict
def _traverse_dict(self, input_dict, resolution_data, resolver_method): """ Traverse a dictionary to resolve intrinsic functions on every value :param input_dict: Input dictionary to traverse :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified dictionary with values resolved """ for key, value in input_dict.items(): input_dict[key] = self._traverse(value, resolution_data, resolver_method) return input_dict
[ "Traverse", "a", "dictionary", "to", "resolve", "intrinsic", "functions", "on", "every", "value" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L134-L146
[ "def", "_traverse_dict", "(", "self", ",", "input_dict", ",", "resolution_data", ",", "resolver_method", ")", ":", "for", "key", ",", "value", "in", "input_dict", ".", "items", "(", ")", ":", "input_dict", "[", "key", "]", "=", "self", ".", "_traverse", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver._traverse_list
Traverse a list to resolve intrinsic functions on every element :param input_list: List of input :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified list with intrinsic functions resolved
samtranslator/intrinsics/resolver.py
def _traverse_list(self, input_list, resolution_data, resolver_method): """ Traverse a list to resolve intrinsic functions on every element :param input_list: List of input :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified list with intrinsic functions resolved """ for index, value in enumerate(input_list): input_list[index] = self._traverse(value, resolution_data, resolver_method) return input_list
def _traverse_list(self, input_list, resolution_data, resolver_method): """ Traverse a list to resolve intrinsic functions on every element :param input_list: List of input :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified list with intrinsic functions resolved """ for index, value in enumerate(input_list): input_list[index] = self._traverse(value, resolution_data, resolver_method) return input_list
[ "Traverse", "a", "list", "to", "resolve", "intrinsic", "functions", "on", "every", "element" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L148-L160
[ "def", "_traverse_list", "(", "self", ",", "input_list", ",", "resolution_data", ",", "resolver_method", ")", ":", "for", "index", ",", "value", "in", "enumerate", "(", "input_list", ")", ":", "input_list", "[", "index", "]", "=", "self", ".", "_traverse", ...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver._try_resolve_parameter_refs
Try to resolve parameter references on the given input object. The object could be of any type. If the input is not in the format used by intrinsics (ie. dictionary with one key), input is returned unmodified. If the single key in dictionary is one of the supported intrinsic function types, go ahead and try to resolve it. :param input: Input object to resolve :param parameters: Parameter values used to for ref substitution :return:
samtranslator/intrinsics/resolver.py
def _try_resolve_parameter_refs(self, input, parameters): """ Try to resolve parameter references on the given input object. The object could be of any type. If the input is not in the format used by intrinsics (ie. dictionary with one key), input is returned unmodified. If the single key in dictionary is one of the supported intrinsic function types, go ahead and try to resolve it. :param input: Input object to resolve :param parameters: Parameter values used to for ref substitution :return: """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_parameter_refs(input, parameters)
def _try_resolve_parameter_refs(self, input, parameters): """ Try to resolve parameter references on the given input object. The object could be of any type. If the input is not in the format used by intrinsics (ie. dictionary with one key), input is returned unmodified. If the single key in dictionary is one of the supported intrinsic function types, go ahead and try to resolve it. :param input: Input object to resolve :param parameters: Parameter values used to for ref substitution :return: """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_parameter_refs(input, parameters)
[ "Try", "to", "resolve", "parameter", "references", "on", "the", "given", "input", "object", ".", "The", "object", "could", "be", "of", "any", "type", ".", "If", "the", "input", "is", "not", "in", "the", "format", "used", "by", "intrinsics", "(", "ie", ...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L162-L177
[ "def", "_try_resolve_parameter_refs", "(", "self", ",", "input", ",", "parameters", ")", ":", "if", "not", "self", ".", "_is_intrinsic_dict", "(", "input", ")", ":", "return", "input", "function_type", "=", "list", "(", "input", ".", "keys", "(", ")", ")",...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver._try_resolve_sam_resource_refs
Try to resolve SAM resource references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param SupportedResourceReferences supported_resource_refs: Object containing information about available resource references and the values they resolve to. :return: Modified input dictionary with references resolved
samtranslator/intrinsics/resolver.py
def _try_resolve_sam_resource_refs(self, input, supported_resource_refs): """ Try to resolve SAM resource references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param SupportedResourceReferences supported_resource_refs: Object containing information about available resource references and the values they resolve to. :return: Modified input dictionary with references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_refs(input, supported_resource_refs)
def _try_resolve_sam_resource_refs(self, input, supported_resource_refs): """ Try to resolve SAM resource references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param SupportedResourceReferences supported_resource_refs: Object containing information about available resource references and the values they resolve to. :return: Modified input dictionary with references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_refs(input, supported_resource_refs)
[ "Try", "to", "resolve", "SAM", "resource", "references", "on", "the", "given", "template", ".", "If", "the", "given", "object", "looks", "like", "one", "of", "the", "supported", "intrinsics", "it", "calls", "the", "appropriate", "resolution", "on", "it", "."...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L179-L194
[ "def", "_try_resolve_sam_resource_refs", "(", "self", ",", "input", ",", "supported_resource_refs", ")", ":", "if", "not", "self", ".", "_is_intrinsic_dict", "(", "input", ")", ":", "return", "input", "function_type", "=", "list", "(", "input", ".", "keys", "(...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver._try_resolve_sam_resource_id_refs
Try to resolve SAM resource id references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return: Modified input dictionary with id references resolved
samtranslator/intrinsics/resolver.py
def _try_resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Try to resolve SAM resource id references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return: Modified input dictionary with id references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_id_refs(input, supported_resource_id_refs)
def _try_resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Try to resolve SAM resource id references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return: Modified input dictionary with id references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_id_refs(input, supported_resource_id_refs)
[ "Try", "to", "resolve", "SAM", "resource", "id", "references", "on", "the", "given", "template", ".", "If", "the", "given", "object", "looks", "like", "one", "of", "the", "supported", "intrinsics", "it", "calls", "the", "appropriate", "resolution", "on", "it...
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L196-L210
[ "def", "_try_resolve_sam_resource_id_refs", "(", "self", ",", "input", ",", "supported_resource_id_refs", ")", ":", "if", "not", "self", ".", "_is_intrinsic_dict", "(", "input", ")", ":", "return", "input", "function_type", "=", "list", "(", "input", ".", "keys"...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
IntrinsicsResolver._is_intrinsic_dict
Can the input represent an intrinsic function in it? :param input: Object to be checked :return: True, if the input contains a supported intrinsic function. False otherwise
samtranslator/intrinsics/resolver.py
def _is_intrinsic_dict(self, input): """ Can the input represent an intrinsic function in it? :param input: Object to be checked :return: True, if the input contains a supported intrinsic function. False otherwise """ # All intrinsic functions are dictionaries with just one key return isinstance(input, dict) \ and len(input) == 1 \ and list(input.keys())[0] in self.supported_intrinsics
def _is_intrinsic_dict(self, input): """ Can the input represent an intrinsic function in it? :param input: Object to be checked :return: True, if the input contains a supported intrinsic function. False otherwise """ # All intrinsic functions are dictionaries with just one key return isinstance(input, dict) \ and len(input) == 1 \ and list(input.keys())[0] in self.supported_intrinsics
[ "Can", "the", "input", "represent", "an", "intrinsic", "function", "in", "it?" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L212-L222
[ "def", "_is_intrinsic_dict", "(", "self", ",", "input", ")", ":", "# All intrinsic functions are dictionaries with just one key", "return", "isinstance", "(", "input", ",", "dict", ")", "and", "len", "(", "input", ")", "==", "1", "and", "list", "(", "input", "."...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
CloudWatchLogs.to_cloudformation
Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list
samtranslator/model/eventsources/cloudwatchlogs.py
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") source_arn = self.get_source_arn() permission = self._construct_permission(function, source_arn=source_arn) subscription_filter = self.get_subscription_filter(function, permission) resources = [permission, subscription_filter] return resources
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") source_arn = self.get_source_arn() permission = self._construct_permission(function, source_arn=source_arn) subscription_filter = self.get_subscription_filter(function, permission) resources = [permission, subscription_filter] return resources
[ "Returns", "the", "CloudWatch", "Logs", "Subscription", "Filter", "and", "Lambda", "Permission", "to", "which", "this", "CloudWatch", "Logs", "event", "source", "corresponds", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/cloudwatchlogs.py#L18-L36
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "source_ar...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
PolicyTemplatesProcessor.convert
Converts the given template to IAM-ready policy statement by substituting template parameters with the given values. :param template_name: Name of the template :param parameter_values: Values for all parameters of the template :return dict: Dictionary containing policy statement :raises ValueError: If the given inputs don't represent valid template :raises InsufficientParameterValues: If the parameter values don't have values for all required parameters
samtranslator/policy_template_processor/processor.py
def convert(self, template_name, parameter_values): """ Converts the given template to IAM-ready policy statement by substituting template parameters with the given values. :param template_name: Name of the template :param parameter_values: Values for all parameters of the template :return dict: Dictionary containing policy statement :raises ValueError: If the given inputs don't represent valid template :raises InsufficientParameterValues: If the parameter values don't have values for all required parameters """ if not self.has(template_name): raise TemplateNotFoundException(template_name) template = self.get(template_name) return template.to_statement(parameter_values)
def convert(self, template_name, parameter_values): """ Converts the given template to IAM-ready policy statement by substituting template parameters with the given values. :param template_name: Name of the template :param parameter_values: Values for all parameters of the template :return dict: Dictionary containing policy statement :raises ValueError: If the given inputs don't represent valid template :raises InsufficientParameterValues: If the parameter values don't have values for all required parameters """ if not self.has(template_name): raise TemplateNotFoundException(template_name) template = self.get(template_name) return template.to_statement(parameter_values)
[ "Converts", "the", "given", "template", "to", "IAM", "-", "ready", "policy", "statement", "by", "substituting", "template", "parameters", "with", "the", "given", "values", "." ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/policy_template_processor/processor.py#L84-L100
[ "def", "convert", "(", "self", ",", "template_name", ",", "parameter_values", ")", ":", "if", "not", "self", ".", "has", "(", "template_name", ")", ":", "raise", "TemplateNotFoundException", "(", "template_name", ")", "template", "=", "self", ".", "get", "("...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
PolicyTemplatesProcessor._is_valid_templates_dict
Is this a valid policy template dictionary :param dict policy_templates_dict: Data to be validated :param dict schema: Optional, dictionary containing JSON Schema representing policy template :return: True, if it is valid. :raises ValueError: If the template dictionary doesn't match up with the schema
samtranslator/policy_template_processor/processor.py
def _is_valid_templates_dict(policy_templates_dict, schema=None): """ Is this a valid policy template dictionary :param dict policy_templates_dict: Data to be validated :param dict schema: Optional, dictionary containing JSON Schema representing policy template :return: True, if it is valid. :raises ValueError: If the template dictionary doesn't match up with the schema """ if not schema: schema = PolicyTemplatesProcessor._read_schema() try: jsonschema.validate(policy_templates_dict, schema) except ValidationError as ex: # Stringifying the exception will give us useful error message raise ValueError(str(ex)) return True
def _is_valid_templates_dict(policy_templates_dict, schema=None): """ Is this a valid policy template dictionary :param dict policy_templates_dict: Data to be validated :param dict schema: Optional, dictionary containing JSON Schema representing policy template :return: True, if it is valid. :raises ValueError: If the template dictionary doesn't match up with the schema """ if not schema: schema = PolicyTemplatesProcessor._read_schema() try: jsonschema.validate(policy_templates_dict, schema) except ValidationError as ex: # Stringifying the exception will give us useful error message raise ValueError(str(ex)) return True
[ "Is", "this", "a", "valid", "policy", "template", "dictionary" ]
awslabs/serverless-application-model
python
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/policy_template_processor/processor.py#L103-L122
[ "def", "_is_valid_templates_dict", "(", "policy_templates_dict", ",", "schema", "=", "None", ")", ":", "if", "not", "schema", ":", "schema", "=", "PolicyTemplatesProcessor", ".", "_read_schema", "(", ")", "try", ":", "jsonschema", ".", "validate", "(", "policy_t...
cccb0c96b5c91e53355ebc07e542467303a5eedd
train
RenderEngine.render_chart_to_file
Render a chart or page to local html files. :param chart: A Chart or Page object :param path: The destination file which the html code write to :param template_name: The name of template file.
pyecharts/render/engine.py
def render_chart_to_file(self, template_name: str, chart: Any, path: str): """ Render a chart or page to local html files. :param chart: A Chart or Page object :param path: The destination file which the html code write to :param template_name: The name of template file. """ tpl = self.env.get_template(template_name) html = tpl.render(chart=self.generate_js_link(chart)) write_utf8_html_file(path, self._reg_replace(html))
def render_chart_to_file(self, template_name: str, chart: Any, path: str): """ Render a chart or page to local html files. :param chart: A Chart or Page object :param path: The destination file which the html code write to :param template_name: The name of template file. """ tpl = self.env.get_template(template_name) html = tpl.render(chart=self.generate_js_link(chart)) write_utf8_html_file(path, self._reg_replace(html))
[ "Render", "a", "chart", "or", "page", "to", "local", "html", "files", "." ]
pyecharts/pyecharts
python
https://github.com/pyecharts/pyecharts/blob/02050acb0e94bb9453b88a25028de7a0ce23f125/pyecharts/render/engine.py#L36-L46
[ "def", "render_chart_to_file", "(", "self", ",", "template_name", ":", "str", ",", "chart", ":", "Any", ",", "path", ":", "str", ")", ":", "tpl", "=", "self", ".", "env", ".", "get_template", "(", "template_name", ")", "html", "=", "tpl", ".", "render"...
02050acb0e94bb9453b88a25028de7a0ce23f125
train
decode_base64
Decode base64, padding being optional. :param data: Base64 data as an ASCII byte string :returns: The decoded byte string.
pyecharts/render/snapshot.py
def decode_base64(data: str) -> bytes: """Decode base64, padding being optional. :param data: Base64 data as an ASCII byte string :returns: The decoded byte string. """ missing_padding = len(data) % 4 if missing_padding != 0: data += "=" * (4 - missing_padding) return base64.decodebytes(data.encode("utf-8"))
def decode_base64(data: str) -> bytes: """Decode base64, padding being optional. :param data: Base64 data as an ASCII byte string :returns: The decoded byte string. """ missing_padding = len(data) % 4 if missing_padding != 0: data += "=" * (4 - missing_padding) return base64.decodebytes(data.encode("utf-8"))
[ "Decode", "base64", "padding", "being", "optional", "." ]
pyecharts/pyecharts
python
https://github.com/pyecharts/pyecharts/blob/02050acb0e94bb9453b88a25028de7a0ce23f125/pyecharts/render/snapshot.py#L58-L67
[ "def", "decode_base64", "(", "data", ":", "str", ")", "->", "bytes", ":", "missing_padding", "=", "len", "(", "data", ")", "%", "4", "if", "missing_padding", "!=", "0", ":", "data", "+=", "\"=\"", "*", "(", "4", "-", "missing_padding", ")", "return", ...
02050acb0e94bb9453b88a25028de7a0ce23f125
train
Tree._set_collapse_interval
间隔折叠节点,当节点过多时可以解决节点显示过杂间隔。 :param data: 节点数据 :param interval: 指定间隔
pyecharts/charts/basic_charts/tree.py
def _set_collapse_interval(data, interval): """ 间隔折叠节点,当节点过多时可以解决节点显示过杂间隔。 :param data: 节点数据 :param interval: 指定间隔 """ if interval <= 0: return data if data and isinstance(data, list): for d in data: children = d.get("children", None) if children and interval > 0: for index, value in enumerate(children): if index % interval == 0: value.update(collapsed="false") return data
def _set_collapse_interval(data, interval): """ 间隔折叠节点,当节点过多时可以解决节点显示过杂间隔。 :param data: 节点数据 :param interval: 指定间隔 """ if interval <= 0: return data if data and isinstance(data, list): for d in data: children = d.get("children", None) if children and interval > 0: for index, value in enumerate(children): if index % interval == 0: value.update(collapsed="false") return data
[ "间隔折叠节点,当节点过多时可以解决节点显示过杂间隔。", ":", "param", "data", ":", "节点数据", ":", "param", "interval", ":", "指定间隔" ]
pyecharts/pyecharts
python
https://github.com/pyecharts/pyecharts/blob/02050acb0e94bb9453b88a25028de7a0ce23f125/pyecharts/charts/basic_charts/tree.py#L19-L35
[ "def", "_set_collapse_interval", "(", "data", ",", "interval", ")", ":", "if", "interval", "<=", "0", ":", "return", "data", "if", "data", "and", "isinstance", "(", "data", ",", "list", ")", ":", "for", "d", "in", "data", ":", "children", "=", "d", "...
02050acb0e94bb9453b88a25028de7a0ce23f125
train
parse_pin
Parses a string and returns a pin-num.
ports/nrf/boards/make-pins.py
def parse_pin(name_str): """Parses a string and returns a pin-num.""" if len(name_str) < 1: raise ValueError("Expecting pin name to be at least 4 charcters.") if name_str[0] != 'P': raise ValueError("Expecting pin name to start with P") pin_str = name_str[1:].split('/')[0] if not pin_str.isdigit(): raise ValueError("Expecting numeric pin number.") return int(pin_str)
def parse_pin(name_str): """Parses a string and returns a pin-num.""" if len(name_str) < 1: raise ValueError("Expecting pin name to be at least 4 charcters.") if name_str[0] != 'P': raise ValueError("Expecting pin name to start with P") pin_str = name_str[1:].split('/')[0] if not pin_str.isdigit(): raise ValueError("Expecting numeric pin number.") return int(pin_str)
[ "Parses", "a", "string", "and", "returns", "a", "pin", "-", "num", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/ports/nrf/boards/make-pins.py#L14-L23
[ "def", "parse_pin", "(", "name_str", ")", ":", "if", "len", "(", "name_str", ")", "<", "1", ":", "raise", "ValueError", "(", "\"Expecting pin name to be at least 4 charcters.\"", ")", "if", "name_str", "[", "0", "]", "!=", "'P'", ":", "raise", "ValueError", ...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
AlternateFunction.ptr
Returns the numbered function (i.e. USART6) for this AF.
ports/nrf/boards/make-pins.py
def ptr(self): """Returns the numbered function (i.e. USART6) for this AF.""" if self.fn_num is None: return self.func return '{:s}{:d}'.format(self.func, self.fn_num)
def ptr(self): """Returns the numbered function (i.e. USART6) for this AF.""" if self.fn_num is None: return self.func return '{:s}{:d}'.format(self.func, self.fn_num)
[ "Returns", "the", "numbered", "function", "(", "i", ".", "e", ".", "USART6", ")", "for", "this", "AF", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/ports/nrf/boards/make-pins.py#L61-L65
[ "def", "ptr", "(", "self", ")", ":", "if", "self", ".", "fn_num", "is", "None", ":", "return", "self", ".", "func", "return", "'{:s}{:d}'", ".", "format", "(", "self", ".", "func", ",", "self", ".", "fn_num", ")" ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
AlternateFunction.print
Prints the C representation of this AF.
ports/nrf/boards/make-pins.py
def print(self): """Prints the C representation of this AF.""" if self.supported: print(' AF', end='') else: print(' //', end='') fn_num = self.fn_num if fn_num is None: fn_num = 0 print('({:2d}, {:8s}, {:2d}, {:10s}, {:8s}), // {:s}'.format(self.idx, self.func, fn_num, self.pin_type, self.ptr(), self.af_str))
def print(self): """Prints the C representation of this AF.""" if self.supported: print(' AF', end='') else: print(' //', end='') fn_num = self.fn_num if fn_num is None: fn_num = 0 print('({:2d}, {:8s}, {:2d}, {:10s}, {:8s}), // {:s}'.format(self.idx, self.func, fn_num, self.pin_type, self.ptr(), self.af_str))
[ "Prints", "the", "C", "representation", "of", "this", "AF", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/ports/nrf/boards/make-pins.py#L70-L80
[ "def", "print", "(", "self", ")", ":", "if", "self", ".", "supported", ":", "print", "(", "' AF'", ",", "end", "=", "''", ")", "else", ":", "print", "(", "' //'", ",", "end", "=", "''", ")", "fn_num", "=", "self", ".", "fn_num", "if", "fn_num",...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
run_loop
Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects
examples/switch.py
def run_loop(leds=all_leds): """ Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects """ print('Loop started.\nPress Ctrl+C to break out of the loop.') while 1: try: if switch(): [led.on() for led in leds] else: [led.off() for led in leds] except OSError: # VCPInterrupt # Ctrl+C in interpreter mode. break
def run_loop(leds=all_leds): """ Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects """ print('Loop started.\nPress Ctrl+C to break out of the loop.') while 1: try: if switch(): [led.on() for led in leds] else: [led.off() for led in leds] except OSError: # VCPInterrupt # Ctrl+C in interpreter mode. break
[ "Start", "the", "loop", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/examples/switch.py#L27-L42
[ "def", "run_loop", "(", "leds", "=", "all_leds", ")", ":", "print", "(", "'Loop started.\\nPress Ctrl+C to break out of the loop.'", ")", "while", "1", ":", "try", ":", "if", "switch", "(", ")", ":", "[", "led", ".", "on", "(", ")", "for", "led", "in", "...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
find_c_file
Search vpaths for the c file that matches the provided object_file. :param str obj_file: object file to find the matching c file for :param List[str] vpath: List of base paths, similar to gcc vpath :return: str path to c file or None
py/makemoduledefs.py
def find_c_file(obj_file, vpath): """ Search vpaths for the c file that matches the provided object_file. :param str obj_file: object file to find the matching c file for :param List[str] vpath: List of base paths, similar to gcc vpath :return: str path to c file or None """ c_file = None relative_c_file = os.path.splitext(obj_file)[0] + ".c" relative_c_file = relative_c_file.lstrip('/\\') for p in vpath: possible_c_file = os.path.join(p, relative_c_file) if os.path.exists(possible_c_file): c_file = possible_c_file break return c_file
def find_c_file(obj_file, vpath): """ Search vpaths for the c file that matches the provided object_file. :param str obj_file: object file to find the matching c file for :param List[str] vpath: List of base paths, similar to gcc vpath :return: str path to c file or None """ c_file = None relative_c_file = os.path.splitext(obj_file)[0] + ".c" relative_c_file = relative_c_file.lstrip('/\\') for p in vpath: possible_c_file = os.path.join(p, relative_c_file) if os.path.exists(possible_c_file): c_file = possible_c_file break return c_file
[ "Search", "vpaths", "for", "the", "c", "file", "that", "matches", "the", "provided", "object_file", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/py/makemoduledefs.py#L22-L38
[ "def", "find_c_file", "(", "obj_file", ",", "vpath", ")", ":", "c_file", "=", "None", "relative_c_file", "=", "os", ".", "path", ".", "splitext", "(", "obj_file", ")", "[", "0", "]", "+", "\".c\"", "relative_c_file", "=", "relative_c_file", ".", "lstrip", ...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
find_module_registrations
Find any MP_REGISTER_MODULE definitions in the provided c file. :param str c_file: path to c file to check :return: List[(module_name, obj_module, enabled_define)]
py/makemoduledefs.py
def find_module_registrations(c_file): """ Find any MP_REGISTER_MODULE definitions in the provided c file. :param str c_file: path to c file to check :return: List[(module_name, obj_module, enabled_define)] """ global pattern if c_file is None: # No c file to match the object file, skip return set() with io.open(c_file, encoding='utf-8') as c_file_obj: return set(re.findall(pattern, c_file_obj.read()))
def find_module_registrations(c_file): """ Find any MP_REGISTER_MODULE definitions in the provided c file. :param str c_file: path to c file to check :return: List[(module_name, obj_module, enabled_define)] """ global pattern if c_file is None: # No c file to match the object file, skip return set() with io.open(c_file, encoding='utf-8') as c_file_obj: return set(re.findall(pattern, c_file_obj.read()))
[ "Find", "any", "MP_REGISTER_MODULE", "definitions", "in", "the", "provided", "c", "file", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/py/makemoduledefs.py#L41-L54
[ "def", "find_module_registrations", "(", "c_file", ")", ":", "global", "pattern", "if", "c_file", "is", "None", ":", "# No c file to match the object file, skip", "return", "set", "(", ")", "with", "io", ".", "open", "(", "c_file", ",", "encoding", "=", "'utf-8'...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
generate_module_table_header
Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None
py/makemoduledefs.py
def generate_module_table_header(modules): """ Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None """ # Print header file for all external modules. mod_defs = [] print("// Automatically generated by makemoduledefs.py.\n") for module_name, obj_module, enabled_define in modules: mod_def = "MODULE_DEF_{}".format(module_name.upper()) mod_defs.append(mod_def) print(( "#if ({enabled_define})\n" " extern const struct _mp_obj_module_t {obj_module};\n" " #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\n" "#else\n" " #define {mod_def}\n" "#endif\n" ).format(module_name=module_name, obj_module=obj_module, enabled_define=enabled_define, mod_def=mod_def) ) print("\n#define MICROPY_REGISTERED_MODULES \\") for mod_def in mod_defs: print(" {mod_def} \\".format(mod_def=mod_def)) print("// MICROPY_REGISTERED_MODULES")
def generate_module_table_header(modules): """ Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None """ # Print header file for all external modules. mod_defs = [] print("// Automatically generated by makemoduledefs.py.\n") for module_name, obj_module, enabled_define in modules: mod_def = "MODULE_DEF_{}".format(module_name.upper()) mod_defs.append(mod_def) print(( "#if ({enabled_define})\n" " extern const struct _mp_obj_module_t {obj_module};\n" " #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\n" "#else\n" " #define {mod_def}\n" "#endif\n" ).format(module_name=module_name, obj_module=obj_module, enabled_define=enabled_define, mod_def=mod_def) ) print("\n#define MICROPY_REGISTERED_MODULES \\") for mod_def in mod_defs: print(" {mod_def} \\".format(mod_def=mod_def)) print("// MICROPY_REGISTERED_MODULES")
[ "Generate", "header", "with", "module", "table", "entries", "for", "builtin", "modules", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/py/makemoduledefs.py#L57-L86
[ "def", "generate_module_table_header", "(", "modules", ")", ":", "# Print header file for all external modules.", "mod_defs", "=", "[", "]", "print", "(", "\"// Automatically generated by makemoduledefs.py.\\n\"", ")", "for", "module_name", ",", "obj_module", ",", "enabled_de...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
readfiles
Reads test files
tools/gen-cpydiff.py
def readfiles(): """ Reads test files """ tests = list(filter(lambda x: x.endswith('.py'), os.listdir(TESTPATH))) tests.sort() files = [] for test in tests: text = open(TESTPATH + test, 'r').read() try: class_, desc, cause, workaround, code = [x.rstrip() for x in \ list(filter(None, re.split(SPLIT, text)))] output = Output(test, class_, desc, cause, workaround, code, '', '', '') files.append(output) except IndexError: print('Incorrect format in file ' + TESTPATH + test) return files
def readfiles(): """ Reads test files """ tests = list(filter(lambda x: x.endswith('.py'), os.listdir(TESTPATH))) tests.sort() files = [] for test in tests: text = open(TESTPATH + test, 'r').read() try: class_, desc, cause, workaround, code = [x.rstrip() for x in \ list(filter(None, re.split(SPLIT, text)))] output = Output(test, class_, desc, cause, workaround, code, '', '', '') files.append(output) except IndexError: print('Incorrect format in file ' + TESTPATH + test) return files
[ "Reads", "test", "files" ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L63-L80
[ "def", "readfiles", "(", ")", ":", "tests", "=", "list", "(", "filter", "(", "lambda", "x", ":", "x", ".", "endswith", "(", "'.py'", ")", ",", "os", ".", "listdir", "(", "TESTPATH", ")", ")", ")", "tests", ".", "sort", "(", ")", "files", "=", "...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
uimports
converts CPython module names into MicroPython equivalents
tools/gen-cpydiff.py
def uimports(code): """ converts CPython module names into MicroPython equivalents """ for uimport in UIMPORTLIST: uimport = bytes(uimport, 'utf8') code = code.replace(uimport, b'u' + uimport) return code
def uimports(code): """ converts CPython module names into MicroPython equivalents """ for uimport in UIMPORTLIST: uimport = bytes(uimport, 'utf8') code = code.replace(uimport, b'u' + uimport) return code
[ "converts", "CPython", "module", "names", "into", "MicroPython", "equivalents" ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L82-L87
[ "def", "uimports", "(", "code", ")", ":", "for", "uimport", "in", "UIMPORTLIST", ":", "uimport", "=", "bytes", "(", "uimport", ",", "'utf8'", ")", "code", "=", "code", ".", "replace", "(", "uimport", ",", "b'u'", "+", "uimport", ")", "return", "code" ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
indent
indents paragraphs of text for rst formatting
tools/gen-cpydiff.py
def indent(block, spaces): """ indents paragraphs of text for rst formatting """ new_block = '' for line in block.split('\n'): new_block += spaces + line + '\n' return new_block
def indent(block, spaces): """ indents paragraphs of text for rst formatting """ new_block = '' for line in block.split('\n'): new_block += spaces + line + '\n' return new_block
[ "indents", "paragraphs", "of", "text", "for", "rst", "formatting" ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L116-L121
[ "def", "indent", "(", "block", ",", "spaces", ")", ":", "new_block", "=", "''", "for", "line", "in", "block", ".", "split", "(", "'\\n'", ")", ":", "new_block", "+=", "spaces", "+", "line", "+", "'\\n'", "return", "new_block" ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
gen_table
creates a table given any set of columns
tools/gen-cpydiff.py
def gen_table(contents): """ creates a table given any set of columns """ xlengths = [] ylengths = [] for column in contents: col_len = 0 for entry in column: lines = entry.split('\n') for line in lines: col_len = max(len(line) + 2, col_len) xlengths.append(col_len) for i in range(len(contents[0])): ymax = 0 for j in range(len(contents)): ymax = max(ymax, len(contents[j][i].split('\n'))) ylengths.append(ymax) table_divider = '+' + ''.join(['-' * i + '+' for i in xlengths]) + '\n' table = table_divider for i in range(len(ylengths)): row = [column[i] for column in contents] row = [entry + '\n' * (ylengths[i]-len(entry.split('\n'))) for entry in row] row = [entry.split('\n') for entry in row] for j in range(ylengths[i]): k = 0 for entry in row: width = xlengths[k] table += ''.join(['| {:{}}'.format(entry[j], width - 1)]) k += 1 table += '|\n' table += table_divider return table + '\n'
def gen_table(contents): """ creates a table given any set of columns """ xlengths = [] ylengths = [] for column in contents: col_len = 0 for entry in column: lines = entry.split('\n') for line in lines: col_len = max(len(line) + 2, col_len) xlengths.append(col_len) for i in range(len(contents[0])): ymax = 0 for j in range(len(contents)): ymax = max(ymax, len(contents[j][i].split('\n'))) ylengths.append(ymax) table_divider = '+' + ''.join(['-' * i + '+' for i in xlengths]) + '\n' table = table_divider for i in range(len(ylengths)): row = [column[i] for column in contents] row = [entry + '\n' * (ylengths[i]-len(entry.split('\n'))) for entry in row] row = [entry.split('\n') for entry in row] for j in range(ylengths[i]): k = 0 for entry in row: width = xlengths[k] table += ''.join(['| {:{}}'.format(entry[j], width - 1)]) k += 1 table += '|\n' table += table_divider return table + '\n'
[ "creates", "a", "table", "given", "any", "set", "of", "columns" ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L123-L154
[ "def", "gen_table", "(", "contents", ")", ":", "xlengths", "=", "[", "]", "ylengths", "=", "[", "]", "for", "column", "in", "contents", ":", "col_len", "=", "0", "for", "entry", "in", "column", ":", "lines", "=", "entry", ".", "split", "(", "'\\n'", ...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
gen_rst
creates restructured text documents to display tests
tools/gen-cpydiff.py
def gen_rst(results): """ creates restructured text documents to display tests """ # make sure the destination directory exists try: os.mkdir(DOCPATH) except OSError as e: if e.args[0] != errno.EEXIST and e.args[0] != errno.EISDIR: raise toctree = [] class_ = [] for output in results: section = output.class_.split(',') for i in range(len(section)): section[i] = section[i].rstrip() if section[i] in CLASSMAP: section[i] = CLASSMAP[section[i]] if i >= len(class_) or section[i] != class_[i]: if i == 0: filename = section[i].replace(' ', '_').lower() rst = open(DOCPATH + filename + '.rst', 'w') rst.write(HEADER) rst.write(section[i] + '\n') rst.write(RSTCHARS[0] * len(section[i])) rst.write(time.strftime("\nGenerated %a %d %b %Y %X UTC\n\n", time.gmtime())) toctree.append(filename) else: rst.write(section[i] + '\n') rst.write(RSTCHARS[min(i, len(RSTCHARS)-1)] * len(section[i])) rst.write('\n\n') class_ = section rst.write('.. _cpydiff_%s:\n\n' % output.name.rsplit('.', 1)[0]) rst.write(output.desc + '\n') rst.write('~' * len(output.desc) + '\n\n') if output.cause != 'Unknown': rst.write('**Cause:** ' + output.cause + '\n\n') if output.workaround != 'Unknown': rst.write('**Workaround:** ' + output.workaround + '\n\n') rst.write('Sample code::\n\n' + indent(output.code, TAB) + '\n') output_cpy = indent(''.join(output.output_cpy[0:2]), TAB).rstrip() output_cpy = ('::\n\n' if output_cpy != '' else '') + output_cpy output_upy = indent(''.join(output.output_upy[0:2]), TAB).rstrip() output_upy = ('::\n\n' if output_upy != '' else '') + output_upy table = gen_table([['CPy output:', output_cpy], ['uPy output:', output_upy]]) rst.write(table) template = open(INDEXTEMPLATE, 'r') index = open(DOCPATH + INDEX, 'w') index.write(HEADER) index.write(template.read()) for section in INDEXPRIORITY: if section in toctree: index.write(indent(section + '.rst', TAB)) toctree.remove(section) for section in toctree: index.write(indent(section + '.rst', TAB))
def gen_rst(results): """ creates restructured text documents to display tests """ # make sure the destination directory exists try: os.mkdir(DOCPATH) except OSError as e: if e.args[0] != errno.EEXIST and e.args[0] != errno.EISDIR: raise toctree = [] class_ = [] for output in results: section = output.class_.split(',') for i in range(len(section)): section[i] = section[i].rstrip() if section[i] in CLASSMAP: section[i] = CLASSMAP[section[i]] if i >= len(class_) or section[i] != class_[i]: if i == 0: filename = section[i].replace(' ', '_').lower() rst = open(DOCPATH + filename + '.rst', 'w') rst.write(HEADER) rst.write(section[i] + '\n') rst.write(RSTCHARS[0] * len(section[i])) rst.write(time.strftime("\nGenerated %a %d %b %Y %X UTC\n\n", time.gmtime())) toctree.append(filename) else: rst.write(section[i] + '\n') rst.write(RSTCHARS[min(i, len(RSTCHARS)-1)] * len(section[i])) rst.write('\n\n') class_ = section rst.write('.. _cpydiff_%s:\n\n' % output.name.rsplit('.', 1)[0]) rst.write(output.desc + '\n') rst.write('~' * len(output.desc) + '\n\n') if output.cause != 'Unknown': rst.write('**Cause:** ' + output.cause + '\n\n') if output.workaround != 'Unknown': rst.write('**Workaround:** ' + output.workaround + '\n\n') rst.write('Sample code::\n\n' + indent(output.code, TAB) + '\n') output_cpy = indent(''.join(output.output_cpy[0:2]), TAB).rstrip() output_cpy = ('::\n\n' if output_cpy != '' else '') + output_cpy output_upy = indent(''.join(output.output_upy[0:2]), TAB).rstrip() output_upy = ('::\n\n' if output_upy != '' else '') + output_upy table = gen_table([['CPy output:', output_cpy], ['uPy output:', output_upy]]) rst.write(table) template = open(INDEXTEMPLATE, 'r') index = open(DOCPATH + INDEX, 'w') index.write(HEADER) index.write(template.read()) for section in INDEXPRIORITY: if section in toctree: index.write(indent(section + '.rst', TAB)) toctree.remove(section) for section in toctree: index.write(indent(section + '.rst', TAB))
[ "creates", "restructured", "text", "documents", "to", "display", "tests" ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L156-L213
[ "def", "gen_rst", "(", "results", ")", ":", "# make sure the destination directory exists", "try", ":", "os", ".", "mkdir", "(", "DOCPATH", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "args", "[", "0", "]", "!=", "errno", ".", "EEXIST", "an...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
main
Main function
tools/gen-cpydiff.py
def main(): """ Main function """ # set search path so that test scripts find the test modules (and no other ones) os.environ['PYTHONPATH'] = TESTPATH os.environ['MICROPYPATH'] = TESTPATH files = readfiles() results = run_tests(files) gen_rst(results)
def main(): """ Main function """ # set search path so that test scripts find the test modules (and no other ones) os.environ['PYTHONPATH'] = TESTPATH os.environ['MICROPYPATH'] = TESTPATH files = readfiles() results = run_tests(files) gen_rst(results)
[ "Main", "function" ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L215-L224
[ "def", "main", "(", ")", ":", "# set search path so that test scripts find the test modules (and no other ones)", "os", ".", "environ", "[", "'PYTHONPATH'", "]", "=", "TESTPATH", "os", ".", "environ", "[", "'MICROPYPATH'", "]", "=", "TESTPATH", "files", "=", "readfile...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
init
Initializes the found DFU device so that we can program it.
tools/pydfu.py
def init(): """Initializes the found DFU device so that we can program it.""" global __dev, __cfg_descr devices = get_dfu_devices(idVendor=__VID, idProduct=__PID) if not devices: raise ValueError('No DFU device found') if len(devices) > 1: raise ValueError("Multiple DFU devices found") __dev = devices[0] __dev.set_configuration() # Claim DFU interface usb.util.claim_interface(__dev, __DFU_INTERFACE) # Find the DFU configuration descriptor, either in the device or interfaces __cfg_descr = None for cfg in __dev.configurations(): __cfg_descr = find_dfu_cfg_descr(cfg.extra_descriptors) if __cfg_descr: break for itf in cfg.interfaces(): __cfg_descr = find_dfu_cfg_descr(itf.extra_descriptors) if __cfg_descr: break # Get device into idle state for attempt in range(4): status = get_status() if status == __DFU_STATE_DFU_IDLE: break elif (status == __DFU_STATE_DFU_DOWNLOAD_IDLE or status == __DFU_STATE_DFU_UPLOAD_IDLE): abort_request() else: clr_status()
def init(): """Initializes the found DFU device so that we can program it.""" global __dev, __cfg_descr devices = get_dfu_devices(idVendor=__VID, idProduct=__PID) if not devices: raise ValueError('No DFU device found') if len(devices) > 1: raise ValueError("Multiple DFU devices found") __dev = devices[0] __dev.set_configuration() # Claim DFU interface usb.util.claim_interface(__dev, __DFU_INTERFACE) # Find the DFU configuration descriptor, either in the device or interfaces __cfg_descr = None for cfg in __dev.configurations(): __cfg_descr = find_dfu_cfg_descr(cfg.extra_descriptors) if __cfg_descr: break for itf in cfg.interfaces(): __cfg_descr = find_dfu_cfg_descr(itf.extra_descriptors) if __cfg_descr: break # Get device into idle state for attempt in range(4): status = get_status() if status == __DFU_STATE_DFU_IDLE: break elif (status == __DFU_STATE_DFU_DOWNLOAD_IDLE or status == __DFU_STATE_DFU_UPLOAD_IDLE): abort_request() else: clr_status()
[ "Initializes", "the", "found", "DFU", "device", "so", "that", "we", "can", "program", "it", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L92-L126
[ "def", "init", "(", ")", ":", "global", "__dev", ",", "__cfg_descr", "devices", "=", "get_dfu_devices", "(", "idVendor", "=", "__VID", ",", "idProduct", "=", "__PID", ")", "if", "not", "devices", ":", "raise", "ValueError", "(", "'No DFU device found'", ")",...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
mass_erase
Performs a MASS erase (i.e. erases the entire device.
tools/pydfu.py
def mass_erase(): """Performs a MASS erase (i.e. erases the entire device.""" # Send DNLOAD with first byte=0x41 __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, "\x41", __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
def mass_erase(): """Performs a MASS erase (i.e. erases the entire device.""" # Send DNLOAD with first byte=0x41 __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, "\x41", __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
[ "Performs", "a", "MASS", "erase", "(", "i", ".", "e", ".", "erases", "the", "entire", "device", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L148-L160
[ "def", "mass_erase", "(", ")", ":", "# Send DNLOAD with first byte=0x41", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "0", ",", "__DFU_INTERFACE", ",", "\"\\x41\"", ",", "__TIMEOUT", ")", "# Execute last command", "if", "get_status", "(", ...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
page_erase
Erases a single page.
tools/pydfu.py
def page_erase(addr): """Erases a single page.""" if __verbose: print("Erasing page: 0x%x..." % (addr)) # Send DNLOAD with first byte=0x41 and page address buf = struct.pack("<BI", 0x41, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
def page_erase(addr): """Erases a single page.""" if __verbose: print("Erasing page: 0x%x..." % (addr)) # Send DNLOAD with first byte=0x41 and page address buf = struct.pack("<BI", 0x41, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
[ "Erases", "a", "single", "page", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L163-L179
[ "def", "page_erase", "(", "addr", ")", ":", "if", "__verbose", ":", "print", "(", "\"Erasing page: 0x%x...\"", "%", "(", "addr", ")", ")", "# Send DNLOAD with first byte=0x41 and page address", "buf", "=", "struct", ".", "pack", "(", "\"<BI\"", ",", "0x41", ",",...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
set_address
Sets the address for the next operation.
tools/pydfu.py
def set_address(addr): """Sets the address for the next operation.""" # Send DNLOAD with first byte=0x21 and page address buf = struct.pack("<BI", 0x21, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: set address failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: set address failed")
def set_address(addr): """Sets the address for the next operation.""" # Send DNLOAD with first byte=0x21 and page address buf = struct.pack("<BI", 0x21, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: set address failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: set address failed")
[ "Sets", "the", "address", "for", "the", "next", "operation", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L182-L194
[ "def", "set_address", "(", "addr", ")", ":", "# Send DNLOAD with first byte=0x21 and page address", "buf", "=", "struct", ".", "pack", "(", "\"<BI\"", ",", "0x21", ",", "addr", ")", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "0", ","...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
write_memory
Writes a buffer into memory. This routine assumes that memory has already been erased.
tools/pydfu.py
def write_memory(addr, buf, progress=None, progress_addr=0, progress_size=0): """Writes a buffer into memory. This routine assumes that memory has already been erased. """ xfer_count = 0 xfer_bytes = 0 xfer_total = len(buf) xfer_base = addr while xfer_bytes < xfer_total: if __verbose and xfer_count % 512 == 0: print ("Addr 0x%x %dKBs/%dKBs..." % (xfer_base + xfer_bytes, xfer_bytes // 1024, xfer_total // 1024)) if progress and xfer_count % 2 == 0: progress(progress_addr, xfer_base + xfer_bytes - progress_addr, progress_size) # Set mem write address set_address(xfer_base+xfer_bytes) # Send DNLOAD with fw data chunk = min(__cfg_descr.wTransferSize, xfer_total-xfer_bytes) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf[xfer_bytes:xfer_bytes + chunk], __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") xfer_count += 1 xfer_bytes += chunk
def write_memory(addr, buf, progress=None, progress_addr=0, progress_size=0): """Writes a buffer into memory. This routine assumes that memory has already been erased. """ xfer_count = 0 xfer_bytes = 0 xfer_total = len(buf) xfer_base = addr while xfer_bytes < xfer_total: if __verbose and xfer_count % 512 == 0: print ("Addr 0x%x %dKBs/%dKBs..." % (xfer_base + xfer_bytes, xfer_bytes // 1024, xfer_total // 1024)) if progress and xfer_count % 2 == 0: progress(progress_addr, xfer_base + xfer_bytes - progress_addr, progress_size) # Set mem write address set_address(xfer_base+xfer_bytes) # Send DNLOAD with fw data chunk = min(__cfg_descr.wTransferSize, xfer_total-xfer_bytes) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf[xfer_bytes:xfer_bytes + chunk], __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") xfer_count += 1 xfer_bytes += chunk
[ "Writes", "a", "buffer", "into", "memory", ".", "This", "routine", "assumes", "that", "memory", "has", "already", "been", "erased", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L197-L233
[ "def", "write_memory", "(", "addr", ",", "buf", ",", "progress", "=", "None", ",", "progress_addr", "=", "0", ",", "progress_size", "=", "0", ")", ":", "xfer_count", "=", "0", "xfer_bytes", "=", "0", "xfer_total", "=", "len", "(", "buf", ")", "xfer_bas...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
write_page
Writes a single page. This routine assumes that memory has already been erased.
tools/pydfu.py
def write_page(buf, xfer_offset): """Writes a single page. This routine assumes that memory has already been erased. """ xfer_base = 0x08000000 # Set mem write address set_address(xfer_base+xfer_offset) # Send DNLOAD with fw data __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") if __verbose: print ("Write: 0x%x " % (xfer_base + xfer_offset))
def write_page(buf, xfer_offset): """Writes a single page. This routine assumes that memory has already been erased. """ xfer_base = 0x08000000 # Set mem write address set_address(xfer_base+xfer_offset) # Send DNLOAD with fw data __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") if __verbose: print ("Write: 0x%x " % (xfer_base + xfer_offset))
[ "Writes", "a", "single", "page", ".", "This", "routine", "assumes", "that", "memory", "has", "already", "been", "erased", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L236-L258
[ "def", "write_page", "(", "buf", ",", "xfer_offset", ")", ":", "xfer_base", "=", "0x08000000", "# Set mem write address", "set_address", "(", "xfer_base", "+", "xfer_offset", ")", "# Send DNLOAD with fw data", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
exit_dfu
Exit DFU mode, and start running the program.
tools/pydfu.py
def exit_dfu(): """Exit DFU mode, and start running the program.""" # set jump address set_address(0x08000000) # Send DNLOAD with 0 length to exit DFU __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, None, __TIMEOUT) try: # Execute last command if get_status() != __DFU_STATE_DFU_MANIFEST: print("Failed to reset device") # Release device usb.util.dispose_resources(__dev) except: pass
def exit_dfu(): """Exit DFU mode, and start running the program.""" # set jump address set_address(0x08000000) # Send DNLOAD with 0 length to exit DFU __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, None, __TIMEOUT) try: # Execute last command if get_status() != __DFU_STATE_DFU_MANIFEST: print("Failed to reset device") # Release device usb.util.dispose_resources(__dev) except: pass
[ "Exit", "DFU", "mode", "and", "start", "running", "the", "program", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L261-L279
[ "def", "exit_dfu", "(", ")", ":", "# set jump address", "set_address", "(", "0x08000000", ")", "# Send DNLOAD with 0 length to exit DFU", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "0", ",", "__DFU_INTERFACE", ",", "None", ",", "__TIMEOUT"...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
consume
Parses the struct defined by `fmt` from `data`, stores the parsed fields into a named tuple using `names`. Returns the named tuple, and the data with the struct stripped off.
tools/pydfu.py
def consume(fmt, data, names): """Parses the struct defined by `fmt` from `data`, stores the parsed fields into a named tuple using `names`. Returns the named tuple, and the data with the struct stripped off.""" size = struct.calcsize(fmt) return named(struct.unpack(fmt, data[:size]), names), data[size:]
def consume(fmt, data, names): """Parses the struct defined by `fmt` from `data`, stores the parsed fields into a named tuple using `names`. Returns the named tuple, and the data with the struct stripped off.""" size = struct.calcsize(fmt) return named(struct.unpack(fmt, data[:size]), names), data[size:]
[ "Parses", "the", "struct", "defined", "by", "fmt", "from", "data", "stores", "the", "parsed", "fields", "into", "a", "named", "tuple", "using", "names", ".", "Returns", "the", "named", "tuple", "and", "the", "data", "with", "the", "struct", "stripped", "of...
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L287-L292
[ "def", "consume", "(", "fmt", ",", "data", ",", "names", ")", ":", "size", "=", "struct", ".", "calcsize", "(", "fmt", ")", "return", "named", "(", "struct", ".", "unpack", "(", "fmt", ",", "data", "[", ":", "size", "]", ")", ",", "names", ")", ...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
read_dfu_file
Reads a DFU file, and parses the individual elements from the file. Returns an array of elements. Each element is a dictionary with the following keys: num - The element index address - The address that the element data should be written to. size - The size of the element ddata. data - The element data. If an error occurs while parsing the file, then None is returned.
tools/pydfu.py
def read_dfu_file(filename): """Reads a DFU file, and parses the individual elements from the file. Returns an array of elements. Each element is a dictionary with the following keys: num - The element index address - The address that the element data should be written to. size - The size of the element ddata. data - The element data. If an error occurs while parsing the file, then None is returned. """ print("File: {}".format(filename)) with open(filename, 'rb') as fin: data = fin.read() crc = compute_crc(data[:-4]) elements = [] # Decode the DFU Prefix # # <5sBIB # < little endian # 5s char[5] signature "DfuSe" # B uint8_t version 1 # I uint32_t size Size of the DFU file (not including suffix) # B uint8_t targets Number of targets dfu_prefix, data = consume('<5sBIB', data, 'signature version size targets') print (" %(signature)s v%(version)d, image size: %(size)d, " "targets: %(targets)d" % dfu_prefix) for target_idx in range(dfu_prefix['targets']): # Decode the Image Prefix # # <6sBI255s2I # < little endian # 6s char[6] signature "Target" # B uint8_t altsetting # I uint32_t named bool indicating if a name was used # 255s char[255] name name of the target # I uint32_t size size of image (not incl prefix) # I uint32_t elements Number of elements in the image img_prefix, data = consume('<6sBI255s2I', data, 'signature altsetting named name ' 'size elements') img_prefix['num'] = target_idx if img_prefix['named']: img_prefix['name'] = cstring(img_prefix['name']) else: img_prefix['name'] = '' print(' %(signature)s %(num)d, alt setting: %(altsetting)s, ' 'name: "%(name)s", size: %(size)d, elements: %(elements)d' % img_prefix) target_size = img_prefix['size'] target_data, data = data[:target_size], data[target_size:] for elem_idx in range(img_prefix['elements']): # Decode target prefix # < little endian # I uint32_t element address # I uint32_t element size elem_prefix, target_data = consume('<2I', target_data, 'addr size') elem_prefix['num'] = elem_idx print(' %(num)d, address: 0x%(addr)08x, size: %(size)d' % elem_prefix) elem_size = elem_prefix['size'] elem_data = target_data[:elem_size] target_data = target_data[elem_size:] elem_prefix['data'] = elem_data elements.append(elem_prefix) if len(target_data): print("target %d PARSE ERROR" % target_idx) # Decode DFU Suffix # < little endian # H uint16_t device Firmware version # H uint16_t product # H uint16_t vendor # H uint16_t dfu 0x11a (DFU file format version) # 3s char[3] ufd 'UFD' # B uint8_t len 16 # I uint32_t crc32 dfu_suffix = named(struct.unpack('<4H3sBI', data[:16]), 'device product vendor dfu ufd len crc') print (' usb: %(vendor)04x:%(product)04x, device: 0x%(device)04x, ' 'dfu: 0x%(dfu)04x, %(ufd)s, %(len)d, 0x%(crc)08x' % dfu_suffix) if crc != dfu_suffix['crc']: print("CRC ERROR: computed crc32 is 0x%08x" % crc) return data = data[16:] if data: print("PARSE ERROR") return return elements
def read_dfu_file(filename): """Reads a DFU file, and parses the individual elements from the file. Returns an array of elements. Each element is a dictionary with the following keys: num - The element index address - The address that the element data should be written to. size - The size of the element ddata. data - The element data. If an error occurs while parsing the file, then None is returned. """ print("File: {}".format(filename)) with open(filename, 'rb') as fin: data = fin.read() crc = compute_crc(data[:-4]) elements = [] # Decode the DFU Prefix # # <5sBIB # < little endian # 5s char[5] signature "DfuSe" # B uint8_t version 1 # I uint32_t size Size of the DFU file (not including suffix) # B uint8_t targets Number of targets dfu_prefix, data = consume('<5sBIB', data, 'signature version size targets') print (" %(signature)s v%(version)d, image size: %(size)d, " "targets: %(targets)d" % dfu_prefix) for target_idx in range(dfu_prefix['targets']): # Decode the Image Prefix # # <6sBI255s2I # < little endian # 6s char[6] signature "Target" # B uint8_t altsetting # I uint32_t named bool indicating if a name was used # 255s char[255] name name of the target # I uint32_t size size of image (not incl prefix) # I uint32_t elements Number of elements in the image img_prefix, data = consume('<6sBI255s2I', data, 'signature altsetting named name ' 'size elements') img_prefix['num'] = target_idx if img_prefix['named']: img_prefix['name'] = cstring(img_prefix['name']) else: img_prefix['name'] = '' print(' %(signature)s %(num)d, alt setting: %(altsetting)s, ' 'name: "%(name)s", size: %(size)d, elements: %(elements)d' % img_prefix) target_size = img_prefix['size'] target_data, data = data[:target_size], data[target_size:] for elem_idx in range(img_prefix['elements']): # Decode target prefix # < little endian # I uint32_t element address # I uint32_t element size elem_prefix, target_data = consume('<2I', target_data, 'addr size') elem_prefix['num'] = elem_idx print(' %(num)d, address: 0x%(addr)08x, size: %(size)d' % elem_prefix) elem_size = elem_prefix['size'] elem_data = target_data[:elem_size] target_data = target_data[elem_size:] elem_prefix['data'] = elem_data elements.append(elem_prefix) if len(target_data): print("target %d PARSE ERROR" % target_idx) # Decode DFU Suffix # < little endian # H uint16_t device Firmware version # H uint16_t product # H uint16_t vendor # H uint16_t dfu 0x11a (DFU file format version) # 3s char[3] ufd 'UFD' # B uint8_t len 16 # I uint32_t crc32 dfu_suffix = named(struct.unpack('<4H3sBI', data[:16]), 'device product vendor dfu ufd len crc') print (' usb: %(vendor)04x:%(product)04x, device: 0x%(device)04x, ' 'dfu: 0x%(dfu)04x, %(ufd)s, %(len)d, 0x%(crc)08x' % dfu_suffix) if crc != dfu_suffix['crc']: print("CRC ERROR: computed crc32 is 0x%08x" % crc) return data = data[16:] if data: print("PARSE ERROR") return return elements
[ "Reads", "a", "DFU", "file", "and", "parses", "the", "individual", "elements", "from", "the", "file", ".", "Returns", "an", "array", "of", "elements", ".", "Each", "element", "is", "a", "dictionary", "with", "the", "following", "keys", ":", "num", "-", "...
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L305-L398
[ "def", "read_dfu_file", "(", "filename", ")", ":", "print", "(", "\"File: {}\"", ".", "format", "(", "filename", ")", ")", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "fin", ":", "data", "=", "fin", ".", "read", "(", ")", "crc", "=", "...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
get_dfu_devices
Returns a list of USB device which are currently in DFU mode. Additional filters (like idProduct and idVendor) can be passed in to refine the search.
tools/pydfu.py
def get_dfu_devices(*args, **kwargs): """Returns a list of USB device which are currently in DFU mode. Additional filters (like idProduct and idVendor) can be passed in to refine the search. """ # convert to list for compatibility with newer pyusb return list(usb.core.find(*args, find_all=True, custom_match=FilterDFU(), **kwargs))
def get_dfu_devices(*args, **kwargs): """Returns a list of USB device which are currently in DFU mode. Additional filters (like idProduct and idVendor) can be passed in to refine the search. """ # convert to list for compatibility with newer pyusb return list(usb.core.find(*args, find_all=True, custom_match=FilterDFU(), **kwargs))
[ "Returns", "a", "list", "of", "USB", "device", "which", "are", "currently", "in", "DFU", "mode", ".", "Additional", "filters", "(", "like", "idProduct", "and", "idVendor", ")", "can", "be", "passed", "in", "to", "refine", "the", "search", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L413-L420
[ "def", "get_dfu_devices", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# convert to list for compatibility with newer pyusb", "return", "list", "(", "usb", ".", "core", ".", "find", "(", "*", "args", ",", "find_all", "=", "True", ",", "custom_match", ...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
get_memory_layout
Returns an array which identifies the memory layout. Each entry of the array will contain a dictionary with the following keys: addr - Address of this memory segment last_addr - Last address contained within the memory segment. size - size of the segment, in bytes num_pages - number of pages in the segment page_size - size of each page, in bytes
tools/pydfu.py
def get_memory_layout(device): """Returns an array which identifies the memory layout. Each entry of the array will contain a dictionary with the following keys: addr - Address of this memory segment last_addr - Last address contained within the memory segment. size - size of the segment, in bytes num_pages - number of pages in the segment page_size - size of each page, in bytes """ cfg = device[0] intf = cfg[(0, 0)] mem_layout_str = get_string(device, intf.iInterface) mem_layout = mem_layout_str.split('/') result = [] for mem_layout_index in range(1, len(mem_layout), 2): addr = int(mem_layout[mem_layout_index], 0) segments = mem_layout[mem_layout_index + 1].split(',') seg_re = re.compile(r'(\d+)\*(\d+)(.)(.)') for segment in segments: seg_match = seg_re.match(segment) num_pages = int(seg_match.groups()[0], 10) page_size = int(seg_match.groups()[1], 10) multiplier = seg_match.groups()[2] if multiplier == 'K': page_size *= 1024 if multiplier == 'M': page_size *= 1024 * 1024 size = num_pages * page_size last_addr = addr + size - 1 result.append(named((addr, last_addr, size, num_pages, page_size), "addr last_addr size num_pages page_size")) addr += size return result
def get_memory_layout(device): """Returns an array which identifies the memory layout. Each entry of the array will contain a dictionary with the following keys: addr - Address of this memory segment last_addr - Last address contained within the memory segment. size - size of the segment, in bytes num_pages - number of pages in the segment page_size - size of each page, in bytes """ cfg = device[0] intf = cfg[(0, 0)] mem_layout_str = get_string(device, intf.iInterface) mem_layout = mem_layout_str.split('/') result = [] for mem_layout_index in range(1, len(mem_layout), 2): addr = int(mem_layout[mem_layout_index], 0) segments = mem_layout[mem_layout_index + 1].split(',') seg_re = re.compile(r'(\d+)\*(\d+)(.)(.)') for segment in segments: seg_match = seg_re.match(segment) num_pages = int(seg_match.groups()[0], 10) page_size = int(seg_match.groups()[1], 10) multiplier = seg_match.groups()[2] if multiplier == 'K': page_size *= 1024 if multiplier == 'M': page_size *= 1024 * 1024 size = num_pages * page_size last_addr = addr + size - 1 result.append(named((addr, last_addr, size, num_pages, page_size), "addr last_addr size num_pages page_size")) addr += size return result
[ "Returns", "an", "array", "which", "identifies", "the", "memory", "layout", ".", "Each", "entry", "of", "the", "array", "will", "contain", "a", "dictionary", "with", "the", "following", "keys", ":", "addr", "-", "Address", "of", "this", "memory", "segment", ...
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L423-L455
[ "def", "get_memory_layout", "(", "device", ")", ":", "cfg", "=", "device", "[", "0", "]", "intf", "=", "cfg", "[", "(", "0", ",", "0", ")", "]", "mem_layout_str", "=", "get_string", "(", "device", ",", "intf", ".", "iInterface", ")", "mem_layout", "=...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
list_dfu_devices
Prints a lits of devices detected in DFU mode.
tools/pydfu.py
def list_dfu_devices(*args, **kwargs): """Prints a lits of devices detected in DFU mode.""" devices = get_dfu_devices(*args, **kwargs) if not devices: print("No DFU capable devices found") return for device in devices: print("Bus {} Device {:03d}: ID {:04x}:{:04x}" .format(device.bus, device.address, device.idVendor, device.idProduct)) layout = get_memory_layout(device) print("Memory Layout") for entry in layout: print(" 0x{:x} {:2d} pages of {:3d}K bytes" .format(entry['addr'], entry['num_pages'], entry['page_size'] // 1024))
def list_dfu_devices(*args, **kwargs): """Prints a lits of devices detected in DFU mode.""" devices = get_dfu_devices(*args, **kwargs) if not devices: print("No DFU capable devices found") return for device in devices: print("Bus {} Device {:03d}: ID {:04x}:{:04x}" .format(device.bus, device.address, device.idVendor, device.idProduct)) layout = get_memory_layout(device) print("Memory Layout") for entry in layout: print(" 0x{:x} {:2d} pages of {:3d}K bytes" .format(entry['addr'], entry['num_pages'], entry['page_size'] // 1024))
[ "Prints", "a", "lits", "of", "devices", "detected", "in", "DFU", "mode", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L458-L473
[ "def", "list_dfu_devices", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "devices", "=", "get_dfu_devices", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "devices", ":", "print", "(", "\"No DFU capable devices found\"", ")", "return",...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
write_elements
Writes the indicated elements into the target memory, erasing as needed.
tools/pydfu.py
def write_elements(elements, mass_erase_used, progress=None): """Writes the indicated elements into the target memory, erasing as needed. """ mem_layout = get_memory_layout(__dev) for elem in elements: addr = elem['addr'] size = elem['size'] data = elem['data'] elem_size = size elem_addr = addr if progress: progress(elem_addr, 0, elem_size) while size > 0: write_size = size if not mass_erase_used: for segment in mem_layout: if addr >= segment['addr'] and \ addr <= segment['last_addr']: # We found the page containing the address we want to # write, erase it page_size = segment['page_size'] page_addr = addr & ~(page_size - 1) if addr + write_size > page_addr + page_size: write_size = page_addr + page_size - addr page_erase(page_addr) break write_memory(addr, data[:write_size], progress, elem_addr, elem_size) data = data[write_size:] addr += write_size size -= write_size if progress: progress(elem_addr, addr - elem_addr, elem_size)
def write_elements(elements, mass_erase_used, progress=None): """Writes the indicated elements into the target memory, erasing as needed. """ mem_layout = get_memory_layout(__dev) for elem in elements: addr = elem['addr'] size = elem['size'] data = elem['data'] elem_size = size elem_addr = addr if progress: progress(elem_addr, 0, elem_size) while size > 0: write_size = size if not mass_erase_used: for segment in mem_layout: if addr >= segment['addr'] and \ addr <= segment['last_addr']: # We found the page containing the address we want to # write, erase it page_size = segment['page_size'] page_addr = addr & ~(page_size - 1) if addr + write_size > page_addr + page_size: write_size = page_addr + page_size - addr page_erase(page_addr) break write_memory(addr, data[:write_size], progress, elem_addr, elem_size) data = data[write_size:] addr += write_size size -= write_size if progress: progress(elem_addr, addr - elem_addr, elem_size)
[ "Writes", "the", "indicated", "elements", "into", "the", "target", "memory", "erasing", "as", "needed", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L476-L510
[ "def", "write_elements", "(", "elements", ",", "mass_erase_used", ",", "progress", "=", "None", ")", ":", "mem_layout", "=", "get_memory_layout", "(", "__dev", ")", "for", "elem", "in", "elements", ":", "addr", "=", "elem", "[", "'addr'", "]", "size", "=",...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
cli_progress
Prints a progress report suitable for use on the command line.
tools/pydfu.py
def cli_progress(addr, offset, size): """Prints a progress report suitable for use on the command line.""" width = 25 done = offset * width // size print("\r0x{:08x} {:7d} [{}{}] {:3d}% " .format(addr, size, '=' * done, ' ' * (width - done), offset * 100 // size), end="") try: sys.stdout.flush() except OSError: pass # Ignore Windows CLI "WinError 87" on Python 3.6 if offset == size: print("")
def cli_progress(addr, offset, size): """Prints a progress report suitable for use on the command line.""" width = 25 done = offset * width // size print("\r0x{:08x} {:7d} [{}{}] {:3d}% " .format(addr, size, '=' * done, ' ' * (width - done), offset * 100 // size), end="") try: sys.stdout.flush() except OSError: pass # Ignore Windows CLI "WinError 87" on Python 3.6 if offset == size: print("")
[ "Prints", "a", "progress", "report", "suitable", "for", "use", "on", "the", "command", "line", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L513-L525
[ "def", "cli_progress", "(", "addr", ",", "offset", ",", "size", ")", ":", "width", "=", "25", "done", "=", "offset", "*", "width", "//", "size", "print", "(", "\"\\r0x{:08x} {:7d} [{}{}] {:3d}% \"", ".", "format", "(", "addr", ",", "size", ",", "'='", "*...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
main
Test program for verifying this files functionality.
tools/pydfu.py
def main(): """Test program for verifying this files functionality.""" global __verbose # Parse CMD args parser = argparse.ArgumentParser(description='DFU Python Util') #parser.add_argument("path", help="file path") parser.add_argument( "-l", "--list", help="list available DFU devices", action="store_true", default=False ) parser.add_argument( "-m", "--mass-erase", help="mass erase device", action="store_true", default=False ) parser.add_argument( "-u", "--upload", help="read file from DFU device", dest="path", default=False ) parser.add_argument( "-v", "--verbose", help="increase output verbosity", action="store_true", default=False ) args = parser.parse_args() __verbose = args.verbose if args.list: list_dfu_devices(idVendor=__VID, idProduct=__PID) return init() if args.mass_erase: print ("Mass erase...") mass_erase() if args.path: elements = read_dfu_file(args.path) if not elements: return print("Writing memory...") write_elements(elements, args.mass_erase, progress=cli_progress) print("Exiting DFU...") exit_dfu() return print("No command specified")
def main(): """Test program for verifying this files functionality.""" global __verbose # Parse CMD args parser = argparse.ArgumentParser(description='DFU Python Util') #parser.add_argument("path", help="file path") parser.add_argument( "-l", "--list", help="list available DFU devices", action="store_true", default=False ) parser.add_argument( "-m", "--mass-erase", help="mass erase device", action="store_true", default=False ) parser.add_argument( "-u", "--upload", help="read file from DFU device", dest="path", default=False ) parser.add_argument( "-v", "--verbose", help="increase output verbosity", action="store_true", default=False ) args = parser.parse_args() __verbose = args.verbose if args.list: list_dfu_devices(idVendor=__VID, idProduct=__PID) return init() if args.mass_erase: print ("Mass erase...") mass_erase() if args.path: elements = read_dfu_file(args.path) if not elements: return print("Writing memory...") write_elements(elements, args.mass_erase, progress=cli_progress) print("Exiting DFU...") exit_dfu() return print("No command specified")
[ "Test", "program", "for", "verifying", "this", "files", "functionality", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L528-L583
[ "def", "main", "(", ")", ":", "global", "__verbose", "# Parse CMD args", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'DFU Python Util'", ")", "#parser.add_argument(\"path\", help=\"file path\")", "parser", ".", "add_argument", "(", "\"-l\...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
parse_port_pin
Parses a string and returns a (port-num, pin-num) tuple.
ports/stm32/boards/make-pins.py
def parse_port_pin(name_str): """Parses a string and returns a (port-num, pin-num) tuple.""" if len(name_str) < 3: raise ValueError("Expecting pin name to be at least 3 charcters.") if name_str[0] != 'P': raise ValueError("Expecting pin name to start with P") if name_str[1] < 'A' or name_str[1] > 'K': raise ValueError("Expecting pin port to be between A and K") port = ord(name_str[1]) - ord('A') pin_str = name_str[2:] if not pin_str.isdigit(): raise ValueError("Expecting numeric pin number.") return (port, int(pin_str))
def parse_port_pin(name_str): """Parses a string and returns a (port-num, pin-num) tuple.""" if len(name_str) < 3: raise ValueError("Expecting pin name to be at least 3 charcters.") if name_str[0] != 'P': raise ValueError("Expecting pin name to start with P") if name_str[1] < 'A' or name_str[1] > 'K': raise ValueError("Expecting pin port to be between A and K") port = ord(name_str[1]) - ord('A') pin_str = name_str[2:] if not pin_str.isdigit(): raise ValueError("Expecting numeric pin number.") return (port, int(pin_str))
[ "Parses", "a", "string", "and", "returns", "a", "(", "port", "-", "num", "pin", "-", "num", ")", "tuple", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/ports/stm32/boards/make-pins.py#L33-L45
[ "def", "parse_port_pin", "(", "name_str", ")", ":", "if", "len", "(", "name_str", ")", "<", "3", ":", "raise", "ValueError", "(", "\"Expecting pin name to be at least 3 charcters.\"", ")", "if", "name_str", "[", "0", "]", "!=", "'P'", ":", "raise", "ValueError...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
AlternateFunction.print
Prints the C representation of this AF.
ports/stm32/boards/make-pins.py
def print(self): """Prints the C representation of this AF.""" cond_var = None if self.supported: cond_var = conditional_var('{}{}'.format(self.func, self.fn_num)) print_conditional_if(cond_var) print(' AF', end='') else: print(' //', end='') fn_num = self.fn_num if fn_num is None: fn_num = 0 print('({:2d}, {:8s}, {:2d}, {:10s}, {:8s}), // {:s}'.format(self.idx, self.func, fn_num, self.pin_type, self.ptr(), self.af_str)) print_conditional_endif(cond_var)
def print(self): """Prints the C representation of this AF.""" cond_var = None if self.supported: cond_var = conditional_var('{}{}'.format(self.func, self.fn_num)) print_conditional_if(cond_var) print(' AF', end='') else: print(' //', end='') fn_num = self.fn_num if fn_num is None: fn_num = 0 print('({:2d}, {:8s}, {:2d}, {:10s}, {:8s}), // {:s}'.format(self.idx, self.func, fn_num, self.pin_type, self.ptr(), self.af_str)) print_conditional_endif(cond_var)
[ "Prints", "the", "C", "representation", "of", "this", "AF", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/ports/stm32/boards/make-pins.py#L121-L135
[ "def", "print", "(", "self", ")", ":", "cond_var", "=", "None", "if", "self", ".", "supported", ":", "cond_var", "=", "conditional_var", "(", "'{}{}'", ".", "format", "(", "self", ".", "func", ",", "self", ".", "fn_num", ")", ")", "print_conditional_if",...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
parse_port_pin
Parses a string and returns a (port, gpio_bit) tuple.
ports/cc3200/boards/make-pins.py
def parse_port_pin(name_str): """Parses a string and returns a (port, gpio_bit) tuple.""" if len(name_str) < 3: raise ValueError("Expecting pin name to be at least 3 characters") if name_str[:2] != 'GP': raise ValueError("Expecting pin name to start with GP") if not name_str[2:].isdigit(): raise ValueError("Expecting numeric GPIO number") port = int(int(name_str[2:]) / 8) gpio_bit = 1 << int(int(name_str[2:]) % 8) return (port, gpio_bit)
def parse_port_pin(name_str): """Parses a string and returns a (port, gpio_bit) tuple.""" if len(name_str) < 3: raise ValueError("Expecting pin name to be at least 3 characters") if name_str[:2] != 'GP': raise ValueError("Expecting pin name to start with GP") if not name_str[2:].isdigit(): raise ValueError("Expecting numeric GPIO number") port = int(int(name_str[2:]) / 8) gpio_bit = 1 << int(int(name_str[2:]) % 8) return (port, gpio_bit)
[ "Parses", "a", "string", "and", "returns", "a", "(", "port", "gpio_bit", ")", "tuple", "." ]
micropython/micropython
python
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/ports/cc3200/boards/make-pins.py#L20-L30
[ "def", "parse_port_pin", "(", "name_str", ")", ":", "if", "len", "(", "name_str", ")", "<", "3", ":", "raise", "ValueError", "(", "\"Expecting pin name to be at least 3 characters\"", ")", "if", "name_str", "[", ":", "2", "]", "!=", "'GP'", ":", "raise", "Va...
8031b7a25c21fb864fe9dd1fa40740030be66c11
train
Backend.run_node
Simple run one operator and return the results. Args: outputs_info: a list of tuples, which contains the element type and shape of each output. First element of the tuple is the dtype, and the second element is the shape. More use case can be found in https://github.com/onnx/onnx/blob/master/onnx/backend/test/runner/__init__.py
onnx/backend/base.py
def run_node(cls, node, # type: NodeProto inputs, # type: Any device='CPU', # type: Text outputs_info=None, # type: Optional[Sequence[Tuple[numpy.dtype, Tuple[int, ...]]]] **kwargs # type: Dict[Text, Any] ): # type: (...) -> Optional[Tuple[Any, ...]] '''Simple run one operator and return the results. Args: outputs_info: a list of tuples, which contains the element type and shape of each output. First element of the tuple is the dtype, and the second element is the shape. More use case can be found in https://github.com/onnx/onnx/blob/master/onnx/backend/test/runner/__init__.py ''' # TODO Remove Optional from return type if 'opset_version' in kwargs: special_context = c_checker.CheckerContext() special_context.ir_version = IR_VERSION special_context.opset_imports = {'': kwargs['opset_version']} # type: ignore onnx.checker.check_node(node, special_context) else: onnx.checker.check_node(node) return None
def run_node(cls, node, # type: NodeProto inputs, # type: Any device='CPU', # type: Text outputs_info=None, # type: Optional[Sequence[Tuple[numpy.dtype, Tuple[int, ...]]]] **kwargs # type: Dict[Text, Any] ): # type: (...) -> Optional[Tuple[Any, ...]] '''Simple run one operator and return the results. Args: outputs_info: a list of tuples, which contains the element type and shape of each output. First element of the tuple is the dtype, and the second element is the shape. More use case can be found in https://github.com/onnx/onnx/blob/master/onnx/backend/test/runner/__init__.py ''' # TODO Remove Optional from return type if 'opset_version' in kwargs: special_context = c_checker.CheckerContext() special_context.ir_version = IR_VERSION special_context.opset_imports = {'': kwargs['opset_version']} # type: ignore onnx.checker.check_node(node, special_context) else: onnx.checker.check_node(node) return None
[ "Simple", "run", "one", "operator", "and", "return", "the", "results", ".", "Args", ":", "outputs_info", ":", "a", "list", "of", "tuples", "which", "contains", "the", "element", "type", "and", "shape", "of", "each", "output", ".", "First", "element", "of",...
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/backend/base.py#L89-L111
[ "def", "run_node", "(", "cls", ",", "node", ",", "# type: NodeProto", "inputs", ",", "# type: Any", "device", "=", "'CPU'", ",", "# type: Text", "outputs_info", "=", "None", ",", "# type: Optional[Sequence[Tuple[numpy.dtype, Tuple[int, ...]]]]", "*", "*", "kwargs", "#...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
load_external_data_for_tensor
Load data from an external file for tensor. @params tensor: a TensorProto object. base_dir: directory that contains the external data.
onnx/external_data_helper.py
def load_external_data_for_tensor(tensor, base_dir): # type: (TensorProto, Text) -> None """ Load data from an external file for tensor. @params tensor: a TensorProto object. base_dir: directory that contains the external data. """ if tensor.HasField("raw_data"): # already loaded return info = ExternalDataInfo(tensor) file_location = _sanitize_path(info.location) external_data_file_path = os.path.join(base_dir, file_location) with open(external_data_file_path, 'rb') as data_file: if info.offset: data_file.seek(info.offset) if info.length: tensor.raw_data = data_file.read(info.length) else: tensor.raw_data = data_file.read()
def load_external_data_for_tensor(tensor, base_dir): # type: (TensorProto, Text) -> None """ Load data from an external file for tensor. @params tensor: a TensorProto object. base_dir: directory that contains the external data. """ if tensor.HasField("raw_data"): # already loaded return info = ExternalDataInfo(tensor) file_location = _sanitize_path(info.location) external_data_file_path = os.path.join(base_dir, file_location) with open(external_data_file_path, 'rb') as data_file: if info.offset: data_file.seek(info.offset) if info.length: tensor.raw_data = data_file.read(info.length) else: tensor.raw_data = data_file.read()
[ "Load", "data", "from", "an", "external", "file", "for", "tensor", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L32-L54
[ "def", "load_external_data_for_tensor", "(", "tensor", ",", "base_dir", ")", ":", "# type: (TensorProto, Text) -> None", "if", "tensor", ".", "HasField", "(", "\"raw_data\"", ")", ":", "# already loaded", "return", "info", "=", "ExternalDataInfo", "(", "tensor", ")", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
load_external_data_for_model
Loads external tensors into model @params model: ModelProto to load external data to base_dir: directory that contains external data
onnx/external_data_helper.py
def load_external_data_for_model(model, base_dir): # type: (ModelProto, Text) -> None """ Loads external tensors into model @params model: ModelProto to load external data to base_dir: directory that contains external data """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): load_external_data_for_tensor(tensor, base_dir)
def load_external_data_for_model(model, base_dir): # type: (ModelProto, Text) -> None """ Loads external tensors into model @params model: ModelProto to load external data to base_dir: directory that contains external data """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): load_external_data_for_tensor(tensor, base_dir)
[ "Loads", "external", "tensors", "into", "model" ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L57-L67
[ "def", "load_external_data_for_model", "(", "model", ",", "base_dir", ")", ":", "# type: (ModelProto, Text) -> None", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "if", "uses_external_data", "(", "tensor", ")", ":", "load_external_data_for_tensor",...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
convert_model_to_external_data
call to set all tensors as external data. save_model saves all the tensors data as external data after calling this function. @params model: ModelProto to be converted. all_tensors_to_one_file: If true, save all tensors to one external file specified by location. If false, save each tensor to a file named with the tensor name. location: specify the external file that all tensors to save to. If not specified, will use the model name.
onnx/external_data_helper.py
def convert_model_to_external_data(model, all_tensors_to_one_file=True, location=None): # type: (ModelProto, bool, Optional[Text]) -> None """ call to set all tensors as external data. save_model saves all the tensors data as external data after calling this function. @params model: ModelProto to be converted. all_tensors_to_one_file: If true, save all tensors to one external file specified by location. If false, save each tensor to a file named with the tensor name. location: specify the external file that all tensors to save to. If not specified, will use the model name. """ if all_tensors_to_one_file: file_name = Text(uuid.uuid1()) if location: file_name = location for tensor in _get_all_tensors(model): set_external_data(tensor, file_name) else: for tensor in _get_all_tensors(model): set_external_data(tensor, tensor.name)
def convert_model_to_external_data(model, all_tensors_to_one_file=True, location=None): # type: (ModelProto, bool, Optional[Text]) -> None """ call to set all tensors as external data. save_model saves all the tensors data as external data after calling this function. @params model: ModelProto to be converted. all_tensors_to_one_file: If true, save all tensors to one external file specified by location. If false, save each tensor to a file named with the tensor name. location: specify the external file that all tensors to save to. If not specified, will use the model name. """ if all_tensors_to_one_file: file_name = Text(uuid.uuid1()) if location: file_name = location for tensor in _get_all_tensors(model): set_external_data(tensor, file_name) else: for tensor in _get_all_tensors(model): set_external_data(tensor, tensor.name)
[ "call", "to", "set", "all", "tensors", "as", "external", "data", ".", "save_model", "saves", "all", "the", "tensors", "data", "as", "external", "data", "after", "calling", "this", "function", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L92-L111
[ "def", "convert_model_to_external_data", "(", "model", ",", "all_tensors_to_one_file", "=", "True", ",", "location", "=", "None", ")", ":", "# type: (ModelProto, bool, Optional[Text]) -> None", "if", "all_tensors_to_one_file", ":", "file_name", "=", "Text", "(", "uuid", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
convert_model_from_external_data
call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted.
onnx/external_data_helper.py
def convert_model_from_external_data(model): # type: (ModelProto) -> None """ call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") del tensor.external_data[:] tensor.data_location = TensorProto.DEFAULT
def convert_model_from_external_data(model): # type: (ModelProto) -> None """ call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") del tensor.external_data[:] tensor.data_location = TensorProto.DEFAULT
[ "call", "to", "set", "all", "tensors", "data", "as", "embedded", "data", ".", "save_model", "saves", "all", "the", "tensors", "data", "as", "embedded", "data", "after", "calling", "this", "function", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L114-L125
[ "def", "convert_model_from_external_data", "(", "model", ")", ":", "# type: (ModelProto) -> None", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "if", "uses_external_data", "(", "tensor", ")", ":", "if", "not", "tensor", ".", "HasField", "(", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
save_external_data
Write tensor data to an external file according to information in the `external_data` field. @params tensor: Tensor object to be serialized base_path: System path of a folder where tensor data is to be stored
onnx/external_data_helper.py
def save_external_data(tensor, base_path): # type: (TensorProto, Text) -> None """ Write tensor data to an external file according to information in the `external_data` field. @params tensor: Tensor object to be serialized base_path: System path of a folder where tensor data is to be stored """ info = ExternalDataInfo(tensor) external_data_file_path = os.path.join(base_path, info.location) # Retrieve the tensor's data from raw_data or load external file if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") # Create file if it doesn't exist if not os.path.isfile(external_data_file_path): open(external_data_file_path, 'ab').close() # Open file for reading and writing at random locations ('r+b') with open(external_data_file_path, 'r+b') as data_file: data_file.seek(0, 2) if info.offset is not None: # Pad file to required offset if needed file_size = data_file.tell() if info.offset > file_size: data_file.write(b"\0" * (info.offset - file_size)) data_file.seek(info.offset) offset = data_file.tell() data_file.write(tensor.raw_data) set_external_data(tensor, info.location, offset, data_file.tell() - offset)
def save_external_data(tensor, base_path): # type: (TensorProto, Text) -> None """ Write tensor data to an external file according to information in the `external_data` field. @params tensor: Tensor object to be serialized base_path: System path of a folder where tensor data is to be stored """ info = ExternalDataInfo(tensor) external_data_file_path = os.path.join(base_path, info.location) # Retrieve the tensor's data from raw_data or load external file if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") # Create file if it doesn't exist if not os.path.isfile(external_data_file_path): open(external_data_file_path, 'ab').close() # Open file for reading and writing at random locations ('r+b') with open(external_data_file_path, 'r+b') as data_file: data_file.seek(0, 2) if info.offset is not None: # Pad file to required offset if needed file_size = data_file.tell() if info.offset > file_size: data_file.write(b"\0" * (info.offset - file_size)) data_file.seek(info.offset) offset = data_file.tell() data_file.write(tensor.raw_data) set_external_data(tensor, info.location, offset, data_file.tell() - offset)
[ "Write", "tensor", "data", "to", "an", "external", "file", "according", "to", "information", "in", "the", "external_data", "field", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L128-L159
[ "def", "save_external_data", "(", "tensor", ",", "base_path", ")", ":", "# type: (TensorProto, Text) -> None", "info", "=", "ExternalDataInfo", "(", "tensor", ")", "external_data_file_path", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "info", ".", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
_get_attribute_tensors
Create an iterator of tensors from node attributes of an ONNX model.
onnx/external_data_helper.py
def _get_attribute_tensors(onnx_model_proto): # type: (ModelProto) -> Iterable[TensorProto] """Create an iterator of tensors from node attributes of an ONNX model.""" for node in onnx_model_proto.graph.node: for attribute in node.attribute: if attribute.HasField("t"): yield attribute.t for tensor in attribute.tensors: yield tensor
def _get_attribute_tensors(onnx_model_proto): # type: (ModelProto) -> Iterable[TensorProto] """Create an iterator of tensors from node attributes of an ONNX model.""" for node in onnx_model_proto.graph.node: for attribute in node.attribute: if attribute.HasField("t"): yield attribute.t for tensor in attribute.tensors: yield tensor
[ "Create", "an", "iterator", "of", "tensors", "from", "node", "attributes", "of", "an", "ONNX", "model", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L174-L181
[ "def", "_get_attribute_tensors", "(", "onnx_model_proto", ")", ":", "# type: (ModelProto) -> Iterable[TensorProto]", "for", "node", "in", "onnx_model_proto", ".", "graph", ".", "node", ":", "for", "attribute", "in", "node", ".", "attribute", ":", "if", "attribute", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
remove_external_data_field
Remove a field from a Tensor's external_data key-value store. Modifies tensor object in place. @params tensor: Tensor object from which value will be removed field_key: The key of the field to be removed
onnx/external_data_helper.py
def remove_external_data_field(tensor, field_key): # type: (TensorProto, Text) -> None """ Remove a field from a Tensor's external_data key-value store. Modifies tensor object in place. @params tensor: Tensor object from which value will be removed field_key: The key of the field to be removed """ for (i, field) in enumerate(tensor.external_data): if field.key == field_key: del tensor.external_data[i]
def remove_external_data_field(tensor, field_key): # type: (TensorProto, Text) -> None """ Remove a field from a Tensor's external_data key-value store. Modifies tensor object in place. @params tensor: Tensor object from which value will be removed field_key: The key of the field to be removed """ for (i, field) in enumerate(tensor.external_data): if field.key == field_key: del tensor.external_data[i]
[ "Remove", "a", "field", "from", "a", "Tensor", "s", "external_data", "key", "-", "value", "store", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L197-L209
[ "def", "remove_external_data_field", "(", "tensor", ",", "field_key", ")", ":", "# type: (TensorProto, Text) -> None", "for", "(", "i", ",", "field", ")", "in", "enumerate", "(", "tensor", ".", "external_data", ")", ":", "if", "field", ".", "key", "==", "field...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
write_external_data_tensors
Write external data of all tensors to files on disk. Note: This function also strips basepath information from all tensors' external_data fields. @params model: Model object which is the source of tensors to serialize. filepath: System path to the directory which should be treated as base path for external data. @return The modified model object.
onnx/external_data_helper.py
def write_external_data_tensors(model, filepath): # type: (ModelProto, Text) -> ModelProto """ Write external data of all tensors to files on disk. Note: This function also strips basepath information from all tensors' external_data fields. @params model: Model object which is the source of tensors to serialize. filepath: System path to the directory which should be treated as base path for external data. @return The modified model object. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): save_external_data(tensor, filepath) tensor.ClearField(str('raw_data')) return model
def write_external_data_tensors(model, filepath): # type: (ModelProto, Text) -> ModelProto """ Write external data of all tensors to files on disk. Note: This function also strips basepath information from all tensors' external_data fields. @params model: Model object which is the source of tensors to serialize. filepath: System path to the directory which should be treated as base path for external data. @return The modified model object. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): save_external_data(tensor, filepath) tensor.ClearField(str('raw_data')) return model
[ "Write", "external", "data", "of", "all", "tensors", "to", "files", "on", "disk", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L212-L230
[ "def", "write_external_data_tensors", "(", "model", ",", "filepath", ")", ":", "# type: (ModelProto, Text) -> ModelProto", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "if", "uses_external_data", "(", "tensor", ")", ":", "save_external_data", "("...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
PkgWriter._import
Imports a stdlib path and returns a handle to it eg. self._import("typing", "Optional") -> "Optional"
tools/protoc-gen-mypy.py
def _import(self, path, name): # type: (Text, Text) -> Text """Imports a stdlib path and returns a handle to it eg. self._import("typing", "Optional") -> "Optional" """ imp = path.replace('/', '.') self.imports[imp].add(name) return name
def _import(self, path, name): # type: (Text, Text) -> Text """Imports a stdlib path and returns a handle to it eg. self._import("typing", "Optional") -> "Optional" """ imp = path.replace('/', '.') self.imports[imp].add(name) return name
[ "Imports", "a", "stdlib", "path", "and", "returns", "a", "handle", "to", "it", "eg", ".", "self", ".", "_import", "(", "typing", "Optional", ")", "-", ">", "Optional" ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/tools/protoc-gen-mypy.py#L74-L81
[ "def", "_import", "(", "self", ",", "path", ",", "name", ")", ":", "# type: (Text, Text) -> Text", "imp", "=", "path", ".", "replace", "(", "'/'", ",", "'.'", ")", "self", ".", "imports", "[", "imp", "]", ".", "add", "(", "name", ")", "return", "name...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
PkgWriter._import_message
Import a referenced message and return a handle
tools/protoc-gen-mypy.py
def _import_message(self, type_name): # type: (d.FieldDescriptorProto) -> Text """Import a referenced message and return a handle""" name = cast(Text, type_name) if name[0] == '.' and name[1].isupper() and name[2].islower(): # Message defined in this file return name[1:] message_fd = self.descriptors.message_to_fd[name] if message_fd.name == self.fd.name: # message defined in this package split = name.split('.') for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): return ".".join(split[i:]) # Not in package. Must import split = name.split(".") for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): assert message_fd.name.endswith('.proto') import_name = self._import(message_fd.name[:-6].replace('-', '_') + "_pb2", segment) remains = ".".join(split[i + 1:]) if not remains: return import_name raise AssertionError("Don't support nested imports yet") # return new_nested_import(import_name, remains) raise AssertionError("Could not parse local name " + name)
def _import_message(self, type_name): # type: (d.FieldDescriptorProto) -> Text """Import a referenced message and return a handle""" name = cast(Text, type_name) if name[0] == '.' and name[1].isupper() and name[2].islower(): # Message defined in this file return name[1:] message_fd = self.descriptors.message_to_fd[name] if message_fd.name == self.fd.name: # message defined in this package split = name.split('.') for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): return ".".join(split[i:]) # Not in package. Must import split = name.split(".") for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): assert message_fd.name.endswith('.proto') import_name = self._import(message_fd.name[:-6].replace('-', '_') + "_pb2", segment) remains = ".".join(split[i + 1:]) if not remains: return import_name raise AssertionError("Don't support nested imports yet") # return new_nested_import(import_name, remains) raise AssertionError("Could not parse local name " + name)
[ "Import", "a", "referenced", "message", "and", "return", "a", "handle" ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/tools/protoc-gen-mypy.py#L83-L112
[ "def", "_import_message", "(", "self", ",", "type_name", ")", ":", "# type: (d.FieldDescriptorProto) -> Text", "name", "=", "cast", "(", "Text", ",", "type_name", ")", "if", "name", "[", "0", "]", "==", "'.'", "and", "name", "[", "1", "]", ".", "isupper", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
mypy_type_check.run
Run command.
setup.py
def run(self): """Run command.""" onnx_script = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "tools/mypy-onnx.py")) returncode = subprocess.call([sys.executable, onnx_script]) sys.exit(returncode)
def run(self): """Run command.""" onnx_script = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "tools/mypy-onnx.py")) returncode = subprocess.call([sys.executable, onnx_script]) sys.exit(returncode)
[ "Run", "command", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/setup.py#L252-L256
[ "def", "run", "(", "self", ")", ":", "onnx_script", "=", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ",", "\...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
make_node
Construct a NodeProto. Arguments: op_type (string): The name of the operator to construct inputs (list of string): list of input names outputs (list of string): list of output names name (string, default None): optional unique identifier for NodeProto doc_string (string, default None): optional documentation string for NodeProto domain (string, default None): optional domain for NodeProto. If it's None, we will just use default domain (which is empty) **kwargs (dict): the attributes of the node. The acceptable values are documented in :func:`make_attribute`.
onnx/helper.py
def make_node( op_type, # type: Text inputs, # type: Sequence[Text] outputs, # type: Sequence[Text] name=None, # type: Optional[Text] doc_string=None, # type: Optional[Text] domain=None, # type: Optional[Text] **kwargs # type: Any ): # type: (...) -> NodeProto """Construct a NodeProto. Arguments: op_type (string): The name of the operator to construct inputs (list of string): list of input names outputs (list of string): list of output names name (string, default None): optional unique identifier for NodeProto doc_string (string, default None): optional documentation string for NodeProto domain (string, default None): optional domain for NodeProto. If it's None, we will just use default domain (which is empty) **kwargs (dict): the attributes of the node. The acceptable values are documented in :func:`make_attribute`. """ node = NodeProto() node.op_type = op_type node.input.extend(inputs) node.output.extend(outputs) if name: node.name = name if doc_string: node.doc_string = doc_string if domain is not None: node.domain = domain if kwargs: node.attribute.extend( make_attribute(key, value) for key, value in sorted(kwargs.items())) return node
def make_node( op_type, # type: Text inputs, # type: Sequence[Text] outputs, # type: Sequence[Text] name=None, # type: Optional[Text] doc_string=None, # type: Optional[Text] domain=None, # type: Optional[Text] **kwargs # type: Any ): # type: (...) -> NodeProto """Construct a NodeProto. Arguments: op_type (string): The name of the operator to construct inputs (list of string): list of input names outputs (list of string): list of output names name (string, default None): optional unique identifier for NodeProto doc_string (string, default None): optional documentation string for NodeProto domain (string, default None): optional domain for NodeProto. If it's None, we will just use default domain (which is empty) **kwargs (dict): the attributes of the node. The acceptable values are documented in :func:`make_attribute`. """ node = NodeProto() node.op_type = op_type node.input.extend(inputs) node.output.extend(outputs) if name: node.name = name if doc_string: node.doc_string = doc_string if domain is not None: node.domain = domain if kwargs: node.attribute.extend( make_attribute(key, value) for key, value in sorted(kwargs.items())) return node
[ "Construct", "a", "NodeProto", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L20-L57
[ "def", "make_node", "(", "op_type", ",", "# type: Text", "inputs", ",", "# type: Sequence[Text]", "outputs", ",", "# type: Sequence[Text]", "name", "=", "None", ",", "# type: Optional[Text]", "doc_string", "=", "None", ",", "# type: Optional[Text]", "domain", "=", "No...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
make_operatorsetid
Construct an OperatorSetIdProto. Arguments: domain (string): The domain of the operator set id version (integer): Version of operator set id
onnx/helper.py
def make_operatorsetid( domain, # type: Text version, # type: int ): # type: (...) -> OperatorSetIdProto """Construct an OperatorSetIdProto. Arguments: domain (string): The domain of the operator set id version (integer): Version of operator set id """ operatorsetid = OperatorSetIdProto() operatorsetid.domain = domain operatorsetid.version = version return operatorsetid
def make_operatorsetid( domain, # type: Text version, # type: int ): # type: (...) -> OperatorSetIdProto """Construct an OperatorSetIdProto. Arguments: domain (string): The domain of the operator set id version (integer): Version of operator set id """ operatorsetid = OperatorSetIdProto() operatorsetid.domain = domain operatorsetid.version = version return operatorsetid
[ "Construct", "an", "OperatorSetIdProto", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L60-L73
[ "def", "make_operatorsetid", "(", "domain", ",", "# type: Text", "version", ",", "# type: int", ")", ":", "# type: (...) -> OperatorSetIdProto", "operatorsetid", "=", "OperatorSetIdProto", "(", ")", "operatorsetid", ".", "domain", "=", "domain", "operatorsetid", ".", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
_to_bytes_or_false
An internal graph to convert the input to a bytes or to False. The criteria for conversion is as follows and should be python 2 and 3 compatible: - If val is py2 str or py3 bytes: return bytes - If val is py2 unicode or py3 str: return val.decode('utf-8') - Otherwise, return False
onnx/helper.py
def _to_bytes_or_false(val): # type: (Union[Text, bytes]) -> Union[bytes, bool] """An internal graph to convert the input to a bytes or to False. The criteria for conversion is as follows and should be python 2 and 3 compatible: - If val is py2 str or py3 bytes: return bytes - If val is py2 unicode or py3 str: return val.decode('utf-8') - Otherwise, return False """ if isinstance(val, bytes): return val else: try: return val.encode('utf-8') except AttributeError: return False
def _to_bytes_or_false(val): # type: (Union[Text, bytes]) -> Union[bytes, bool] """An internal graph to convert the input to a bytes or to False. The criteria for conversion is as follows and should be python 2 and 3 compatible: - If val is py2 str or py3 bytes: return bytes - If val is py2 unicode or py3 str: return val.decode('utf-8') - Otherwise, return False """ if isinstance(val, bytes): return val else: try: return val.encode('utf-8') except AttributeError: return False
[ "An", "internal", "graph", "to", "convert", "the", "input", "to", "a", "bytes", "or", "to", "False", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L179-L194
[ "def", "_to_bytes_or_false", "(", "val", ")", ":", "# type: (Union[Text, bytes]) -> Union[bytes, bool]", "if", "isinstance", "(", "val", ",", "bytes", ")", ":", "return", "val", "else", ":", "try", ":", "return", "val", ".", "encode", "(", "'utf-8'", ")", "exc...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
make_attribute
Makes an AttributeProto based on the value type.
onnx/helper.py
def make_attribute( key, # type: Text value, # type: Any doc_string=None # type: Optional[Text] ): # type: (...) -> AttributeProto """Makes an AttributeProto based on the value type.""" attr = AttributeProto() attr.name = key if doc_string: attr.doc_string = doc_string is_iterable = isinstance(value, collections.Iterable) bytes_or_false = _to_bytes_or_false(value) # First, singular cases # float if isinstance(value, float): attr.f = value attr.type = AttributeProto.FLOAT # integer elif isinstance(value, numbers.Integral): attr.i = cast(int, value) attr.type = AttributeProto.INT # string elif bytes_or_false: assert isinstance(bytes_or_false, bytes) attr.s = bytes_or_false attr.type = AttributeProto.STRING elif isinstance(value, TensorProto): attr.t.CopyFrom(value) attr.type = AttributeProto.TENSOR elif isinstance(value, GraphProto): attr.g.CopyFrom(value) attr.type = AttributeProto.GRAPH # third, iterable cases elif is_iterable: byte_array = [_to_bytes_or_false(v) for v in value] if all(isinstance(v, float) for v in value): attr.floats.extend(value) attr.type = AttributeProto.FLOATS elif all(isinstance(v, numbers.Integral) for v in value): # Turn np.int32/64 into Python built-in int. attr.ints.extend(int(v) for v in value) attr.type = AttributeProto.INTS elif all(byte_array): attr.strings.extend(cast(List[bytes], byte_array)) attr.type = AttributeProto.STRINGS elif all(isinstance(v, TensorProto) for v in value): attr.tensors.extend(value) attr.type = AttributeProto.TENSORS elif all(isinstance(v, GraphProto) for v in value): attr.graphs.extend(value) attr.type = AttributeProto.GRAPHS else: raise ValueError( "You passed in an iterable attribute but I cannot figure out " "its applicable type.") else: raise ValueError( 'Value "{}" is not valid attribute data type.'.format(value)) return attr
def make_attribute( key, # type: Text value, # type: Any doc_string=None # type: Optional[Text] ): # type: (...) -> AttributeProto """Makes an AttributeProto based on the value type.""" attr = AttributeProto() attr.name = key if doc_string: attr.doc_string = doc_string is_iterable = isinstance(value, collections.Iterable) bytes_or_false = _to_bytes_or_false(value) # First, singular cases # float if isinstance(value, float): attr.f = value attr.type = AttributeProto.FLOAT # integer elif isinstance(value, numbers.Integral): attr.i = cast(int, value) attr.type = AttributeProto.INT # string elif bytes_or_false: assert isinstance(bytes_or_false, bytes) attr.s = bytes_or_false attr.type = AttributeProto.STRING elif isinstance(value, TensorProto): attr.t.CopyFrom(value) attr.type = AttributeProto.TENSOR elif isinstance(value, GraphProto): attr.g.CopyFrom(value) attr.type = AttributeProto.GRAPH # third, iterable cases elif is_iterable: byte_array = [_to_bytes_or_false(v) for v in value] if all(isinstance(v, float) for v in value): attr.floats.extend(value) attr.type = AttributeProto.FLOATS elif all(isinstance(v, numbers.Integral) for v in value): # Turn np.int32/64 into Python built-in int. attr.ints.extend(int(v) for v in value) attr.type = AttributeProto.INTS elif all(byte_array): attr.strings.extend(cast(List[bytes], byte_array)) attr.type = AttributeProto.STRINGS elif all(isinstance(v, TensorProto) for v in value): attr.tensors.extend(value) attr.type = AttributeProto.TENSORS elif all(isinstance(v, GraphProto) for v in value): attr.graphs.extend(value) attr.type = AttributeProto.GRAPHS else: raise ValueError( "You passed in an iterable attribute but I cannot figure out " "its applicable type.") else: raise ValueError( 'Value "{}" is not valid attribute data type.'.format(value)) return attr
[ "Makes", "an", "AttributeProto", "based", "on", "the", "value", "type", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L197-L256
[ "def", "make_attribute", "(", "key", ",", "# type: Text", "value", ",", "# type: Any", "doc_string", "=", "None", "# type: Optional[Text]", ")", ":", "# type: (...) -> AttributeProto", "attr", "=", "AttributeProto", "(", ")", "attr", ".", "name", "=", "key", "if",...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
make_tensor_value_info
Makes a ValueInfoProto based on the data type and shape.
onnx/helper.py
def make_tensor_value_info( name, # type: Text elem_type, # type: int shape, # type: Optional[Sequence[Union[Text, int]]] doc_string="", # type: Text shape_denotation=None, # type: Optional[List[Text]] ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = elem_type tensor_shape_proto = tensor_type_proto.shape if shape is not None: # You might think this is a no-op (extending a normal Python # list by [] certainly is), but protobuf lists work a little # differently; if a field is never set, it is omitted from the # resulting protobuf; a list that is explicitly set to be # empty will get an (empty) entry in the protobuf. This # difference is visible to our consumers, so make sure we emit # an empty shape! tensor_shape_proto.dim.extend([]) if shape_denotation: if len(shape_denotation) != len(shape): raise ValueError( 'Invalid shape_denotation. ' 'Must be of the same length as shape.') for i, d in enumerate(shape): dim = tensor_shape_proto.dim.add() if d is None: pass elif isinstance(d, integer_types): dim.dim_value = d elif isinstance(d, text_type): dim.dim_param = d else: raise ValueError( 'Invalid item in shape: {}. ' 'Needs to of integer_types or text_type.'.format(d)) if shape_denotation: dim.denotation = shape_denotation[i] return value_info_proto
def make_tensor_value_info( name, # type: Text elem_type, # type: int shape, # type: Optional[Sequence[Union[Text, int]]] doc_string="", # type: Text shape_denotation=None, # type: Optional[List[Text]] ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = elem_type tensor_shape_proto = tensor_type_proto.shape if shape is not None: # You might think this is a no-op (extending a normal Python # list by [] certainly is), but protobuf lists work a little # differently; if a field is never set, it is omitted from the # resulting protobuf; a list that is explicitly set to be # empty will get an (empty) entry in the protobuf. This # difference is visible to our consumers, so make sure we emit # an empty shape! tensor_shape_proto.dim.extend([]) if shape_denotation: if len(shape_denotation) != len(shape): raise ValueError( 'Invalid shape_denotation. ' 'Must be of the same length as shape.') for i, d in enumerate(shape): dim = tensor_shape_proto.dim.add() if d is None: pass elif isinstance(d, integer_types): dim.dim_value = d elif isinstance(d, text_type): dim.dim_param = d else: raise ValueError( 'Invalid item in shape: {}. ' 'Needs to of integer_types or text_type.'.format(d)) if shape_denotation: dim.denotation = shape_denotation[i] return value_info_proto
[ "Makes", "a", "ValueInfoProto", "based", "on", "the", "data", "type", "and", "shape", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L290-L340
[ "def", "make_tensor_value_info", "(", "name", ",", "# type: Text", "elem_type", ",", "# type: int", "shape", ",", "# type: Optional[Sequence[Union[Text, int]]]", "doc_string", "=", "\"\"", ",", "# type: Text", "shape_denotation", "=", "None", ",", "# type: Optional[List[Tex...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
strip_doc_string
Empties `doc_string` field on any nested protobuf messages
onnx/helper.py
def strip_doc_string(proto): # type: (google.protobuf.message.Message) -> None """ Empties `doc_string` field on any nested protobuf messages """ assert isinstance(proto, google.protobuf.message.Message) for descriptor in proto.DESCRIPTOR.fields: if descriptor.name == 'doc_string': proto.ClearField(descriptor.name) elif descriptor.type == descriptor.TYPE_MESSAGE: if descriptor.label == descriptor.LABEL_REPEATED: for x in getattr(proto, descriptor.name): strip_doc_string(x) elif proto.HasField(descriptor.name): strip_doc_string(getattr(proto, descriptor.name))
def strip_doc_string(proto): # type: (google.protobuf.message.Message) -> None """ Empties `doc_string` field on any nested protobuf messages """ assert isinstance(proto, google.protobuf.message.Message) for descriptor in proto.DESCRIPTOR.fields: if descriptor.name == 'doc_string': proto.ClearField(descriptor.name) elif descriptor.type == descriptor.TYPE_MESSAGE: if descriptor.label == descriptor.LABEL_REPEATED: for x in getattr(proto, descriptor.name): strip_doc_string(x) elif proto.HasField(descriptor.name): strip_doc_string(getattr(proto, descriptor.name))
[ "Empties", "doc_string", "field", "on", "any", "nested", "protobuf", "messages" ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L538-L551
[ "def", "strip_doc_string", "(", "proto", ")", ":", "# type: (google.protobuf.message.Message) -> None", "assert", "isinstance", "(", "proto", ",", "google", ".", "protobuf", ".", "message", ".", "Message", ")", "for", "descriptor", "in", "proto", ".", "DESCRIPTOR", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
to_array
Converts a tensor def object to a numpy array. Inputs: tensor: a TensorProto object. Returns: arr: the converted array.
onnx/numpy_helper.py
def to_array(tensor): # type: (TensorProto) -> np.ndarray[Any] """Converts a tensor def object to a numpy array. Inputs: tensor: a TensorProto object. Returns: arr: the converted array. """ if tensor.HasField("segment"): raise ValueError( "Currently not supporting loading segments.") if tensor.data_type == TensorProto.UNDEFINED: raise ValueError("The data type is not defined.") tensor_dtype = tensor.data_type np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[tensor_dtype] storage_type = mapping.TENSOR_TYPE_TO_STORAGE_TENSOR_TYPE[tensor_dtype] storage_np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[storage_type] storage_field = mapping.STORAGE_TENSOR_TYPE_TO_FIELD[storage_type] dims = tensor.dims if tensor.data_type == TensorProto.STRING: utf8_strings = getattr(tensor, storage_field) ss = list(s.decode('utf-8') for s in utf8_strings) return np.asarray(ss).astype(np_dtype).reshape(dims) if tensor.HasField("raw_data"): # Raw_bytes support: using frombuffer. return np.frombuffer( tensor.raw_data, dtype=np_dtype).reshape(dims) else: data = getattr(tensor, storage_field), # type: Sequence[np.complex64] if (tensor_dtype == TensorProto.COMPLEX64 or tensor_dtype == TensorProto.COMPLEX128): data = combine_pairs_to_complex(data) return ( np.asarray( data, dtype=storage_np_dtype) .astype(np_dtype) .reshape(dims) )
def to_array(tensor): # type: (TensorProto) -> np.ndarray[Any] """Converts a tensor def object to a numpy array. Inputs: tensor: a TensorProto object. Returns: arr: the converted array. """ if tensor.HasField("segment"): raise ValueError( "Currently not supporting loading segments.") if tensor.data_type == TensorProto.UNDEFINED: raise ValueError("The data type is not defined.") tensor_dtype = tensor.data_type np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[tensor_dtype] storage_type = mapping.TENSOR_TYPE_TO_STORAGE_TENSOR_TYPE[tensor_dtype] storage_np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[storage_type] storage_field = mapping.STORAGE_TENSOR_TYPE_TO_FIELD[storage_type] dims = tensor.dims if tensor.data_type == TensorProto.STRING: utf8_strings = getattr(tensor, storage_field) ss = list(s.decode('utf-8') for s in utf8_strings) return np.asarray(ss).astype(np_dtype).reshape(dims) if tensor.HasField("raw_data"): # Raw_bytes support: using frombuffer. return np.frombuffer( tensor.raw_data, dtype=np_dtype).reshape(dims) else: data = getattr(tensor, storage_field), # type: Sequence[np.complex64] if (tensor_dtype == TensorProto.COMPLEX64 or tensor_dtype == TensorProto.COMPLEX128): data = combine_pairs_to_complex(data) return ( np.asarray( data, dtype=storage_np_dtype) .astype(np_dtype) .reshape(dims) )
[ "Converts", "a", "tensor", "def", "object", "to", "a", "numpy", "array", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/numpy_helper.py#L24-L66
[ "def", "to_array", "(", "tensor", ")", ":", "# type: (TensorProto) -> np.ndarray[Any]", "if", "tensor", ".", "HasField", "(", "\"segment\"", ")", ":", "raise", "ValueError", "(", "\"Currently not supporting loading segments.\"", ")", "if", "tensor", ".", "data_type", ...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
from_array
Converts a numpy array to a tensor def. Inputs: arr: a numpy array. name: (optional) the name of the tensor. Returns: tensor_def: the converted tensor def.
onnx/numpy_helper.py
def from_array(arr, name=None): # type: (np.ndarray[Any], Optional[Text]) -> TensorProto """Converts a numpy array to a tensor def. Inputs: arr: a numpy array. name: (optional) the name of the tensor. Returns: tensor_def: the converted tensor def. """ tensor = TensorProto() tensor.dims.extend(arr.shape) if name: tensor.name = name if arr.dtype == np.object: # Special care for strings. tensor.data_type = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] # TODO: Introduce full string support. # We flatten the array in case there are 2-D arrays are specified # We throw the error below if we have a 3-D array or some kind of other # object. If you want more complex shapes then follow the below instructions. # Unlike other types where the shape is automatically inferred from # nested arrays of values, the only reliable way now to feed strings # is to put them into a flat array then specify type astype(np.object) # (otherwise all strings may have different types depending on their length) # and then specify shape .reshape([x, y, z]) flat_array = arr.flatten() for e in flat_array: if isinstance(e, text_type): tensor.string_data.append(e.encode('utf-8')) elif isinstance(e, np.ndarray): for s in e: if isinstance(s, text_type): tensor.string_data.append(s.encode('utf-8')) else: raise NotImplementedError( "Unrecognized object in the object array, expect a string, or array of bytes: ", str(type(e))) return tensor # For numerical types, directly use numpy raw bytes. try: dtype = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] except KeyError: raise RuntimeError( "Numpy data type not understood yet: {}".format(str(arr.dtype))) tensor.data_type = dtype tensor.raw_data = arr.tobytes() # note: tobytes() is only after 1.9. return tensor
def from_array(arr, name=None): # type: (np.ndarray[Any], Optional[Text]) -> TensorProto """Converts a numpy array to a tensor def. Inputs: arr: a numpy array. name: (optional) the name of the tensor. Returns: tensor_def: the converted tensor def. """ tensor = TensorProto() tensor.dims.extend(arr.shape) if name: tensor.name = name if arr.dtype == np.object: # Special care for strings. tensor.data_type = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] # TODO: Introduce full string support. # We flatten the array in case there are 2-D arrays are specified # We throw the error below if we have a 3-D array or some kind of other # object. If you want more complex shapes then follow the below instructions. # Unlike other types where the shape is automatically inferred from # nested arrays of values, the only reliable way now to feed strings # is to put them into a flat array then specify type astype(np.object) # (otherwise all strings may have different types depending on their length) # and then specify shape .reshape([x, y, z]) flat_array = arr.flatten() for e in flat_array: if isinstance(e, text_type): tensor.string_data.append(e.encode('utf-8')) elif isinstance(e, np.ndarray): for s in e: if isinstance(s, text_type): tensor.string_data.append(s.encode('utf-8')) else: raise NotImplementedError( "Unrecognized object in the object array, expect a string, or array of bytes: ", str(type(e))) return tensor # For numerical types, directly use numpy raw bytes. try: dtype = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] except KeyError: raise RuntimeError( "Numpy data type not understood yet: {}".format(str(arr.dtype))) tensor.data_type = dtype tensor.raw_data = arr.tobytes() # note: tobytes() is only after 1.9. return tensor
[ "Converts", "a", "numpy", "array", "to", "a", "tensor", "def", "." ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/numpy_helper.py#L69-L117
[ "def", "from_array", "(", "arr", ",", "name", "=", "None", ")", ":", "# type: (np.ndarray[Any], Optional[Text]) -> TensorProto", "tensor", "=", "TensorProto", "(", ")", "tensor", ".", "dims", ".", "extend", "(", "arr", ".", "shape", ")", "if", "name", ":", "...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
_serialize
Serialize a in-memory proto to bytes @params proto is a in-memory proto, such as a ModelProto, TensorProto, etc @return Serialized proto in bytes
onnx/__init__.py
def _serialize(proto): # type: (Union[bytes, google.protobuf.message.Message]) -> bytes ''' Serialize a in-memory proto to bytes @params proto is a in-memory proto, such as a ModelProto, TensorProto, etc @return Serialized proto in bytes ''' if isinstance(proto, bytes): return proto elif hasattr(proto, 'SerializeToString') and callable(proto.SerializeToString): result = proto.SerializeToString() return result else: raise ValueError('No SerializeToString method is detected. ' 'neither proto is a str.\ntype is {}'.format(type(proto)))
def _serialize(proto): # type: (Union[bytes, google.protobuf.message.Message]) -> bytes ''' Serialize a in-memory proto to bytes @params proto is a in-memory proto, such as a ModelProto, TensorProto, etc @return Serialized proto in bytes ''' if isinstance(proto, bytes): return proto elif hasattr(proto, 'SerializeToString') and callable(proto.SerializeToString): result = proto.SerializeToString() return result else: raise ValueError('No SerializeToString method is detected. ' 'neither proto is a str.\ntype is {}'.format(type(proto)))
[ "Serialize", "a", "in", "-", "memory", "proto", "to", "bytes" ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/__init__.py#L53-L70
[ "def", "_serialize", "(", "proto", ")", ":", "# type: (Union[bytes, google.protobuf.message.Message]) -> bytes", "if", "isinstance", "(", "proto", ",", "bytes", ")", ":", "return", "proto", "elif", "hasattr", "(", "proto", ",", "'SerializeToString'", ")", "and", "ca...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
train
_deserialize
Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s
onnx/__init__.py
def _deserialize(s, proto): # type: (bytes, _Proto) -> _Proto ''' Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s ''' if not isinstance(s, bytes): raise ValueError('Parameter s must be bytes, but got type: {}'.format(type(s))) if not (hasattr(proto, 'ParseFromString') and callable(proto.ParseFromString)): raise ValueError('No ParseFromString method is detected. ' '\ntype is {}'.format(type(proto))) decoded = cast(Optional[int], proto.ParseFromString(s)) if decoded is not None and decoded != len(s): raise google.protobuf.message.DecodeError( "Protobuf decoding consumed too few bytes: {} out of {}".format( decoded, len(s))) return proto
def _deserialize(s, proto): # type: (bytes, _Proto) -> _Proto ''' Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s ''' if not isinstance(s, bytes): raise ValueError('Parameter s must be bytes, but got type: {}'.format(type(s))) if not (hasattr(proto, 'ParseFromString') and callable(proto.ParseFromString)): raise ValueError('No ParseFromString method is detected. ' '\ntype is {}'.format(type(proto))) decoded = cast(Optional[int], proto.ParseFromString(s)) if decoded is not None and decoded != len(s): raise google.protobuf.message.DecodeError( "Protobuf decoding consumed too few bytes: {} out of {}".format( decoded, len(s))) return proto
[ "Parse", "bytes", "into", "a", "in", "-", "memory", "proto" ]
onnx/onnx
python
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/__init__.py#L76-L99
[ "def", "_deserialize", "(", "s", ",", "proto", ")", ":", "# type: (bytes, _Proto) -> _Proto", "if", "not", "isinstance", "(", "s", ",", "bytes", ")", ":", "raise", "ValueError", "(", "'Parameter s must be bytes, but got type: {}'", ".", "format", "(", "type", "(",...
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4