id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
23,300
awslabs/serverless-application-model
samtranslator/intrinsics/actions.py
Action._parse_resource_reference
def _parse_resource_reference(cls, ref_value): """ Splits a resource reference of structure "LogicalId.Property" and returns the "LogicalId" and "Property" separately. :param string ref_value: Input reference value which *may* contain the structure "LogicalId.Property" :return string, string: Returns two values - logical_id, property. If the input does not contain the structure, then both `logical_id` and property will be None """ no_result = (None, None) if not isinstance(ref_value, string_types): return no_result splits = ref_value.split(cls._resource_ref_separator, 1) # Either there is no 'dot' (or) one of the values is empty string (Ex: when you split "LogicalId.") if len(splits) != 2 or not all(splits): return no_result return splits[0], splits[1]
python
def _parse_resource_reference(cls, ref_value): """ Splits a resource reference of structure "LogicalId.Property" and returns the "LogicalId" and "Property" separately. :param string ref_value: Input reference value which *may* contain the structure "LogicalId.Property" :return string, string: Returns two values - logical_id, property. If the input does not contain the structure, then both `logical_id` and property will be None """ no_result = (None, None) if not isinstance(ref_value, string_types): return no_result splits = ref_value.split(cls._resource_ref_separator, 1) # Either there is no 'dot' (or) one of the values is empty string (Ex: when you split "LogicalId.") if len(splits) != 2 or not all(splits): return no_result return splits[0], splits[1]
[ "def", "_parse_resource_reference", "(", "cls", ",", "ref_value", ")", ":", "no_result", "=", "(", "None", ",", "None", ")", "if", "not", "isinstance", "(", "ref_value", ",", "string_types", ")", ":", "return", "no_result", "splits", "=", "ref_value", ".", "split", "(", "cls", ".", "_resource_ref_separator", ",", "1", ")", "# Either there is no 'dot' (or) one of the values is empty string (Ex: when you split \"LogicalId.\")", "if", "len", "(", "splits", ")", "!=", "2", "or", "not", "all", "(", "splits", ")", ":", "return", "no_result", "return", "splits", "[", "0", "]", ",", "splits", "[", "1", "]" ]
Splits a resource reference of structure "LogicalId.Property" and returns the "LogicalId" and "Property" separately. :param string ref_value: Input reference value which *may* contain the structure "LogicalId.Property" :return string, string: Returns two values - logical_id, property. If the input does not contain the structure, then both `logical_id` and property will be None
[ "Splits", "a", "resource", "reference", "of", "structure", "LogicalId", ".", "Property", "and", "returns", "the", "LogicalId", "and", "Property", "separately", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/actions.py#L55-L76
23,301
awslabs/serverless-application-model
samtranslator/intrinsics/actions.py
RefAction.resolve_parameter_refs
def resolve_parameter_refs(self, input_dict, parameters): """ Resolves references that are present in the parameters and returns the value. If it is not in parameters, this method simply returns the input unchanged. :param input_dict: Dictionary representing the Ref function. Must contain only one key and it should be "Ref". Ex: {Ref: "foo"} :param parameters: Dictionary of parameter values for resolution :return: """ if not self.can_handle(input_dict): return input_dict param_name = input_dict[self.intrinsic_name] if not isinstance(param_name, string_types): return input_dict if param_name in parameters: return parameters[param_name] else: return input_dict
python
def resolve_parameter_refs(self, input_dict, parameters): """ Resolves references that are present in the parameters and returns the value. If it is not in parameters, this method simply returns the input unchanged. :param input_dict: Dictionary representing the Ref function. Must contain only one key and it should be "Ref". Ex: {Ref: "foo"} :param parameters: Dictionary of parameter values for resolution :return: """ if not self.can_handle(input_dict): return input_dict param_name = input_dict[self.intrinsic_name] if not isinstance(param_name, string_types): return input_dict if param_name in parameters: return parameters[param_name] else: return input_dict
[ "def", "resolve_parameter_refs", "(", "self", ",", "input_dict", ",", "parameters", ")", ":", "if", "not", "self", ".", "can_handle", "(", "input_dict", ")", ":", "return", "input_dict", "param_name", "=", "input_dict", "[", "self", ".", "intrinsic_name", "]", "if", "not", "isinstance", "(", "param_name", ",", "string_types", ")", ":", "return", "input_dict", "if", "param_name", "in", "parameters", ":", "return", "parameters", "[", "param_name", "]", "else", ":", "return", "input_dict" ]
Resolves references that are present in the parameters and returns the value. If it is not in parameters, this method simply returns the input unchanged. :param input_dict: Dictionary representing the Ref function. Must contain only one key and it should be "Ref". Ex: {Ref: "foo"} :param parameters: Dictionary of parameter values for resolution :return:
[ "Resolves", "references", "that", "are", "present", "in", "the", "parameters", "and", "returns", "the", "value", ".", "If", "it", "is", "not", "in", "parameters", "this", "method", "simply", "returns", "the", "input", "unchanged", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/actions.py#L82-L104
23,302
awslabs/serverless-application-model
samtranslator/intrinsics/actions.py
RefAction.resolve_resource_refs
def resolve_resource_refs(self, input_dict, supported_resource_refs): """ Resolves references to some property of a resource. These are runtime properties which can't be converted to a value here. Instead we output another reference that will more actually resolve to the value when executed via CloudFormation Example: {"Ref": "LogicalId.Property"} => {"Ref": "SomeOtherLogicalId"} :param dict input_dict: Dictionary representing the Ref function to be resolved. :param samtranslator.intrinsics.resource_refs.SupportedResourceReferences supported_resource_refs: Instance of an `SupportedResourceReferences` object that contain value of the property. :return dict: Dictionary with resource references resolved. """ if not self.can_handle(input_dict): return input_dict ref_value = input_dict[self.intrinsic_name] logical_id, property = self._parse_resource_reference(ref_value) # ref_value could not be parsed if not logical_id: return input_dict resolved_value = supported_resource_refs.get(logical_id, property) if not resolved_value: return input_dict return { self.intrinsic_name: resolved_value }
python
def resolve_resource_refs(self, input_dict, supported_resource_refs): """ Resolves references to some property of a resource. These are runtime properties which can't be converted to a value here. Instead we output another reference that will more actually resolve to the value when executed via CloudFormation Example: {"Ref": "LogicalId.Property"} => {"Ref": "SomeOtherLogicalId"} :param dict input_dict: Dictionary representing the Ref function to be resolved. :param samtranslator.intrinsics.resource_refs.SupportedResourceReferences supported_resource_refs: Instance of an `SupportedResourceReferences` object that contain value of the property. :return dict: Dictionary with resource references resolved. """ if not self.can_handle(input_dict): return input_dict ref_value = input_dict[self.intrinsic_name] logical_id, property = self._parse_resource_reference(ref_value) # ref_value could not be parsed if not logical_id: return input_dict resolved_value = supported_resource_refs.get(logical_id, property) if not resolved_value: return input_dict return { self.intrinsic_name: resolved_value }
[ "def", "resolve_resource_refs", "(", "self", ",", "input_dict", ",", "supported_resource_refs", ")", ":", "if", "not", "self", ".", "can_handle", "(", "input_dict", ")", ":", "return", "input_dict", "ref_value", "=", "input_dict", "[", "self", ".", "intrinsic_name", "]", "logical_id", ",", "property", "=", "self", ".", "_parse_resource_reference", "(", "ref_value", ")", "# ref_value could not be parsed", "if", "not", "logical_id", ":", "return", "input_dict", "resolved_value", "=", "supported_resource_refs", ".", "get", "(", "logical_id", ",", "property", ")", "if", "not", "resolved_value", ":", "return", "input_dict", "return", "{", "self", ".", "intrinsic_name", ":", "resolved_value", "}" ]
Resolves references to some property of a resource. These are runtime properties which can't be converted to a value here. Instead we output another reference that will more actually resolve to the value when executed via CloudFormation Example: {"Ref": "LogicalId.Property"} => {"Ref": "SomeOtherLogicalId"} :param dict input_dict: Dictionary representing the Ref function to be resolved. :param samtranslator.intrinsics.resource_refs.SupportedResourceReferences supported_resource_refs: Instance of an `SupportedResourceReferences` object that contain value of the property. :return dict: Dictionary with resource references resolved.
[ "Resolves", "references", "to", "some", "property", "of", "a", "resource", ".", "These", "are", "runtime", "properties", "which", "can", "t", "be", "converted", "to", "a", "value", "here", ".", "Instead", "we", "output", "another", "reference", "that", "will", "more", "actually", "resolve", "to", "the", "value", "when", "executed", "via", "CloudFormation" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/actions.py#L106-L137
23,303
awslabs/serverless-application-model
samtranslator/intrinsics/actions.py
SubAction._handle_sub_action
def _handle_sub_action(self, input_dict, handler): """ Handles resolving replacements in the Sub action based on the handler that is passed as an input. :param input_dict: Dictionary to be resolved :param supported_values: One of several different objects that contain the supported values that need to be changed. See each method above for specifics on these objects. :param handler: handler that is specific to each implementation. :return: Resolved value of the Sub dictionary """ if not self.can_handle(input_dict): return input_dict key = self.intrinsic_name sub_value = input_dict[key] input_dict[key] = self._handle_sub_value(sub_value, handler) return input_dict
python
def _handle_sub_action(self, input_dict, handler): """ Handles resolving replacements in the Sub action based on the handler that is passed as an input. :param input_dict: Dictionary to be resolved :param supported_values: One of several different objects that contain the supported values that need to be changed. See each method above for specifics on these objects. :param handler: handler that is specific to each implementation. :return: Resolved value of the Sub dictionary """ if not self.can_handle(input_dict): return input_dict key = self.intrinsic_name sub_value = input_dict[key] input_dict[key] = self._handle_sub_value(sub_value, handler) return input_dict
[ "def", "_handle_sub_action", "(", "self", ",", "input_dict", ",", "handler", ")", ":", "if", "not", "self", ".", "can_handle", "(", "input_dict", ")", ":", "return", "input_dict", "key", "=", "self", ".", "intrinsic_name", "sub_value", "=", "input_dict", "[", "key", "]", "input_dict", "[", "key", "]", "=", "self", ".", "_handle_sub_value", "(", "sub_value", ",", "handler", ")", "return", "input_dict" ]
Handles resolving replacements in the Sub action based on the handler that is passed as an input. :param input_dict: Dictionary to be resolved :param supported_values: One of several different objects that contain the supported values that need to be changed. See each method above for specifics on these objects. :param handler: handler that is specific to each implementation. :return: Resolved value of the Sub dictionary
[ "Handles", "resolving", "replacements", "in", "the", "Sub", "action", "based", "on", "the", "handler", "that", "is", "passed", "as", "an", "input", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/actions.py#L311-L329
23,304
awslabs/serverless-application-model
samtranslator/intrinsics/actions.py
GetAttAction._get_resolved_dictionary
def _get_resolved_dictionary(self, input_dict, key, resolved_value, remaining): """ Resolves the function and returns the updated dictionary :param input_dict: Dictionary to be resolved :param key: Name of this intrinsic. :param resolved_value: Resolved or updated value for this action. :param remaining: Remaining sections for the GetAtt action. """ if resolved_value: # We resolved to a new resource logicalId. Use this as the first element and keep remaining elements intact # This is the new value of Fn::GetAtt input_dict[key] = [resolved_value] + remaining return input_dict
python
def _get_resolved_dictionary(self, input_dict, key, resolved_value, remaining): """ Resolves the function and returns the updated dictionary :param input_dict: Dictionary to be resolved :param key: Name of this intrinsic. :param resolved_value: Resolved or updated value for this action. :param remaining: Remaining sections for the GetAtt action. """ if resolved_value: # We resolved to a new resource logicalId. Use this as the first element and keep remaining elements intact # This is the new value of Fn::GetAtt input_dict[key] = [resolved_value] + remaining return input_dict
[ "def", "_get_resolved_dictionary", "(", "self", ",", "input_dict", ",", "key", ",", "resolved_value", ",", "remaining", ")", ":", "if", "resolved_value", ":", "# We resolved to a new resource logicalId. Use this as the first element and keep remaining elements intact", "# This is the new value of Fn::GetAtt", "input_dict", "[", "key", "]", "=", "[", "resolved_value", "]", "+", "remaining", "return", "input_dict" ]
Resolves the function and returns the updated dictionary :param input_dict: Dictionary to be resolved :param key: Name of this intrinsic. :param resolved_value: Resolved or updated value for this action. :param remaining: Remaining sections for the GetAtt action.
[ "Resolves", "the", "function", "and", "returns", "the", "updated", "dictionary" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/actions.py#L497-L511
23,305
awslabs/serverless-application-model
examples/apps/datadog-process-rds-metrics/lambda_function.py
lambda_handler
def lambda_handler(event, context): ''' Process a RDS enhenced monitoring DATA_MESSAGE, coming from CLOUDWATCH LOGS ''' # event is a dict containing a base64 string gzipped event = json.loads(gzip.GzipFile(fileobj=StringIO(event['awslogs']['data'].decode('base64'))).read()) account = event['owner'] region = context.invoked_function_arn.split(':', 4)[3] log_events = event['logEvents'] for log_event in log_events: message = json.loads(log_event['message']) ts = log_event['timestamp'] / 1000 _process_rds_enhanced_monitoring_message(ts, message, account, region) stats.flush() return {'Status': 'OK'}
python
def lambda_handler(event, context): ''' Process a RDS enhenced monitoring DATA_MESSAGE, coming from CLOUDWATCH LOGS ''' # event is a dict containing a base64 string gzipped event = json.loads(gzip.GzipFile(fileobj=StringIO(event['awslogs']['data'].decode('base64'))).read()) account = event['owner'] region = context.invoked_function_arn.split(':', 4)[3] log_events = event['logEvents'] for log_event in log_events: message = json.loads(log_event['message']) ts = log_event['timestamp'] / 1000 _process_rds_enhanced_monitoring_message(ts, message, account, region) stats.flush() return {'Status': 'OK'}
[ "def", "lambda_handler", "(", "event", ",", "context", ")", ":", "# event is a dict containing a base64 string gzipped", "event", "=", "json", ".", "loads", "(", "gzip", ".", "GzipFile", "(", "fileobj", "=", "StringIO", "(", "event", "[", "'awslogs'", "]", "[", "'data'", "]", ".", "decode", "(", "'base64'", ")", ")", ")", ".", "read", "(", ")", ")", "account", "=", "event", "[", "'owner'", "]", "region", "=", "context", ".", "invoked_function_arn", ".", "split", "(", "':'", ",", "4", ")", "[", "3", "]", "log_events", "=", "event", "[", "'logEvents'", "]", "for", "log_event", "in", "log_events", ":", "message", "=", "json", ".", "loads", "(", "log_event", "[", "'message'", "]", ")", "ts", "=", "log_event", "[", "'timestamp'", "]", "/", "1000", "_process_rds_enhanced_monitoring_message", "(", "ts", ",", "message", ",", "account", ",", "region", ")", "stats", ".", "flush", "(", ")", "return", "{", "'Status'", ":", "'OK'", "}" ]
Process a RDS enhenced monitoring DATA_MESSAGE, coming from CLOUDWATCH LOGS
[ "Process", "a", "RDS", "enhenced", "monitoring", "DATA_MESSAGE", "coming", "from", "CLOUDWATCH", "LOGS" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/examples/apps/datadog-process-rds-metrics/lambda_function.py#L104-L122
23,306
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._construct_rest_api
def _construct_rest_api(self): """Constructs and returns the ApiGateway RestApi. :returns: the RestApi to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayRestApi """ rest_api = ApiGatewayRestApi(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) rest_api.BinaryMediaTypes = self.binary_media rest_api.MinimumCompressionSize = self.minimum_compression_size if self.endpoint_configuration: self._set_endpoint_configuration(rest_api, self.endpoint_configuration) elif not RegionConfiguration.is_apigw_edge_configuration_supported(): # Since this region does not support EDGE configuration, we explicitly set the endpoint type # to Regional which is the only supported config. self._set_endpoint_configuration(rest_api, "REGIONAL") if self.definition_uri and self.definition_body: raise InvalidResourceException(self.logical_id, "Specify either 'DefinitionUri' or 'DefinitionBody' property and not both") self._add_cors() self._add_auth() self._add_gateway_responses() if self.definition_uri: rest_api.BodyS3Location = self._construct_body_s3_dict() elif self.definition_body: rest_api.Body = self.definition_body if self.name: rest_api.Name = self.name return rest_api
python
def _construct_rest_api(self): """Constructs and returns the ApiGateway RestApi. :returns: the RestApi to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayRestApi """ rest_api = ApiGatewayRestApi(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) rest_api.BinaryMediaTypes = self.binary_media rest_api.MinimumCompressionSize = self.minimum_compression_size if self.endpoint_configuration: self._set_endpoint_configuration(rest_api, self.endpoint_configuration) elif not RegionConfiguration.is_apigw_edge_configuration_supported(): # Since this region does not support EDGE configuration, we explicitly set the endpoint type # to Regional which is the only supported config. self._set_endpoint_configuration(rest_api, "REGIONAL") if self.definition_uri and self.definition_body: raise InvalidResourceException(self.logical_id, "Specify either 'DefinitionUri' or 'DefinitionBody' property and not both") self._add_cors() self._add_auth() self._add_gateway_responses() if self.definition_uri: rest_api.BodyS3Location = self._construct_body_s3_dict() elif self.definition_body: rest_api.Body = self.definition_body if self.name: rest_api.Name = self.name return rest_api
[ "def", "_construct_rest_api", "(", "self", ")", ":", "rest_api", "=", "ApiGatewayRestApi", "(", "self", ".", "logical_id", ",", "depends_on", "=", "self", ".", "depends_on", ",", "attributes", "=", "self", ".", "resource_attributes", ")", "rest_api", ".", "BinaryMediaTypes", "=", "self", ".", "binary_media", "rest_api", ".", "MinimumCompressionSize", "=", "self", ".", "minimum_compression_size", "if", "self", ".", "endpoint_configuration", ":", "self", ".", "_set_endpoint_configuration", "(", "rest_api", ",", "self", ".", "endpoint_configuration", ")", "elif", "not", "RegionConfiguration", ".", "is_apigw_edge_configuration_supported", "(", ")", ":", "# Since this region does not support EDGE configuration, we explicitly set the endpoint type", "# to Regional which is the only supported config.", "self", ".", "_set_endpoint_configuration", "(", "rest_api", ",", "\"REGIONAL\"", ")", "if", "self", ".", "definition_uri", "and", "self", ".", "definition_body", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Specify either 'DefinitionUri' or 'DefinitionBody' property and not both\"", ")", "self", ".", "_add_cors", "(", ")", "self", ".", "_add_auth", "(", ")", "self", ".", "_add_gateway_responses", "(", ")", "if", "self", ".", "definition_uri", ":", "rest_api", ".", "BodyS3Location", "=", "self", ".", "_construct_body_s3_dict", "(", ")", "elif", "self", ".", "definition_body", ":", "rest_api", ".", "Body", "=", "self", ".", "definition_body", "if", "self", ".", "name", ":", "rest_api", ".", "Name", "=", "self", ".", "name", "return", "rest_api" ]
Constructs and returns the ApiGateway RestApi. :returns: the RestApi to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayRestApi
[ "Constructs", "and", "returns", "the", "ApiGateway", "RestApi", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L74-L108
23,307
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._construct_body_s3_dict
def _construct_body_s3_dict(self): """Constructs the RestApi's `BodyS3Location property`_, from the SAM Api's DefinitionUri property. :returns: a BodyS3Location dict, containing the S3 Bucket, Key, and Version of the Swagger definition :rtype: dict """ if isinstance(self.definition_uri, dict): if not self.definition_uri.get("Bucket", None) or not self.definition_uri.get("Key", None): # DefinitionUri is a dictionary but does not contain Bucket or Key property raise InvalidResourceException(self.logical_id, "'DefinitionUri' requires Bucket and Key properties to be specified") s3_pointer = self.definition_uri else: # DefinitionUri is a string s3_pointer = parse_s3_uri(self.definition_uri) if s3_pointer is None: raise InvalidResourceException(self.logical_id, '\'DefinitionUri\' is not a valid S3 Uri of the form ' '"s3://bucket/key" with optional versionId query parameter.') body_s3 = { 'Bucket': s3_pointer['Bucket'], 'Key': s3_pointer['Key'] } if 'Version' in s3_pointer: body_s3['Version'] = s3_pointer['Version'] return body_s3
python
def _construct_body_s3_dict(self): """Constructs the RestApi's `BodyS3Location property`_, from the SAM Api's DefinitionUri property. :returns: a BodyS3Location dict, containing the S3 Bucket, Key, and Version of the Swagger definition :rtype: dict """ if isinstance(self.definition_uri, dict): if not self.definition_uri.get("Bucket", None) or not self.definition_uri.get("Key", None): # DefinitionUri is a dictionary but does not contain Bucket or Key property raise InvalidResourceException(self.logical_id, "'DefinitionUri' requires Bucket and Key properties to be specified") s3_pointer = self.definition_uri else: # DefinitionUri is a string s3_pointer = parse_s3_uri(self.definition_uri) if s3_pointer is None: raise InvalidResourceException(self.logical_id, '\'DefinitionUri\' is not a valid S3 Uri of the form ' '"s3://bucket/key" with optional versionId query parameter.') body_s3 = { 'Bucket': s3_pointer['Bucket'], 'Key': s3_pointer['Key'] } if 'Version' in s3_pointer: body_s3['Version'] = s3_pointer['Version'] return body_s3
[ "def", "_construct_body_s3_dict", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "definition_uri", ",", "dict", ")", ":", "if", "not", "self", ".", "definition_uri", ".", "get", "(", "\"Bucket\"", ",", "None", ")", "or", "not", "self", ".", "definition_uri", ".", "get", "(", "\"Key\"", ",", "None", ")", ":", "# DefinitionUri is a dictionary but does not contain Bucket or Key property", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"'DefinitionUri' requires Bucket and Key properties to be specified\"", ")", "s3_pointer", "=", "self", ".", "definition_uri", "else", ":", "# DefinitionUri is a string", "s3_pointer", "=", "parse_s3_uri", "(", "self", ".", "definition_uri", ")", "if", "s3_pointer", "is", "None", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "'\\'DefinitionUri\\' is not a valid S3 Uri of the form '", "'\"s3://bucket/key\" with optional versionId query parameter.'", ")", "body_s3", "=", "{", "'Bucket'", ":", "s3_pointer", "[", "'Bucket'", "]", ",", "'Key'", ":", "s3_pointer", "[", "'Key'", "]", "}", "if", "'Version'", "in", "s3_pointer", ":", "body_s3", "[", "'Version'", "]", "=", "s3_pointer", "[", "'Version'", "]", "return", "body_s3" ]
Constructs the RestApi's `BodyS3Location property`_, from the SAM Api's DefinitionUri property. :returns: a BodyS3Location dict, containing the S3 Bucket, Key, and Version of the Swagger definition :rtype: dict
[ "Constructs", "the", "RestApi", "s", "BodyS3Location", "property", "_", "from", "the", "SAM", "Api", "s", "DefinitionUri", "property", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L110-L138
23,308
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._construct_deployment
def _construct_deployment(self, rest_api): """Constructs and returns the ApiGateway Deployment. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi for this Deployment :returns: the Deployment to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayDeployment """ deployment = ApiGatewayDeployment(self.logical_id + 'Deployment', attributes=self.passthrough_resource_attributes) deployment.RestApiId = rest_api.get_runtime_attr('rest_api_id') deployment.StageName = 'Stage' return deployment
python
def _construct_deployment(self, rest_api): """Constructs and returns the ApiGateway Deployment. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi for this Deployment :returns: the Deployment to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayDeployment """ deployment = ApiGatewayDeployment(self.logical_id + 'Deployment', attributes=self.passthrough_resource_attributes) deployment.RestApiId = rest_api.get_runtime_attr('rest_api_id') deployment.StageName = 'Stage' return deployment
[ "def", "_construct_deployment", "(", "self", ",", "rest_api", ")", ":", "deployment", "=", "ApiGatewayDeployment", "(", "self", ".", "logical_id", "+", "'Deployment'", ",", "attributes", "=", "self", ".", "passthrough_resource_attributes", ")", "deployment", ".", "RestApiId", "=", "rest_api", ".", "get_runtime_attr", "(", "'rest_api_id'", ")", "deployment", ".", "StageName", "=", "'Stage'", "return", "deployment" ]
Constructs and returns the ApiGateway Deployment. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi for this Deployment :returns: the Deployment to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayDeployment
[ "Constructs", "and", "returns", "the", "ApiGateway", "Deployment", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L140-L152
23,309
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._construct_stage
def _construct_stage(self, deployment, swagger): """Constructs and returns the ApiGateway Stage. :param model.apigateway.ApiGatewayDeployment deployment: the Deployment for this Stage :returns: the Stage to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayStage """ # If StageName is some intrinsic function, then don't prefix the Stage's logical ID # This will NOT create duplicates because we allow only ONE stage per API resource stage_name_prefix = self.stage_name if isinstance(self.stage_name, string_types) else "" stage = ApiGatewayStage(self.logical_id + stage_name_prefix + 'Stage', attributes=self.passthrough_resource_attributes) stage.RestApiId = ref(self.logical_id) stage.update_deployment_ref(deployment.logical_id) stage.StageName = self.stage_name stage.CacheClusterEnabled = self.cache_cluster_enabled stage.CacheClusterSize = self.cache_cluster_size stage.Variables = self.variables stage.MethodSettings = self.method_settings stage.AccessLogSetting = self.access_log_setting stage.CanarySetting = self.canary_setting stage.TracingEnabled = self.tracing_enabled if swagger is not None: deployment.make_auto_deployable(stage, swagger) return stage
python
def _construct_stage(self, deployment, swagger): """Constructs and returns the ApiGateway Stage. :param model.apigateway.ApiGatewayDeployment deployment: the Deployment for this Stage :returns: the Stage to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayStage """ # If StageName is some intrinsic function, then don't prefix the Stage's logical ID # This will NOT create duplicates because we allow only ONE stage per API resource stage_name_prefix = self.stage_name if isinstance(self.stage_name, string_types) else "" stage = ApiGatewayStage(self.logical_id + stage_name_prefix + 'Stage', attributes=self.passthrough_resource_attributes) stage.RestApiId = ref(self.logical_id) stage.update_deployment_ref(deployment.logical_id) stage.StageName = self.stage_name stage.CacheClusterEnabled = self.cache_cluster_enabled stage.CacheClusterSize = self.cache_cluster_size stage.Variables = self.variables stage.MethodSettings = self.method_settings stage.AccessLogSetting = self.access_log_setting stage.CanarySetting = self.canary_setting stage.TracingEnabled = self.tracing_enabled if swagger is not None: deployment.make_auto_deployable(stage, swagger) return stage
[ "def", "_construct_stage", "(", "self", ",", "deployment", ",", "swagger", ")", ":", "# If StageName is some intrinsic function, then don't prefix the Stage's logical ID", "# This will NOT create duplicates because we allow only ONE stage per API resource", "stage_name_prefix", "=", "self", ".", "stage_name", "if", "isinstance", "(", "self", ".", "stage_name", ",", "string_types", ")", "else", "\"\"", "stage", "=", "ApiGatewayStage", "(", "self", ".", "logical_id", "+", "stage_name_prefix", "+", "'Stage'", ",", "attributes", "=", "self", ".", "passthrough_resource_attributes", ")", "stage", ".", "RestApiId", "=", "ref", "(", "self", ".", "logical_id", ")", "stage", ".", "update_deployment_ref", "(", "deployment", ".", "logical_id", ")", "stage", ".", "StageName", "=", "self", ".", "stage_name", "stage", ".", "CacheClusterEnabled", "=", "self", ".", "cache_cluster_enabled", "stage", ".", "CacheClusterSize", "=", "self", ".", "cache_cluster_size", "stage", ".", "Variables", "=", "self", ".", "variables", "stage", ".", "MethodSettings", "=", "self", ".", "method_settings", "stage", ".", "AccessLogSetting", "=", "self", ".", "access_log_setting", "stage", ".", "CanarySetting", "=", "self", ".", "canary_setting", "stage", ".", "TracingEnabled", "=", "self", ".", "tracing_enabled", "if", "swagger", "is", "not", "None", ":", "deployment", ".", "make_auto_deployable", "(", "stage", ",", "swagger", ")", "return", "stage" ]
Constructs and returns the ApiGateway Stage. :param model.apigateway.ApiGatewayDeployment deployment: the Deployment for this Stage :returns: the Stage to which this SAM Api corresponds :rtype: model.apigateway.ApiGatewayStage
[ "Constructs", "and", "returns", "the", "ApiGateway", "Stage", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L154-L182
23,310
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator.to_cloudformation
def to_cloudformation(self): """Generates CloudFormation resources from a SAM API resource :returns: a tuple containing the RestApi, Deployment, and Stage for an empty Api. :rtype: tuple """ rest_api = self._construct_rest_api() deployment = self._construct_deployment(rest_api) swagger = None if rest_api.Body is not None: swagger = rest_api.Body elif rest_api.BodyS3Location is not None: swagger = rest_api.BodyS3Location stage = self._construct_stage(deployment, swagger) permissions = self._construct_authorizer_lambda_permission() return rest_api, deployment, stage, permissions
python
def to_cloudformation(self): """Generates CloudFormation resources from a SAM API resource :returns: a tuple containing the RestApi, Deployment, and Stage for an empty Api. :rtype: tuple """ rest_api = self._construct_rest_api() deployment = self._construct_deployment(rest_api) swagger = None if rest_api.Body is not None: swagger = rest_api.Body elif rest_api.BodyS3Location is not None: swagger = rest_api.BodyS3Location stage = self._construct_stage(deployment, swagger) permissions = self._construct_authorizer_lambda_permission() return rest_api, deployment, stage, permissions
[ "def", "to_cloudformation", "(", "self", ")", ":", "rest_api", "=", "self", ".", "_construct_rest_api", "(", ")", "deployment", "=", "self", ".", "_construct_deployment", "(", "rest_api", ")", "swagger", "=", "None", "if", "rest_api", ".", "Body", "is", "not", "None", ":", "swagger", "=", "rest_api", ".", "Body", "elif", "rest_api", ".", "BodyS3Location", "is", "not", "None", ":", "swagger", "=", "rest_api", ".", "BodyS3Location", "stage", "=", "self", ".", "_construct_stage", "(", "deployment", ",", "swagger", ")", "permissions", "=", "self", ".", "_construct_authorizer_lambda_permission", "(", ")", "return", "rest_api", ",", "deployment", ",", "stage", ",", "permissions" ]
Generates CloudFormation resources from a SAM API resource :returns: a tuple containing the RestApi, Deployment, and Stage for an empty Api. :rtype: tuple
[ "Generates", "CloudFormation", "resources", "from", "a", "SAM", "API", "resource" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L184-L203
23,311
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._add_cors
def _add_cors(self): """ Add CORS configuration to the Swagger file, if necessary """ INVALID_ERROR = "Invalid value for 'Cors' property" if not self.cors: return if self.cors and not self.definition_body: raise InvalidResourceException(self.logical_id, "Cors works only with inline Swagger specified in " "'DefinitionBody' property") if isinstance(self.cors, string_types) or is_instrinsic(self.cors): # Just set Origin property. Others will be defaults properties = CorsProperties(AllowOrigin=self.cors) elif isinstance(self.cors, dict): # Make sure keys in the dict are recognized if not all(key in CorsProperties._fields for key in self.cors.keys()): raise InvalidResourceException(self.logical_id, INVALID_ERROR) properties = CorsProperties(**self.cors) else: raise InvalidResourceException(self.logical_id, INVALID_ERROR) if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException(self.logical_id, "Unable to add Cors configuration because " "'DefinitionBody' does not contain a valid Swagger") if properties.AllowCredentials is True and properties.AllowOrigin == _CORS_WILDCARD: raise InvalidResourceException(self.logical_id, "Unable to add Cors configuration because " "'AllowCredentials' can not be true when " "'AllowOrigin' is \"'*'\" or not set") editor = SwaggerEditor(self.definition_body) for path in editor.iter_on_path(): editor.add_cors(path, properties.AllowOrigin, properties.AllowHeaders, properties.AllowMethods, max_age=properties.MaxAge, allow_credentials=properties.AllowCredentials) # Assign the Swagger back to template self.definition_body = editor.swagger
python
def _add_cors(self): """ Add CORS configuration to the Swagger file, if necessary """ INVALID_ERROR = "Invalid value for 'Cors' property" if not self.cors: return if self.cors and not self.definition_body: raise InvalidResourceException(self.logical_id, "Cors works only with inline Swagger specified in " "'DefinitionBody' property") if isinstance(self.cors, string_types) or is_instrinsic(self.cors): # Just set Origin property. Others will be defaults properties = CorsProperties(AllowOrigin=self.cors) elif isinstance(self.cors, dict): # Make sure keys in the dict are recognized if not all(key in CorsProperties._fields for key in self.cors.keys()): raise InvalidResourceException(self.logical_id, INVALID_ERROR) properties = CorsProperties(**self.cors) else: raise InvalidResourceException(self.logical_id, INVALID_ERROR) if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException(self.logical_id, "Unable to add Cors configuration because " "'DefinitionBody' does not contain a valid Swagger") if properties.AllowCredentials is True and properties.AllowOrigin == _CORS_WILDCARD: raise InvalidResourceException(self.logical_id, "Unable to add Cors configuration because " "'AllowCredentials' can not be true when " "'AllowOrigin' is \"'*'\" or not set") editor = SwaggerEditor(self.definition_body) for path in editor.iter_on_path(): editor.add_cors(path, properties.AllowOrigin, properties.AllowHeaders, properties.AllowMethods, max_age=properties.MaxAge, allow_credentials=properties.AllowCredentials) # Assign the Swagger back to template self.definition_body = editor.swagger
[ "def", "_add_cors", "(", "self", ")", ":", "INVALID_ERROR", "=", "\"Invalid value for 'Cors' property\"", "if", "not", "self", ".", "cors", ":", "return", "if", "self", ".", "cors", "and", "not", "self", ".", "definition_body", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Cors works only with inline Swagger specified in \"", "\"'DefinitionBody' property\"", ")", "if", "isinstance", "(", "self", ".", "cors", ",", "string_types", ")", "or", "is_instrinsic", "(", "self", ".", "cors", ")", ":", "# Just set Origin property. Others will be defaults", "properties", "=", "CorsProperties", "(", "AllowOrigin", "=", "self", ".", "cors", ")", "elif", "isinstance", "(", "self", ".", "cors", ",", "dict", ")", ":", "# Make sure keys in the dict are recognized", "if", "not", "all", "(", "key", "in", "CorsProperties", ".", "_fields", "for", "key", "in", "self", ".", "cors", ".", "keys", "(", ")", ")", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "INVALID_ERROR", ")", "properties", "=", "CorsProperties", "(", "*", "*", "self", ".", "cors", ")", "else", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "INVALID_ERROR", ")", "if", "not", "SwaggerEditor", ".", "is_valid", "(", "self", ".", "definition_body", ")", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Unable to add Cors configuration because \"", "\"'DefinitionBody' does not contain a valid Swagger\"", ")", "if", "properties", ".", "AllowCredentials", "is", "True", "and", "properties", ".", "AllowOrigin", "==", "_CORS_WILDCARD", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Unable to add Cors configuration because \"", "\"'AllowCredentials' can not be true when \"", "\"'AllowOrigin' is \\\"'*'\\\" or not set\"", ")", "editor", "=", "SwaggerEditor", "(", "self", ".", "definition_body", ")", "for", "path", "in", "editor", ".", "iter_on_path", "(", ")", ":", "editor", ".", "add_cors", "(", "path", ",", "properties", ".", "AllowOrigin", ",", "properties", ".", "AllowHeaders", ",", "properties", ".", "AllowMethods", ",", "max_age", "=", "properties", ".", "MaxAge", ",", "allow_credentials", "=", "properties", ".", "AllowCredentials", ")", "# Assign the Swagger back to template", "self", ".", "definition_body", "=", "editor", ".", "swagger" ]
Add CORS configuration to the Swagger file, if necessary
[ "Add", "CORS", "configuration", "to", "the", "Swagger", "file", "if", "necessary" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L205-L249
23,312
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._add_auth
def _add_auth(self): """ Add Auth configuration to the Swagger file, if necessary """ if not self.auth: return if self.auth and not self.definition_body: raise InvalidResourceException(self.logical_id, "Auth works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized if not all(key in AuthProperties._fields for key in self.auth.keys()): raise InvalidResourceException( self.logical_id, "Invalid value for 'Auth' property") if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException(self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) auth_properties = AuthProperties(**self.auth) authorizers = self._get_authorizers(auth_properties.Authorizers, auth_properties.DefaultAuthorizer) if authorizers: swagger_editor.add_authorizers(authorizers) self._set_default_authorizer(swagger_editor, authorizers, auth_properties.DefaultAuthorizer) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
python
def _add_auth(self): """ Add Auth configuration to the Swagger file, if necessary """ if not self.auth: return if self.auth and not self.definition_body: raise InvalidResourceException(self.logical_id, "Auth works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized if not all(key in AuthProperties._fields for key in self.auth.keys()): raise InvalidResourceException( self.logical_id, "Invalid value for 'Auth' property") if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException(self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) auth_properties = AuthProperties(**self.auth) authorizers = self._get_authorizers(auth_properties.Authorizers, auth_properties.DefaultAuthorizer) if authorizers: swagger_editor.add_authorizers(authorizers) self._set_default_authorizer(swagger_editor, authorizers, auth_properties.DefaultAuthorizer) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
[ "def", "_add_auth", "(", "self", ")", ":", "if", "not", "self", ".", "auth", ":", "return", "if", "self", ".", "auth", "and", "not", "self", ".", "definition_body", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Auth works only with inline Swagger specified in \"", "\"'DefinitionBody' property\"", ")", "# Make sure keys in the dict are recognized", "if", "not", "all", "(", "key", "in", "AuthProperties", ".", "_fields", "for", "key", "in", "self", ".", "auth", ".", "keys", "(", ")", ")", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Invalid value for 'Auth' property\"", ")", "if", "not", "SwaggerEditor", ".", "is_valid", "(", "self", ".", "definition_body", ")", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Unable to add Auth configuration because \"", "\"'DefinitionBody' does not contain a valid Swagger\"", ")", "swagger_editor", "=", "SwaggerEditor", "(", "self", ".", "definition_body", ")", "auth_properties", "=", "AuthProperties", "(", "*", "*", "self", ".", "auth", ")", "authorizers", "=", "self", ".", "_get_authorizers", "(", "auth_properties", ".", "Authorizers", ",", "auth_properties", ".", "DefaultAuthorizer", ")", "if", "authorizers", ":", "swagger_editor", ".", "add_authorizers", "(", "authorizers", ")", "self", ".", "_set_default_authorizer", "(", "swagger_editor", ",", "authorizers", ",", "auth_properties", ".", "DefaultAuthorizer", ")", "# Assign the Swagger back to template", "self", ".", "definition_body", "=", "swagger_editor", ".", "swagger" ]
Add Auth configuration to the Swagger file, if necessary
[ "Add", "Auth", "configuration", "to", "the", "Swagger", "file", "if", "necessary" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L251-L281
23,313
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._add_gateway_responses
def _add_gateway_responses(self): """ Add Gateway Response configuration to the Swagger file, if necessary """ if not self.gateway_responses: return if self.gateway_responses and not self.definition_body: raise InvalidResourceException( self.logical_id, "GatewayResponses works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized for responses_key, responses_value in self.gateway_responses.items(): for response_key in responses_value.keys(): if response_key not in GatewayResponseProperties: raise InvalidResourceException( self.logical_id, "Invalid property '{}' in 'GatewayResponses' property '{}'".format(response_key, responses_key)) if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException( self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) gateway_responses = {} for response_type, response in self.gateway_responses.items(): gateway_responses[response_type] = ApiGatewayResponse( api_logical_id=self.logical_id, response_parameters=response.get('ResponseParameters', {}), response_templates=response.get('ResponseTemplates', {}), status_code=response.get('StatusCode', None) ) if gateway_responses: swagger_editor.add_gateway_responses(gateway_responses) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
python
def _add_gateway_responses(self): """ Add Gateway Response configuration to the Swagger file, if necessary """ if not self.gateway_responses: return if self.gateway_responses and not self.definition_body: raise InvalidResourceException( self.logical_id, "GatewayResponses works only with inline Swagger specified in " "'DefinitionBody' property") # Make sure keys in the dict are recognized for responses_key, responses_value in self.gateway_responses.items(): for response_key in responses_value.keys(): if response_key not in GatewayResponseProperties: raise InvalidResourceException( self.logical_id, "Invalid property '{}' in 'GatewayResponses' property '{}'".format(response_key, responses_key)) if not SwaggerEditor.is_valid(self.definition_body): raise InvalidResourceException( self.logical_id, "Unable to add Auth configuration because " "'DefinitionBody' does not contain a valid Swagger") swagger_editor = SwaggerEditor(self.definition_body) gateway_responses = {} for response_type, response in self.gateway_responses.items(): gateway_responses[response_type] = ApiGatewayResponse( api_logical_id=self.logical_id, response_parameters=response.get('ResponseParameters', {}), response_templates=response.get('ResponseTemplates', {}), status_code=response.get('StatusCode', None) ) if gateway_responses: swagger_editor.add_gateway_responses(gateway_responses) # Assign the Swagger back to template self.definition_body = swagger_editor.swagger
[ "def", "_add_gateway_responses", "(", "self", ")", ":", "if", "not", "self", ".", "gateway_responses", ":", "return", "if", "self", ".", "gateway_responses", "and", "not", "self", ".", "definition_body", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"GatewayResponses works only with inline Swagger specified in \"", "\"'DefinitionBody' property\"", ")", "# Make sure keys in the dict are recognized", "for", "responses_key", ",", "responses_value", "in", "self", ".", "gateway_responses", ".", "items", "(", ")", ":", "for", "response_key", "in", "responses_value", ".", "keys", "(", ")", ":", "if", "response_key", "not", "in", "GatewayResponseProperties", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Invalid property '{}' in 'GatewayResponses' property '{}'\"", ".", "format", "(", "response_key", ",", "responses_key", ")", ")", "if", "not", "SwaggerEditor", ".", "is_valid", "(", "self", ".", "definition_body", ")", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Unable to add Auth configuration because \"", "\"'DefinitionBody' does not contain a valid Swagger\"", ")", "swagger_editor", "=", "SwaggerEditor", "(", "self", ".", "definition_body", ")", "gateway_responses", "=", "{", "}", "for", "response_type", ",", "response", "in", "self", ".", "gateway_responses", ".", "items", "(", ")", ":", "gateway_responses", "[", "response_type", "]", "=", "ApiGatewayResponse", "(", "api_logical_id", "=", "self", ".", "logical_id", ",", "response_parameters", "=", "response", ".", "get", "(", "'ResponseParameters'", ",", "{", "}", ")", ",", "response_templates", "=", "response", ".", "get", "(", "'ResponseTemplates'", ",", "{", "}", ")", ",", "status_code", "=", "response", ".", "get", "(", "'StatusCode'", ",", "None", ")", ")", "if", "gateway_responses", ":", "swagger_editor", ".", "add_gateway_responses", "(", "gateway_responses", ")", "# Assign the Swagger back to template", "self", ".", "definition_body", "=", "swagger_editor", ".", "swagger" ]
Add Gateway Response configuration to the Swagger file, if necessary
[ "Add", "Gateway", "Response", "configuration", "to", "the", "Swagger", "file", "if", "necessary" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L283-L324
23,314
awslabs/serverless-application-model
samtranslator/model/api/api_generator.py
ApiGenerator._get_permission
def _get_permission(self, authorizer_name, authorizer_lambda_function_arn): """Constructs and returns the Lambda Permission resource allowing the Authorizer to invoke the function. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ rest_api = ApiGatewayRestApi(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) api_id = rest_api.get_runtime_attr('rest_api_id') partition = ArnGenerator.get_partition_name() resource = '${__ApiId__}/authorizers/*' source_arn = fnSub(ArnGenerator.generate_arn(partition=partition, service='execute-api', resource=resource), {"__ApiId__": api_id}) lambda_permission = LambdaPermission(self.logical_id + authorizer_name + 'AuthorizerPermission', attributes=self.passthrough_resource_attributes) lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = authorizer_lambda_function_arn lambda_permission.Principal = 'apigateway.amazonaws.com' lambda_permission.SourceArn = source_arn return lambda_permission
python
def _get_permission(self, authorizer_name, authorizer_lambda_function_arn): """Constructs and returns the Lambda Permission resource allowing the Authorizer to invoke the function. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ rest_api = ApiGatewayRestApi(self.logical_id, depends_on=self.depends_on, attributes=self.resource_attributes) api_id = rest_api.get_runtime_attr('rest_api_id') partition = ArnGenerator.get_partition_name() resource = '${__ApiId__}/authorizers/*' source_arn = fnSub(ArnGenerator.generate_arn(partition=partition, service='execute-api', resource=resource), {"__ApiId__": api_id}) lambda_permission = LambdaPermission(self.logical_id + authorizer_name + 'AuthorizerPermission', attributes=self.passthrough_resource_attributes) lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = authorizer_lambda_function_arn lambda_permission.Principal = 'apigateway.amazonaws.com' lambda_permission.SourceArn = source_arn return lambda_permission
[ "def", "_get_permission", "(", "self", ",", "authorizer_name", ",", "authorizer_lambda_function_arn", ")", ":", "rest_api", "=", "ApiGatewayRestApi", "(", "self", ".", "logical_id", ",", "depends_on", "=", "self", ".", "depends_on", ",", "attributes", "=", "self", ".", "resource_attributes", ")", "api_id", "=", "rest_api", ".", "get_runtime_attr", "(", "'rest_api_id'", ")", "partition", "=", "ArnGenerator", ".", "get_partition_name", "(", ")", "resource", "=", "'${__ApiId__}/authorizers/*'", "source_arn", "=", "fnSub", "(", "ArnGenerator", ".", "generate_arn", "(", "partition", "=", "partition", ",", "service", "=", "'execute-api'", ",", "resource", "=", "resource", ")", ",", "{", "\"__ApiId__\"", ":", "api_id", "}", ")", "lambda_permission", "=", "LambdaPermission", "(", "self", ".", "logical_id", "+", "authorizer_name", "+", "'AuthorizerPermission'", ",", "attributes", "=", "self", ".", "passthrough_resource_attributes", ")", "lambda_permission", ".", "Action", "=", "'lambda:invokeFunction'", "lambda_permission", ".", "FunctionName", "=", "authorizer_lambda_function_arn", "lambda_permission", ".", "Principal", "=", "'apigateway.amazonaws.com'", "lambda_permission", ".", "SourceArn", "=", "source_arn", "return", "lambda_permission" ]
Constructs and returns the Lambda Permission resource allowing the Authorizer to invoke the function. :returns: the permission resource :rtype: model.lambda_.LambdaPermission
[ "Constructs", "and", "returns", "the", "Lambda", "Permission", "resource", "allowing", "the", "Authorizer", "to", "invoke", "the", "function", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/api/api_generator.py#L360-L381
23,315
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
to_cloudformation
def to_cloudformation(self, **kwargs): """Returns the Lambda function, role, and event resources to which this SAM Function corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] intrinsics_resolver = kwargs["intrinsics_resolver"] if self.DeadLetterQueue: self._validate_dlq() lambda_function = self._construct_lambda_function() resources.append(lambda_function) lambda_alias = None if self.AutoPublishAlias: alias_name = self._get_resolved_alias_name("AutoPublishAlias", self.AutoPublishAlias, intrinsics_resolver) lambda_version = self._construct_version(lambda_function, intrinsics_resolver=intrinsics_resolver) lambda_alias = self._construct_alias(alias_name, lambda_function, lambda_version) resources.append(lambda_version) resources.append(lambda_alias) if self.DeploymentPreference: self._validate_deployment_preference_and_add_update_policy(kwargs.get('deployment_preference_collection', None), lambda_alias, intrinsics_resolver) managed_policy_map = kwargs.get('managed_policy_map', {}) if not managed_policy_map: raise Exception('Managed policy map is empty, but should not be.') execution_role = None if lambda_function.Role is None: execution_role = self._construct_role(managed_policy_map) lambda_function.Role = execution_role.get_runtime_attr('arn') resources.append(execution_role) try: resources += self._generate_event_resources(lambda_function, execution_role, kwargs['event_resources'], lambda_alias=lambda_alias) except InvalidEventException as e: raise InvalidResourceException(self.logical_id, e.message) return resources
python
def to_cloudformation(self, **kwargs): """Returns the Lambda function, role, and event resources to which this SAM Function corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] intrinsics_resolver = kwargs["intrinsics_resolver"] if self.DeadLetterQueue: self._validate_dlq() lambda_function = self._construct_lambda_function() resources.append(lambda_function) lambda_alias = None if self.AutoPublishAlias: alias_name = self._get_resolved_alias_name("AutoPublishAlias", self.AutoPublishAlias, intrinsics_resolver) lambda_version = self._construct_version(lambda_function, intrinsics_resolver=intrinsics_resolver) lambda_alias = self._construct_alias(alias_name, lambda_function, lambda_version) resources.append(lambda_version) resources.append(lambda_alias) if self.DeploymentPreference: self._validate_deployment_preference_and_add_update_policy(kwargs.get('deployment_preference_collection', None), lambda_alias, intrinsics_resolver) managed_policy_map = kwargs.get('managed_policy_map', {}) if not managed_policy_map: raise Exception('Managed policy map is empty, but should not be.') execution_role = None if lambda_function.Role is None: execution_role = self._construct_role(managed_policy_map) lambda_function.Role = execution_role.get_runtime_attr('arn') resources.append(execution_role) try: resources += self._generate_event_resources(lambda_function, execution_role, kwargs['event_resources'], lambda_alias=lambda_alias) except InvalidEventException as e: raise InvalidResourceException(self.logical_id, e.message) return resources
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "intrinsics_resolver", "=", "kwargs", "[", "\"intrinsics_resolver\"", "]", "if", "self", ".", "DeadLetterQueue", ":", "self", ".", "_validate_dlq", "(", ")", "lambda_function", "=", "self", ".", "_construct_lambda_function", "(", ")", "resources", ".", "append", "(", "lambda_function", ")", "lambda_alias", "=", "None", "if", "self", ".", "AutoPublishAlias", ":", "alias_name", "=", "self", ".", "_get_resolved_alias_name", "(", "\"AutoPublishAlias\"", ",", "self", ".", "AutoPublishAlias", ",", "intrinsics_resolver", ")", "lambda_version", "=", "self", ".", "_construct_version", "(", "lambda_function", ",", "intrinsics_resolver", "=", "intrinsics_resolver", ")", "lambda_alias", "=", "self", ".", "_construct_alias", "(", "alias_name", ",", "lambda_function", ",", "lambda_version", ")", "resources", ".", "append", "(", "lambda_version", ")", "resources", ".", "append", "(", "lambda_alias", ")", "if", "self", ".", "DeploymentPreference", ":", "self", ".", "_validate_deployment_preference_and_add_update_policy", "(", "kwargs", ".", "get", "(", "'deployment_preference_collection'", ",", "None", ")", ",", "lambda_alias", ",", "intrinsics_resolver", ")", "managed_policy_map", "=", "kwargs", ".", "get", "(", "'managed_policy_map'", ",", "{", "}", ")", "if", "not", "managed_policy_map", ":", "raise", "Exception", "(", "'Managed policy map is empty, but should not be.'", ")", "execution_role", "=", "None", "if", "lambda_function", ".", "Role", "is", "None", ":", "execution_role", "=", "self", ".", "_construct_role", "(", "managed_policy_map", ")", "lambda_function", ".", "Role", "=", "execution_role", ".", "get_runtime_attr", "(", "'arn'", ")", "resources", ".", "append", "(", "execution_role", ")", "try", ":", "resources", "+=", "self", ".", "_generate_event_resources", "(", "lambda_function", ",", "execution_role", ",", "kwargs", "[", "'event_resources'", "]", ",", "lambda_alias", "=", "lambda_alias", ")", "except", "InvalidEventException", "as", "e", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "e", ".", "message", ")", "return", "resources" ]
Returns the Lambda function, role, and event resources to which this SAM Function corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list
[ "Returns", "the", "Lambda", "function", "role", "and", "event", "resources", "to", "which", "this", "SAM", "Function", "corresponds", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L80-L126
23,316
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
_construct_role
def _construct_role(self, managed_policy_map): """Constructs a Lambda execution role based on this SAM function's Policies property. :returns: the generated IAM Role :rtype: model.iam.IAMRole """ execution_role = IAMRole(self.logical_id + 'Role', attributes=self.get_passthrough_resource_attributes()) execution_role.AssumeRolePolicyDocument = IAMRolePolicies.lambda_assume_role_policy() managed_policy_arns = [ArnGenerator.generate_aws_managed_policy_arn('service-role/AWSLambdaBasicExecutionRole')] if self.Tracing: managed_policy_arns.append(ArnGenerator.generate_aws_managed_policy_arn('AWSXrayWriteOnlyAccess')) function_policies = FunctionPolicies({"Policies": self.Policies}, # No support for policy templates in the "core" policy_template_processor=None) policy_documents = [] if self.DeadLetterQueue: policy_documents.append(IAMRolePolicies.dead_letter_queue_policy( self.dead_letter_queue_policy_actions[self.DeadLetterQueue['Type']], self.DeadLetterQueue['TargetArn'])) for index, policy_entry in enumerate(function_policies.get()): if policy_entry.type is PolicyTypes.POLICY_STATEMENT: policy_documents.append({ 'PolicyName': execution_role.logical_id + 'Policy' + str(index), 'PolicyDocument': policy_entry.data }) elif policy_entry.type is PolicyTypes.MANAGED_POLICY: # There are three options: # Managed Policy Name (string): Try to convert to Managed Policy ARN # Managed Policy Arn (string): Insert it directly into the list # Intrinsic Function (dict): Insert it directly into the list # # When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice # policy_arn = policy_entry.data if isinstance(policy_entry.data, string_types) and policy_entry.data in managed_policy_map: policy_arn = managed_policy_map[policy_entry.data] # De-Duplicate managed policy arns before inserting. Mainly useful # when customer specifies a managed policy which is already inserted # by SAM, such as AWSLambdaBasicExecutionRole if policy_arn not in managed_policy_arns: managed_policy_arns.append(policy_arn) else: # Policy Templates are not supported here in the "core" raise InvalidResourceException( self.logical_id, "Policy at index {} in the 'Policies' property is not valid".format(index)) execution_role.ManagedPolicyArns = list(managed_policy_arns) execution_role.Policies = policy_documents or None execution_role.PermissionsBoundary = self.PermissionsBoundary return execution_role
python
def _construct_role(self, managed_policy_map): """Constructs a Lambda execution role based on this SAM function's Policies property. :returns: the generated IAM Role :rtype: model.iam.IAMRole """ execution_role = IAMRole(self.logical_id + 'Role', attributes=self.get_passthrough_resource_attributes()) execution_role.AssumeRolePolicyDocument = IAMRolePolicies.lambda_assume_role_policy() managed_policy_arns = [ArnGenerator.generate_aws_managed_policy_arn('service-role/AWSLambdaBasicExecutionRole')] if self.Tracing: managed_policy_arns.append(ArnGenerator.generate_aws_managed_policy_arn('AWSXrayWriteOnlyAccess')) function_policies = FunctionPolicies({"Policies": self.Policies}, # No support for policy templates in the "core" policy_template_processor=None) policy_documents = [] if self.DeadLetterQueue: policy_documents.append(IAMRolePolicies.dead_letter_queue_policy( self.dead_letter_queue_policy_actions[self.DeadLetterQueue['Type']], self.DeadLetterQueue['TargetArn'])) for index, policy_entry in enumerate(function_policies.get()): if policy_entry.type is PolicyTypes.POLICY_STATEMENT: policy_documents.append({ 'PolicyName': execution_role.logical_id + 'Policy' + str(index), 'PolicyDocument': policy_entry.data }) elif policy_entry.type is PolicyTypes.MANAGED_POLICY: # There are three options: # Managed Policy Name (string): Try to convert to Managed Policy ARN # Managed Policy Arn (string): Insert it directly into the list # Intrinsic Function (dict): Insert it directly into the list # # When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice # policy_arn = policy_entry.data if isinstance(policy_entry.data, string_types) and policy_entry.data in managed_policy_map: policy_arn = managed_policy_map[policy_entry.data] # De-Duplicate managed policy arns before inserting. Mainly useful # when customer specifies a managed policy which is already inserted # by SAM, such as AWSLambdaBasicExecutionRole if policy_arn not in managed_policy_arns: managed_policy_arns.append(policy_arn) else: # Policy Templates are not supported here in the "core" raise InvalidResourceException( self.logical_id, "Policy at index {} in the 'Policies' property is not valid".format(index)) execution_role.ManagedPolicyArns = list(managed_policy_arns) execution_role.Policies = policy_documents or None execution_role.PermissionsBoundary = self.PermissionsBoundary return execution_role
[ "def", "_construct_role", "(", "self", ",", "managed_policy_map", ")", ":", "execution_role", "=", "IAMRole", "(", "self", ".", "logical_id", "+", "'Role'", ",", "attributes", "=", "self", ".", "get_passthrough_resource_attributes", "(", ")", ")", "execution_role", ".", "AssumeRolePolicyDocument", "=", "IAMRolePolicies", ".", "lambda_assume_role_policy", "(", ")", "managed_policy_arns", "=", "[", "ArnGenerator", ".", "generate_aws_managed_policy_arn", "(", "'service-role/AWSLambdaBasicExecutionRole'", ")", "]", "if", "self", ".", "Tracing", ":", "managed_policy_arns", ".", "append", "(", "ArnGenerator", ".", "generate_aws_managed_policy_arn", "(", "'AWSXrayWriteOnlyAccess'", ")", ")", "function_policies", "=", "FunctionPolicies", "(", "{", "\"Policies\"", ":", "self", ".", "Policies", "}", ",", "# No support for policy templates in the \"core\"", "policy_template_processor", "=", "None", ")", "policy_documents", "=", "[", "]", "if", "self", ".", "DeadLetterQueue", ":", "policy_documents", ".", "append", "(", "IAMRolePolicies", ".", "dead_letter_queue_policy", "(", "self", ".", "dead_letter_queue_policy_actions", "[", "self", ".", "DeadLetterQueue", "[", "'Type'", "]", "]", ",", "self", ".", "DeadLetterQueue", "[", "'TargetArn'", "]", ")", ")", "for", "index", ",", "policy_entry", "in", "enumerate", "(", "function_policies", ".", "get", "(", ")", ")", ":", "if", "policy_entry", ".", "type", "is", "PolicyTypes", ".", "POLICY_STATEMENT", ":", "policy_documents", ".", "append", "(", "{", "'PolicyName'", ":", "execution_role", ".", "logical_id", "+", "'Policy'", "+", "str", "(", "index", ")", ",", "'PolicyDocument'", ":", "policy_entry", ".", "data", "}", ")", "elif", "policy_entry", ".", "type", "is", "PolicyTypes", ".", "MANAGED_POLICY", ":", "# There are three options:", "# Managed Policy Name (string): Try to convert to Managed Policy ARN", "# Managed Policy Arn (string): Insert it directly into the list", "# Intrinsic Function (dict): Insert it directly into the list", "#", "# When you insert into managed_policy_arns list, de-dupe to prevent same ARN from showing up twice", "#", "policy_arn", "=", "policy_entry", ".", "data", "if", "isinstance", "(", "policy_entry", ".", "data", ",", "string_types", ")", "and", "policy_entry", ".", "data", "in", "managed_policy_map", ":", "policy_arn", "=", "managed_policy_map", "[", "policy_entry", ".", "data", "]", "# De-Duplicate managed policy arns before inserting. Mainly useful", "# when customer specifies a managed policy which is already inserted", "# by SAM, such as AWSLambdaBasicExecutionRole", "if", "policy_arn", "not", "in", "managed_policy_arns", ":", "managed_policy_arns", ".", "append", "(", "policy_arn", ")", "else", ":", "# Policy Templates are not supported here in the \"core\"", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Policy at index {} in the 'Policies' property is not valid\"", ".", "format", "(", "index", ")", ")", "execution_role", ".", "ManagedPolicyArns", "=", "list", "(", "managed_policy_arns", ")", "execution_role", ".", "Policies", "=", "policy_documents", "or", "None", "execution_role", ".", "PermissionsBoundary", "=", "self", ".", "PermissionsBoundary", "return", "execution_role" ]
Constructs a Lambda execution role based on this SAM function's Policies property. :returns: the generated IAM Role :rtype: model.iam.IAMRole
[ "Constructs", "a", "Lambda", "execution", "role", "based", "on", "this", "SAM", "function", "s", "Policies", "property", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L186-L246
23,317
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
_generate_event_resources
def _generate_event_resources(self, lambda_function, execution_role, event_resources, lambda_alias=None): """Generates and returns the resources associated with this function's events. :param model.lambda_.LambdaFunction lambda_function: generated Lambda function :param iam.IAMRole execution_role: generated Lambda execution role :param implicit_api: Global Implicit API resource where the implicit APIs get attached to, if necessary :param implicit_api_stage: Global implicit API stage resource where implicit APIs get attached to, if necessary :param event_resources: All the event sources associated with this Lambda function :param model.lambda_.LambdaAlias lambda_alias: Optional Lambda Alias resource if we want to connect the event sources to this alias :returns: a list containing the function's event resources :rtype: list """ resources = [] if self.Events: for logical_id, event_dict in self.Events.items(): try: eventsource = self.event_resolver.resolve_resource_type(event_dict).from_dict( lambda_function.logical_id + logical_id, event_dict, logical_id) except TypeError as e: raise InvalidEventException(logical_id, "{}".format(e)) kwargs = { # When Alias is provided, connect all event sources to the alias and *not* the function 'function': lambda_alias or lambda_function, 'role': execution_role, } for name, resource in event_resources[logical_id].items(): kwargs[name] = resource resources += eventsource.to_cloudformation(**kwargs) return resources
python
def _generate_event_resources(self, lambda_function, execution_role, event_resources, lambda_alias=None): """Generates and returns the resources associated with this function's events. :param model.lambda_.LambdaFunction lambda_function: generated Lambda function :param iam.IAMRole execution_role: generated Lambda execution role :param implicit_api: Global Implicit API resource where the implicit APIs get attached to, if necessary :param implicit_api_stage: Global implicit API stage resource where implicit APIs get attached to, if necessary :param event_resources: All the event sources associated with this Lambda function :param model.lambda_.LambdaAlias lambda_alias: Optional Lambda Alias resource if we want to connect the event sources to this alias :returns: a list containing the function's event resources :rtype: list """ resources = [] if self.Events: for logical_id, event_dict in self.Events.items(): try: eventsource = self.event_resolver.resolve_resource_type(event_dict).from_dict( lambda_function.logical_id + logical_id, event_dict, logical_id) except TypeError as e: raise InvalidEventException(logical_id, "{}".format(e)) kwargs = { # When Alias is provided, connect all event sources to the alias and *not* the function 'function': lambda_alias or lambda_function, 'role': execution_role, } for name, resource in event_resources[logical_id].items(): kwargs[name] = resource resources += eventsource.to_cloudformation(**kwargs) return resources
[ "def", "_generate_event_resources", "(", "self", ",", "lambda_function", ",", "execution_role", ",", "event_resources", ",", "lambda_alias", "=", "None", ")", ":", "resources", "=", "[", "]", "if", "self", ".", "Events", ":", "for", "logical_id", ",", "event_dict", "in", "self", ".", "Events", ".", "items", "(", ")", ":", "try", ":", "eventsource", "=", "self", ".", "event_resolver", ".", "resolve_resource_type", "(", "event_dict", ")", ".", "from_dict", "(", "lambda_function", ".", "logical_id", "+", "logical_id", ",", "event_dict", ",", "logical_id", ")", "except", "TypeError", "as", "e", ":", "raise", "InvalidEventException", "(", "logical_id", ",", "\"{}\"", ".", "format", "(", "e", ")", ")", "kwargs", "=", "{", "# When Alias is provided, connect all event sources to the alias and *not* the function", "'function'", ":", "lambda_alias", "or", "lambda_function", ",", "'role'", ":", "execution_role", ",", "}", "for", "name", ",", "resource", "in", "event_resources", "[", "logical_id", "]", ".", "items", "(", ")", ":", "kwargs", "[", "name", "]", "=", "resource", "resources", "+=", "eventsource", ".", "to_cloudformation", "(", "*", "*", "kwargs", ")", "return", "resources" ]
Generates and returns the resources associated with this function's events. :param model.lambda_.LambdaFunction lambda_function: generated Lambda function :param iam.IAMRole execution_role: generated Lambda execution role :param implicit_api: Global Implicit API resource where the implicit APIs get attached to, if necessary :param implicit_api_stage: Global implicit API stage resource where implicit APIs get attached to, if necessary :param event_resources: All the event sources associated with this Lambda function :param model.lambda_.LambdaAlias lambda_alias: Optional Lambda Alias resource if we want to connect the event sources to this alias :returns: a list containing the function's event resources :rtype: list
[ "Generates", "and", "returns", "the", "resources", "associated", "with", "this", "function", "s", "events", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L276-L309
23,318
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
_construct_version
def _construct_version(self, function, intrinsics_resolver): """Constructs a Lambda Version resource that will be auto-published when CodeUri of the function changes. Old versions will not be deleted without a direct reference from the CloudFormation template. :param model.lambda_.LambdaFunction function: Lambda function object that is being connected to a version :param model.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Class that can help resolve references to parameters present in CodeUri. It is a common usecase to set S3Key of Code to be a template parameter. Need to resolve the values otherwise we will never detect a change in Code dict :return: Lambda function Version resource """ code_dict = function.Code if not code_dict: raise ValueError("Lambda function code must be a valid non-empty dictionary") if not intrinsics_resolver: raise ValueError("intrinsics_resolver is required for versions creation") # Resolve references to template parameters before creating hash. This will *not* resolve all intrinsics # because we cannot resolve runtime values like Arn of a resource. For purposes of detecting changes, this # is good enough. Here is why: # # When using intrinsic functions there are two cases when has must change: # - Value of the template parameter changes # - (or) LogicalId of a referenced resource changes ie. !GetAtt NewResource.Arn # # Later case will already change the hash because some value in the Code dictionary changes. We handle the # first case by resolving references to template parameters. It is okay even if these references are # present inside another intrinsic such as !Join. The resolver will replace the reference with the parameter's # value and keep all other parts of !Join identical. This will still trigger a change in the hash. code_dict = intrinsics_resolver.resolve_parameter_refs(code_dict) # Construct the LogicalID of Lambda version by appending 10 characters of SHA of CodeUri. This is necessary # to trigger creation of a new version every time code location changes. Since logicalId changes, CloudFormation # will drop the old version and create a new one for us. We set a DeletionPolicy on the version resource to # prevent CloudFormation from actually deleting the underlying version resource # # SHA Collisions: For purposes of triggering a new update, we are concerned about just the difference previous # and next hashes. The chances that two subsequent hashes collide is fairly low. prefix = "{id}Version".format(id=self.logical_id) logical_id = logical_id_generator.LogicalIdGenerator(prefix, code_dict).gen() attributes = self.get_passthrough_resource_attributes() if attributes is None: attributes = {} attributes["DeletionPolicy"] = "Retain" lambda_version = LambdaVersion(logical_id=logical_id, attributes=attributes) lambda_version.FunctionName = function.get_runtime_attr('name') lambda_version.Description = self.VersionDescription return lambda_version
python
def _construct_version(self, function, intrinsics_resolver): """Constructs a Lambda Version resource that will be auto-published when CodeUri of the function changes. Old versions will not be deleted without a direct reference from the CloudFormation template. :param model.lambda_.LambdaFunction function: Lambda function object that is being connected to a version :param model.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Class that can help resolve references to parameters present in CodeUri. It is a common usecase to set S3Key of Code to be a template parameter. Need to resolve the values otherwise we will never detect a change in Code dict :return: Lambda function Version resource """ code_dict = function.Code if not code_dict: raise ValueError("Lambda function code must be a valid non-empty dictionary") if not intrinsics_resolver: raise ValueError("intrinsics_resolver is required for versions creation") # Resolve references to template parameters before creating hash. This will *not* resolve all intrinsics # because we cannot resolve runtime values like Arn of a resource. For purposes of detecting changes, this # is good enough. Here is why: # # When using intrinsic functions there are two cases when has must change: # - Value of the template parameter changes # - (or) LogicalId of a referenced resource changes ie. !GetAtt NewResource.Arn # # Later case will already change the hash because some value in the Code dictionary changes. We handle the # first case by resolving references to template parameters. It is okay even if these references are # present inside another intrinsic such as !Join. The resolver will replace the reference with the parameter's # value and keep all other parts of !Join identical. This will still trigger a change in the hash. code_dict = intrinsics_resolver.resolve_parameter_refs(code_dict) # Construct the LogicalID of Lambda version by appending 10 characters of SHA of CodeUri. This is necessary # to trigger creation of a new version every time code location changes. Since logicalId changes, CloudFormation # will drop the old version and create a new one for us. We set a DeletionPolicy on the version resource to # prevent CloudFormation from actually deleting the underlying version resource # # SHA Collisions: For purposes of triggering a new update, we are concerned about just the difference previous # and next hashes. The chances that two subsequent hashes collide is fairly low. prefix = "{id}Version".format(id=self.logical_id) logical_id = logical_id_generator.LogicalIdGenerator(prefix, code_dict).gen() attributes = self.get_passthrough_resource_attributes() if attributes is None: attributes = {} attributes["DeletionPolicy"] = "Retain" lambda_version = LambdaVersion(logical_id=logical_id, attributes=attributes) lambda_version.FunctionName = function.get_runtime_attr('name') lambda_version.Description = self.VersionDescription return lambda_version
[ "def", "_construct_version", "(", "self", ",", "function", ",", "intrinsics_resolver", ")", ":", "code_dict", "=", "function", ".", "Code", "if", "not", "code_dict", ":", "raise", "ValueError", "(", "\"Lambda function code must be a valid non-empty dictionary\"", ")", "if", "not", "intrinsics_resolver", ":", "raise", "ValueError", "(", "\"intrinsics_resolver is required for versions creation\"", ")", "# Resolve references to template parameters before creating hash. This will *not* resolve all intrinsics", "# because we cannot resolve runtime values like Arn of a resource. For purposes of detecting changes, this", "# is good enough. Here is why:", "#", "# When using intrinsic functions there are two cases when has must change:", "# - Value of the template parameter changes", "# - (or) LogicalId of a referenced resource changes ie. !GetAtt NewResource.Arn", "#", "# Later case will already change the hash because some value in the Code dictionary changes. We handle the", "# first case by resolving references to template parameters. It is okay even if these references are", "# present inside another intrinsic such as !Join. The resolver will replace the reference with the parameter's", "# value and keep all other parts of !Join identical. This will still trigger a change in the hash.", "code_dict", "=", "intrinsics_resolver", ".", "resolve_parameter_refs", "(", "code_dict", ")", "# Construct the LogicalID of Lambda version by appending 10 characters of SHA of CodeUri. This is necessary", "# to trigger creation of a new version every time code location changes. Since logicalId changes, CloudFormation", "# will drop the old version and create a new one for us. We set a DeletionPolicy on the version resource to", "# prevent CloudFormation from actually deleting the underlying version resource", "#", "# SHA Collisions: For purposes of triggering a new update, we are concerned about just the difference previous", "# and next hashes. The chances that two subsequent hashes collide is fairly low.", "prefix", "=", "\"{id}Version\"", ".", "format", "(", "id", "=", "self", ".", "logical_id", ")", "logical_id", "=", "logical_id_generator", ".", "LogicalIdGenerator", "(", "prefix", ",", "code_dict", ")", ".", "gen", "(", ")", "attributes", "=", "self", ".", "get_passthrough_resource_attributes", "(", ")", "if", "attributes", "is", "None", ":", "attributes", "=", "{", "}", "attributes", "[", "\"DeletionPolicy\"", "]", "=", "\"Retain\"", "lambda_version", "=", "LambdaVersion", "(", "logical_id", "=", "logical_id", ",", "attributes", "=", "attributes", ")", "lambda_version", ".", "FunctionName", "=", "function", ".", "get_runtime_attr", "(", "'name'", ")", "lambda_version", ".", "Description", "=", "self", ".", "VersionDescription", "return", "lambda_version" ]
Constructs a Lambda Version resource that will be auto-published when CodeUri of the function changes. Old versions will not be deleted without a direct reference from the CloudFormation template. :param model.lambda_.LambdaFunction function: Lambda function object that is being connected to a version :param model.intrinsics.resolver.IntrinsicsResolver intrinsics_resolver: Class that can help resolve references to parameters present in CodeUri. It is a common usecase to set S3Key of Code to be a template parameter. Need to resolve the values otherwise we will never detect a change in Code dict :return: Lambda function Version resource
[ "Constructs", "a", "Lambda", "Version", "resource", "that", "will", "be", "auto", "-", "published", "when", "CodeUri", "of", "the", "function", "changes", ".", "Old", "versions", "will", "not", "be", "deleted", "without", "a", "direct", "reference", "from", "the", "CloudFormation", "template", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L321-L371
23,319
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
_construct_alias
def _construct_alias(self, name, function, version): """Constructs a Lambda Alias for the given function and pointing to the given version :param string name: Name of the alias :param model.lambda_.LambdaFunction function: Lambda function object to associate the alias with :param model.lambda_.LambdaVersion version: Lambda version object to associate the alias with :return: Lambda alias object :rtype model.lambda_.LambdaAlias """ if not name: raise InvalidResourceException(self.logical_id, "Alias name is required to create an alias") logical_id = "{id}Alias{suffix}".format(id=function.logical_id, suffix=name) alias = LambdaAlias(logical_id=logical_id, attributes=self.get_passthrough_resource_attributes()) alias.Name = name alias.FunctionName = function.get_runtime_attr('name') alias.FunctionVersion = version.get_runtime_attr("version") return alias
python
def _construct_alias(self, name, function, version): """Constructs a Lambda Alias for the given function and pointing to the given version :param string name: Name of the alias :param model.lambda_.LambdaFunction function: Lambda function object to associate the alias with :param model.lambda_.LambdaVersion version: Lambda version object to associate the alias with :return: Lambda alias object :rtype model.lambda_.LambdaAlias """ if not name: raise InvalidResourceException(self.logical_id, "Alias name is required to create an alias") logical_id = "{id}Alias{suffix}".format(id=function.logical_id, suffix=name) alias = LambdaAlias(logical_id=logical_id, attributes=self.get_passthrough_resource_attributes()) alias.Name = name alias.FunctionName = function.get_runtime_attr('name') alias.FunctionVersion = version.get_runtime_attr("version") return alias
[ "def", "_construct_alias", "(", "self", ",", "name", ",", "function", ",", "version", ")", ":", "if", "not", "name", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"Alias name is required to create an alias\"", ")", "logical_id", "=", "\"{id}Alias{suffix}\"", ".", "format", "(", "id", "=", "function", ".", "logical_id", ",", "suffix", "=", "name", ")", "alias", "=", "LambdaAlias", "(", "logical_id", "=", "logical_id", ",", "attributes", "=", "self", ".", "get_passthrough_resource_attributes", "(", ")", ")", "alias", ".", "Name", "=", "name", "alias", ".", "FunctionName", "=", "function", ".", "get_runtime_attr", "(", "'name'", ")", "alias", ".", "FunctionVersion", "=", "version", ".", "get_runtime_attr", "(", "\"version\"", ")", "return", "alias" ]
Constructs a Lambda Alias for the given function and pointing to the given version :param string name: Name of the alias :param model.lambda_.LambdaFunction function: Lambda function object to associate the alias with :param model.lambda_.LambdaVersion version: Lambda version object to associate the alias with :return: Lambda alias object :rtype model.lambda_.LambdaAlias
[ "Constructs", "a", "Lambda", "Alias", "for", "the", "given", "function", "and", "pointing", "to", "the", "given", "version" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L373-L392
23,320
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
SamApi.to_cloudformation
def to_cloudformation(self, **kwargs): """Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] api_generator = ApiGenerator(self.logical_id, self.CacheClusterEnabled, self.CacheClusterSize, self.Variables, self.depends_on, self.DefinitionBody, self.DefinitionUri, self.Name, self.StageName, endpoint_configuration=self.EndpointConfiguration, method_settings=self.MethodSettings, binary_media=self.BinaryMediaTypes, minimum_compression_size=self.MinimumCompressionSize, cors=self.Cors, auth=self.Auth, gateway_responses=self.GatewayResponses, access_log_setting=self.AccessLogSetting, canary_setting=self.CanarySetting, tracing_enabled=self.TracingEnabled, resource_attributes=self.resource_attributes, passthrough_resource_attributes=self.get_passthrough_resource_attributes()) rest_api, deployment, stage, permissions = api_generator.to_cloudformation() resources.extend([rest_api, deployment, stage]) resources.extend(permissions) return resources
python
def to_cloudformation(self, **kwargs): """Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] api_generator = ApiGenerator(self.logical_id, self.CacheClusterEnabled, self.CacheClusterSize, self.Variables, self.depends_on, self.DefinitionBody, self.DefinitionUri, self.Name, self.StageName, endpoint_configuration=self.EndpointConfiguration, method_settings=self.MethodSettings, binary_media=self.BinaryMediaTypes, minimum_compression_size=self.MinimumCompressionSize, cors=self.Cors, auth=self.Auth, gateway_responses=self.GatewayResponses, access_log_setting=self.AccessLogSetting, canary_setting=self.CanarySetting, tracing_enabled=self.TracingEnabled, resource_attributes=self.resource_attributes, passthrough_resource_attributes=self.get_passthrough_resource_attributes()) rest_api, deployment, stage, permissions = api_generator.to_cloudformation() resources.extend([rest_api, deployment, stage]) resources.extend(permissions) return resources
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "api_generator", "=", "ApiGenerator", "(", "self", ".", "logical_id", ",", "self", ".", "CacheClusterEnabled", ",", "self", ".", "CacheClusterSize", ",", "self", ".", "Variables", ",", "self", ".", "depends_on", ",", "self", ".", "DefinitionBody", ",", "self", ".", "DefinitionUri", ",", "self", ".", "Name", ",", "self", ".", "StageName", ",", "endpoint_configuration", "=", "self", ".", "EndpointConfiguration", ",", "method_settings", "=", "self", ".", "MethodSettings", ",", "binary_media", "=", "self", ".", "BinaryMediaTypes", ",", "minimum_compression_size", "=", "self", ".", "MinimumCompressionSize", ",", "cors", "=", "self", ".", "Cors", ",", "auth", "=", "self", ".", "Auth", ",", "gateway_responses", "=", "self", ".", "GatewayResponses", ",", "access_log_setting", "=", "self", ".", "AccessLogSetting", ",", "canary_setting", "=", "self", ".", "CanarySetting", ",", "tracing_enabled", "=", "self", ".", "TracingEnabled", ",", "resource_attributes", "=", "self", ".", "resource_attributes", ",", "passthrough_resource_attributes", "=", "self", ".", "get_passthrough_resource_attributes", "(", ")", ")", "rest_api", ",", "deployment", ",", "stage", ",", "permissions", "=", "api_generator", ".", "to_cloudformation", "(", ")", "resources", ".", "extend", "(", "[", "rest_api", ",", "deployment", ",", "stage", "]", ")", "resources", ".", "extend", "(", "permissions", ")", "return", "resources" ]
Returns the API Gateway RestApi, Deployment, and Stage to which this SAM Api corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list
[ "Returns", "the", "API", "Gateway", "RestApi", "Deployment", "and", "Stage", "to", "which", "this", "SAM", "Api", "corresponds", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L456-L493
23,321
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
SamApplication._get_application_tags
def _get_application_tags(self): """Adds tags to the stack if this resource is using the serverless app repo """ application_tags = {} if isinstance(self.Location, dict): if (self.APPLICATION_ID_KEY in self.Location.keys() and self.Location[self.APPLICATION_ID_KEY] is not None): application_tags[self._SAR_APP_KEY] = self.Location[self.APPLICATION_ID_KEY] if (self.SEMANTIC_VERSION_KEY in self.Location.keys() and self.Location[self.SEMANTIC_VERSION_KEY] is not None): application_tags[self._SAR_SEMVER_KEY] = self.Location[self.SEMANTIC_VERSION_KEY] return application_tags
python
def _get_application_tags(self): """Adds tags to the stack if this resource is using the serverless app repo """ application_tags = {} if isinstance(self.Location, dict): if (self.APPLICATION_ID_KEY in self.Location.keys() and self.Location[self.APPLICATION_ID_KEY] is not None): application_tags[self._SAR_APP_KEY] = self.Location[self.APPLICATION_ID_KEY] if (self.SEMANTIC_VERSION_KEY in self.Location.keys() and self.Location[self.SEMANTIC_VERSION_KEY] is not None): application_tags[self._SAR_SEMVER_KEY] = self.Location[self.SEMANTIC_VERSION_KEY] return application_tags
[ "def", "_get_application_tags", "(", "self", ")", ":", "application_tags", "=", "{", "}", "if", "isinstance", "(", "self", ".", "Location", ",", "dict", ")", ":", "if", "(", "self", ".", "APPLICATION_ID_KEY", "in", "self", ".", "Location", ".", "keys", "(", ")", "and", "self", ".", "Location", "[", "self", ".", "APPLICATION_ID_KEY", "]", "is", "not", "None", ")", ":", "application_tags", "[", "self", ".", "_SAR_APP_KEY", "]", "=", "self", ".", "Location", "[", "self", ".", "APPLICATION_ID_KEY", "]", "if", "(", "self", ".", "SEMANTIC_VERSION_KEY", "in", "self", ".", "Location", ".", "keys", "(", ")", "and", "self", ".", "Location", "[", "self", ".", "SEMANTIC_VERSION_KEY", "]", "is", "not", "None", ")", ":", "application_tags", "[", "self", ".", "_SAR_SEMVER_KEY", "]", "=", "self", ".", "Location", "[", "self", ".", "SEMANTIC_VERSION_KEY", "]", "return", "application_tags" ]
Adds tags to the stack if this resource is using the serverless app repo
[ "Adds", "tags", "to", "the", "stack", "if", "this", "resource", "is", "using", "the", "serverless", "app", "repo" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L597-L608
23,322
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
SamLayerVersion.to_cloudformation
def to_cloudformation(self, **kwargs): """Returns the Lambda layer to which this SAM Layer corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] # Append any CFN resources: intrinsics_resolver = kwargs["intrinsics_resolver"] resources.append(self._construct_lambda_layer(intrinsics_resolver)) return resources
python
def to_cloudformation(self, **kwargs): """Returns the Lambda layer to which this SAM Layer corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list """ resources = [] # Append any CFN resources: intrinsics_resolver = kwargs["intrinsics_resolver"] resources.append(self._construct_lambda_layer(intrinsics_resolver)) return resources
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "# Append any CFN resources:", "intrinsics_resolver", "=", "kwargs", "[", "\"intrinsics_resolver\"", "]", "resources", ".", "append", "(", "self", ".", "_construct_lambda_layer", "(", "intrinsics_resolver", ")", ")", "return", "resources" ]
Returns the Lambda layer to which this SAM Layer corresponds. :param dict kwargs: already-converted resources that may need to be modified when converting this \ macro to pure CloudFormation :returns: a list of vanilla CloudFormation Resources, to which this Function expands :rtype: list
[ "Returns", "the", "Lambda", "layer", "to", "which", "this", "SAM", "Layer", "corresponds", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L628-L642
23,323
awslabs/serverless-application-model
samtranslator/model/sam_resources.py
SamLayerVersion._get_retention_policy_value
def _get_retention_policy_value(self): """ Sets the deletion policy on this resource. The default is 'Retain'. :return: value for the DeletionPolicy attribute. """ if self.RetentionPolicy is None or self.RetentionPolicy.lower() == self.RETAIN.lower(): return self.RETAIN elif self.RetentionPolicy.lower() == self.DELETE.lower(): return self.DELETE elif self.RetentionPolicy.lower() not in self.retention_policy_options: raise InvalidResourceException(self.logical_id, "'{}' must be one of the following options: {}." .format('RetentionPolicy', [self.RETAIN, self.DELETE]))
python
def _get_retention_policy_value(self): """ Sets the deletion policy on this resource. The default is 'Retain'. :return: value for the DeletionPolicy attribute. """ if self.RetentionPolicy is None or self.RetentionPolicy.lower() == self.RETAIN.lower(): return self.RETAIN elif self.RetentionPolicy.lower() == self.DELETE.lower(): return self.DELETE elif self.RetentionPolicy.lower() not in self.retention_policy_options: raise InvalidResourceException(self.logical_id, "'{}' must be one of the following options: {}." .format('RetentionPolicy', [self.RETAIN, self.DELETE]))
[ "def", "_get_retention_policy_value", "(", "self", ")", ":", "if", "self", ".", "RetentionPolicy", "is", "None", "or", "self", ".", "RetentionPolicy", ".", "lower", "(", ")", "==", "self", ".", "RETAIN", ".", "lower", "(", ")", ":", "return", "self", ".", "RETAIN", "elif", "self", ".", "RetentionPolicy", ".", "lower", "(", ")", "==", "self", ".", "DELETE", ".", "lower", "(", ")", ":", "return", "self", ".", "DELETE", "elif", "self", ".", "RetentionPolicy", ".", "lower", "(", ")", "not", "in", "self", ".", "retention_policy_options", ":", "raise", "InvalidResourceException", "(", "self", ".", "logical_id", ",", "\"'{}' must be one of the following options: {}.\"", ".", "format", "(", "'RetentionPolicy'", ",", "[", "self", ".", "RETAIN", ",", "self", ".", "DELETE", "]", ")", ")" ]
Sets the deletion policy on this resource. The default is 'Retain'. :return: value for the DeletionPolicy attribute.
[ "Sets", "the", "deletion", "policy", "on", "this", "resource", ".", "The", "default", "is", "Retain", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/sam_resources.py#L690-L704
23,324
awslabs/serverless-application-model
examples/apps/lex-order-flowers-python/lambda_function.py
order_flowers
def order_flowers(intent_request): """ Performs dialog management and fulfillment for ordering flowers. Beyond fulfillment, the implementation of this intent demonstrates the use of the elicitSlot dialog action in slot validation and re-prompting. """ flower_type = get_slots(intent_request)["FlowerType"] date = get_slots(intent_request)["PickupDate"] time = get_slots(intent_request)["PickupTime"] source = intent_request['invocationSource'] if source == 'DialogCodeHook': # Perform basic validation on the supplied input slots. # Use the elicitSlot dialog action to re-prompt for the first violation detected. slots = get_slots(intent_request) validation_result = validate_order_flowers(flower_type, date, time) if not validation_result['isValid']: slots[validation_result['violatedSlot']] = None return elicit_slot(intent_request['sessionAttributes'], intent_request['currentIntent']['name'], slots, validation_result['violatedSlot'], validation_result['message']) # Pass the price of the flowers back through session attributes to be used in various prompts defined # on the bot model. output_session_attributes = intent_request['sessionAttributes'] if flower_type is not None: output_session_attributes['Price'] = len(flower_type) * 5 # Elegant pricing model return delegate(output_session_attributes, get_slots(intent_request)) # Order the flowers, and rely on the goodbye message of the bot to define the message to the end user. # In a real bot, this would likely involve a call to a backend service. return close(intent_request['sessionAttributes'], 'Fulfilled', {'contentType': 'PlainText', 'content': 'Thanks, your order for {} has been placed and will be ready for pickup by {} on {}'.format(flower_type, time, date)})
python
def order_flowers(intent_request): """ Performs dialog management and fulfillment for ordering flowers. Beyond fulfillment, the implementation of this intent demonstrates the use of the elicitSlot dialog action in slot validation and re-prompting. """ flower_type = get_slots(intent_request)["FlowerType"] date = get_slots(intent_request)["PickupDate"] time = get_slots(intent_request)["PickupTime"] source = intent_request['invocationSource'] if source == 'DialogCodeHook': # Perform basic validation on the supplied input slots. # Use the elicitSlot dialog action to re-prompt for the first violation detected. slots = get_slots(intent_request) validation_result = validate_order_flowers(flower_type, date, time) if not validation_result['isValid']: slots[validation_result['violatedSlot']] = None return elicit_slot(intent_request['sessionAttributes'], intent_request['currentIntent']['name'], slots, validation_result['violatedSlot'], validation_result['message']) # Pass the price of the flowers back through session attributes to be used in various prompts defined # on the bot model. output_session_attributes = intent_request['sessionAttributes'] if flower_type is not None: output_session_attributes['Price'] = len(flower_type) * 5 # Elegant pricing model return delegate(output_session_attributes, get_slots(intent_request)) # Order the flowers, and rely on the goodbye message of the bot to define the message to the end user. # In a real bot, this would likely involve a call to a backend service. return close(intent_request['sessionAttributes'], 'Fulfilled', {'contentType': 'PlainText', 'content': 'Thanks, your order for {} has been placed and will be ready for pickup by {} on {}'.format(flower_type, time, date)})
[ "def", "order_flowers", "(", "intent_request", ")", ":", "flower_type", "=", "get_slots", "(", "intent_request", ")", "[", "\"FlowerType\"", "]", "date", "=", "get_slots", "(", "intent_request", ")", "[", "\"PickupDate\"", "]", "time", "=", "get_slots", "(", "intent_request", ")", "[", "\"PickupTime\"", "]", "source", "=", "intent_request", "[", "'invocationSource'", "]", "if", "source", "==", "'DialogCodeHook'", ":", "# Perform basic validation on the supplied input slots.", "# Use the elicitSlot dialog action to re-prompt for the first violation detected.", "slots", "=", "get_slots", "(", "intent_request", ")", "validation_result", "=", "validate_order_flowers", "(", "flower_type", ",", "date", ",", "time", ")", "if", "not", "validation_result", "[", "'isValid'", "]", ":", "slots", "[", "validation_result", "[", "'violatedSlot'", "]", "]", "=", "None", "return", "elicit_slot", "(", "intent_request", "[", "'sessionAttributes'", "]", ",", "intent_request", "[", "'currentIntent'", "]", "[", "'name'", "]", ",", "slots", ",", "validation_result", "[", "'violatedSlot'", "]", ",", "validation_result", "[", "'message'", "]", ")", "# Pass the price of the flowers back through session attributes to be used in various prompts defined", "# on the bot model.", "output_session_attributes", "=", "intent_request", "[", "'sessionAttributes'", "]", "if", "flower_type", "is", "not", "None", ":", "output_session_attributes", "[", "'Price'", "]", "=", "len", "(", "flower_type", ")", "*", "5", "# Elegant pricing model", "return", "delegate", "(", "output_session_attributes", ",", "get_slots", "(", "intent_request", ")", ")", "# Order the flowers, and rely on the goodbye message of the bot to define the message to the end user.", "# In a real bot, this would likely involve a call to a backend service.", "return", "close", "(", "intent_request", "[", "'sessionAttributes'", "]", ",", "'Fulfilled'", ",", "{", "'contentType'", ":", "'PlainText'", ",", "'content'", ":", "'Thanks, your order for {} has been placed and will be ready for pickup by {} on {}'", ".", "format", "(", "flower_type", ",", "time", ",", "date", ")", "}", ")" ]
Performs dialog management and fulfillment for ordering flowers. Beyond fulfillment, the implementation of this intent demonstrates the use of the elicitSlot dialog action in slot validation and re-prompting.
[ "Performs", "dialog", "management", "and", "fulfillment", "for", "ordering", "flowers", ".", "Beyond", "fulfillment", "the", "implementation", "of", "this", "intent", "demonstrates", "the", "use", "of", "the", "elicitSlot", "dialog", "action", "in", "slot", "validation", "and", "re", "-", "prompting", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/examples/apps/lex-order-flowers-python/lambda_function.py#L119-L158
23,325
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
PushEventSource._construct_permission
def _construct_permission(self, function, source_arn=None, source_account=None, suffix="", event_source_token=None): """Constructs the Lambda Permission resource allowing the source service to invoke the function this event source triggers. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ lambda_permission = LambdaPermission(self.logical_id + 'Permission' + suffix, attributes=function.get_passthrough_resource_attributes()) try: # Name will not be available for Alias resources function_name_or_arn = function.get_runtime_attr("name") except NotImplementedError: function_name_or_arn = function.get_runtime_attr("arn") lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = function_name_or_arn lambda_permission.Principal = self.principal lambda_permission.SourceArn = source_arn lambda_permission.SourceAccount = source_account lambda_permission.EventSourceToken = event_source_token return lambda_permission
python
def _construct_permission(self, function, source_arn=None, source_account=None, suffix="", event_source_token=None): """Constructs the Lambda Permission resource allowing the source service to invoke the function this event source triggers. :returns: the permission resource :rtype: model.lambda_.LambdaPermission """ lambda_permission = LambdaPermission(self.logical_id + 'Permission' + suffix, attributes=function.get_passthrough_resource_attributes()) try: # Name will not be available for Alias resources function_name_or_arn = function.get_runtime_attr("name") except NotImplementedError: function_name_or_arn = function.get_runtime_attr("arn") lambda_permission.Action = 'lambda:invokeFunction' lambda_permission.FunctionName = function_name_or_arn lambda_permission.Principal = self.principal lambda_permission.SourceArn = source_arn lambda_permission.SourceAccount = source_account lambda_permission.EventSourceToken = event_source_token return lambda_permission
[ "def", "_construct_permission", "(", "self", ",", "function", ",", "source_arn", "=", "None", ",", "source_account", "=", "None", ",", "suffix", "=", "\"\"", ",", "event_source_token", "=", "None", ")", ":", "lambda_permission", "=", "LambdaPermission", "(", "self", ".", "logical_id", "+", "'Permission'", "+", "suffix", ",", "attributes", "=", "function", ".", "get_passthrough_resource_attributes", "(", ")", ")", "try", ":", "# Name will not be available for Alias resources", "function_name_or_arn", "=", "function", ".", "get_runtime_attr", "(", "\"name\"", ")", "except", "NotImplementedError", ":", "function_name_or_arn", "=", "function", ".", "get_runtime_attr", "(", "\"arn\"", ")", "lambda_permission", ".", "Action", "=", "'lambda:invokeFunction'", "lambda_permission", ".", "FunctionName", "=", "function_name_or_arn", "lambda_permission", ".", "Principal", "=", "self", ".", "principal", "lambda_permission", ".", "SourceArn", "=", "source_arn", "lambda_permission", ".", "SourceAccount", "=", "source_account", "lambda_permission", ".", "EventSourceToken", "=", "event_source_token", "return", "lambda_permission" ]
Constructs the Lambda Permission resource allowing the source service to invoke the function this event source triggers. :returns: the permission resource :rtype: model.lambda_.LambdaPermission
[ "Constructs", "the", "Lambda", "Permission", "resource", "allowing", "the", "source", "service", "to", "invoke", "the", "function", "this", "event", "source", "triggers", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L43-L66
23,326
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
Schedule.to_cloudformation
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Events Rule and Lambda Permission to which this Schedule event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this pull event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") resources = [] events_rule = EventsRule(self.logical_id) resources.append(events_rule) events_rule.ScheduleExpression = self.Schedule events_rule.Targets = [self._construct_target(function)] source_arn = events_rule.get_runtime_attr("arn") if CONDITION in function.resource_attributes: events_rule.set_resource_attribute(CONDITION, function.resource_attributes[CONDITION]) resources.append(self._construct_permission(function, source_arn=source_arn)) return resources
python
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Events Rule and Lambda Permission to which this Schedule event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this pull event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") resources = [] events_rule = EventsRule(self.logical_id) resources.append(events_rule) events_rule.ScheduleExpression = self.Schedule events_rule.Targets = [self._construct_target(function)] source_arn = events_rule.get_runtime_attr("arn") if CONDITION in function.resource_attributes: events_rule.set_resource_attribute(CONDITION, function.resource_attributes[CONDITION]) resources.append(self._construct_permission(function, source_arn=source_arn)) return resources
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "resources", "=", "[", "]", "events_rule", "=", "EventsRule", "(", "self", ".", "logical_id", ")", "resources", ".", "append", "(", "events_rule", ")", "events_rule", ".", "ScheduleExpression", "=", "self", ".", "Schedule", "events_rule", ".", "Targets", "=", "[", "self", ".", "_construct_target", "(", "function", ")", "]", "source_arn", "=", "events_rule", ".", "get_runtime_attr", "(", "\"arn\"", ")", "if", "CONDITION", "in", "function", ".", "resource_attributes", ":", "events_rule", ".", "set_resource_attribute", "(", "CONDITION", ",", "function", ".", "resource_attributes", "[", "CONDITION", "]", ")", "resources", ".", "append", "(", "self", ".", "_construct_permission", "(", "function", ",", "source_arn", "=", "source_arn", ")", ")", "return", "resources" ]
Returns the CloudWatch Events Rule and Lambda Permission to which this Schedule event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this pull event expands :rtype: list
[ "Returns", "the", "CloudWatch", "Events", "Rule", "and", "Lambda", "Permission", "to", "which", "this", "Schedule", "event", "source", "corresponds", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L78-L103
23,327
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
CloudWatchEvent._construct_target
def _construct_target(self, function): """Constructs the Target property for the CloudWatch Events Rule. :returns: the Target property :rtype: dict """ target = { 'Arn': function.get_runtime_attr("arn"), 'Id': self.logical_id + 'LambdaTarget' } if self.Input is not None: target['Input'] = self.Input if self.InputPath is not None: target['InputPath'] = self.InputPath return target
python
def _construct_target(self, function): """Constructs the Target property for the CloudWatch Events Rule. :returns: the Target property :rtype: dict """ target = { 'Arn': function.get_runtime_attr("arn"), 'Id': self.logical_id + 'LambdaTarget' } if self.Input is not None: target['Input'] = self.Input if self.InputPath is not None: target['InputPath'] = self.InputPath return target
[ "def", "_construct_target", "(", "self", ",", "function", ")", ":", "target", "=", "{", "'Arn'", ":", "function", ".", "get_runtime_attr", "(", "\"arn\"", ")", ",", "'Id'", ":", "self", ".", "logical_id", "+", "'LambdaTarget'", "}", "if", "self", ".", "Input", "is", "not", "None", ":", "target", "[", "'Input'", "]", "=", "self", ".", "Input", "if", "self", ".", "InputPath", "is", "not", "None", ":", "target", "[", "'InputPath'", "]", "=", "self", ".", "InputPath", "return", "target" ]
Constructs the Target property for the CloudWatch Events Rule. :returns: the Target property :rtype: dict
[ "Constructs", "the", "Target", "property", "for", "the", "CloudWatch", "Events", "Rule", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L159-L174
23,328
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
S3.to_cloudformation
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing S3 to invoke the function this event source triggers. :param dict kwargs: S3 bucket resource :returns: a list of vanilla CloudFormation Resources, to which this S3 event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if 'bucket' not in kwargs or kwargs['bucket'] is None: raise TypeError("Missing required keyword argument: bucket") if 'bucket_id' not in kwargs or kwargs['bucket_id'] is None: raise TypeError("Missing required keyword argument: bucket_id") bucket = kwargs['bucket'] bucket_id = kwargs['bucket_id'] resources = [] source_account = ref('AWS::AccountId') permission = self._construct_permission(function, source_account=source_account) if CONDITION in permission.resource_attributes: self._depend_on_lambda_permissions_using_tag(bucket, permission) else: self._depend_on_lambda_permissions(bucket, permission) resources.append(permission) # NOTE: `bucket` here is a dictionary representing the S3 Bucket resource in your SAM template. If there are # multiple S3 Events attached to the same bucket, we will update the Bucket resource with notification # configuration for each event. This is the reason why we continue to use existing bucket dict and append onto # it. # # NOTE: There is some fragile logic here where we will append multiple resources to output # SAM template but de-dupe them when merging into output CFN template. This is scary because the order of # merging is literally "last one wins", which works fine because we linearly loop through the template once. # The de-dupe happens inside `samtranslator.translator.Translator.translate` method when merging results of # to_cloudformation() to output template. self._inject_notification_configuration(function, bucket) resources.append(S3Bucket.from_dict(bucket_id, bucket)) return resources
python
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing S3 to invoke the function this event source triggers. :param dict kwargs: S3 bucket resource :returns: a list of vanilla CloudFormation Resources, to which this S3 event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if 'bucket' not in kwargs or kwargs['bucket'] is None: raise TypeError("Missing required keyword argument: bucket") if 'bucket_id' not in kwargs or kwargs['bucket_id'] is None: raise TypeError("Missing required keyword argument: bucket_id") bucket = kwargs['bucket'] bucket_id = kwargs['bucket_id'] resources = [] source_account = ref('AWS::AccountId') permission = self._construct_permission(function, source_account=source_account) if CONDITION in permission.resource_attributes: self._depend_on_lambda_permissions_using_tag(bucket, permission) else: self._depend_on_lambda_permissions(bucket, permission) resources.append(permission) # NOTE: `bucket` here is a dictionary representing the S3 Bucket resource in your SAM template. If there are # multiple S3 Events attached to the same bucket, we will update the Bucket resource with notification # configuration for each event. This is the reason why we continue to use existing bucket dict and append onto # it. # # NOTE: There is some fragile logic here where we will append multiple resources to output # SAM template but de-dupe them when merging into output CFN template. This is scary because the order of # merging is literally "last one wins", which works fine because we linearly loop through the template once. # The de-dupe happens inside `samtranslator.translator.Translator.translate` method when merging results of # to_cloudformation() to output template. self._inject_notification_configuration(function, bucket) resources.append(S3Bucket.from_dict(bucket_id, bucket)) return resources
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "if", "'bucket'", "not", "in", "kwargs", "or", "kwargs", "[", "'bucket'", "]", "is", "None", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: bucket\"", ")", "if", "'bucket_id'", "not", "in", "kwargs", "or", "kwargs", "[", "'bucket_id'", "]", "is", "None", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: bucket_id\"", ")", "bucket", "=", "kwargs", "[", "'bucket'", "]", "bucket_id", "=", "kwargs", "[", "'bucket_id'", "]", "resources", "=", "[", "]", "source_account", "=", "ref", "(", "'AWS::AccountId'", ")", "permission", "=", "self", ".", "_construct_permission", "(", "function", ",", "source_account", "=", "source_account", ")", "if", "CONDITION", "in", "permission", ".", "resource_attributes", ":", "self", ".", "_depend_on_lambda_permissions_using_tag", "(", "bucket", ",", "permission", ")", "else", ":", "self", ".", "_depend_on_lambda_permissions", "(", "bucket", ",", "permission", ")", "resources", ".", "append", "(", "permission", ")", "# NOTE: `bucket` here is a dictionary representing the S3 Bucket resource in your SAM template. If there are", "# multiple S3 Events attached to the same bucket, we will update the Bucket resource with notification", "# configuration for each event. This is the reason why we continue to use existing bucket dict and append onto", "# it.", "#", "# NOTE: There is some fragile logic here where we will append multiple resources to output", "# SAM template but de-dupe them when merging into output CFN template. This is scary because the order of", "# merging is literally \"last one wins\", which works fine because we linearly loop through the template once.", "# The de-dupe happens inside `samtranslator.translator.Translator.translate` method when merging results of", "# to_cloudformation() to output template.", "self", ".", "_inject_notification_configuration", "(", "function", ",", "bucket", ")", "resources", ".", "append", "(", "S3Bucket", ".", "from_dict", "(", "bucket_id", ",", "bucket", ")", ")", "return", "resources" ]
Returns the Lambda Permission resource allowing S3 to invoke the function this event source triggers. :param dict kwargs: S3 bucket resource :returns: a list of vanilla CloudFormation Resources, to which this S3 event expands :rtype: list
[ "Returns", "the", "Lambda", "Permission", "resource", "allowing", "S3", "to", "invoke", "the", "function", "this", "event", "source", "triggers", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L197-L241
23,329
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
S3._depend_on_lambda_permissions_using_tag
def _depend_on_lambda_permissions_using_tag(self, bucket, permission): """ Since conditional DependsOn is not supported this undocumented way of implicitely making dependency through tags is used. See https://stackoverflow.com/questions/34607476/cloudformation-apply-condition-on-dependson It is done by using Ref wrapped in a conditional Fn::If. Using Ref implies a dependency, so CloudFormation will automatically wait once it reaches that function, the same as if you were using a DependsOn. """ properties = bucket.get('Properties', None) if properties is None: properties = {} bucket['Properties'] = properties tags = properties.get('Tags', None) if tags is None: tags = [] properties['Tags'] = tags dep_tag = { 'sam:ConditionalDependsOn:' + permission.logical_id: { 'Fn::If': [ permission.resource_attributes[CONDITION], ref(permission.logical_id), 'no dependency' ] } } properties['Tags'] = tags + get_tag_list(dep_tag) return bucket
python
def _depend_on_lambda_permissions_using_tag(self, bucket, permission): """ Since conditional DependsOn is not supported this undocumented way of implicitely making dependency through tags is used. See https://stackoverflow.com/questions/34607476/cloudformation-apply-condition-on-dependson It is done by using Ref wrapped in a conditional Fn::If. Using Ref implies a dependency, so CloudFormation will automatically wait once it reaches that function, the same as if you were using a DependsOn. """ properties = bucket.get('Properties', None) if properties is None: properties = {} bucket['Properties'] = properties tags = properties.get('Tags', None) if tags is None: tags = [] properties['Tags'] = tags dep_tag = { 'sam:ConditionalDependsOn:' + permission.logical_id: { 'Fn::If': [ permission.resource_attributes[CONDITION], ref(permission.logical_id), 'no dependency' ] } } properties['Tags'] = tags + get_tag_list(dep_tag) return bucket
[ "def", "_depend_on_lambda_permissions_using_tag", "(", "self", ",", "bucket", ",", "permission", ")", ":", "properties", "=", "bucket", ".", "get", "(", "'Properties'", ",", "None", ")", "if", "properties", "is", "None", ":", "properties", "=", "{", "}", "bucket", "[", "'Properties'", "]", "=", "properties", "tags", "=", "properties", ".", "get", "(", "'Tags'", ",", "None", ")", "if", "tags", "is", "None", ":", "tags", "=", "[", "]", "properties", "[", "'Tags'", "]", "=", "tags", "dep_tag", "=", "{", "'sam:ConditionalDependsOn:'", "+", "permission", ".", "logical_id", ":", "{", "'Fn::If'", ":", "[", "permission", ".", "resource_attributes", "[", "CONDITION", "]", ",", "ref", "(", "permission", ".", "logical_id", ")", ",", "'no dependency'", "]", "}", "}", "properties", "[", "'Tags'", "]", "=", "tags", "+", "get_tag_list", "(", "dep_tag", ")", "return", "bucket" ]
Since conditional DependsOn is not supported this undocumented way of implicitely making dependency through tags is used. See https://stackoverflow.com/questions/34607476/cloudformation-apply-condition-on-dependson It is done by using Ref wrapped in a conditional Fn::If. Using Ref implies a dependency, so CloudFormation will automatically wait once it reaches that function, the same as if you were using a DependsOn.
[ "Since", "conditional", "DependsOn", "is", "not", "supported", "this", "undocumented", "way", "of", "implicitely", "making", "dependency", "through", "tags", "is", "used", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L268-L297
23,330
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
SNS.to_cloudformation
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing SNS to invoke the function this event source triggers. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this SNS event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") return [self._construct_permission(function, source_arn=self.Topic), self._inject_subscription(function, self.Topic, self.FilterPolicy)]
python
def to_cloudformation(self, **kwargs): """Returns the Lambda Permission resource allowing SNS to invoke the function this event source triggers. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this SNS event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") return [self._construct_permission(function, source_arn=self.Topic), self._inject_subscription(function, self.Topic, self.FilterPolicy)]
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "return", "[", "self", ".", "_construct_permission", "(", "function", ",", "source_arn", "=", "self", ".", "Topic", ")", ",", "self", ".", "_inject_subscription", "(", "function", ",", "self", ".", "Topic", ",", "self", ".", "FilterPolicy", ")", "]" ]
Returns the Lambda Permission resource allowing SNS to invoke the function this event source triggers. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this SNS event expands :rtype: list
[ "Returns", "the", "Lambda", "Permission", "resource", "allowing", "SNS", "to", "invoke", "the", "function", "this", "event", "source", "triggers", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L350-L363
23,331
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
Api.resources_to_link
def resources_to_link(self, resources): """ If this API Event Source refers to an explicit API resource, resolve the reference and grab necessary data from the explicit API """ rest_api_id = self.RestApiId if isinstance(rest_api_id, dict) and "Ref" in rest_api_id: rest_api_id = rest_api_id["Ref"] # If RestApiId is a resource in the same template, then we try find the StageName by following the reference # Otherwise we default to a wildcard. This stage name is solely used to construct the permission to # allow this stage to invoke the Lambda function. If we are unable to resolve the stage name, we will # simply permit all stages to invoke this Lambda function # This hack is necessary because customers could use !ImportValue, !Ref or other intrinsic functions which # can be sometimes impossible to resolve (ie. when it has cross-stack references) permitted_stage = "*" stage_suffix = "AllStages" explicit_api = None if isinstance(rest_api_id, string_types): if rest_api_id in resources \ and "Properties" in resources[rest_api_id] \ and "StageName" in resources[rest_api_id]["Properties"]: explicit_api = resources[rest_api_id]["Properties"] permitted_stage = explicit_api["StageName"] # Stage could be a intrinsic, in which case leave the suffix to default value if isinstance(permitted_stage, string_types): if not permitted_stage: raise InvalidResourceException(rest_api_id, 'StageName cannot be empty.') stage_suffix = permitted_stage else: stage_suffix = "Stage" else: # RestApiId is a string, not an intrinsic, but we did not find a valid API resource for this ID raise InvalidEventException(self.relative_id, "RestApiId property of Api event must reference a valid " "resource in the same template.") return { 'explicit_api': explicit_api, 'explicit_api_stage': { 'permitted_stage': permitted_stage, 'suffix': stage_suffix } }
python
def resources_to_link(self, resources): """ If this API Event Source refers to an explicit API resource, resolve the reference and grab necessary data from the explicit API """ rest_api_id = self.RestApiId if isinstance(rest_api_id, dict) and "Ref" in rest_api_id: rest_api_id = rest_api_id["Ref"] # If RestApiId is a resource in the same template, then we try find the StageName by following the reference # Otherwise we default to a wildcard. This stage name is solely used to construct the permission to # allow this stage to invoke the Lambda function. If we are unable to resolve the stage name, we will # simply permit all stages to invoke this Lambda function # This hack is necessary because customers could use !ImportValue, !Ref or other intrinsic functions which # can be sometimes impossible to resolve (ie. when it has cross-stack references) permitted_stage = "*" stage_suffix = "AllStages" explicit_api = None if isinstance(rest_api_id, string_types): if rest_api_id in resources \ and "Properties" in resources[rest_api_id] \ and "StageName" in resources[rest_api_id]["Properties"]: explicit_api = resources[rest_api_id]["Properties"] permitted_stage = explicit_api["StageName"] # Stage could be a intrinsic, in which case leave the suffix to default value if isinstance(permitted_stage, string_types): if not permitted_stage: raise InvalidResourceException(rest_api_id, 'StageName cannot be empty.') stage_suffix = permitted_stage else: stage_suffix = "Stage" else: # RestApiId is a string, not an intrinsic, but we did not find a valid API resource for this ID raise InvalidEventException(self.relative_id, "RestApiId property of Api event must reference a valid " "resource in the same template.") return { 'explicit_api': explicit_api, 'explicit_api_stage': { 'permitted_stage': permitted_stage, 'suffix': stage_suffix } }
[ "def", "resources_to_link", "(", "self", ",", "resources", ")", ":", "rest_api_id", "=", "self", ".", "RestApiId", "if", "isinstance", "(", "rest_api_id", ",", "dict", ")", "and", "\"Ref\"", "in", "rest_api_id", ":", "rest_api_id", "=", "rest_api_id", "[", "\"Ref\"", "]", "# If RestApiId is a resource in the same template, then we try find the StageName by following the reference", "# Otherwise we default to a wildcard. This stage name is solely used to construct the permission to", "# allow this stage to invoke the Lambda function. If we are unable to resolve the stage name, we will", "# simply permit all stages to invoke this Lambda function", "# This hack is necessary because customers could use !ImportValue, !Ref or other intrinsic functions which", "# can be sometimes impossible to resolve (ie. when it has cross-stack references)", "permitted_stage", "=", "\"*\"", "stage_suffix", "=", "\"AllStages\"", "explicit_api", "=", "None", "if", "isinstance", "(", "rest_api_id", ",", "string_types", ")", ":", "if", "rest_api_id", "in", "resources", "and", "\"Properties\"", "in", "resources", "[", "rest_api_id", "]", "and", "\"StageName\"", "in", "resources", "[", "rest_api_id", "]", "[", "\"Properties\"", "]", ":", "explicit_api", "=", "resources", "[", "rest_api_id", "]", "[", "\"Properties\"", "]", "permitted_stage", "=", "explicit_api", "[", "\"StageName\"", "]", "# Stage could be a intrinsic, in which case leave the suffix to default value", "if", "isinstance", "(", "permitted_stage", ",", "string_types", ")", ":", "if", "not", "permitted_stage", ":", "raise", "InvalidResourceException", "(", "rest_api_id", ",", "'StageName cannot be empty.'", ")", "stage_suffix", "=", "permitted_stage", "else", ":", "stage_suffix", "=", "\"Stage\"", "else", ":", "# RestApiId is a string, not an intrinsic, but we did not find a valid API resource for this ID", "raise", "InvalidEventException", "(", "self", ".", "relative_id", ",", "\"RestApiId property of Api event must reference a valid \"", "\"resource in the same template.\"", ")", "return", "{", "'explicit_api'", ":", "explicit_api", ",", "'explicit_api_stage'", ":", "{", "'permitted_stage'", ":", "permitted_stage", ",", "'suffix'", ":", "stage_suffix", "}", "}" ]
If this API Event Source refers to an explicit API resource, resolve the reference and grab necessary data from the explicit API
[ "If", "this", "API", "Event", "Source", "refers", "to", "an", "explicit", "API", "resource", "resolve", "the", "reference", "and", "grab", "necessary", "data", "from", "the", "explicit", "API" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L392-L439
23,332
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
Api.to_cloudformation
def to_cloudformation(self, **kwargs): """If the Api event source has a RestApi property, then simply return the Lambda Permission resource allowing API Gateway to call the function. If no RestApi is provided, then additionally inject the path, method, and the x-amazon-apigateway-integration into the Swagger body for a provided implicit API. :param dict kwargs: a dict containing the implicit RestApi to be modified, should no explicit RestApi \ be provided. :returns: a list of vanilla CloudFormation Resources, to which this Api event expands :rtype: list """ resources = [] function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if self.Method is not None: # Convert to lower case so that user can specify either GET or get self.Method = self.Method.lower() resources.extend(self._get_permissions(kwargs)) explicit_api = kwargs['explicit_api'] if explicit_api.get("__MANAGE_SWAGGER"): self._add_swagger_integration(explicit_api, function) return resources
python
def to_cloudformation(self, **kwargs): """If the Api event source has a RestApi property, then simply return the Lambda Permission resource allowing API Gateway to call the function. If no RestApi is provided, then additionally inject the path, method, and the x-amazon-apigateway-integration into the Swagger body for a provided implicit API. :param dict kwargs: a dict containing the implicit RestApi to be modified, should no explicit RestApi \ be provided. :returns: a list of vanilla CloudFormation Resources, to which this Api event expands :rtype: list """ resources = [] function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") if self.Method is not None: # Convert to lower case so that user can specify either GET or get self.Method = self.Method.lower() resources.extend(self._get_permissions(kwargs)) explicit_api = kwargs['explicit_api'] if explicit_api.get("__MANAGE_SWAGGER"): self._add_swagger_integration(explicit_api, function) return resources
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "resources", "=", "[", "]", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "if", "self", ".", "Method", "is", "not", "None", ":", "# Convert to lower case so that user can specify either GET or get", "self", ".", "Method", "=", "self", ".", "Method", ".", "lower", "(", ")", "resources", ".", "extend", "(", "self", ".", "_get_permissions", "(", "kwargs", ")", ")", "explicit_api", "=", "kwargs", "[", "'explicit_api'", "]", "if", "explicit_api", ".", "get", "(", "\"__MANAGE_SWAGGER\"", ")", ":", "self", ".", "_add_swagger_integration", "(", "explicit_api", ",", "function", ")", "return", "resources" ]
If the Api event source has a RestApi property, then simply return the Lambda Permission resource allowing API Gateway to call the function. If no RestApi is provided, then additionally inject the path, method, and the x-amazon-apigateway-integration into the Swagger body for a provided implicit API. :param dict kwargs: a dict containing the implicit RestApi to be modified, should no explicit RestApi \ be provided. :returns: a list of vanilla CloudFormation Resources, to which this Api event expands :rtype: list
[ "If", "the", "Api", "event", "source", "has", "a", "RestApi", "property", "then", "simply", "return", "the", "Lambda", "Permission", "resource", "allowing", "API", "Gateway", "to", "call", "the", "function", ".", "If", "no", "RestApi", "is", "provided", "then", "additionally", "inject", "the", "path", "method", "and", "the", "x", "-", "amazon", "-", "apigateway", "-", "integration", "into", "the", "Swagger", "body", "for", "a", "provided", "implicit", "API", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L441-L468
23,333
awslabs/serverless-application-model
samtranslator/model/eventsources/push.py
Api._add_swagger_integration
def _add_swagger_integration(self, api, function): """Adds the path and method for this Api event source to the Swagger body for the provided RestApi. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi to which the path and method should be added. """ swagger_body = api.get("DefinitionBody") if swagger_body is None: return function_arn = function.get_runtime_attr('arn') partition = ArnGenerator.get_partition_name() uri = fnSub('arn:' + partition + ':apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/' + make_shorthand(function_arn) + '/invocations') editor = SwaggerEditor(swagger_body) if editor.has_integration(self.Path, self.Method): # Cannot add the Lambda Integration, if it is already present raise InvalidEventException( self.relative_id, 'API method "{method}" defined multiple times for path "{path}".'.format( method=self.Method, path=self.Path)) condition = None if CONDITION in function.resource_attributes: condition = function.resource_attributes[CONDITION] editor.add_lambda_integration(self.Path, self.Method, uri, self.Auth, api.get('Auth'), condition=condition) if self.Auth: method_authorizer = self.Auth.get('Authorizer') if method_authorizer: api_auth = api.get('Auth') api_authorizers = api_auth and api_auth.get('Authorizers') if method_authorizer != 'AWS_IAM': if not api_authorizers: raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because the related API does not define any Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer != 'NONE' and not api_authorizers.get(method_authorizer): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because it wasn\'t defined in the API\'s Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer == 'NONE' and not api_auth.get('DefaultAuthorizer'): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer on API method [{method}] for path [{path}] because \'NONE\' ' 'is only a valid value when a DefaultAuthorizer on the API is specified.'.format( method=self.Method, path=self.Path)) editor.add_auth_to_method(api=api, path=self.Path, method_name=self.Method, auth=self.Auth) api["DefinitionBody"] = editor.swagger
python
def _add_swagger_integration(self, api, function): """Adds the path and method for this Api event source to the Swagger body for the provided RestApi. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi to which the path and method should be added. """ swagger_body = api.get("DefinitionBody") if swagger_body is None: return function_arn = function.get_runtime_attr('arn') partition = ArnGenerator.get_partition_name() uri = fnSub('arn:' + partition + ':apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/' + make_shorthand(function_arn) + '/invocations') editor = SwaggerEditor(swagger_body) if editor.has_integration(self.Path, self.Method): # Cannot add the Lambda Integration, if it is already present raise InvalidEventException( self.relative_id, 'API method "{method}" defined multiple times for path "{path}".'.format( method=self.Method, path=self.Path)) condition = None if CONDITION in function.resource_attributes: condition = function.resource_attributes[CONDITION] editor.add_lambda_integration(self.Path, self.Method, uri, self.Auth, api.get('Auth'), condition=condition) if self.Auth: method_authorizer = self.Auth.get('Authorizer') if method_authorizer: api_auth = api.get('Auth') api_authorizers = api_auth and api_auth.get('Authorizers') if method_authorizer != 'AWS_IAM': if not api_authorizers: raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because the related API does not define any Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer != 'NONE' and not api_authorizers.get(method_authorizer): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] ' 'because it wasn\'t defined in the API\'s Authorizers.'.format( authorizer=method_authorizer, method=self.Method, path=self.Path)) if method_authorizer == 'NONE' and not api_auth.get('DefaultAuthorizer'): raise InvalidEventException( self.relative_id, 'Unable to set Authorizer on API method [{method}] for path [{path}] because \'NONE\' ' 'is only a valid value when a DefaultAuthorizer on the API is specified.'.format( method=self.Method, path=self.Path)) editor.add_auth_to_method(api=api, path=self.Path, method_name=self.Method, auth=self.Auth) api["DefinitionBody"] = editor.swagger
[ "def", "_add_swagger_integration", "(", "self", ",", "api", ",", "function", ")", ":", "swagger_body", "=", "api", ".", "get", "(", "\"DefinitionBody\"", ")", "if", "swagger_body", "is", "None", ":", "return", "function_arn", "=", "function", ".", "get_runtime_attr", "(", "'arn'", ")", "partition", "=", "ArnGenerator", ".", "get_partition_name", "(", ")", "uri", "=", "fnSub", "(", "'arn:'", "+", "partition", "+", "':apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/'", "+", "make_shorthand", "(", "function_arn", ")", "+", "'/invocations'", ")", "editor", "=", "SwaggerEditor", "(", "swagger_body", ")", "if", "editor", ".", "has_integration", "(", "self", ".", "Path", ",", "self", ".", "Method", ")", ":", "# Cannot add the Lambda Integration, if it is already present", "raise", "InvalidEventException", "(", "self", ".", "relative_id", ",", "'API method \"{method}\" defined multiple times for path \"{path}\".'", ".", "format", "(", "method", "=", "self", ".", "Method", ",", "path", "=", "self", ".", "Path", ")", ")", "condition", "=", "None", "if", "CONDITION", "in", "function", ".", "resource_attributes", ":", "condition", "=", "function", ".", "resource_attributes", "[", "CONDITION", "]", "editor", ".", "add_lambda_integration", "(", "self", ".", "Path", ",", "self", ".", "Method", ",", "uri", ",", "self", ".", "Auth", ",", "api", ".", "get", "(", "'Auth'", ")", ",", "condition", "=", "condition", ")", "if", "self", ".", "Auth", ":", "method_authorizer", "=", "self", ".", "Auth", ".", "get", "(", "'Authorizer'", ")", "if", "method_authorizer", ":", "api_auth", "=", "api", ".", "get", "(", "'Auth'", ")", "api_authorizers", "=", "api_auth", "and", "api_auth", ".", "get", "(", "'Authorizers'", ")", "if", "method_authorizer", "!=", "'AWS_IAM'", ":", "if", "not", "api_authorizers", ":", "raise", "InvalidEventException", "(", "self", ".", "relative_id", ",", "'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] '", "'because the related API does not define any Authorizers.'", ".", "format", "(", "authorizer", "=", "method_authorizer", ",", "method", "=", "self", ".", "Method", ",", "path", "=", "self", ".", "Path", ")", ")", "if", "method_authorizer", "!=", "'NONE'", "and", "not", "api_authorizers", ".", "get", "(", "method_authorizer", ")", ":", "raise", "InvalidEventException", "(", "self", ".", "relative_id", ",", "'Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] '", "'because it wasn\\'t defined in the API\\'s Authorizers.'", ".", "format", "(", "authorizer", "=", "method_authorizer", ",", "method", "=", "self", ".", "Method", ",", "path", "=", "self", ".", "Path", ")", ")", "if", "method_authorizer", "==", "'NONE'", "and", "not", "api_auth", ".", "get", "(", "'DefaultAuthorizer'", ")", ":", "raise", "InvalidEventException", "(", "self", ".", "relative_id", ",", "'Unable to set Authorizer on API method [{method}] for path [{path}] because \\'NONE\\' '", "'is only a valid value when a DefaultAuthorizer on the API is specified.'", ".", "format", "(", "method", "=", "self", ".", "Method", ",", "path", "=", "self", ".", "Path", ")", ")", "editor", ".", "add_auth_to_method", "(", "api", "=", "api", ",", "path", "=", "self", ".", "Path", ",", "method_name", "=", "self", ".", "Method", ",", "auth", "=", "self", ".", "Auth", ")", "api", "[", "\"DefinitionBody\"", "]", "=", "editor", ".", "swagger" ]
Adds the path and method for this Api event source to the Swagger body for the provided RestApi. :param model.apigateway.ApiGatewayRestApi rest_api: the RestApi to which the path and method should be added.
[ "Adds", "the", "path", "and", "method", "for", "this", "Api", "event", "source", "to", "the", "Swagger", "body", "for", "the", "provided", "RestApi", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/push.py#L507-L567
23,334
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver.resolve_parameter_refs
def resolve_parameter_refs(self, input): """ Resolves references to parameters within the given dictionary recursively. Other intrinsic functions such as !GetAtt, !Sub or !Ref to non-parameters will be left untouched. Result is a dictionary where parameter values are inlined. Don't pass this dictionary directly into transform's output because it changes the template structure by inlining parameter values. :param input: Any primitive type (dict, array, string etc) whose values might contain intrinsic functions :return: A copy of a dictionary with parameter references replaced by actual value. """ return self._traverse(input, self.parameters, self._try_resolve_parameter_refs)
python
def resolve_parameter_refs(self, input): """ Resolves references to parameters within the given dictionary recursively. Other intrinsic functions such as !GetAtt, !Sub or !Ref to non-parameters will be left untouched. Result is a dictionary where parameter values are inlined. Don't pass this dictionary directly into transform's output because it changes the template structure by inlining parameter values. :param input: Any primitive type (dict, array, string etc) whose values might contain intrinsic functions :return: A copy of a dictionary with parameter references replaced by actual value. """ return self._traverse(input, self.parameters, self._try_resolve_parameter_refs)
[ "def", "resolve_parameter_refs", "(", "self", ",", "input", ")", ":", "return", "self", ".", "_traverse", "(", "input", ",", "self", ".", "parameters", ",", "self", ".", "_try_resolve_parameter_refs", ")" ]
Resolves references to parameters within the given dictionary recursively. Other intrinsic functions such as !GetAtt, !Sub or !Ref to non-parameters will be left untouched. Result is a dictionary where parameter values are inlined. Don't pass this dictionary directly into transform's output because it changes the template structure by inlining parameter values. :param input: Any primitive type (dict, array, string etc) whose values might contain intrinsic functions :return: A copy of a dictionary with parameter references replaced by actual value.
[ "Resolves", "references", "to", "parameters", "within", "the", "given", "dictionary", "recursively", ".", "Other", "intrinsic", "functions", "such", "as", "!GetAtt", "!Sub", "or", "!Ref", "to", "non", "-", "parameters", "will", "be", "left", "untouched", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L30-L41
23,335
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver.resolve_sam_resource_refs
def resolve_sam_resource_refs(self, input, supported_resource_refs): """ Customers can provide a reference to a "derived" SAM resource such as Alias of a Function or Stage of an API resource. This method recursively walks the tree, converting all derived references to the real resource name, if it is present. Example: {"Ref": "MyFunction.Alias"} -> {"Ref": "MyFunctionAliasLive"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param SupportedResourceReferences supported_resource_refs: Object that contains information about the resource references supported in this SAM template, along with the value they should resolve to. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_refs, self._try_resolve_sam_resource_refs)
python
def resolve_sam_resource_refs(self, input, supported_resource_refs): """ Customers can provide a reference to a "derived" SAM resource such as Alias of a Function or Stage of an API resource. This method recursively walks the tree, converting all derived references to the real resource name, if it is present. Example: {"Ref": "MyFunction.Alias"} -> {"Ref": "MyFunctionAliasLive"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param SupportedResourceReferences supported_resource_refs: Object that contains information about the resource references supported in this SAM template, along with the value they should resolve to. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_refs, self._try_resolve_sam_resource_refs)
[ "def", "resolve_sam_resource_refs", "(", "self", ",", "input", ",", "supported_resource_refs", ")", ":", "return", "self", ".", "_traverse", "(", "input", ",", "supported_resource_refs", ",", "self", ".", "_try_resolve_sam_resource_refs", ")" ]
Customers can provide a reference to a "derived" SAM resource such as Alias of a Function or Stage of an API resource. This method recursively walks the tree, converting all derived references to the real resource name, if it is present. Example: {"Ref": "MyFunction.Alias"} -> {"Ref": "MyFunctionAliasLive"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param SupportedResourceReferences supported_resource_refs: Object that contains information about the resource references supported in this SAM template, along with the value they should resolve to. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise
[ "Customers", "can", "provide", "a", "reference", "to", "a", "derived", "SAM", "resource", "such", "as", "Alias", "of", "a", "Function", "or", "Stage", "of", "an", "API", "resource", ".", "This", "method", "recursively", "walks", "the", "tree", "converting", "all", "derived", "references", "to", "the", "real", "resource", "name", "if", "it", "is", "present", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L43-L65
23,336
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver.resolve_sam_resource_id_refs
def resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Some SAM resources have their logical ids mutated from the original id that the customer writes in the template. This method recursively walks the tree and updates these logical ids from the old value to the new value that is generated by SAM. Example: {"Ref": "MyLayer"} -> {"Ref": "MyLayerABC123"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_id_refs, self._try_resolve_sam_resource_id_refs)
python
def resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Some SAM resources have their logical ids mutated from the original id that the customer writes in the template. This method recursively walks the tree and updates these logical ids from the old value to the new value that is generated by SAM. Example: {"Ref": "MyLayer"} -> {"Ref": "MyLayerABC123"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise """ return self._traverse(input, supported_resource_id_refs, self._try_resolve_sam_resource_id_refs)
[ "def", "resolve_sam_resource_id_refs", "(", "self", ",", "input", ",", "supported_resource_id_refs", ")", ":", "return", "self", ".", "_traverse", "(", "input", ",", "supported_resource_id_refs", ",", "self", ".", "_try_resolve_sam_resource_id_refs", ")" ]
Some SAM resources have their logical ids mutated from the original id that the customer writes in the template. This method recursively walks the tree and updates these logical ids from the old value to the new value that is generated by SAM. Example: {"Ref": "MyLayer"} -> {"Ref": "MyLayerABC123"} This method does not attempt to validate a reference. If it is invalid or non-resolvable, it skips the occurrence and continues with the rest. It is recommended that you have an external process that detects and surfaces invalid references. For first call, it is recommended that `template` is the entire CFN template in order to handle references in Mapping or Output sections. :param dict input: CFN template that needs resolution. This method will modify the input directly resolving references. In subsequent recursions, this will be a fragment of the CFN template. :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return list errors: List of dictionary containing information about invalid reference. Empty list otherwise
[ "Some", "SAM", "resources", "have", "their", "logical", "ids", "mutated", "from", "the", "original", "id", "that", "the", "customer", "writes", "in", "the", "template", ".", "This", "method", "recursively", "walks", "the", "tree", "and", "updates", "these", "logical", "ids", "from", "the", "old", "value", "to", "the", "new", "value", "that", "is", "generated", "by", "SAM", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L67-L88
23,337
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver._traverse
def _traverse(self, input, resolution_data, resolver_method): """ Driver method that performs the actual traversal of input and calls the appropriate `resolver_method` when to perform the resolution. :param input: Any primitive type (dict, array, string etc) whose value might contain an intrinsic function :param resolution_data: Data that will help with resolution. For example, when resolving parameter references, this object will contain a dictionary of parameter names and their values. :param resolver_method: Method that will be called to actually resolve an intrinsic function. This method is called with the parameters `(input, resolution_data)`. :return: Modified `input` with intrinsics resolved """ # There is data to help with resolution. Skip the traversal altogether if len(resolution_data) == 0: return input # # Traversal Algorithm: # # Imagine the input dictionary/list as a tree. We are doing a Pre-Order tree traversal here where we first # process the root node before going to its children. Dict and Lists are the only two iterable nodes. # Everything else is a leaf node. # # We do a Pre-Order traversal to handle the case where `input` contains intrinsic function as its only child # ie. input = {"Ref": "foo}. # # We will try to resolve the intrinsics if we can, otherwise return the original input. In some cases, resolving # an intrinsic will result in a terminal state ie. {"Ref": "foo"} could resolve to a string "bar". In other # cases, resolving intrinsics is only partial and we might need to continue traversing the tree (ex: Fn::Sub) # to handle nested intrinsics. All of these cases lend well towards a Pre-Order traversal where we try and # process the intrinsic, which results in a modified sub-tree to traverse. # input = resolver_method(input, resolution_data) if isinstance(input, dict): return self._traverse_dict(input, resolution_data, resolver_method) elif isinstance(input, list): return self._traverse_list(input, resolution_data, resolver_method) else: # We can iterate only over dict or list types. Primitive types are terminals return input
python
def _traverse(self, input, resolution_data, resolver_method): """ Driver method that performs the actual traversal of input and calls the appropriate `resolver_method` when to perform the resolution. :param input: Any primitive type (dict, array, string etc) whose value might contain an intrinsic function :param resolution_data: Data that will help with resolution. For example, when resolving parameter references, this object will contain a dictionary of parameter names and their values. :param resolver_method: Method that will be called to actually resolve an intrinsic function. This method is called with the parameters `(input, resolution_data)`. :return: Modified `input` with intrinsics resolved """ # There is data to help with resolution. Skip the traversal altogether if len(resolution_data) == 0: return input # # Traversal Algorithm: # # Imagine the input dictionary/list as a tree. We are doing a Pre-Order tree traversal here where we first # process the root node before going to its children. Dict and Lists are the only two iterable nodes. # Everything else is a leaf node. # # We do a Pre-Order traversal to handle the case where `input` contains intrinsic function as its only child # ie. input = {"Ref": "foo}. # # We will try to resolve the intrinsics if we can, otherwise return the original input. In some cases, resolving # an intrinsic will result in a terminal state ie. {"Ref": "foo"} could resolve to a string "bar". In other # cases, resolving intrinsics is only partial and we might need to continue traversing the tree (ex: Fn::Sub) # to handle nested intrinsics. All of these cases lend well towards a Pre-Order traversal where we try and # process the intrinsic, which results in a modified sub-tree to traverse. # input = resolver_method(input, resolution_data) if isinstance(input, dict): return self._traverse_dict(input, resolution_data, resolver_method) elif isinstance(input, list): return self._traverse_list(input, resolution_data, resolver_method) else: # We can iterate only over dict or list types. Primitive types are terminals return input
[ "def", "_traverse", "(", "self", ",", "input", ",", "resolution_data", ",", "resolver_method", ")", ":", "# There is data to help with resolution. Skip the traversal altogether", "if", "len", "(", "resolution_data", ")", "==", "0", ":", "return", "input", "#", "# Traversal Algorithm:", "#", "# Imagine the input dictionary/list as a tree. We are doing a Pre-Order tree traversal here where we first", "# process the root node before going to its children. Dict and Lists are the only two iterable nodes.", "# Everything else is a leaf node.", "#", "# We do a Pre-Order traversal to handle the case where `input` contains intrinsic function as its only child", "# ie. input = {\"Ref\": \"foo}.", "#", "# We will try to resolve the intrinsics if we can, otherwise return the original input. In some cases, resolving", "# an intrinsic will result in a terminal state ie. {\"Ref\": \"foo\"} could resolve to a string \"bar\". In other", "# cases, resolving intrinsics is only partial and we might need to continue traversing the tree (ex: Fn::Sub)", "# to handle nested intrinsics. All of these cases lend well towards a Pre-Order traversal where we try and", "# process the intrinsic, which results in a modified sub-tree to traverse.", "#", "input", "=", "resolver_method", "(", "input", ",", "resolution_data", ")", "if", "isinstance", "(", "input", ",", "dict", ")", ":", "return", "self", ".", "_traverse_dict", "(", "input", ",", "resolution_data", ",", "resolver_method", ")", "elif", "isinstance", "(", "input", ",", "list", ")", ":", "return", "self", ".", "_traverse_list", "(", "input", ",", "resolution_data", ",", "resolver_method", ")", "else", ":", "# We can iterate only over dict or list types. Primitive types are terminals", "return", "input" ]
Driver method that performs the actual traversal of input and calls the appropriate `resolver_method` when to perform the resolution. :param input: Any primitive type (dict, array, string etc) whose value might contain an intrinsic function :param resolution_data: Data that will help with resolution. For example, when resolving parameter references, this object will contain a dictionary of parameter names and their values. :param resolver_method: Method that will be called to actually resolve an intrinsic function. This method is called with the parameters `(input, resolution_data)`. :return: Modified `input` with intrinsics resolved
[ "Driver", "method", "that", "performs", "the", "actual", "traversal", "of", "input", "and", "calls", "the", "appropriate", "resolver_method", "when", "to", "perform", "the", "resolution", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L90-L132
23,338
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver._traverse_dict
def _traverse_dict(self, input_dict, resolution_data, resolver_method): """ Traverse a dictionary to resolve intrinsic functions on every value :param input_dict: Input dictionary to traverse :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified dictionary with values resolved """ for key, value in input_dict.items(): input_dict[key] = self._traverse(value, resolution_data, resolver_method) return input_dict
python
def _traverse_dict(self, input_dict, resolution_data, resolver_method): """ Traverse a dictionary to resolve intrinsic functions on every value :param input_dict: Input dictionary to traverse :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified dictionary with values resolved """ for key, value in input_dict.items(): input_dict[key] = self._traverse(value, resolution_data, resolver_method) return input_dict
[ "def", "_traverse_dict", "(", "self", ",", "input_dict", ",", "resolution_data", ",", "resolver_method", ")", ":", "for", "key", ",", "value", "in", "input_dict", ".", "items", "(", ")", ":", "input_dict", "[", "key", "]", "=", "self", ".", "_traverse", "(", "value", ",", "resolution_data", ",", "resolver_method", ")", "return", "input_dict" ]
Traverse a dictionary to resolve intrinsic functions on every value :param input_dict: Input dictionary to traverse :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified dictionary with values resolved
[ "Traverse", "a", "dictionary", "to", "resolve", "intrinsic", "functions", "on", "every", "value" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L134-L146
23,339
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver._traverse_list
def _traverse_list(self, input_list, resolution_data, resolver_method): """ Traverse a list to resolve intrinsic functions on every element :param input_list: List of input :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified list with intrinsic functions resolved """ for index, value in enumerate(input_list): input_list[index] = self._traverse(value, resolution_data, resolver_method) return input_list
python
def _traverse_list(self, input_list, resolution_data, resolver_method): """ Traverse a list to resolve intrinsic functions on every element :param input_list: List of input :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified list with intrinsic functions resolved """ for index, value in enumerate(input_list): input_list[index] = self._traverse(value, resolution_data, resolver_method) return input_list
[ "def", "_traverse_list", "(", "self", ",", "input_list", ",", "resolution_data", ",", "resolver_method", ")", ":", "for", "index", ",", "value", "in", "enumerate", "(", "input_list", ")", ":", "input_list", "[", "index", "]", "=", "self", ".", "_traverse", "(", "value", ",", "resolution_data", ",", "resolver_method", ")", "return", "input_list" ]
Traverse a list to resolve intrinsic functions on every element :param input_list: List of input :param resolution_data: Data that the `resolver_method` needs to operate :param resolver_method: Method that can actually resolve an intrinsic function, if it detects one :return: Modified list with intrinsic functions resolved
[ "Traverse", "a", "list", "to", "resolve", "intrinsic", "functions", "on", "every", "element" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L148-L160
23,340
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver._try_resolve_sam_resource_refs
def _try_resolve_sam_resource_refs(self, input, supported_resource_refs): """ Try to resolve SAM resource references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param SupportedResourceReferences supported_resource_refs: Object containing information about available resource references and the values they resolve to. :return: Modified input dictionary with references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_refs(input, supported_resource_refs)
python
def _try_resolve_sam_resource_refs(self, input, supported_resource_refs): """ Try to resolve SAM resource references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param SupportedResourceReferences supported_resource_refs: Object containing information about available resource references and the values they resolve to. :return: Modified input dictionary with references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_refs(input, supported_resource_refs)
[ "def", "_try_resolve_sam_resource_refs", "(", "self", ",", "input", ",", "supported_resource_refs", ")", ":", "if", "not", "self", ".", "_is_intrinsic_dict", "(", "input", ")", ":", "return", "input", "function_type", "=", "list", "(", "input", ".", "keys", "(", ")", ")", "[", "0", "]", "return", "self", ".", "supported_intrinsics", "[", "function_type", "]", ".", "resolve_resource_refs", "(", "input", ",", "supported_resource_refs", ")" ]
Try to resolve SAM resource references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param SupportedResourceReferences supported_resource_refs: Object containing information about available resource references and the values they resolve to. :return: Modified input dictionary with references resolved
[ "Try", "to", "resolve", "SAM", "resource", "references", "on", "the", "given", "template", ".", "If", "the", "given", "object", "looks", "like", "one", "of", "the", "supported", "intrinsics", "it", "calls", "the", "appropriate", "resolution", "on", "it", ".", "If", "not", "this", "method", "returns", "the", "original", "input", "unmodified", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L179-L194
23,341
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver._try_resolve_sam_resource_id_refs
def _try_resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Try to resolve SAM resource id references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return: Modified input dictionary with id references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_id_refs(input, supported_resource_id_refs)
python
def _try_resolve_sam_resource_id_refs(self, input, supported_resource_id_refs): """ Try to resolve SAM resource id references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return: Modified input dictionary with id references resolved """ if not self._is_intrinsic_dict(input): return input function_type = list(input.keys())[0] return self.supported_intrinsics[function_type].resolve_resource_id_refs(input, supported_resource_id_refs)
[ "def", "_try_resolve_sam_resource_id_refs", "(", "self", ",", "input", ",", "supported_resource_id_refs", ")", ":", "if", "not", "self", ".", "_is_intrinsic_dict", "(", "input", ")", ":", "return", "input", "function_type", "=", "list", "(", "input", ".", "keys", "(", ")", ")", "[", "0", "]", "return", "self", ".", "supported_intrinsics", "[", "function_type", "]", ".", "resolve_resource_id_refs", "(", "input", ",", "supported_resource_id_refs", ")" ]
Try to resolve SAM resource id references on the given template. If the given object looks like one of the supported intrinsics, it calls the appropriate resolution on it. If not, this method returns the original input unmodified. :param dict input: Dictionary that may represent an intrinsic function :param dict supported_resource_id_refs: Dictionary that maps old logical ids to new ones. :return: Modified input dictionary with id references resolved
[ "Try", "to", "resolve", "SAM", "resource", "id", "references", "on", "the", "given", "template", ".", "If", "the", "given", "object", "looks", "like", "one", "of", "the", "supported", "intrinsics", "it", "calls", "the", "appropriate", "resolution", "on", "it", ".", "If", "not", "this", "method", "returns", "the", "original", "input", "unmodified", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L196-L210
23,342
awslabs/serverless-application-model
samtranslator/intrinsics/resolver.py
IntrinsicsResolver._is_intrinsic_dict
def _is_intrinsic_dict(self, input): """ Can the input represent an intrinsic function in it? :param input: Object to be checked :return: True, if the input contains a supported intrinsic function. False otherwise """ # All intrinsic functions are dictionaries with just one key return isinstance(input, dict) \ and len(input) == 1 \ and list(input.keys())[0] in self.supported_intrinsics
python
def _is_intrinsic_dict(self, input): """ Can the input represent an intrinsic function in it? :param input: Object to be checked :return: True, if the input contains a supported intrinsic function. False otherwise """ # All intrinsic functions are dictionaries with just one key return isinstance(input, dict) \ and len(input) == 1 \ and list(input.keys())[0] in self.supported_intrinsics
[ "def", "_is_intrinsic_dict", "(", "self", ",", "input", ")", ":", "# All intrinsic functions are dictionaries with just one key", "return", "isinstance", "(", "input", ",", "dict", ")", "and", "len", "(", "input", ")", "==", "1", "and", "list", "(", "input", ".", "keys", "(", ")", ")", "[", "0", "]", "in", "self", ".", "supported_intrinsics" ]
Can the input represent an intrinsic function in it? :param input: Object to be checked :return: True, if the input contains a supported intrinsic function. False otherwise
[ "Can", "the", "input", "represent", "an", "intrinsic", "function", "in", "it?" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/intrinsics/resolver.py#L212-L222
23,343
awslabs/serverless-application-model
samtranslator/model/eventsources/cloudwatchlogs.py
CloudWatchLogs.to_cloudformation
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") source_arn = self.get_source_arn() permission = self._construct_permission(function, source_arn=source_arn) subscription_filter = self.get_subscription_filter(function, permission) resources = [permission, subscription_filter] return resources
python
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") source_arn = self.get_source_arn() permission = self._construct_permission(function, source_arn=source_arn) subscription_filter = self.get_subscription_filter(function, permission) resources = [permission, subscription_filter] return resources
[ "def", "to_cloudformation", "(", "self", ",", "*", "*", "kwargs", ")", ":", "function", "=", "kwargs", ".", "get", "(", "'function'", ")", "if", "not", "function", ":", "raise", "TypeError", "(", "\"Missing required keyword argument: function\"", ")", "source_arn", "=", "self", ".", "get_source_arn", "(", ")", "permission", "=", "self", ".", "_construct_permission", "(", "function", ",", "source_arn", "=", "source_arn", ")", "subscription_filter", "=", "self", ".", "get_subscription_filter", "(", "function", ",", "permission", ")", "resources", "=", "[", "permission", ",", "subscription_filter", "]", "return", "resources" ]
Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list
[ "Returns", "the", "CloudWatch", "Logs", "Subscription", "Filter", "and", "Lambda", "Permission", "to", "which", "this", "CloudWatch", "Logs", "event", "source", "corresponds", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/model/eventsources/cloudwatchlogs.py#L18-L36
23,344
awslabs/serverless-application-model
samtranslator/policy_template_processor/processor.py
PolicyTemplatesProcessor.convert
def convert(self, template_name, parameter_values): """ Converts the given template to IAM-ready policy statement by substituting template parameters with the given values. :param template_name: Name of the template :param parameter_values: Values for all parameters of the template :return dict: Dictionary containing policy statement :raises ValueError: If the given inputs don't represent valid template :raises InsufficientParameterValues: If the parameter values don't have values for all required parameters """ if not self.has(template_name): raise TemplateNotFoundException(template_name) template = self.get(template_name) return template.to_statement(parameter_values)
python
def convert(self, template_name, parameter_values): """ Converts the given template to IAM-ready policy statement by substituting template parameters with the given values. :param template_name: Name of the template :param parameter_values: Values for all parameters of the template :return dict: Dictionary containing policy statement :raises ValueError: If the given inputs don't represent valid template :raises InsufficientParameterValues: If the parameter values don't have values for all required parameters """ if not self.has(template_name): raise TemplateNotFoundException(template_name) template = self.get(template_name) return template.to_statement(parameter_values)
[ "def", "convert", "(", "self", ",", "template_name", ",", "parameter_values", ")", ":", "if", "not", "self", ".", "has", "(", "template_name", ")", ":", "raise", "TemplateNotFoundException", "(", "template_name", ")", "template", "=", "self", ".", "get", "(", "template_name", ")", "return", "template", ".", "to_statement", "(", "parameter_values", ")" ]
Converts the given template to IAM-ready policy statement by substituting template parameters with the given values. :param template_name: Name of the template :param parameter_values: Values for all parameters of the template :return dict: Dictionary containing policy statement :raises ValueError: If the given inputs don't represent valid template :raises InsufficientParameterValues: If the parameter values don't have values for all required parameters
[ "Converts", "the", "given", "template", "to", "IAM", "-", "ready", "policy", "statement", "by", "substituting", "template", "parameters", "with", "the", "given", "values", "." ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/policy_template_processor/processor.py#L84-L100
23,345
awslabs/serverless-application-model
samtranslator/policy_template_processor/processor.py
PolicyTemplatesProcessor._is_valid_templates_dict
def _is_valid_templates_dict(policy_templates_dict, schema=None): """ Is this a valid policy template dictionary :param dict policy_templates_dict: Data to be validated :param dict schema: Optional, dictionary containing JSON Schema representing policy template :return: True, if it is valid. :raises ValueError: If the template dictionary doesn't match up with the schema """ if not schema: schema = PolicyTemplatesProcessor._read_schema() try: jsonschema.validate(policy_templates_dict, schema) except ValidationError as ex: # Stringifying the exception will give us useful error message raise ValueError(str(ex)) return True
python
def _is_valid_templates_dict(policy_templates_dict, schema=None): """ Is this a valid policy template dictionary :param dict policy_templates_dict: Data to be validated :param dict schema: Optional, dictionary containing JSON Schema representing policy template :return: True, if it is valid. :raises ValueError: If the template dictionary doesn't match up with the schema """ if not schema: schema = PolicyTemplatesProcessor._read_schema() try: jsonschema.validate(policy_templates_dict, schema) except ValidationError as ex: # Stringifying the exception will give us useful error message raise ValueError(str(ex)) return True
[ "def", "_is_valid_templates_dict", "(", "policy_templates_dict", ",", "schema", "=", "None", ")", ":", "if", "not", "schema", ":", "schema", "=", "PolicyTemplatesProcessor", ".", "_read_schema", "(", ")", "try", ":", "jsonschema", ".", "validate", "(", "policy_templates_dict", ",", "schema", ")", "except", "ValidationError", "as", "ex", ":", "# Stringifying the exception will give us useful error message", "raise", "ValueError", "(", "str", "(", "ex", ")", ")", "return", "True" ]
Is this a valid policy template dictionary :param dict policy_templates_dict: Data to be validated :param dict schema: Optional, dictionary containing JSON Schema representing policy template :return: True, if it is valid. :raises ValueError: If the template dictionary doesn't match up with the schema
[ "Is", "this", "a", "valid", "policy", "template", "dictionary" ]
cccb0c96b5c91e53355ebc07e542467303a5eedd
https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/samtranslator/policy_template_processor/processor.py#L103-L122
23,346
pyecharts/pyecharts
pyecharts/render/engine.py
RenderEngine.render_chart_to_file
def render_chart_to_file(self, template_name: str, chart: Any, path: str): """ Render a chart or page to local html files. :param chart: A Chart or Page object :param path: The destination file which the html code write to :param template_name: The name of template file. """ tpl = self.env.get_template(template_name) html = tpl.render(chart=self.generate_js_link(chart)) write_utf8_html_file(path, self._reg_replace(html))
python
def render_chart_to_file(self, template_name: str, chart: Any, path: str): """ Render a chart or page to local html files. :param chart: A Chart or Page object :param path: The destination file which the html code write to :param template_name: The name of template file. """ tpl = self.env.get_template(template_name) html = tpl.render(chart=self.generate_js_link(chart)) write_utf8_html_file(path, self._reg_replace(html))
[ "def", "render_chart_to_file", "(", "self", ",", "template_name", ":", "str", ",", "chart", ":", "Any", ",", "path", ":", "str", ")", ":", "tpl", "=", "self", ".", "env", ".", "get_template", "(", "template_name", ")", "html", "=", "tpl", ".", "render", "(", "chart", "=", "self", ".", "generate_js_link", "(", "chart", ")", ")", "write_utf8_html_file", "(", "path", ",", "self", ".", "_reg_replace", "(", "html", ")", ")" ]
Render a chart or page to local html files. :param chart: A Chart or Page object :param path: The destination file which the html code write to :param template_name: The name of template file.
[ "Render", "a", "chart", "or", "page", "to", "local", "html", "files", "." ]
02050acb0e94bb9453b88a25028de7a0ce23f125
https://github.com/pyecharts/pyecharts/blob/02050acb0e94bb9453b88a25028de7a0ce23f125/pyecharts/render/engine.py#L36-L46
23,347
pyecharts/pyecharts
pyecharts/render/snapshot.py
decode_base64
def decode_base64(data: str) -> bytes: """Decode base64, padding being optional. :param data: Base64 data as an ASCII byte string :returns: The decoded byte string. """ missing_padding = len(data) % 4 if missing_padding != 0: data += "=" * (4 - missing_padding) return base64.decodebytes(data.encode("utf-8"))
python
def decode_base64(data: str) -> bytes: """Decode base64, padding being optional. :param data: Base64 data as an ASCII byte string :returns: The decoded byte string. """ missing_padding = len(data) % 4 if missing_padding != 0: data += "=" * (4 - missing_padding) return base64.decodebytes(data.encode("utf-8"))
[ "def", "decode_base64", "(", "data", ":", "str", ")", "->", "bytes", ":", "missing_padding", "=", "len", "(", "data", ")", "%", "4", "if", "missing_padding", "!=", "0", ":", "data", "+=", "\"=\"", "*", "(", "4", "-", "missing_padding", ")", "return", "base64", ".", "decodebytes", "(", "data", ".", "encode", "(", "\"utf-8\"", ")", ")" ]
Decode base64, padding being optional. :param data: Base64 data as an ASCII byte string :returns: The decoded byte string.
[ "Decode", "base64", "padding", "being", "optional", "." ]
02050acb0e94bb9453b88a25028de7a0ce23f125
https://github.com/pyecharts/pyecharts/blob/02050acb0e94bb9453b88a25028de7a0ce23f125/pyecharts/render/snapshot.py#L58-L67
23,348
micropython/micropython
ports/nrf/boards/make-pins.py
parse_pin
def parse_pin(name_str): """Parses a string and returns a pin-num.""" if len(name_str) < 1: raise ValueError("Expecting pin name to be at least 4 charcters.") if name_str[0] != 'P': raise ValueError("Expecting pin name to start with P") pin_str = name_str[1:].split('/')[0] if not pin_str.isdigit(): raise ValueError("Expecting numeric pin number.") return int(pin_str)
python
def parse_pin(name_str): """Parses a string and returns a pin-num.""" if len(name_str) < 1: raise ValueError("Expecting pin name to be at least 4 charcters.") if name_str[0] != 'P': raise ValueError("Expecting pin name to start with P") pin_str = name_str[1:].split('/')[0] if not pin_str.isdigit(): raise ValueError("Expecting numeric pin number.") return int(pin_str)
[ "def", "parse_pin", "(", "name_str", ")", ":", "if", "len", "(", "name_str", ")", "<", "1", ":", "raise", "ValueError", "(", "\"Expecting pin name to be at least 4 charcters.\"", ")", "if", "name_str", "[", "0", "]", "!=", "'P'", ":", "raise", "ValueError", "(", "\"Expecting pin name to start with P\"", ")", "pin_str", "=", "name_str", "[", "1", ":", "]", ".", "split", "(", "'/'", ")", "[", "0", "]", "if", "not", "pin_str", ".", "isdigit", "(", ")", ":", "raise", "ValueError", "(", "\"Expecting numeric pin number.\"", ")", "return", "int", "(", "pin_str", ")" ]
Parses a string and returns a pin-num.
[ "Parses", "a", "string", "and", "returns", "a", "pin", "-", "num", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/ports/nrf/boards/make-pins.py#L14-L23
23,349
micropython/micropython
examples/switch.py
run_loop
def run_loop(leds=all_leds): """ Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects """ print('Loop started.\nPress Ctrl+C to break out of the loop.') while 1: try: if switch(): [led.on() for led in leds] else: [led.off() for led in leds] except OSError: # VCPInterrupt # Ctrl+C in interpreter mode. break
python
def run_loop(leds=all_leds): """ Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects """ print('Loop started.\nPress Ctrl+C to break out of the loop.') while 1: try: if switch(): [led.on() for led in leds] else: [led.off() for led in leds] except OSError: # VCPInterrupt # Ctrl+C in interpreter mode. break
[ "def", "run_loop", "(", "leds", "=", "all_leds", ")", ":", "print", "(", "'Loop started.\\nPress Ctrl+C to break out of the loop.'", ")", "while", "1", ":", "try", ":", "if", "switch", "(", ")", ":", "[", "led", ".", "on", "(", ")", "for", "led", "in", "leds", "]", "else", ":", "[", "led", ".", "off", "(", ")", "for", "led", "in", "leds", "]", "except", "OSError", ":", "# VCPInterrupt # Ctrl+C in interpreter mode.", "break" ]
Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects
[ "Start", "the", "loop", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/examples/switch.py#L27-L42
23,350
micropython/micropython
py/makemoduledefs.py
find_c_file
def find_c_file(obj_file, vpath): """ Search vpaths for the c file that matches the provided object_file. :param str obj_file: object file to find the matching c file for :param List[str] vpath: List of base paths, similar to gcc vpath :return: str path to c file or None """ c_file = None relative_c_file = os.path.splitext(obj_file)[0] + ".c" relative_c_file = relative_c_file.lstrip('/\\') for p in vpath: possible_c_file = os.path.join(p, relative_c_file) if os.path.exists(possible_c_file): c_file = possible_c_file break return c_file
python
def find_c_file(obj_file, vpath): """ Search vpaths for the c file that matches the provided object_file. :param str obj_file: object file to find the matching c file for :param List[str] vpath: List of base paths, similar to gcc vpath :return: str path to c file or None """ c_file = None relative_c_file = os.path.splitext(obj_file)[0] + ".c" relative_c_file = relative_c_file.lstrip('/\\') for p in vpath: possible_c_file = os.path.join(p, relative_c_file) if os.path.exists(possible_c_file): c_file = possible_c_file break return c_file
[ "def", "find_c_file", "(", "obj_file", ",", "vpath", ")", ":", "c_file", "=", "None", "relative_c_file", "=", "os", ".", "path", ".", "splitext", "(", "obj_file", ")", "[", "0", "]", "+", "\".c\"", "relative_c_file", "=", "relative_c_file", ".", "lstrip", "(", "'/\\\\'", ")", "for", "p", "in", "vpath", ":", "possible_c_file", "=", "os", ".", "path", ".", "join", "(", "p", ",", "relative_c_file", ")", "if", "os", ".", "path", ".", "exists", "(", "possible_c_file", ")", ":", "c_file", "=", "possible_c_file", "break", "return", "c_file" ]
Search vpaths for the c file that matches the provided object_file. :param str obj_file: object file to find the matching c file for :param List[str] vpath: List of base paths, similar to gcc vpath :return: str path to c file or None
[ "Search", "vpaths", "for", "the", "c", "file", "that", "matches", "the", "provided", "object_file", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/py/makemoduledefs.py#L22-L38
23,351
micropython/micropython
py/makemoduledefs.py
find_module_registrations
def find_module_registrations(c_file): """ Find any MP_REGISTER_MODULE definitions in the provided c file. :param str c_file: path to c file to check :return: List[(module_name, obj_module, enabled_define)] """ global pattern if c_file is None: # No c file to match the object file, skip return set() with io.open(c_file, encoding='utf-8') as c_file_obj: return set(re.findall(pattern, c_file_obj.read()))
python
def find_module_registrations(c_file): """ Find any MP_REGISTER_MODULE definitions in the provided c file. :param str c_file: path to c file to check :return: List[(module_name, obj_module, enabled_define)] """ global pattern if c_file is None: # No c file to match the object file, skip return set() with io.open(c_file, encoding='utf-8') as c_file_obj: return set(re.findall(pattern, c_file_obj.read()))
[ "def", "find_module_registrations", "(", "c_file", ")", ":", "global", "pattern", "if", "c_file", "is", "None", ":", "# No c file to match the object file, skip", "return", "set", "(", ")", "with", "io", ".", "open", "(", "c_file", ",", "encoding", "=", "'utf-8'", ")", "as", "c_file_obj", ":", "return", "set", "(", "re", ".", "findall", "(", "pattern", ",", "c_file_obj", ".", "read", "(", ")", ")", ")" ]
Find any MP_REGISTER_MODULE definitions in the provided c file. :param str c_file: path to c file to check :return: List[(module_name, obj_module, enabled_define)]
[ "Find", "any", "MP_REGISTER_MODULE", "definitions", "in", "the", "provided", "c", "file", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/py/makemoduledefs.py#L41-L54
23,352
micropython/micropython
py/makemoduledefs.py
generate_module_table_header
def generate_module_table_header(modules): """ Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None """ # Print header file for all external modules. mod_defs = [] print("// Automatically generated by makemoduledefs.py.\n") for module_name, obj_module, enabled_define in modules: mod_def = "MODULE_DEF_{}".format(module_name.upper()) mod_defs.append(mod_def) print(( "#if ({enabled_define})\n" " extern const struct _mp_obj_module_t {obj_module};\n" " #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\n" "#else\n" " #define {mod_def}\n" "#endif\n" ).format(module_name=module_name, obj_module=obj_module, enabled_define=enabled_define, mod_def=mod_def) ) print("\n#define MICROPY_REGISTERED_MODULES \\") for mod_def in mod_defs: print(" {mod_def} \\".format(mod_def=mod_def)) print("// MICROPY_REGISTERED_MODULES")
python
def generate_module_table_header(modules): """ Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None """ # Print header file for all external modules. mod_defs = [] print("// Automatically generated by makemoduledefs.py.\n") for module_name, obj_module, enabled_define in modules: mod_def = "MODULE_DEF_{}".format(module_name.upper()) mod_defs.append(mod_def) print(( "#if ({enabled_define})\n" " extern const struct _mp_obj_module_t {obj_module};\n" " #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\n" "#else\n" " #define {mod_def}\n" "#endif\n" ).format(module_name=module_name, obj_module=obj_module, enabled_define=enabled_define, mod_def=mod_def) ) print("\n#define MICROPY_REGISTERED_MODULES \\") for mod_def in mod_defs: print(" {mod_def} \\".format(mod_def=mod_def)) print("// MICROPY_REGISTERED_MODULES")
[ "def", "generate_module_table_header", "(", "modules", ")", ":", "# Print header file for all external modules.", "mod_defs", "=", "[", "]", "print", "(", "\"// Automatically generated by makemoduledefs.py.\\n\"", ")", "for", "module_name", ",", "obj_module", ",", "enabled_define", "in", "modules", ":", "mod_def", "=", "\"MODULE_DEF_{}\"", ".", "format", "(", "module_name", ".", "upper", "(", ")", ")", "mod_defs", ".", "append", "(", "mod_def", ")", "print", "(", "(", "\"#if ({enabled_define})\\n\"", "\" extern const struct _mp_obj_module_t {obj_module};\\n\"", "\" #define {mod_def} {{ MP_ROM_QSTR({module_name}), MP_ROM_PTR(&{obj_module}) }},\\n\"", "\"#else\\n\"", "\" #define {mod_def}\\n\"", "\"#endif\\n\"", ")", ".", "format", "(", "module_name", "=", "module_name", ",", "obj_module", "=", "obj_module", ",", "enabled_define", "=", "enabled_define", ",", "mod_def", "=", "mod_def", ")", ")", "print", "(", "\"\\n#define MICROPY_REGISTERED_MODULES \\\\\"", ")", "for", "mod_def", "in", "mod_defs", ":", "print", "(", "\" {mod_def} \\\\\"", ".", "format", "(", "mod_def", "=", "mod_def", ")", ")", "print", "(", "\"// MICROPY_REGISTERED_MODULES\"", ")" ]
Generate header with module table entries for builtin modules. :param List[(module_name, obj_module, enabled_define)] modules: module defs :return: None
[ "Generate", "header", "with", "module", "table", "entries", "for", "builtin", "modules", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/py/makemoduledefs.py#L57-L86
23,353
micropython/micropython
tools/gen-cpydiff.py
readfiles
def readfiles(): """ Reads test files """ tests = list(filter(lambda x: x.endswith('.py'), os.listdir(TESTPATH))) tests.sort() files = [] for test in tests: text = open(TESTPATH + test, 'r').read() try: class_, desc, cause, workaround, code = [x.rstrip() for x in \ list(filter(None, re.split(SPLIT, text)))] output = Output(test, class_, desc, cause, workaround, code, '', '', '') files.append(output) except IndexError: print('Incorrect format in file ' + TESTPATH + test) return files
python
def readfiles(): """ Reads test files """ tests = list(filter(lambda x: x.endswith('.py'), os.listdir(TESTPATH))) tests.sort() files = [] for test in tests: text = open(TESTPATH + test, 'r').read() try: class_, desc, cause, workaround, code = [x.rstrip() for x in \ list(filter(None, re.split(SPLIT, text)))] output = Output(test, class_, desc, cause, workaround, code, '', '', '') files.append(output) except IndexError: print('Incorrect format in file ' + TESTPATH + test) return files
[ "def", "readfiles", "(", ")", ":", "tests", "=", "list", "(", "filter", "(", "lambda", "x", ":", "x", ".", "endswith", "(", "'.py'", ")", ",", "os", ".", "listdir", "(", "TESTPATH", ")", ")", ")", "tests", ".", "sort", "(", ")", "files", "=", "[", "]", "for", "test", "in", "tests", ":", "text", "=", "open", "(", "TESTPATH", "+", "test", ",", "'r'", ")", ".", "read", "(", ")", "try", ":", "class_", ",", "desc", ",", "cause", ",", "workaround", ",", "code", "=", "[", "x", ".", "rstrip", "(", ")", "for", "x", "in", "list", "(", "filter", "(", "None", ",", "re", ".", "split", "(", "SPLIT", ",", "text", ")", ")", ")", "]", "output", "=", "Output", "(", "test", ",", "class_", ",", "desc", ",", "cause", ",", "workaround", ",", "code", ",", "''", ",", "''", ",", "''", ")", "files", ".", "append", "(", "output", ")", "except", "IndexError", ":", "print", "(", "'Incorrect format in file '", "+", "TESTPATH", "+", "test", ")", "return", "files" ]
Reads test files
[ "Reads", "test", "files" ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L63-L80
23,354
micropython/micropython
tools/gen-cpydiff.py
uimports
def uimports(code): """ converts CPython module names into MicroPython equivalents """ for uimport in UIMPORTLIST: uimport = bytes(uimport, 'utf8') code = code.replace(uimport, b'u' + uimport) return code
python
def uimports(code): """ converts CPython module names into MicroPython equivalents """ for uimport in UIMPORTLIST: uimport = bytes(uimport, 'utf8') code = code.replace(uimport, b'u' + uimport) return code
[ "def", "uimports", "(", "code", ")", ":", "for", "uimport", "in", "UIMPORTLIST", ":", "uimport", "=", "bytes", "(", "uimport", ",", "'utf8'", ")", "code", "=", "code", ".", "replace", "(", "uimport", ",", "b'u'", "+", "uimport", ")", "return", "code" ]
converts CPython module names into MicroPython equivalents
[ "converts", "CPython", "module", "names", "into", "MicroPython", "equivalents" ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L82-L87
23,355
micropython/micropython
tools/gen-cpydiff.py
indent
def indent(block, spaces): """ indents paragraphs of text for rst formatting """ new_block = '' for line in block.split('\n'): new_block += spaces + line + '\n' return new_block
python
def indent(block, spaces): """ indents paragraphs of text for rst formatting """ new_block = '' for line in block.split('\n'): new_block += spaces + line + '\n' return new_block
[ "def", "indent", "(", "block", ",", "spaces", ")", ":", "new_block", "=", "''", "for", "line", "in", "block", ".", "split", "(", "'\\n'", ")", ":", "new_block", "+=", "spaces", "+", "line", "+", "'\\n'", "return", "new_block" ]
indents paragraphs of text for rst formatting
[ "indents", "paragraphs", "of", "text", "for", "rst", "formatting" ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L116-L121
23,356
micropython/micropython
tools/gen-cpydiff.py
gen_table
def gen_table(contents): """ creates a table given any set of columns """ xlengths = [] ylengths = [] for column in contents: col_len = 0 for entry in column: lines = entry.split('\n') for line in lines: col_len = max(len(line) + 2, col_len) xlengths.append(col_len) for i in range(len(contents[0])): ymax = 0 for j in range(len(contents)): ymax = max(ymax, len(contents[j][i].split('\n'))) ylengths.append(ymax) table_divider = '+' + ''.join(['-' * i + '+' for i in xlengths]) + '\n' table = table_divider for i in range(len(ylengths)): row = [column[i] for column in contents] row = [entry + '\n' * (ylengths[i]-len(entry.split('\n'))) for entry in row] row = [entry.split('\n') for entry in row] for j in range(ylengths[i]): k = 0 for entry in row: width = xlengths[k] table += ''.join(['| {:{}}'.format(entry[j], width - 1)]) k += 1 table += '|\n' table += table_divider return table + '\n'
python
def gen_table(contents): """ creates a table given any set of columns """ xlengths = [] ylengths = [] for column in contents: col_len = 0 for entry in column: lines = entry.split('\n') for line in lines: col_len = max(len(line) + 2, col_len) xlengths.append(col_len) for i in range(len(contents[0])): ymax = 0 for j in range(len(contents)): ymax = max(ymax, len(contents[j][i].split('\n'))) ylengths.append(ymax) table_divider = '+' + ''.join(['-' * i + '+' for i in xlengths]) + '\n' table = table_divider for i in range(len(ylengths)): row = [column[i] for column in contents] row = [entry + '\n' * (ylengths[i]-len(entry.split('\n'))) for entry in row] row = [entry.split('\n') for entry in row] for j in range(ylengths[i]): k = 0 for entry in row: width = xlengths[k] table += ''.join(['| {:{}}'.format(entry[j], width - 1)]) k += 1 table += '|\n' table += table_divider return table + '\n'
[ "def", "gen_table", "(", "contents", ")", ":", "xlengths", "=", "[", "]", "ylengths", "=", "[", "]", "for", "column", "in", "contents", ":", "col_len", "=", "0", "for", "entry", "in", "column", ":", "lines", "=", "entry", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "col_len", "=", "max", "(", "len", "(", "line", ")", "+", "2", ",", "col_len", ")", "xlengths", ".", "append", "(", "col_len", ")", "for", "i", "in", "range", "(", "len", "(", "contents", "[", "0", "]", ")", ")", ":", "ymax", "=", "0", "for", "j", "in", "range", "(", "len", "(", "contents", ")", ")", ":", "ymax", "=", "max", "(", "ymax", ",", "len", "(", "contents", "[", "j", "]", "[", "i", "]", ".", "split", "(", "'\\n'", ")", ")", ")", "ylengths", ".", "append", "(", "ymax", ")", "table_divider", "=", "'+'", "+", "''", ".", "join", "(", "[", "'-'", "*", "i", "+", "'+'", "for", "i", "in", "xlengths", "]", ")", "+", "'\\n'", "table", "=", "table_divider", "for", "i", "in", "range", "(", "len", "(", "ylengths", ")", ")", ":", "row", "=", "[", "column", "[", "i", "]", "for", "column", "in", "contents", "]", "row", "=", "[", "entry", "+", "'\\n'", "*", "(", "ylengths", "[", "i", "]", "-", "len", "(", "entry", ".", "split", "(", "'\\n'", ")", ")", ")", "for", "entry", "in", "row", "]", "row", "=", "[", "entry", ".", "split", "(", "'\\n'", ")", "for", "entry", "in", "row", "]", "for", "j", "in", "range", "(", "ylengths", "[", "i", "]", ")", ":", "k", "=", "0", "for", "entry", "in", "row", ":", "width", "=", "xlengths", "[", "k", "]", "table", "+=", "''", ".", "join", "(", "[", "'| {:{}}'", ".", "format", "(", "entry", "[", "j", "]", ",", "width", "-", "1", ")", "]", ")", "k", "+=", "1", "table", "+=", "'|\\n'", "table", "+=", "table_divider", "return", "table", "+", "'\\n'" ]
creates a table given any set of columns
[ "creates", "a", "table", "given", "any", "set", "of", "columns" ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/gen-cpydiff.py#L123-L154
23,357
micropython/micropython
tools/pydfu.py
init
def init(): """Initializes the found DFU device so that we can program it.""" global __dev, __cfg_descr devices = get_dfu_devices(idVendor=__VID, idProduct=__PID) if not devices: raise ValueError('No DFU device found') if len(devices) > 1: raise ValueError("Multiple DFU devices found") __dev = devices[0] __dev.set_configuration() # Claim DFU interface usb.util.claim_interface(__dev, __DFU_INTERFACE) # Find the DFU configuration descriptor, either in the device or interfaces __cfg_descr = None for cfg in __dev.configurations(): __cfg_descr = find_dfu_cfg_descr(cfg.extra_descriptors) if __cfg_descr: break for itf in cfg.interfaces(): __cfg_descr = find_dfu_cfg_descr(itf.extra_descriptors) if __cfg_descr: break # Get device into idle state for attempt in range(4): status = get_status() if status == __DFU_STATE_DFU_IDLE: break elif (status == __DFU_STATE_DFU_DOWNLOAD_IDLE or status == __DFU_STATE_DFU_UPLOAD_IDLE): abort_request() else: clr_status()
python
def init(): """Initializes the found DFU device so that we can program it.""" global __dev, __cfg_descr devices = get_dfu_devices(idVendor=__VID, idProduct=__PID) if not devices: raise ValueError('No DFU device found') if len(devices) > 1: raise ValueError("Multiple DFU devices found") __dev = devices[0] __dev.set_configuration() # Claim DFU interface usb.util.claim_interface(__dev, __DFU_INTERFACE) # Find the DFU configuration descriptor, either in the device or interfaces __cfg_descr = None for cfg in __dev.configurations(): __cfg_descr = find_dfu_cfg_descr(cfg.extra_descriptors) if __cfg_descr: break for itf in cfg.interfaces(): __cfg_descr = find_dfu_cfg_descr(itf.extra_descriptors) if __cfg_descr: break # Get device into idle state for attempt in range(4): status = get_status() if status == __DFU_STATE_DFU_IDLE: break elif (status == __DFU_STATE_DFU_DOWNLOAD_IDLE or status == __DFU_STATE_DFU_UPLOAD_IDLE): abort_request() else: clr_status()
[ "def", "init", "(", ")", ":", "global", "__dev", ",", "__cfg_descr", "devices", "=", "get_dfu_devices", "(", "idVendor", "=", "__VID", ",", "idProduct", "=", "__PID", ")", "if", "not", "devices", ":", "raise", "ValueError", "(", "'No DFU device found'", ")", "if", "len", "(", "devices", ")", ">", "1", ":", "raise", "ValueError", "(", "\"Multiple DFU devices found\"", ")", "__dev", "=", "devices", "[", "0", "]", "__dev", ".", "set_configuration", "(", ")", "# Claim DFU interface", "usb", ".", "util", ".", "claim_interface", "(", "__dev", ",", "__DFU_INTERFACE", ")", "# Find the DFU configuration descriptor, either in the device or interfaces", "__cfg_descr", "=", "None", "for", "cfg", "in", "__dev", ".", "configurations", "(", ")", ":", "__cfg_descr", "=", "find_dfu_cfg_descr", "(", "cfg", ".", "extra_descriptors", ")", "if", "__cfg_descr", ":", "break", "for", "itf", "in", "cfg", ".", "interfaces", "(", ")", ":", "__cfg_descr", "=", "find_dfu_cfg_descr", "(", "itf", ".", "extra_descriptors", ")", "if", "__cfg_descr", ":", "break", "# Get device into idle state", "for", "attempt", "in", "range", "(", "4", ")", ":", "status", "=", "get_status", "(", ")", "if", "status", "==", "__DFU_STATE_DFU_IDLE", ":", "break", "elif", "(", "status", "==", "__DFU_STATE_DFU_DOWNLOAD_IDLE", "or", "status", "==", "__DFU_STATE_DFU_UPLOAD_IDLE", ")", ":", "abort_request", "(", ")", "else", ":", "clr_status", "(", ")" ]
Initializes the found DFU device so that we can program it.
[ "Initializes", "the", "found", "DFU", "device", "so", "that", "we", "can", "program", "it", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L92-L126
23,358
micropython/micropython
tools/pydfu.py
mass_erase
def mass_erase(): """Performs a MASS erase (i.e. erases the entire device.""" # Send DNLOAD with first byte=0x41 __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, "\x41", __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
python
def mass_erase(): """Performs a MASS erase (i.e. erases the entire device.""" # Send DNLOAD with first byte=0x41 __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, "\x41", __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
[ "def", "mass_erase", "(", ")", ":", "# Send DNLOAD with first byte=0x41", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "0", ",", "__DFU_INTERFACE", ",", "\"\\x41\"", ",", "__TIMEOUT", ")", "# Execute last command", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_BUSY", ":", "raise", "Exception", "(", "\"DFU: erase failed\"", ")", "# Check command state", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_IDLE", ":", "raise", "Exception", "(", "\"DFU: erase failed\"", ")" ]
Performs a MASS erase (i.e. erases the entire device.
[ "Performs", "a", "MASS", "erase", "(", "i", ".", "e", ".", "erases", "the", "entire", "device", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L148-L160
23,359
micropython/micropython
tools/pydfu.py
page_erase
def page_erase(addr): """Erases a single page.""" if __verbose: print("Erasing page: 0x%x..." % (addr)) # Send DNLOAD with first byte=0x41 and page address buf = struct.pack("<BI", 0x41, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
python
def page_erase(addr): """Erases a single page.""" if __verbose: print("Erasing page: 0x%x..." % (addr)) # Send DNLOAD with first byte=0x41 and page address buf = struct.pack("<BI", 0x41, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
[ "def", "page_erase", "(", "addr", ")", ":", "if", "__verbose", ":", "print", "(", "\"Erasing page: 0x%x...\"", "%", "(", "addr", ")", ")", "# Send DNLOAD with first byte=0x41 and page address", "buf", "=", "struct", ".", "pack", "(", "\"<BI\"", ",", "0x41", ",", "addr", ")", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "0", ",", "__DFU_INTERFACE", ",", "buf", ",", "__TIMEOUT", ")", "# Execute last command", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_BUSY", ":", "raise", "Exception", "(", "\"DFU: erase failed\"", ")", "# Check command state", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_IDLE", ":", "raise", "Exception", "(", "\"DFU: erase failed\"", ")" ]
Erases a single page.
[ "Erases", "a", "single", "page", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L163-L179
23,360
micropython/micropython
tools/pydfu.py
set_address
def set_address(addr): """Sets the address for the next operation.""" # Send DNLOAD with first byte=0x21 and page address buf = struct.pack("<BI", 0x21, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: set address failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: set address failed")
python
def set_address(addr): """Sets the address for the next operation.""" # Send DNLOAD with first byte=0x21 and page address buf = struct.pack("<BI", 0x21, addr) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: set address failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: set address failed")
[ "def", "set_address", "(", "addr", ")", ":", "# Send DNLOAD with first byte=0x21 and page address", "buf", "=", "struct", ".", "pack", "(", "\"<BI\"", ",", "0x21", ",", "addr", ")", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "0", ",", "__DFU_INTERFACE", ",", "buf", ",", "__TIMEOUT", ")", "# Execute last command", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_BUSY", ":", "raise", "Exception", "(", "\"DFU: set address failed\"", ")", "# Check command state", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_IDLE", ":", "raise", "Exception", "(", "\"DFU: set address failed\"", ")" ]
Sets the address for the next operation.
[ "Sets", "the", "address", "for", "the", "next", "operation", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L182-L194
23,361
micropython/micropython
tools/pydfu.py
write_memory
def write_memory(addr, buf, progress=None, progress_addr=0, progress_size=0): """Writes a buffer into memory. This routine assumes that memory has already been erased. """ xfer_count = 0 xfer_bytes = 0 xfer_total = len(buf) xfer_base = addr while xfer_bytes < xfer_total: if __verbose and xfer_count % 512 == 0: print ("Addr 0x%x %dKBs/%dKBs..." % (xfer_base + xfer_bytes, xfer_bytes // 1024, xfer_total // 1024)) if progress and xfer_count % 2 == 0: progress(progress_addr, xfer_base + xfer_bytes - progress_addr, progress_size) # Set mem write address set_address(xfer_base+xfer_bytes) # Send DNLOAD with fw data chunk = min(__cfg_descr.wTransferSize, xfer_total-xfer_bytes) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf[xfer_bytes:xfer_bytes + chunk], __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") xfer_count += 1 xfer_bytes += chunk
python
def write_memory(addr, buf, progress=None, progress_addr=0, progress_size=0): """Writes a buffer into memory. This routine assumes that memory has already been erased. """ xfer_count = 0 xfer_bytes = 0 xfer_total = len(buf) xfer_base = addr while xfer_bytes < xfer_total: if __verbose and xfer_count % 512 == 0: print ("Addr 0x%x %dKBs/%dKBs..." % (xfer_base + xfer_bytes, xfer_bytes // 1024, xfer_total // 1024)) if progress and xfer_count % 2 == 0: progress(progress_addr, xfer_base + xfer_bytes - progress_addr, progress_size) # Set mem write address set_address(xfer_base+xfer_bytes) # Send DNLOAD with fw data chunk = min(__cfg_descr.wTransferSize, xfer_total-xfer_bytes) __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf[xfer_bytes:xfer_bytes + chunk], __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") xfer_count += 1 xfer_bytes += chunk
[ "def", "write_memory", "(", "addr", ",", "buf", ",", "progress", "=", "None", ",", "progress_addr", "=", "0", ",", "progress_size", "=", "0", ")", ":", "xfer_count", "=", "0", "xfer_bytes", "=", "0", "xfer_total", "=", "len", "(", "buf", ")", "xfer_base", "=", "addr", "while", "xfer_bytes", "<", "xfer_total", ":", "if", "__verbose", "and", "xfer_count", "%", "512", "==", "0", ":", "print", "(", "\"Addr 0x%x %dKBs/%dKBs...\"", "%", "(", "xfer_base", "+", "xfer_bytes", ",", "xfer_bytes", "//", "1024", ",", "xfer_total", "//", "1024", ")", ")", "if", "progress", "and", "xfer_count", "%", "2", "==", "0", ":", "progress", "(", "progress_addr", ",", "xfer_base", "+", "xfer_bytes", "-", "progress_addr", ",", "progress_size", ")", "# Set mem write address", "set_address", "(", "xfer_base", "+", "xfer_bytes", ")", "# Send DNLOAD with fw data", "chunk", "=", "min", "(", "__cfg_descr", ".", "wTransferSize", ",", "xfer_total", "-", "xfer_bytes", ")", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "2", ",", "__DFU_INTERFACE", ",", "buf", "[", "xfer_bytes", ":", "xfer_bytes", "+", "chunk", "]", ",", "__TIMEOUT", ")", "# Execute last command", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_BUSY", ":", "raise", "Exception", "(", "\"DFU: write memory failed\"", ")", "# Check command state", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_IDLE", ":", "raise", "Exception", "(", "\"DFU: write memory failed\"", ")", "xfer_count", "+=", "1", "xfer_bytes", "+=", "chunk" ]
Writes a buffer into memory. This routine assumes that memory has already been erased.
[ "Writes", "a", "buffer", "into", "memory", ".", "This", "routine", "assumes", "that", "memory", "has", "already", "been", "erased", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L197-L233
23,362
micropython/micropython
tools/pydfu.py
write_page
def write_page(buf, xfer_offset): """Writes a single page. This routine assumes that memory has already been erased. """ xfer_base = 0x08000000 # Set mem write address set_address(xfer_base+xfer_offset) # Send DNLOAD with fw data __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") if __verbose: print ("Write: 0x%x " % (xfer_base + xfer_offset))
python
def write_page(buf, xfer_offset): """Writes a single page. This routine assumes that memory has already been erased. """ xfer_base = 0x08000000 # Set mem write address set_address(xfer_base+xfer_offset) # Send DNLOAD with fw data __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 2, __DFU_INTERFACE, buf, __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: write memory failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: write memory failed") if __verbose: print ("Write: 0x%x " % (xfer_base + xfer_offset))
[ "def", "write_page", "(", "buf", ",", "xfer_offset", ")", ":", "xfer_base", "=", "0x08000000", "# Set mem write address", "set_address", "(", "xfer_base", "+", "xfer_offset", ")", "# Send DNLOAD with fw data", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "2", ",", "__DFU_INTERFACE", ",", "buf", ",", "__TIMEOUT", ")", "# Execute last command", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_BUSY", ":", "raise", "Exception", "(", "\"DFU: write memory failed\"", ")", "# Check command state", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_DOWNLOAD_IDLE", ":", "raise", "Exception", "(", "\"DFU: write memory failed\"", ")", "if", "__verbose", ":", "print", "(", "\"Write: 0x%x \"", "%", "(", "xfer_base", "+", "xfer_offset", ")", ")" ]
Writes a single page. This routine assumes that memory has already been erased.
[ "Writes", "a", "single", "page", ".", "This", "routine", "assumes", "that", "memory", "has", "already", "been", "erased", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L236-L258
23,363
micropython/micropython
tools/pydfu.py
exit_dfu
def exit_dfu(): """Exit DFU mode, and start running the program.""" # set jump address set_address(0x08000000) # Send DNLOAD with 0 length to exit DFU __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, None, __TIMEOUT) try: # Execute last command if get_status() != __DFU_STATE_DFU_MANIFEST: print("Failed to reset device") # Release device usb.util.dispose_resources(__dev) except: pass
python
def exit_dfu(): """Exit DFU mode, and start running the program.""" # set jump address set_address(0x08000000) # Send DNLOAD with 0 length to exit DFU __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, None, __TIMEOUT) try: # Execute last command if get_status() != __DFU_STATE_DFU_MANIFEST: print("Failed to reset device") # Release device usb.util.dispose_resources(__dev) except: pass
[ "def", "exit_dfu", "(", ")", ":", "# set jump address", "set_address", "(", "0x08000000", ")", "# Send DNLOAD with 0 length to exit DFU", "__dev", ".", "ctrl_transfer", "(", "0x21", ",", "__DFU_DNLOAD", ",", "0", ",", "__DFU_INTERFACE", ",", "None", ",", "__TIMEOUT", ")", "try", ":", "# Execute last command", "if", "get_status", "(", ")", "!=", "__DFU_STATE_DFU_MANIFEST", ":", "print", "(", "\"Failed to reset device\"", ")", "# Release device", "usb", ".", "util", ".", "dispose_resources", "(", "__dev", ")", "except", ":", "pass" ]
Exit DFU mode, and start running the program.
[ "Exit", "DFU", "mode", "and", "start", "running", "the", "program", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L261-L279
23,364
micropython/micropython
tools/pydfu.py
consume
def consume(fmt, data, names): """Parses the struct defined by `fmt` from `data`, stores the parsed fields into a named tuple using `names`. Returns the named tuple, and the data with the struct stripped off.""" size = struct.calcsize(fmt) return named(struct.unpack(fmt, data[:size]), names), data[size:]
python
def consume(fmt, data, names): """Parses the struct defined by `fmt` from `data`, stores the parsed fields into a named tuple using `names`. Returns the named tuple, and the data with the struct stripped off.""" size = struct.calcsize(fmt) return named(struct.unpack(fmt, data[:size]), names), data[size:]
[ "def", "consume", "(", "fmt", ",", "data", ",", "names", ")", ":", "size", "=", "struct", ".", "calcsize", "(", "fmt", ")", "return", "named", "(", "struct", ".", "unpack", "(", "fmt", ",", "data", "[", ":", "size", "]", ")", ",", "names", ")", ",", "data", "[", "size", ":", "]" ]
Parses the struct defined by `fmt` from `data`, stores the parsed fields into a named tuple using `names`. Returns the named tuple, and the data with the struct stripped off.
[ "Parses", "the", "struct", "defined", "by", "fmt", "from", "data", "stores", "the", "parsed", "fields", "into", "a", "named", "tuple", "using", "names", ".", "Returns", "the", "named", "tuple", "and", "the", "data", "with", "the", "struct", "stripped", "off", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L287-L292
23,365
micropython/micropython
tools/pydfu.py
list_dfu_devices
def list_dfu_devices(*args, **kwargs): """Prints a lits of devices detected in DFU mode.""" devices = get_dfu_devices(*args, **kwargs) if not devices: print("No DFU capable devices found") return for device in devices: print("Bus {} Device {:03d}: ID {:04x}:{:04x}" .format(device.bus, device.address, device.idVendor, device.idProduct)) layout = get_memory_layout(device) print("Memory Layout") for entry in layout: print(" 0x{:x} {:2d} pages of {:3d}K bytes" .format(entry['addr'], entry['num_pages'], entry['page_size'] // 1024))
python
def list_dfu_devices(*args, **kwargs): """Prints a lits of devices detected in DFU mode.""" devices = get_dfu_devices(*args, **kwargs) if not devices: print("No DFU capable devices found") return for device in devices: print("Bus {} Device {:03d}: ID {:04x}:{:04x}" .format(device.bus, device.address, device.idVendor, device.idProduct)) layout = get_memory_layout(device) print("Memory Layout") for entry in layout: print(" 0x{:x} {:2d} pages of {:3d}K bytes" .format(entry['addr'], entry['num_pages'], entry['page_size'] // 1024))
[ "def", "list_dfu_devices", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "devices", "=", "get_dfu_devices", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "devices", ":", "print", "(", "\"No DFU capable devices found\"", ")", "return", "for", "device", "in", "devices", ":", "print", "(", "\"Bus {} Device {:03d}: ID {:04x}:{:04x}\"", ".", "format", "(", "device", ".", "bus", ",", "device", ".", "address", ",", "device", ".", "idVendor", ",", "device", ".", "idProduct", ")", ")", "layout", "=", "get_memory_layout", "(", "device", ")", "print", "(", "\"Memory Layout\"", ")", "for", "entry", "in", "layout", ":", "print", "(", "\" 0x{:x} {:2d} pages of {:3d}K bytes\"", ".", "format", "(", "entry", "[", "'addr'", "]", ",", "entry", "[", "'num_pages'", "]", ",", "entry", "[", "'page_size'", "]", "//", "1024", ")", ")" ]
Prints a lits of devices detected in DFU mode.
[ "Prints", "a", "lits", "of", "devices", "detected", "in", "DFU", "mode", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L458-L473
23,366
micropython/micropython
tools/pydfu.py
write_elements
def write_elements(elements, mass_erase_used, progress=None): """Writes the indicated elements into the target memory, erasing as needed. """ mem_layout = get_memory_layout(__dev) for elem in elements: addr = elem['addr'] size = elem['size'] data = elem['data'] elem_size = size elem_addr = addr if progress: progress(elem_addr, 0, elem_size) while size > 0: write_size = size if not mass_erase_used: for segment in mem_layout: if addr >= segment['addr'] and \ addr <= segment['last_addr']: # We found the page containing the address we want to # write, erase it page_size = segment['page_size'] page_addr = addr & ~(page_size - 1) if addr + write_size > page_addr + page_size: write_size = page_addr + page_size - addr page_erase(page_addr) break write_memory(addr, data[:write_size], progress, elem_addr, elem_size) data = data[write_size:] addr += write_size size -= write_size if progress: progress(elem_addr, addr - elem_addr, elem_size)
python
def write_elements(elements, mass_erase_used, progress=None): """Writes the indicated elements into the target memory, erasing as needed. """ mem_layout = get_memory_layout(__dev) for elem in elements: addr = elem['addr'] size = elem['size'] data = elem['data'] elem_size = size elem_addr = addr if progress: progress(elem_addr, 0, elem_size) while size > 0: write_size = size if not mass_erase_used: for segment in mem_layout: if addr >= segment['addr'] and \ addr <= segment['last_addr']: # We found the page containing the address we want to # write, erase it page_size = segment['page_size'] page_addr = addr & ~(page_size - 1) if addr + write_size > page_addr + page_size: write_size = page_addr + page_size - addr page_erase(page_addr) break write_memory(addr, data[:write_size], progress, elem_addr, elem_size) data = data[write_size:] addr += write_size size -= write_size if progress: progress(elem_addr, addr - elem_addr, elem_size)
[ "def", "write_elements", "(", "elements", ",", "mass_erase_used", ",", "progress", "=", "None", ")", ":", "mem_layout", "=", "get_memory_layout", "(", "__dev", ")", "for", "elem", "in", "elements", ":", "addr", "=", "elem", "[", "'addr'", "]", "size", "=", "elem", "[", "'size'", "]", "data", "=", "elem", "[", "'data'", "]", "elem_size", "=", "size", "elem_addr", "=", "addr", "if", "progress", ":", "progress", "(", "elem_addr", ",", "0", ",", "elem_size", ")", "while", "size", ">", "0", ":", "write_size", "=", "size", "if", "not", "mass_erase_used", ":", "for", "segment", "in", "mem_layout", ":", "if", "addr", ">=", "segment", "[", "'addr'", "]", "and", "addr", "<=", "segment", "[", "'last_addr'", "]", ":", "# We found the page containing the address we want to", "# write, erase it", "page_size", "=", "segment", "[", "'page_size'", "]", "page_addr", "=", "addr", "&", "~", "(", "page_size", "-", "1", ")", "if", "addr", "+", "write_size", ">", "page_addr", "+", "page_size", ":", "write_size", "=", "page_addr", "+", "page_size", "-", "addr", "page_erase", "(", "page_addr", ")", "break", "write_memory", "(", "addr", ",", "data", "[", ":", "write_size", "]", ",", "progress", ",", "elem_addr", ",", "elem_size", ")", "data", "=", "data", "[", "write_size", ":", "]", "addr", "+=", "write_size", "size", "-=", "write_size", "if", "progress", ":", "progress", "(", "elem_addr", ",", "addr", "-", "elem_addr", ",", "elem_size", ")" ]
Writes the indicated elements into the target memory, erasing as needed.
[ "Writes", "the", "indicated", "elements", "into", "the", "target", "memory", "erasing", "as", "needed", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L476-L510
23,367
micropython/micropython
tools/pydfu.py
cli_progress
def cli_progress(addr, offset, size): """Prints a progress report suitable for use on the command line.""" width = 25 done = offset * width // size print("\r0x{:08x} {:7d} [{}{}] {:3d}% " .format(addr, size, '=' * done, ' ' * (width - done), offset * 100 // size), end="") try: sys.stdout.flush() except OSError: pass # Ignore Windows CLI "WinError 87" on Python 3.6 if offset == size: print("")
python
def cli_progress(addr, offset, size): """Prints a progress report suitable for use on the command line.""" width = 25 done = offset * width // size print("\r0x{:08x} {:7d} [{}{}] {:3d}% " .format(addr, size, '=' * done, ' ' * (width - done), offset * 100 // size), end="") try: sys.stdout.flush() except OSError: pass # Ignore Windows CLI "WinError 87" on Python 3.6 if offset == size: print("")
[ "def", "cli_progress", "(", "addr", ",", "offset", ",", "size", ")", ":", "width", "=", "25", "done", "=", "offset", "*", "width", "//", "size", "print", "(", "\"\\r0x{:08x} {:7d} [{}{}] {:3d}% \"", ".", "format", "(", "addr", ",", "size", ",", "'='", "*", "done", ",", "' '", "*", "(", "width", "-", "done", ")", ",", "offset", "*", "100", "//", "size", ")", ",", "end", "=", "\"\"", ")", "try", ":", "sys", ".", "stdout", ".", "flush", "(", ")", "except", "OSError", ":", "pass", "# Ignore Windows CLI \"WinError 87\" on Python 3.6", "if", "offset", "==", "size", ":", "print", "(", "\"\"", ")" ]
Prints a progress report suitable for use on the command line.
[ "Prints", "a", "progress", "report", "suitable", "for", "use", "on", "the", "command", "line", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L513-L525
23,368
micropython/micropython
tools/pydfu.py
main
def main(): """Test program for verifying this files functionality.""" global __verbose # Parse CMD args parser = argparse.ArgumentParser(description='DFU Python Util') #parser.add_argument("path", help="file path") parser.add_argument( "-l", "--list", help="list available DFU devices", action="store_true", default=False ) parser.add_argument( "-m", "--mass-erase", help="mass erase device", action="store_true", default=False ) parser.add_argument( "-u", "--upload", help="read file from DFU device", dest="path", default=False ) parser.add_argument( "-v", "--verbose", help="increase output verbosity", action="store_true", default=False ) args = parser.parse_args() __verbose = args.verbose if args.list: list_dfu_devices(idVendor=__VID, idProduct=__PID) return init() if args.mass_erase: print ("Mass erase...") mass_erase() if args.path: elements = read_dfu_file(args.path) if not elements: return print("Writing memory...") write_elements(elements, args.mass_erase, progress=cli_progress) print("Exiting DFU...") exit_dfu() return print("No command specified")
python
def main(): """Test program for verifying this files functionality.""" global __verbose # Parse CMD args parser = argparse.ArgumentParser(description='DFU Python Util') #parser.add_argument("path", help="file path") parser.add_argument( "-l", "--list", help="list available DFU devices", action="store_true", default=False ) parser.add_argument( "-m", "--mass-erase", help="mass erase device", action="store_true", default=False ) parser.add_argument( "-u", "--upload", help="read file from DFU device", dest="path", default=False ) parser.add_argument( "-v", "--verbose", help="increase output verbosity", action="store_true", default=False ) args = parser.parse_args() __verbose = args.verbose if args.list: list_dfu_devices(idVendor=__VID, idProduct=__PID) return init() if args.mass_erase: print ("Mass erase...") mass_erase() if args.path: elements = read_dfu_file(args.path) if not elements: return print("Writing memory...") write_elements(elements, args.mass_erase, progress=cli_progress) print("Exiting DFU...") exit_dfu() return print("No command specified")
[ "def", "main", "(", ")", ":", "global", "__verbose", "# Parse CMD args", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'DFU Python Util'", ")", "#parser.add_argument(\"path\", help=\"file path\")", "parser", ".", "add_argument", "(", "\"-l\"", ",", "\"--list\"", ",", "help", "=", "\"list available DFU devices\"", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ")", "parser", ".", "add_argument", "(", "\"-m\"", ",", "\"--mass-erase\"", ",", "help", "=", "\"mass erase device\"", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ")", "parser", ".", "add_argument", "(", "\"-u\"", ",", "\"--upload\"", ",", "help", "=", "\"read file from DFU device\"", ",", "dest", "=", "\"path\"", ",", "default", "=", "False", ")", "parser", ".", "add_argument", "(", "\"-v\"", ",", "\"--verbose\"", ",", "help", "=", "\"increase output verbosity\"", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "__verbose", "=", "args", ".", "verbose", "if", "args", ".", "list", ":", "list_dfu_devices", "(", "idVendor", "=", "__VID", ",", "idProduct", "=", "__PID", ")", "return", "init", "(", ")", "if", "args", ".", "mass_erase", ":", "print", "(", "\"Mass erase...\"", ")", "mass_erase", "(", ")", "if", "args", ".", "path", ":", "elements", "=", "read_dfu_file", "(", "args", ".", "path", ")", "if", "not", "elements", ":", "return", "print", "(", "\"Writing memory...\"", ")", "write_elements", "(", "elements", ",", "args", ".", "mass_erase", ",", "progress", "=", "cli_progress", ")", "print", "(", "\"Exiting DFU...\"", ")", "exit_dfu", "(", ")", "return", "print", "(", "\"No command specified\"", ")" ]
Test program for verifying this files functionality.
[ "Test", "program", "for", "verifying", "this", "files", "functionality", "." ]
8031b7a25c21fb864fe9dd1fa40740030be66c11
https://github.com/micropython/micropython/blob/8031b7a25c21fb864fe9dd1fa40740030be66c11/tools/pydfu.py#L528-L583
23,369
onnx/onnx
onnx/external_data_helper.py
load_external_data_for_tensor
def load_external_data_for_tensor(tensor, base_dir): # type: (TensorProto, Text) -> None """ Load data from an external file for tensor. @params tensor: a TensorProto object. base_dir: directory that contains the external data. """ if tensor.HasField("raw_data"): # already loaded return info = ExternalDataInfo(tensor) file_location = _sanitize_path(info.location) external_data_file_path = os.path.join(base_dir, file_location) with open(external_data_file_path, 'rb') as data_file: if info.offset: data_file.seek(info.offset) if info.length: tensor.raw_data = data_file.read(info.length) else: tensor.raw_data = data_file.read()
python
def load_external_data_for_tensor(tensor, base_dir): # type: (TensorProto, Text) -> None """ Load data from an external file for tensor. @params tensor: a TensorProto object. base_dir: directory that contains the external data. """ if tensor.HasField("raw_data"): # already loaded return info = ExternalDataInfo(tensor) file_location = _sanitize_path(info.location) external_data_file_path = os.path.join(base_dir, file_location) with open(external_data_file_path, 'rb') as data_file: if info.offset: data_file.seek(info.offset) if info.length: tensor.raw_data = data_file.read(info.length) else: tensor.raw_data = data_file.read()
[ "def", "load_external_data_for_tensor", "(", "tensor", ",", "base_dir", ")", ":", "# type: (TensorProto, Text) -> None", "if", "tensor", ".", "HasField", "(", "\"raw_data\"", ")", ":", "# already loaded", "return", "info", "=", "ExternalDataInfo", "(", "tensor", ")", "file_location", "=", "_sanitize_path", "(", "info", ".", "location", ")", "external_data_file_path", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "file_location", ")", "with", "open", "(", "external_data_file_path", ",", "'rb'", ")", "as", "data_file", ":", "if", "info", ".", "offset", ":", "data_file", ".", "seek", "(", "info", ".", "offset", ")", "if", "info", ".", "length", ":", "tensor", ".", "raw_data", "=", "data_file", ".", "read", "(", "info", ".", "length", ")", "else", ":", "tensor", ".", "raw_data", "=", "data_file", ".", "read", "(", ")" ]
Load data from an external file for tensor. @params tensor: a TensorProto object. base_dir: directory that contains the external data.
[ "Load", "data", "from", "an", "external", "file", "for", "tensor", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L32-L54
23,370
onnx/onnx
onnx/external_data_helper.py
load_external_data_for_model
def load_external_data_for_model(model, base_dir): # type: (ModelProto, Text) -> None """ Loads external tensors into model @params model: ModelProto to load external data to base_dir: directory that contains external data """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): load_external_data_for_tensor(tensor, base_dir)
python
def load_external_data_for_model(model, base_dir): # type: (ModelProto, Text) -> None """ Loads external tensors into model @params model: ModelProto to load external data to base_dir: directory that contains external data """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): load_external_data_for_tensor(tensor, base_dir)
[ "def", "load_external_data_for_model", "(", "model", ",", "base_dir", ")", ":", "# type: (ModelProto, Text) -> None", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "if", "uses_external_data", "(", "tensor", ")", ":", "load_external_data_for_tensor", "(", "tensor", ",", "base_dir", ")" ]
Loads external tensors into model @params model: ModelProto to load external data to base_dir: directory that contains external data
[ "Loads", "external", "tensors", "into", "model" ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L57-L67
23,371
onnx/onnx
onnx/external_data_helper.py
convert_model_to_external_data
def convert_model_to_external_data(model, all_tensors_to_one_file=True, location=None): # type: (ModelProto, bool, Optional[Text]) -> None """ call to set all tensors as external data. save_model saves all the tensors data as external data after calling this function. @params model: ModelProto to be converted. all_tensors_to_one_file: If true, save all tensors to one external file specified by location. If false, save each tensor to a file named with the tensor name. location: specify the external file that all tensors to save to. If not specified, will use the model name. """ if all_tensors_to_one_file: file_name = Text(uuid.uuid1()) if location: file_name = location for tensor in _get_all_tensors(model): set_external_data(tensor, file_name) else: for tensor in _get_all_tensors(model): set_external_data(tensor, tensor.name)
python
def convert_model_to_external_data(model, all_tensors_to_one_file=True, location=None): # type: (ModelProto, bool, Optional[Text]) -> None """ call to set all tensors as external data. save_model saves all the tensors data as external data after calling this function. @params model: ModelProto to be converted. all_tensors_to_one_file: If true, save all tensors to one external file specified by location. If false, save each tensor to a file named with the tensor name. location: specify the external file that all tensors to save to. If not specified, will use the model name. """ if all_tensors_to_one_file: file_name = Text(uuid.uuid1()) if location: file_name = location for tensor in _get_all_tensors(model): set_external_data(tensor, file_name) else: for tensor in _get_all_tensors(model): set_external_data(tensor, tensor.name)
[ "def", "convert_model_to_external_data", "(", "model", ",", "all_tensors_to_one_file", "=", "True", ",", "location", "=", "None", ")", ":", "# type: (ModelProto, bool, Optional[Text]) -> None", "if", "all_tensors_to_one_file", ":", "file_name", "=", "Text", "(", "uuid", ".", "uuid1", "(", ")", ")", "if", "location", ":", "file_name", "=", "location", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "set_external_data", "(", "tensor", ",", "file_name", ")", "else", ":", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "set_external_data", "(", "tensor", ",", "tensor", ".", "name", ")" ]
call to set all tensors as external data. save_model saves all the tensors data as external data after calling this function. @params model: ModelProto to be converted. all_tensors_to_one_file: If true, save all tensors to one external file specified by location. If false, save each tensor to a file named with the tensor name. location: specify the external file that all tensors to save to. If not specified, will use the model name.
[ "call", "to", "set", "all", "tensors", "as", "external", "data", ".", "save_model", "saves", "all", "the", "tensors", "data", "as", "external", "data", "after", "calling", "this", "function", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L92-L111
23,372
onnx/onnx
onnx/external_data_helper.py
convert_model_from_external_data
def convert_model_from_external_data(model): # type: (ModelProto) -> None """ call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") del tensor.external_data[:] tensor.data_location = TensorProto.DEFAULT
python
def convert_model_from_external_data(model): # type: (ModelProto) -> None """ call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") del tensor.external_data[:] tensor.data_location = TensorProto.DEFAULT
[ "def", "convert_model_from_external_data", "(", "model", ")", ":", "# type: (ModelProto) -> None", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "if", "uses_external_data", "(", "tensor", ")", ":", "if", "not", "tensor", ".", "HasField", "(", "\"raw_data\"", ")", ":", "raise", "ValueError", "(", "\"raw_data field doesn't exist.\"", ")", "del", "tensor", ".", "external_data", "[", ":", "]", "tensor", ".", "data_location", "=", "TensorProto", ".", "DEFAULT" ]
call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted.
[ "call", "to", "set", "all", "tensors", "data", "as", "embedded", "data", ".", "save_model", "saves", "all", "the", "tensors", "data", "as", "embedded", "data", "after", "calling", "this", "function", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L114-L125
23,373
onnx/onnx
onnx/external_data_helper.py
save_external_data
def save_external_data(tensor, base_path): # type: (TensorProto, Text) -> None """ Write tensor data to an external file according to information in the `external_data` field. @params tensor: Tensor object to be serialized base_path: System path of a folder where tensor data is to be stored """ info = ExternalDataInfo(tensor) external_data_file_path = os.path.join(base_path, info.location) # Retrieve the tensor's data from raw_data or load external file if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") # Create file if it doesn't exist if not os.path.isfile(external_data_file_path): open(external_data_file_path, 'ab').close() # Open file for reading and writing at random locations ('r+b') with open(external_data_file_path, 'r+b') as data_file: data_file.seek(0, 2) if info.offset is not None: # Pad file to required offset if needed file_size = data_file.tell() if info.offset > file_size: data_file.write(b"\0" * (info.offset - file_size)) data_file.seek(info.offset) offset = data_file.tell() data_file.write(tensor.raw_data) set_external_data(tensor, info.location, offset, data_file.tell() - offset)
python
def save_external_data(tensor, base_path): # type: (TensorProto, Text) -> None """ Write tensor data to an external file according to information in the `external_data` field. @params tensor: Tensor object to be serialized base_path: System path of a folder where tensor data is to be stored """ info = ExternalDataInfo(tensor) external_data_file_path = os.path.join(base_path, info.location) # Retrieve the tensor's data from raw_data or load external file if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") # Create file if it doesn't exist if not os.path.isfile(external_data_file_path): open(external_data_file_path, 'ab').close() # Open file for reading and writing at random locations ('r+b') with open(external_data_file_path, 'r+b') as data_file: data_file.seek(0, 2) if info.offset is not None: # Pad file to required offset if needed file_size = data_file.tell() if info.offset > file_size: data_file.write(b"\0" * (info.offset - file_size)) data_file.seek(info.offset) offset = data_file.tell() data_file.write(tensor.raw_data) set_external_data(tensor, info.location, offset, data_file.tell() - offset)
[ "def", "save_external_data", "(", "tensor", ",", "base_path", ")", ":", "# type: (TensorProto, Text) -> None", "info", "=", "ExternalDataInfo", "(", "tensor", ")", "external_data_file_path", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "info", ".", "location", ")", "# Retrieve the tensor's data from raw_data or load external file", "if", "not", "tensor", ".", "HasField", "(", "\"raw_data\"", ")", ":", "raise", "ValueError", "(", "\"raw_data field doesn't exist.\"", ")", "# Create file if it doesn't exist", "if", "not", "os", ".", "path", ".", "isfile", "(", "external_data_file_path", ")", ":", "open", "(", "external_data_file_path", ",", "'ab'", ")", ".", "close", "(", ")", "# Open file for reading and writing at random locations ('r+b')", "with", "open", "(", "external_data_file_path", ",", "'r+b'", ")", "as", "data_file", ":", "data_file", ".", "seek", "(", "0", ",", "2", ")", "if", "info", ".", "offset", "is", "not", "None", ":", "# Pad file to required offset if needed", "file_size", "=", "data_file", ".", "tell", "(", ")", "if", "info", ".", "offset", ">", "file_size", ":", "data_file", ".", "write", "(", "b\"\\0\"", "*", "(", "info", ".", "offset", "-", "file_size", ")", ")", "data_file", ".", "seek", "(", "info", ".", "offset", ")", "offset", "=", "data_file", ".", "tell", "(", ")", "data_file", ".", "write", "(", "tensor", ".", "raw_data", ")", "set_external_data", "(", "tensor", ",", "info", ".", "location", ",", "offset", ",", "data_file", ".", "tell", "(", ")", "-", "offset", ")" ]
Write tensor data to an external file according to information in the `external_data` field. @params tensor: Tensor object to be serialized base_path: System path of a folder where tensor data is to be stored
[ "Write", "tensor", "data", "to", "an", "external", "file", "according", "to", "information", "in", "the", "external_data", "field", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L128-L159
23,374
onnx/onnx
onnx/external_data_helper.py
_get_attribute_tensors
def _get_attribute_tensors(onnx_model_proto): # type: (ModelProto) -> Iterable[TensorProto] """Create an iterator of tensors from node attributes of an ONNX model.""" for node in onnx_model_proto.graph.node: for attribute in node.attribute: if attribute.HasField("t"): yield attribute.t for tensor in attribute.tensors: yield tensor
python
def _get_attribute_tensors(onnx_model_proto): # type: (ModelProto) -> Iterable[TensorProto] """Create an iterator of tensors from node attributes of an ONNX model.""" for node in onnx_model_proto.graph.node: for attribute in node.attribute: if attribute.HasField("t"): yield attribute.t for tensor in attribute.tensors: yield tensor
[ "def", "_get_attribute_tensors", "(", "onnx_model_proto", ")", ":", "# type: (ModelProto) -> Iterable[TensorProto]", "for", "node", "in", "onnx_model_proto", ".", "graph", ".", "node", ":", "for", "attribute", "in", "node", ".", "attribute", ":", "if", "attribute", ".", "HasField", "(", "\"t\"", ")", ":", "yield", "attribute", ".", "t", "for", "tensor", "in", "attribute", ".", "tensors", ":", "yield", "tensor" ]
Create an iterator of tensors from node attributes of an ONNX model.
[ "Create", "an", "iterator", "of", "tensors", "from", "node", "attributes", "of", "an", "ONNX", "model", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L174-L181
23,375
onnx/onnx
onnx/external_data_helper.py
remove_external_data_field
def remove_external_data_field(tensor, field_key): # type: (TensorProto, Text) -> None """ Remove a field from a Tensor's external_data key-value store. Modifies tensor object in place. @params tensor: Tensor object from which value will be removed field_key: The key of the field to be removed """ for (i, field) in enumerate(tensor.external_data): if field.key == field_key: del tensor.external_data[i]
python
def remove_external_data_field(tensor, field_key): # type: (TensorProto, Text) -> None """ Remove a field from a Tensor's external_data key-value store. Modifies tensor object in place. @params tensor: Tensor object from which value will be removed field_key: The key of the field to be removed """ for (i, field) in enumerate(tensor.external_data): if field.key == field_key: del tensor.external_data[i]
[ "def", "remove_external_data_field", "(", "tensor", ",", "field_key", ")", ":", "# type: (TensorProto, Text) -> None", "for", "(", "i", ",", "field", ")", "in", "enumerate", "(", "tensor", ".", "external_data", ")", ":", "if", "field", ".", "key", "==", "field_key", ":", "del", "tensor", ".", "external_data", "[", "i", "]" ]
Remove a field from a Tensor's external_data key-value store. Modifies tensor object in place. @params tensor: Tensor object from which value will be removed field_key: The key of the field to be removed
[ "Remove", "a", "field", "from", "a", "Tensor", "s", "external_data", "key", "-", "value", "store", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L197-L209
23,376
onnx/onnx
onnx/external_data_helper.py
write_external_data_tensors
def write_external_data_tensors(model, filepath): # type: (ModelProto, Text) -> ModelProto """ Write external data of all tensors to files on disk. Note: This function also strips basepath information from all tensors' external_data fields. @params model: Model object which is the source of tensors to serialize. filepath: System path to the directory which should be treated as base path for external data. @return The modified model object. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): save_external_data(tensor, filepath) tensor.ClearField(str('raw_data')) return model
python
def write_external_data_tensors(model, filepath): # type: (ModelProto, Text) -> ModelProto """ Write external data of all tensors to files on disk. Note: This function also strips basepath information from all tensors' external_data fields. @params model: Model object which is the source of tensors to serialize. filepath: System path to the directory which should be treated as base path for external data. @return The modified model object. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): save_external_data(tensor, filepath) tensor.ClearField(str('raw_data')) return model
[ "def", "write_external_data_tensors", "(", "model", ",", "filepath", ")", ":", "# type: (ModelProto, Text) -> ModelProto", "for", "tensor", "in", "_get_all_tensors", "(", "model", ")", ":", "if", "uses_external_data", "(", "tensor", ")", ":", "save_external_data", "(", "tensor", ",", "filepath", ")", "tensor", ".", "ClearField", "(", "str", "(", "'raw_data'", ")", ")", "return", "model" ]
Write external data of all tensors to files on disk. Note: This function also strips basepath information from all tensors' external_data fields. @params model: Model object which is the source of tensors to serialize. filepath: System path to the directory which should be treated as base path for external data. @return The modified model object.
[ "Write", "external", "data", "of", "all", "tensors", "to", "files", "on", "disk", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/external_data_helper.py#L212-L230
23,377
onnx/onnx
tools/protoc-gen-mypy.py
PkgWriter._import_message
def _import_message(self, type_name): # type: (d.FieldDescriptorProto) -> Text """Import a referenced message and return a handle""" name = cast(Text, type_name) if name[0] == '.' and name[1].isupper() and name[2].islower(): # Message defined in this file return name[1:] message_fd = self.descriptors.message_to_fd[name] if message_fd.name == self.fd.name: # message defined in this package split = name.split('.') for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): return ".".join(split[i:]) # Not in package. Must import split = name.split(".") for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): assert message_fd.name.endswith('.proto') import_name = self._import(message_fd.name[:-6].replace('-', '_') + "_pb2", segment) remains = ".".join(split[i + 1:]) if not remains: return import_name raise AssertionError("Don't support nested imports yet") # return new_nested_import(import_name, remains) raise AssertionError("Could not parse local name " + name)
python
def _import_message(self, type_name): # type: (d.FieldDescriptorProto) -> Text """Import a referenced message and return a handle""" name = cast(Text, type_name) if name[0] == '.' and name[1].isupper() and name[2].islower(): # Message defined in this file return name[1:] message_fd = self.descriptors.message_to_fd[name] if message_fd.name == self.fd.name: # message defined in this package split = name.split('.') for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): return ".".join(split[i:]) # Not in package. Must import split = name.split(".") for i, segment in enumerate(split): if segment and segment[0].isupper() and segment[1].islower(): assert message_fd.name.endswith('.proto') import_name = self._import(message_fd.name[:-6].replace('-', '_') + "_pb2", segment) remains = ".".join(split[i + 1:]) if not remains: return import_name raise AssertionError("Don't support nested imports yet") # return new_nested_import(import_name, remains) raise AssertionError("Could not parse local name " + name)
[ "def", "_import_message", "(", "self", ",", "type_name", ")", ":", "# type: (d.FieldDescriptorProto) -> Text", "name", "=", "cast", "(", "Text", ",", "type_name", ")", "if", "name", "[", "0", "]", "==", "'.'", "and", "name", "[", "1", "]", ".", "isupper", "(", ")", "and", "name", "[", "2", "]", ".", "islower", "(", ")", ":", "# Message defined in this file", "return", "name", "[", "1", ":", "]", "message_fd", "=", "self", ".", "descriptors", ".", "message_to_fd", "[", "name", "]", "if", "message_fd", ".", "name", "==", "self", ".", "fd", ".", "name", ":", "# message defined in this package", "split", "=", "name", ".", "split", "(", "'.'", ")", "for", "i", ",", "segment", "in", "enumerate", "(", "split", ")", ":", "if", "segment", "and", "segment", "[", "0", "]", ".", "isupper", "(", ")", "and", "segment", "[", "1", "]", ".", "islower", "(", ")", ":", "return", "\".\"", ".", "join", "(", "split", "[", "i", ":", "]", ")", "# Not in package. Must import", "split", "=", "name", ".", "split", "(", "\".\"", ")", "for", "i", ",", "segment", "in", "enumerate", "(", "split", ")", ":", "if", "segment", "and", "segment", "[", "0", "]", ".", "isupper", "(", ")", "and", "segment", "[", "1", "]", ".", "islower", "(", ")", ":", "assert", "message_fd", ".", "name", ".", "endswith", "(", "'.proto'", ")", "import_name", "=", "self", ".", "_import", "(", "message_fd", ".", "name", "[", ":", "-", "6", "]", ".", "replace", "(", "'-'", ",", "'_'", ")", "+", "\"_pb2\"", ",", "segment", ")", "remains", "=", "\".\"", ".", "join", "(", "split", "[", "i", "+", "1", ":", "]", ")", "if", "not", "remains", ":", "return", "import_name", "raise", "AssertionError", "(", "\"Don't support nested imports yet\"", ")", "# return new_nested_import(import_name, remains)", "raise", "AssertionError", "(", "\"Could not parse local name \"", "+", "name", ")" ]
Import a referenced message and return a handle
[ "Import", "a", "referenced", "message", "and", "return", "a", "handle" ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/tools/protoc-gen-mypy.py#L83-L112
23,378
onnx/onnx
onnx/helper.py
make_node
def make_node( op_type, # type: Text inputs, # type: Sequence[Text] outputs, # type: Sequence[Text] name=None, # type: Optional[Text] doc_string=None, # type: Optional[Text] domain=None, # type: Optional[Text] **kwargs # type: Any ): # type: (...) -> NodeProto """Construct a NodeProto. Arguments: op_type (string): The name of the operator to construct inputs (list of string): list of input names outputs (list of string): list of output names name (string, default None): optional unique identifier for NodeProto doc_string (string, default None): optional documentation string for NodeProto domain (string, default None): optional domain for NodeProto. If it's None, we will just use default domain (which is empty) **kwargs (dict): the attributes of the node. The acceptable values are documented in :func:`make_attribute`. """ node = NodeProto() node.op_type = op_type node.input.extend(inputs) node.output.extend(outputs) if name: node.name = name if doc_string: node.doc_string = doc_string if domain is not None: node.domain = domain if kwargs: node.attribute.extend( make_attribute(key, value) for key, value in sorted(kwargs.items())) return node
python
def make_node( op_type, # type: Text inputs, # type: Sequence[Text] outputs, # type: Sequence[Text] name=None, # type: Optional[Text] doc_string=None, # type: Optional[Text] domain=None, # type: Optional[Text] **kwargs # type: Any ): # type: (...) -> NodeProto """Construct a NodeProto. Arguments: op_type (string): The name of the operator to construct inputs (list of string): list of input names outputs (list of string): list of output names name (string, default None): optional unique identifier for NodeProto doc_string (string, default None): optional documentation string for NodeProto domain (string, default None): optional domain for NodeProto. If it's None, we will just use default domain (which is empty) **kwargs (dict): the attributes of the node. The acceptable values are documented in :func:`make_attribute`. """ node = NodeProto() node.op_type = op_type node.input.extend(inputs) node.output.extend(outputs) if name: node.name = name if doc_string: node.doc_string = doc_string if domain is not None: node.domain = domain if kwargs: node.attribute.extend( make_attribute(key, value) for key, value in sorted(kwargs.items())) return node
[ "def", "make_node", "(", "op_type", ",", "# type: Text", "inputs", ",", "# type: Sequence[Text]", "outputs", ",", "# type: Sequence[Text]", "name", "=", "None", ",", "# type: Optional[Text]", "doc_string", "=", "None", ",", "# type: Optional[Text]", "domain", "=", "None", ",", "# type: Optional[Text]", "*", "*", "kwargs", "# type: Any", ")", ":", "# type: (...) -> NodeProto", "node", "=", "NodeProto", "(", ")", "node", ".", "op_type", "=", "op_type", "node", ".", "input", ".", "extend", "(", "inputs", ")", "node", ".", "output", ".", "extend", "(", "outputs", ")", "if", "name", ":", "node", ".", "name", "=", "name", "if", "doc_string", ":", "node", ".", "doc_string", "=", "doc_string", "if", "domain", "is", "not", "None", ":", "node", ".", "domain", "=", "domain", "if", "kwargs", ":", "node", ".", "attribute", ".", "extend", "(", "make_attribute", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "sorted", "(", "kwargs", ".", "items", "(", ")", ")", ")", "return", "node" ]
Construct a NodeProto. Arguments: op_type (string): The name of the operator to construct inputs (list of string): list of input names outputs (list of string): list of output names name (string, default None): optional unique identifier for NodeProto doc_string (string, default None): optional documentation string for NodeProto domain (string, default None): optional domain for NodeProto. If it's None, we will just use default domain (which is empty) **kwargs (dict): the attributes of the node. The acceptable values are documented in :func:`make_attribute`.
[ "Construct", "a", "NodeProto", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L20-L57
23,379
onnx/onnx
onnx/helper.py
make_operatorsetid
def make_operatorsetid( domain, # type: Text version, # type: int ): # type: (...) -> OperatorSetIdProto """Construct an OperatorSetIdProto. Arguments: domain (string): The domain of the operator set id version (integer): Version of operator set id """ operatorsetid = OperatorSetIdProto() operatorsetid.domain = domain operatorsetid.version = version return operatorsetid
python
def make_operatorsetid( domain, # type: Text version, # type: int ): # type: (...) -> OperatorSetIdProto """Construct an OperatorSetIdProto. Arguments: domain (string): The domain of the operator set id version (integer): Version of operator set id """ operatorsetid = OperatorSetIdProto() operatorsetid.domain = domain operatorsetid.version = version return operatorsetid
[ "def", "make_operatorsetid", "(", "domain", ",", "# type: Text", "version", ",", "# type: int", ")", ":", "# type: (...) -> OperatorSetIdProto", "operatorsetid", "=", "OperatorSetIdProto", "(", ")", "operatorsetid", ".", "domain", "=", "domain", "operatorsetid", ".", "version", "=", "version", "return", "operatorsetid" ]
Construct an OperatorSetIdProto. Arguments: domain (string): The domain of the operator set id version (integer): Version of operator set id
[ "Construct", "an", "OperatorSetIdProto", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L60-L73
23,380
onnx/onnx
onnx/helper.py
_to_bytes_or_false
def _to_bytes_or_false(val): # type: (Union[Text, bytes]) -> Union[bytes, bool] """An internal graph to convert the input to a bytes or to False. The criteria for conversion is as follows and should be python 2 and 3 compatible: - If val is py2 str or py3 bytes: return bytes - If val is py2 unicode or py3 str: return val.decode('utf-8') - Otherwise, return False """ if isinstance(val, bytes): return val else: try: return val.encode('utf-8') except AttributeError: return False
python
def _to_bytes_or_false(val): # type: (Union[Text, bytes]) -> Union[bytes, bool] """An internal graph to convert the input to a bytes or to False. The criteria for conversion is as follows and should be python 2 and 3 compatible: - If val is py2 str or py3 bytes: return bytes - If val is py2 unicode or py3 str: return val.decode('utf-8') - Otherwise, return False """ if isinstance(val, bytes): return val else: try: return val.encode('utf-8') except AttributeError: return False
[ "def", "_to_bytes_or_false", "(", "val", ")", ":", "# type: (Union[Text, bytes]) -> Union[bytes, bool]", "if", "isinstance", "(", "val", ",", "bytes", ")", ":", "return", "val", "else", ":", "try", ":", "return", "val", ".", "encode", "(", "'utf-8'", ")", "except", "AttributeError", ":", "return", "False" ]
An internal graph to convert the input to a bytes or to False. The criteria for conversion is as follows and should be python 2 and 3 compatible: - If val is py2 str or py3 bytes: return bytes - If val is py2 unicode or py3 str: return val.decode('utf-8') - Otherwise, return False
[ "An", "internal", "graph", "to", "convert", "the", "input", "to", "a", "bytes", "or", "to", "False", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L179-L194
23,381
onnx/onnx
onnx/helper.py
make_attribute
def make_attribute( key, # type: Text value, # type: Any doc_string=None # type: Optional[Text] ): # type: (...) -> AttributeProto """Makes an AttributeProto based on the value type.""" attr = AttributeProto() attr.name = key if doc_string: attr.doc_string = doc_string is_iterable = isinstance(value, collections.Iterable) bytes_or_false = _to_bytes_or_false(value) # First, singular cases # float if isinstance(value, float): attr.f = value attr.type = AttributeProto.FLOAT # integer elif isinstance(value, numbers.Integral): attr.i = cast(int, value) attr.type = AttributeProto.INT # string elif bytes_or_false: assert isinstance(bytes_or_false, bytes) attr.s = bytes_or_false attr.type = AttributeProto.STRING elif isinstance(value, TensorProto): attr.t.CopyFrom(value) attr.type = AttributeProto.TENSOR elif isinstance(value, GraphProto): attr.g.CopyFrom(value) attr.type = AttributeProto.GRAPH # third, iterable cases elif is_iterable: byte_array = [_to_bytes_or_false(v) for v in value] if all(isinstance(v, float) for v in value): attr.floats.extend(value) attr.type = AttributeProto.FLOATS elif all(isinstance(v, numbers.Integral) for v in value): # Turn np.int32/64 into Python built-in int. attr.ints.extend(int(v) for v in value) attr.type = AttributeProto.INTS elif all(byte_array): attr.strings.extend(cast(List[bytes], byte_array)) attr.type = AttributeProto.STRINGS elif all(isinstance(v, TensorProto) for v in value): attr.tensors.extend(value) attr.type = AttributeProto.TENSORS elif all(isinstance(v, GraphProto) for v in value): attr.graphs.extend(value) attr.type = AttributeProto.GRAPHS else: raise ValueError( "You passed in an iterable attribute but I cannot figure out " "its applicable type.") else: raise ValueError( 'Value "{}" is not valid attribute data type.'.format(value)) return attr
python
def make_attribute( key, # type: Text value, # type: Any doc_string=None # type: Optional[Text] ): # type: (...) -> AttributeProto """Makes an AttributeProto based on the value type.""" attr = AttributeProto() attr.name = key if doc_string: attr.doc_string = doc_string is_iterable = isinstance(value, collections.Iterable) bytes_or_false = _to_bytes_or_false(value) # First, singular cases # float if isinstance(value, float): attr.f = value attr.type = AttributeProto.FLOAT # integer elif isinstance(value, numbers.Integral): attr.i = cast(int, value) attr.type = AttributeProto.INT # string elif bytes_or_false: assert isinstance(bytes_or_false, bytes) attr.s = bytes_or_false attr.type = AttributeProto.STRING elif isinstance(value, TensorProto): attr.t.CopyFrom(value) attr.type = AttributeProto.TENSOR elif isinstance(value, GraphProto): attr.g.CopyFrom(value) attr.type = AttributeProto.GRAPH # third, iterable cases elif is_iterable: byte_array = [_to_bytes_or_false(v) for v in value] if all(isinstance(v, float) for v in value): attr.floats.extend(value) attr.type = AttributeProto.FLOATS elif all(isinstance(v, numbers.Integral) for v in value): # Turn np.int32/64 into Python built-in int. attr.ints.extend(int(v) for v in value) attr.type = AttributeProto.INTS elif all(byte_array): attr.strings.extend(cast(List[bytes], byte_array)) attr.type = AttributeProto.STRINGS elif all(isinstance(v, TensorProto) for v in value): attr.tensors.extend(value) attr.type = AttributeProto.TENSORS elif all(isinstance(v, GraphProto) for v in value): attr.graphs.extend(value) attr.type = AttributeProto.GRAPHS else: raise ValueError( "You passed in an iterable attribute but I cannot figure out " "its applicable type.") else: raise ValueError( 'Value "{}" is not valid attribute data type.'.format(value)) return attr
[ "def", "make_attribute", "(", "key", ",", "# type: Text", "value", ",", "# type: Any", "doc_string", "=", "None", "# type: Optional[Text]", ")", ":", "# type: (...) -> AttributeProto", "attr", "=", "AttributeProto", "(", ")", "attr", ".", "name", "=", "key", "if", "doc_string", ":", "attr", ".", "doc_string", "=", "doc_string", "is_iterable", "=", "isinstance", "(", "value", ",", "collections", ".", "Iterable", ")", "bytes_or_false", "=", "_to_bytes_or_false", "(", "value", ")", "# First, singular cases", "# float", "if", "isinstance", "(", "value", ",", "float", ")", ":", "attr", ".", "f", "=", "value", "attr", ".", "type", "=", "AttributeProto", ".", "FLOAT", "# integer", "elif", "isinstance", "(", "value", ",", "numbers", ".", "Integral", ")", ":", "attr", ".", "i", "=", "cast", "(", "int", ",", "value", ")", "attr", ".", "type", "=", "AttributeProto", ".", "INT", "# string", "elif", "bytes_or_false", ":", "assert", "isinstance", "(", "bytes_or_false", ",", "bytes", ")", "attr", ".", "s", "=", "bytes_or_false", "attr", ".", "type", "=", "AttributeProto", ".", "STRING", "elif", "isinstance", "(", "value", ",", "TensorProto", ")", ":", "attr", ".", "t", ".", "CopyFrom", "(", "value", ")", "attr", ".", "type", "=", "AttributeProto", ".", "TENSOR", "elif", "isinstance", "(", "value", ",", "GraphProto", ")", ":", "attr", ".", "g", ".", "CopyFrom", "(", "value", ")", "attr", ".", "type", "=", "AttributeProto", ".", "GRAPH", "# third, iterable cases", "elif", "is_iterable", ":", "byte_array", "=", "[", "_to_bytes_or_false", "(", "v", ")", "for", "v", "in", "value", "]", "if", "all", "(", "isinstance", "(", "v", ",", "float", ")", "for", "v", "in", "value", ")", ":", "attr", ".", "floats", ".", "extend", "(", "value", ")", "attr", ".", "type", "=", "AttributeProto", ".", "FLOATS", "elif", "all", "(", "isinstance", "(", "v", ",", "numbers", ".", "Integral", ")", "for", "v", "in", "value", ")", ":", "# Turn np.int32/64 into Python built-in int.", "attr", ".", "ints", ".", "extend", "(", "int", "(", "v", ")", "for", "v", "in", "value", ")", "attr", ".", "type", "=", "AttributeProto", ".", "INTS", "elif", "all", "(", "byte_array", ")", ":", "attr", ".", "strings", ".", "extend", "(", "cast", "(", "List", "[", "bytes", "]", ",", "byte_array", ")", ")", "attr", ".", "type", "=", "AttributeProto", ".", "STRINGS", "elif", "all", "(", "isinstance", "(", "v", ",", "TensorProto", ")", "for", "v", "in", "value", ")", ":", "attr", ".", "tensors", ".", "extend", "(", "value", ")", "attr", ".", "type", "=", "AttributeProto", ".", "TENSORS", "elif", "all", "(", "isinstance", "(", "v", ",", "GraphProto", ")", "for", "v", "in", "value", ")", ":", "attr", ".", "graphs", ".", "extend", "(", "value", ")", "attr", ".", "type", "=", "AttributeProto", ".", "GRAPHS", "else", ":", "raise", "ValueError", "(", "\"You passed in an iterable attribute but I cannot figure out \"", "\"its applicable type.\"", ")", "else", ":", "raise", "ValueError", "(", "'Value \"{}\" is not valid attribute data type.'", ".", "format", "(", "value", ")", ")", "return", "attr" ]
Makes an AttributeProto based on the value type.
[ "Makes", "an", "AttributeProto", "based", "on", "the", "value", "type", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L197-L256
23,382
onnx/onnx
onnx/helper.py
make_tensor_value_info
def make_tensor_value_info( name, # type: Text elem_type, # type: int shape, # type: Optional[Sequence[Union[Text, int]]] doc_string="", # type: Text shape_denotation=None, # type: Optional[List[Text]] ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = elem_type tensor_shape_proto = tensor_type_proto.shape if shape is not None: # You might think this is a no-op (extending a normal Python # list by [] certainly is), but protobuf lists work a little # differently; if a field is never set, it is omitted from the # resulting protobuf; a list that is explicitly set to be # empty will get an (empty) entry in the protobuf. This # difference is visible to our consumers, so make sure we emit # an empty shape! tensor_shape_proto.dim.extend([]) if shape_denotation: if len(shape_denotation) != len(shape): raise ValueError( 'Invalid shape_denotation. ' 'Must be of the same length as shape.') for i, d in enumerate(shape): dim = tensor_shape_proto.dim.add() if d is None: pass elif isinstance(d, integer_types): dim.dim_value = d elif isinstance(d, text_type): dim.dim_param = d else: raise ValueError( 'Invalid item in shape: {}. ' 'Needs to of integer_types or text_type.'.format(d)) if shape_denotation: dim.denotation = shape_denotation[i] return value_info_proto
python
def make_tensor_value_info( name, # type: Text elem_type, # type: int shape, # type: Optional[Sequence[Union[Text, int]]] doc_string="", # type: Text shape_denotation=None, # type: Optional[List[Text]] ): # type: (...) -> ValueInfoProto """Makes a ValueInfoProto based on the data type and shape.""" value_info_proto = ValueInfoProto() value_info_proto.name = name if doc_string: value_info_proto.doc_string = doc_string tensor_type_proto = value_info_proto.type.tensor_type tensor_type_proto.elem_type = elem_type tensor_shape_proto = tensor_type_proto.shape if shape is not None: # You might think this is a no-op (extending a normal Python # list by [] certainly is), but protobuf lists work a little # differently; if a field is never set, it is omitted from the # resulting protobuf; a list that is explicitly set to be # empty will get an (empty) entry in the protobuf. This # difference is visible to our consumers, so make sure we emit # an empty shape! tensor_shape_proto.dim.extend([]) if shape_denotation: if len(shape_denotation) != len(shape): raise ValueError( 'Invalid shape_denotation. ' 'Must be of the same length as shape.') for i, d in enumerate(shape): dim = tensor_shape_proto.dim.add() if d is None: pass elif isinstance(d, integer_types): dim.dim_value = d elif isinstance(d, text_type): dim.dim_param = d else: raise ValueError( 'Invalid item in shape: {}. ' 'Needs to of integer_types or text_type.'.format(d)) if shape_denotation: dim.denotation = shape_denotation[i] return value_info_proto
[ "def", "make_tensor_value_info", "(", "name", ",", "# type: Text", "elem_type", ",", "# type: int", "shape", ",", "# type: Optional[Sequence[Union[Text, int]]]", "doc_string", "=", "\"\"", ",", "# type: Text", "shape_denotation", "=", "None", ",", "# type: Optional[List[Text]]", ")", ":", "# type: (...) -> ValueInfoProto", "value_info_proto", "=", "ValueInfoProto", "(", ")", "value_info_proto", ".", "name", "=", "name", "if", "doc_string", ":", "value_info_proto", ".", "doc_string", "=", "doc_string", "tensor_type_proto", "=", "value_info_proto", ".", "type", ".", "tensor_type", "tensor_type_proto", ".", "elem_type", "=", "elem_type", "tensor_shape_proto", "=", "tensor_type_proto", ".", "shape", "if", "shape", "is", "not", "None", ":", "# You might think this is a no-op (extending a normal Python", "# list by [] certainly is), but protobuf lists work a little", "# differently; if a field is never set, it is omitted from the", "# resulting protobuf; a list that is explicitly set to be", "# empty will get an (empty) entry in the protobuf. This", "# difference is visible to our consumers, so make sure we emit", "# an empty shape!", "tensor_shape_proto", ".", "dim", ".", "extend", "(", "[", "]", ")", "if", "shape_denotation", ":", "if", "len", "(", "shape_denotation", ")", "!=", "len", "(", "shape", ")", ":", "raise", "ValueError", "(", "'Invalid shape_denotation. '", "'Must be of the same length as shape.'", ")", "for", "i", ",", "d", "in", "enumerate", "(", "shape", ")", ":", "dim", "=", "tensor_shape_proto", ".", "dim", ".", "add", "(", ")", "if", "d", "is", "None", ":", "pass", "elif", "isinstance", "(", "d", ",", "integer_types", ")", ":", "dim", ".", "dim_value", "=", "d", "elif", "isinstance", "(", "d", ",", "text_type", ")", ":", "dim", ".", "dim_param", "=", "d", "else", ":", "raise", "ValueError", "(", "'Invalid item in shape: {}. '", "'Needs to of integer_types or text_type.'", ".", "format", "(", "d", ")", ")", "if", "shape_denotation", ":", "dim", ".", "denotation", "=", "shape_denotation", "[", "i", "]", "return", "value_info_proto" ]
Makes a ValueInfoProto based on the data type and shape.
[ "Makes", "a", "ValueInfoProto", "based", "on", "the", "data", "type", "and", "shape", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L290-L340
23,383
onnx/onnx
onnx/helper.py
strip_doc_string
def strip_doc_string(proto): # type: (google.protobuf.message.Message) -> None """ Empties `doc_string` field on any nested protobuf messages """ assert isinstance(proto, google.protobuf.message.Message) for descriptor in proto.DESCRIPTOR.fields: if descriptor.name == 'doc_string': proto.ClearField(descriptor.name) elif descriptor.type == descriptor.TYPE_MESSAGE: if descriptor.label == descriptor.LABEL_REPEATED: for x in getattr(proto, descriptor.name): strip_doc_string(x) elif proto.HasField(descriptor.name): strip_doc_string(getattr(proto, descriptor.name))
python
def strip_doc_string(proto): # type: (google.protobuf.message.Message) -> None """ Empties `doc_string` field on any nested protobuf messages """ assert isinstance(proto, google.protobuf.message.Message) for descriptor in proto.DESCRIPTOR.fields: if descriptor.name == 'doc_string': proto.ClearField(descriptor.name) elif descriptor.type == descriptor.TYPE_MESSAGE: if descriptor.label == descriptor.LABEL_REPEATED: for x in getattr(proto, descriptor.name): strip_doc_string(x) elif proto.HasField(descriptor.name): strip_doc_string(getattr(proto, descriptor.name))
[ "def", "strip_doc_string", "(", "proto", ")", ":", "# type: (google.protobuf.message.Message) -> None", "assert", "isinstance", "(", "proto", ",", "google", ".", "protobuf", ".", "message", ".", "Message", ")", "for", "descriptor", "in", "proto", ".", "DESCRIPTOR", ".", "fields", ":", "if", "descriptor", ".", "name", "==", "'doc_string'", ":", "proto", ".", "ClearField", "(", "descriptor", ".", "name", ")", "elif", "descriptor", ".", "type", "==", "descriptor", ".", "TYPE_MESSAGE", ":", "if", "descriptor", ".", "label", "==", "descriptor", ".", "LABEL_REPEATED", ":", "for", "x", "in", "getattr", "(", "proto", ",", "descriptor", ".", "name", ")", ":", "strip_doc_string", "(", "x", ")", "elif", "proto", ".", "HasField", "(", "descriptor", ".", "name", ")", ":", "strip_doc_string", "(", "getattr", "(", "proto", ",", "descriptor", ".", "name", ")", ")" ]
Empties `doc_string` field on any nested protobuf messages
[ "Empties", "doc_string", "field", "on", "any", "nested", "protobuf", "messages" ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/helper.py#L538-L551
23,384
onnx/onnx
onnx/numpy_helper.py
to_array
def to_array(tensor): # type: (TensorProto) -> np.ndarray[Any] """Converts a tensor def object to a numpy array. Inputs: tensor: a TensorProto object. Returns: arr: the converted array. """ if tensor.HasField("segment"): raise ValueError( "Currently not supporting loading segments.") if tensor.data_type == TensorProto.UNDEFINED: raise ValueError("The data type is not defined.") tensor_dtype = tensor.data_type np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[tensor_dtype] storage_type = mapping.TENSOR_TYPE_TO_STORAGE_TENSOR_TYPE[tensor_dtype] storage_np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[storage_type] storage_field = mapping.STORAGE_TENSOR_TYPE_TO_FIELD[storage_type] dims = tensor.dims if tensor.data_type == TensorProto.STRING: utf8_strings = getattr(tensor, storage_field) ss = list(s.decode('utf-8') for s in utf8_strings) return np.asarray(ss).astype(np_dtype).reshape(dims) if tensor.HasField("raw_data"): # Raw_bytes support: using frombuffer. return np.frombuffer( tensor.raw_data, dtype=np_dtype).reshape(dims) else: data = getattr(tensor, storage_field), # type: Sequence[np.complex64] if (tensor_dtype == TensorProto.COMPLEX64 or tensor_dtype == TensorProto.COMPLEX128): data = combine_pairs_to_complex(data) return ( np.asarray( data, dtype=storage_np_dtype) .astype(np_dtype) .reshape(dims) )
python
def to_array(tensor): # type: (TensorProto) -> np.ndarray[Any] """Converts a tensor def object to a numpy array. Inputs: tensor: a TensorProto object. Returns: arr: the converted array. """ if tensor.HasField("segment"): raise ValueError( "Currently not supporting loading segments.") if tensor.data_type == TensorProto.UNDEFINED: raise ValueError("The data type is not defined.") tensor_dtype = tensor.data_type np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[tensor_dtype] storage_type = mapping.TENSOR_TYPE_TO_STORAGE_TENSOR_TYPE[tensor_dtype] storage_np_dtype = mapping.TENSOR_TYPE_TO_NP_TYPE[storage_type] storage_field = mapping.STORAGE_TENSOR_TYPE_TO_FIELD[storage_type] dims = tensor.dims if tensor.data_type == TensorProto.STRING: utf8_strings = getattr(tensor, storage_field) ss = list(s.decode('utf-8') for s in utf8_strings) return np.asarray(ss).astype(np_dtype).reshape(dims) if tensor.HasField("raw_data"): # Raw_bytes support: using frombuffer. return np.frombuffer( tensor.raw_data, dtype=np_dtype).reshape(dims) else: data = getattr(tensor, storage_field), # type: Sequence[np.complex64] if (tensor_dtype == TensorProto.COMPLEX64 or tensor_dtype == TensorProto.COMPLEX128): data = combine_pairs_to_complex(data) return ( np.asarray( data, dtype=storage_np_dtype) .astype(np_dtype) .reshape(dims) )
[ "def", "to_array", "(", "tensor", ")", ":", "# type: (TensorProto) -> np.ndarray[Any]", "if", "tensor", ".", "HasField", "(", "\"segment\"", ")", ":", "raise", "ValueError", "(", "\"Currently not supporting loading segments.\"", ")", "if", "tensor", ".", "data_type", "==", "TensorProto", ".", "UNDEFINED", ":", "raise", "ValueError", "(", "\"The data type is not defined.\"", ")", "tensor_dtype", "=", "tensor", ".", "data_type", "np_dtype", "=", "mapping", ".", "TENSOR_TYPE_TO_NP_TYPE", "[", "tensor_dtype", "]", "storage_type", "=", "mapping", ".", "TENSOR_TYPE_TO_STORAGE_TENSOR_TYPE", "[", "tensor_dtype", "]", "storage_np_dtype", "=", "mapping", ".", "TENSOR_TYPE_TO_NP_TYPE", "[", "storage_type", "]", "storage_field", "=", "mapping", ".", "STORAGE_TENSOR_TYPE_TO_FIELD", "[", "storage_type", "]", "dims", "=", "tensor", ".", "dims", "if", "tensor", ".", "data_type", "==", "TensorProto", ".", "STRING", ":", "utf8_strings", "=", "getattr", "(", "tensor", ",", "storage_field", ")", "ss", "=", "list", "(", "s", ".", "decode", "(", "'utf-8'", ")", "for", "s", "in", "utf8_strings", ")", "return", "np", ".", "asarray", "(", "ss", ")", ".", "astype", "(", "np_dtype", ")", ".", "reshape", "(", "dims", ")", "if", "tensor", ".", "HasField", "(", "\"raw_data\"", ")", ":", "# Raw_bytes support: using frombuffer.", "return", "np", ".", "frombuffer", "(", "tensor", ".", "raw_data", ",", "dtype", "=", "np_dtype", ")", ".", "reshape", "(", "dims", ")", "else", ":", "data", "=", "getattr", "(", "tensor", ",", "storage_field", ")", ",", "# type: Sequence[np.complex64]", "if", "(", "tensor_dtype", "==", "TensorProto", ".", "COMPLEX64", "or", "tensor_dtype", "==", "TensorProto", ".", "COMPLEX128", ")", ":", "data", "=", "combine_pairs_to_complex", "(", "data", ")", "return", "(", "np", ".", "asarray", "(", "data", ",", "dtype", "=", "storage_np_dtype", ")", ".", "astype", "(", "np_dtype", ")", ".", "reshape", "(", "dims", ")", ")" ]
Converts a tensor def object to a numpy array. Inputs: tensor: a TensorProto object. Returns: arr: the converted array.
[ "Converts", "a", "tensor", "def", "object", "to", "a", "numpy", "array", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/numpy_helper.py#L24-L66
23,385
onnx/onnx
onnx/numpy_helper.py
from_array
def from_array(arr, name=None): # type: (np.ndarray[Any], Optional[Text]) -> TensorProto """Converts a numpy array to a tensor def. Inputs: arr: a numpy array. name: (optional) the name of the tensor. Returns: tensor_def: the converted tensor def. """ tensor = TensorProto() tensor.dims.extend(arr.shape) if name: tensor.name = name if arr.dtype == np.object: # Special care for strings. tensor.data_type = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] # TODO: Introduce full string support. # We flatten the array in case there are 2-D arrays are specified # We throw the error below if we have a 3-D array or some kind of other # object. If you want more complex shapes then follow the below instructions. # Unlike other types where the shape is automatically inferred from # nested arrays of values, the only reliable way now to feed strings # is to put them into a flat array then specify type astype(np.object) # (otherwise all strings may have different types depending on their length) # and then specify shape .reshape([x, y, z]) flat_array = arr.flatten() for e in flat_array: if isinstance(e, text_type): tensor.string_data.append(e.encode('utf-8')) elif isinstance(e, np.ndarray): for s in e: if isinstance(s, text_type): tensor.string_data.append(s.encode('utf-8')) else: raise NotImplementedError( "Unrecognized object in the object array, expect a string, or array of bytes: ", str(type(e))) return tensor # For numerical types, directly use numpy raw bytes. try: dtype = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] except KeyError: raise RuntimeError( "Numpy data type not understood yet: {}".format(str(arr.dtype))) tensor.data_type = dtype tensor.raw_data = arr.tobytes() # note: tobytes() is only after 1.9. return tensor
python
def from_array(arr, name=None): # type: (np.ndarray[Any], Optional[Text]) -> TensorProto """Converts a numpy array to a tensor def. Inputs: arr: a numpy array. name: (optional) the name of the tensor. Returns: tensor_def: the converted tensor def. """ tensor = TensorProto() tensor.dims.extend(arr.shape) if name: tensor.name = name if arr.dtype == np.object: # Special care for strings. tensor.data_type = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] # TODO: Introduce full string support. # We flatten the array in case there are 2-D arrays are specified # We throw the error below if we have a 3-D array or some kind of other # object. If you want more complex shapes then follow the below instructions. # Unlike other types where the shape is automatically inferred from # nested arrays of values, the only reliable way now to feed strings # is to put them into a flat array then specify type astype(np.object) # (otherwise all strings may have different types depending on their length) # and then specify shape .reshape([x, y, z]) flat_array = arr.flatten() for e in flat_array: if isinstance(e, text_type): tensor.string_data.append(e.encode('utf-8')) elif isinstance(e, np.ndarray): for s in e: if isinstance(s, text_type): tensor.string_data.append(s.encode('utf-8')) else: raise NotImplementedError( "Unrecognized object in the object array, expect a string, or array of bytes: ", str(type(e))) return tensor # For numerical types, directly use numpy raw bytes. try: dtype = mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype] except KeyError: raise RuntimeError( "Numpy data type not understood yet: {}".format(str(arr.dtype))) tensor.data_type = dtype tensor.raw_data = arr.tobytes() # note: tobytes() is only after 1.9. return tensor
[ "def", "from_array", "(", "arr", ",", "name", "=", "None", ")", ":", "# type: (np.ndarray[Any], Optional[Text]) -> TensorProto", "tensor", "=", "TensorProto", "(", ")", "tensor", ".", "dims", ".", "extend", "(", "arr", ".", "shape", ")", "if", "name", ":", "tensor", ".", "name", "=", "name", "if", "arr", ".", "dtype", "==", "np", ".", "object", ":", "# Special care for strings.", "tensor", ".", "data_type", "=", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "arr", ".", "dtype", "]", "# TODO: Introduce full string support.", "# We flatten the array in case there are 2-D arrays are specified", "# We throw the error below if we have a 3-D array or some kind of other", "# object. If you want more complex shapes then follow the below instructions.", "# Unlike other types where the shape is automatically inferred from", "# nested arrays of values, the only reliable way now to feed strings", "# is to put them into a flat array then specify type astype(np.object)", "# (otherwise all strings may have different types depending on their length)", "# and then specify shape .reshape([x, y, z])", "flat_array", "=", "arr", ".", "flatten", "(", ")", "for", "e", "in", "flat_array", ":", "if", "isinstance", "(", "e", ",", "text_type", ")", ":", "tensor", ".", "string_data", ".", "append", "(", "e", ".", "encode", "(", "'utf-8'", ")", ")", "elif", "isinstance", "(", "e", ",", "np", ".", "ndarray", ")", ":", "for", "s", "in", "e", ":", "if", "isinstance", "(", "s", ",", "text_type", ")", ":", "tensor", ".", "string_data", ".", "append", "(", "s", ".", "encode", "(", "'utf-8'", ")", ")", "else", ":", "raise", "NotImplementedError", "(", "\"Unrecognized object in the object array, expect a string, or array of bytes: \"", ",", "str", "(", "type", "(", "e", ")", ")", ")", "return", "tensor", "# For numerical types, directly use numpy raw bytes.", "try", ":", "dtype", "=", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "arr", ".", "dtype", "]", "except", "KeyError", ":", "raise", "RuntimeError", "(", "\"Numpy data type not understood yet: {}\"", ".", "format", "(", "str", "(", "arr", ".", "dtype", ")", ")", ")", "tensor", ".", "data_type", "=", "dtype", "tensor", ".", "raw_data", "=", "arr", ".", "tobytes", "(", ")", "# note: tobytes() is only after 1.9.", "return", "tensor" ]
Converts a numpy array to a tensor def. Inputs: arr: a numpy array. name: (optional) the name of the tensor. Returns: tensor_def: the converted tensor def.
[ "Converts", "a", "numpy", "array", "to", "a", "tensor", "def", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/numpy_helper.py#L69-L117
23,386
onnx/onnx
onnx/__init__.py
_serialize
def _serialize(proto): # type: (Union[bytes, google.protobuf.message.Message]) -> bytes ''' Serialize a in-memory proto to bytes @params proto is a in-memory proto, such as a ModelProto, TensorProto, etc @return Serialized proto in bytes ''' if isinstance(proto, bytes): return proto elif hasattr(proto, 'SerializeToString') and callable(proto.SerializeToString): result = proto.SerializeToString() return result else: raise ValueError('No SerializeToString method is detected. ' 'neither proto is a str.\ntype is {}'.format(type(proto)))
python
def _serialize(proto): # type: (Union[bytes, google.protobuf.message.Message]) -> bytes ''' Serialize a in-memory proto to bytes @params proto is a in-memory proto, such as a ModelProto, TensorProto, etc @return Serialized proto in bytes ''' if isinstance(proto, bytes): return proto elif hasattr(proto, 'SerializeToString') and callable(proto.SerializeToString): result = proto.SerializeToString() return result else: raise ValueError('No SerializeToString method is detected. ' 'neither proto is a str.\ntype is {}'.format(type(proto)))
[ "def", "_serialize", "(", "proto", ")", ":", "# type: (Union[bytes, google.protobuf.message.Message]) -> bytes", "if", "isinstance", "(", "proto", ",", "bytes", ")", ":", "return", "proto", "elif", "hasattr", "(", "proto", ",", "'SerializeToString'", ")", "and", "callable", "(", "proto", ".", "SerializeToString", ")", ":", "result", "=", "proto", ".", "SerializeToString", "(", ")", "return", "result", "else", ":", "raise", "ValueError", "(", "'No SerializeToString method is detected. '", "'neither proto is a str.\\ntype is {}'", ".", "format", "(", "type", "(", "proto", ")", ")", ")" ]
Serialize a in-memory proto to bytes @params proto is a in-memory proto, such as a ModelProto, TensorProto, etc @return Serialized proto in bytes
[ "Serialize", "a", "in", "-", "memory", "proto", "to", "bytes" ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/__init__.py#L53-L70
23,387
onnx/onnx
onnx/__init__.py
_deserialize
def _deserialize(s, proto): # type: (bytes, _Proto) -> _Proto ''' Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s ''' if not isinstance(s, bytes): raise ValueError('Parameter s must be bytes, but got type: {}'.format(type(s))) if not (hasattr(proto, 'ParseFromString') and callable(proto.ParseFromString)): raise ValueError('No ParseFromString method is detected. ' '\ntype is {}'.format(type(proto))) decoded = cast(Optional[int], proto.ParseFromString(s)) if decoded is not None and decoded != len(s): raise google.protobuf.message.DecodeError( "Protobuf decoding consumed too few bytes: {} out of {}".format( decoded, len(s))) return proto
python
def _deserialize(s, proto): # type: (bytes, _Proto) -> _Proto ''' Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s ''' if not isinstance(s, bytes): raise ValueError('Parameter s must be bytes, but got type: {}'.format(type(s))) if not (hasattr(proto, 'ParseFromString') and callable(proto.ParseFromString)): raise ValueError('No ParseFromString method is detected. ' '\ntype is {}'.format(type(proto))) decoded = cast(Optional[int], proto.ParseFromString(s)) if decoded is not None and decoded != len(s): raise google.protobuf.message.DecodeError( "Protobuf decoding consumed too few bytes: {} out of {}".format( decoded, len(s))) return proto
[ "def", "_deserialize", "(", "s", ",", "proto", ")", ":", "# type: (bytes, _Proto) -> _Proto", "if", "not", "isinstance", "(", "s", ",", "bytes", ")", ":", "raise", "ValueError", "(", "'Parameter s must be bytes, but got type: {}'", ".", "format", "(", "type", "(", "s", ")", ")", ")", "if", "not", "(", "hasattr", "(", "proto", ",", "'ParseFromString'", ")", "and", "callable", "(", "proto", ".", "ParseFromString", ")", ")", ":", "raise", "ValueError", "(", "'No ParseFromString method is detected. '", "'\\ntype is {}'", ".", "format", "(", "type", "(", "proto", ")", ")", ")", "decoded", "=", "cast", "(", "Optional", "[", "int", "]", ",", "proto", ".", "ParseFromString", "(", "s", ")", ")", "if", "decoded", "is", "not", "None", "and", "decoded", "!=", "len", "(", "s", ")", ":", "raise", "google", ".", "protobuf", ".", "message", ".", "DecodeError", "(", "\"Protobuf decoding consumed too few bytes: {} out of {}\"", ".", "format", "(", "decoded", ",", "len", "(", "s", ")", ")", ")", "return", "proto" ]
Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s
[ "Parse", "bytes", "into", "a", "in", "-", "memory", "proto" ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/__init__.py#L76-L99
23,388
onnx/onnx
onnx/__init__.py
load_model
def load_model(f, format=None, load_external_data=True): # type: (Union[IO[bytes], Text], Optional[Any], bool) -> ModelProto ''' Loads a serialized ModelProto into memory @params f can be a file-like object (has "read" function) or a string containing a file name format is for future use @return Loaded in-memory ModelProto ''' s = _load_bytes(f) model = load_model_from_string(s, format=format) if load_external_data: model_filepath = _get_file_path(f) if model_filepath: base_dir = os.path.dirname(model_filepath) load_external_data_for_model(model, base_dir) return model
python
def load_model(f, format=None, load_external_data=True): # type: (Union[IO[bytes], Text], Optional[Any], bool) -> ModelProto ''' Loads a serialized ModelProto into memory @params f can be a file-like object (has "read" function) or a string containing a file name format is for future use @return Loaded in-memory ModelProto ''' s = _load_bytes(f) model = load_model_from_string(s, format=format) if load_external_data: model_filepath = _get_file_path(f) if model_filepath: base_dir = os.path.dirname(model_filepath) load_external_data_for_model(model, base_dir) return model
[ "def", "load_model", "(", "f", ",", "format", "=", "None", ",", "load_external_data", "=", "True", ")", ":", "# type: (Union[IO[bytes], Text], Optional[Any], bool) -> ModelProto", "s", "=", "_load_bytes", "(", "f", ")", "model", "=", "load_model_from_string", "(", "s", ",", "format", "=", "format", ")", "if", "load_external_data", ":", "model_filepath", "=", "_get_file_path", "(", "f", ")", "if", "model_filepath", ":", "base_dir", "=", "os", ".", "path", ".", "dirname", "(", "model_filepath", ")", "load_external_data_for_model", "(", "model", ",", "base_dir", ")", "return", "model" ]
Loads a serialized ModelProto into memory @params f can be a file-like object (has "read" function) or a string containing a file name format is for future use @return Loaded in-memory ModelProto
[ "Loads", "a", "serialized", "ModelProto", "into", "memory" ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/__init__.py#L102-L122
23,389
onnx/onnx
onnx/__init__.py
load_tensor
def load_tensor(f, format=None): # type: (Union[IO[bytes], Text], Optional[Any]) -> TensorProto ''' Loads a serialized TensorProto into memory @params f can be a file-like object (has "read" function) or a string containing a file name format is for future use @return Loaded in-memory TensorProto ''' s = _load_bytes(f) return load_tensor_from_string(s, format=format)
python
def load_tensor(f, format=None): # type: (Union[IO[bytes], Text], Optional[Any]) -> TensorProto ''' Loads a serialized TensorProto into memory @params f can be a file-like object (has "read" function) or a string containing a file name format is for future use @return Loaded in-memory TensorProto ''' s = _load_bytes(f) return load_tensor_from_string(s, format=format)
[ "def", "load_tensor", "(", "f", ",", "format", "=", "None", ")", ":", "# type: (Union[IO[bytes], Text], Optional[Any]) -> TensorProto", "s", "=", "_load_bytes", "(", "f", ")", "return", "load_tensor_from_string", "(", "s", ",", "format", "=", "format", ")" ]
Loads a serialized TensorProto into memory @params f can be a file-like object (has "read" function) or a string containing a file name format is for future use @return Loaded in-memory TensorProto
[ "Loads", "a", "serialized", "TensorProto", "into", "memory" ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/__init__.py#L125-L137
23,390
onnx/onnx
onnx/__init__.py
save_model
def save_model(proto, f, format=None): # type: (Union[ModelProto, bytes], Union[IO[bytes], Text], Optional[Any]) -> None ''' Saves the ModelProto to the specified path. @params proto should be a in-memory ModelProto f can be a file-like object (has "write" function) or a string containing a file name format is for future use ''' if isinstance(proto, bytes): proto = _deserialize(proto, ModelProto()) model_filepath = _get_file_path(f) if model_filepath: basepath = os.path.dirname(model_filepath) proto = write_external_data_tensors(proto, basepath) s = _serialize(proto) _save_bytes(s, f)
python
def save_model(proto, f, format=None): # type: (Union[ModelProto, bytes], Union[IO[bytes], Text], Optional[Any]) -> None ''' Saves the ModelProto to the specified path. @params proto should be a in-memory ModelProto f can be a file-like object (has "write" function) or a string containing a file name format is for future use ''' if isinstance(proto, bytes): proto = _deserialize(proto, ModelProto()) model_filepath = _get_file_path(f) if model_filepath: basepath = os.path.dirname(model_filepath) proto = write_external_data_tensors(proto, basepath) s = _serialize(proto) _save_bytes(s, f)
[ "def", "save_model", "(", "proto", ",", "f", ",", "format", "=", "None", ")", ":", "# type: (Union[ModelProto, bytes], Union[IO[bytes], Text], Optional[Any]) -> None", "if", "isinstance", "(", "proto", ",", "bytes", ")", ":", "proto", "=", "_deserialize", "(", "proto", ",", "ModelProto", "(", ")", ")", "model_filepath", "=", "_get_file_path", "(", "f", ")", "if", "model_filepath", ":", "basepath", "=", "os", ".", "path", ".", "dirname", "(", "model_filepath", ")", "proto", "=", "write_external_data_tensors", "(", "proto", ",", "basepath", ")", "s", "=", "_serialize", "(", "proto", ")", "_save_bytes", "(", "s", ",", "f", ")" ]
Saves the ModelProto to the specified path. @params proto should be a in-memory ModelProto f can be a file-like object (has "write" function) or a string containing a file name format is for future use
[ "Saves", "the", "ModelProto", "to", "the", "specified", "path", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/__init__.py#L168-L186
23,391
onnx/onnx
onnx/utils.py
polish_model
def polish_model(model): # type: (ModelProto) -> ModelProto ''' This function combines several useful utility functions together. ''' onnx.checker.check_model(model) onnx.helper.strip_doc_string(model) model = onnx.shape_inference.infer_shapes(model) model = onnx.optimizer.optimize(model) onnx.checker.check_model(model) return model
python
def polish_model(model): # type: (ModelProto) -> ModelProto ''' This function combines several useful utility functions together. ''' onnx.checker.check_model(model) onnx.helper.strip_doc_string(model) model = onnx.shape_inference.infer_shapes(model) model = onnx.optimizer.optimize(model) onnx.checker.check_model(model) return model
[ "def", "polish_model", "(", "model", ")", ":", "# type: (ModelProto) -> ModelProto", "onnx", ".", "checker", ".", "check_model", "(", "model", ")", "onnx", ".", "helper", ".", "strip_doc_string", "(", "model", ")", "model", "=", "onnx", ".", "shape_inference", ".", "infer_shapes", "(", "model", ")", "model", "=", "onnx", ".", "optimizer", ".", "optimize", "(", "model", ")", "onnx", ".", "checker", ".", "check_model", "(", "model", ")", "return", "model" ]
This function combines several useful utility functions together.
[ "This", "function", "combines", "several", "useful", "utility", "functions", "together", "." ]
2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4
https://github.com/onnx/onnx/blob/2f7dc10f03a072526d94b6820cedbf2a1ec5a2c4/onnx/utils.py#L14-L23
23,392
apache/incubator-mxnet
python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py
get_input_shape
def get_input_shape(sym, proto_obj): """Helper function to obtain the shape of an array""" arg_params = proto_obj.arg_dict aux_params = proto_obj.aux_dict model_input_shape = [data[1] for data in proto_obj.model_metadata.get('input_tensor_data')] data_names = [data[0] for data in proto_obj.model_metadata.get('input_tensor_data')] # creating dummy inputs inputs = [] for in_shape in model_input_shape: inputs.append(nd.ones(shape=in_shape)) data_shapes = [] for idx, input_name in enumerate(data_names): data_shapes.append((input_name, inputs[idx].shape)) ctx = context.cpu() # create a module mod = module.Module(symbol=sym, data_names=data_names, context=ctx, label_names=None) mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None) mod.set_params(arg_params=arg_params, aux_params=aux_params) data_forward = [] for idx, input_name in enumerate(data_names): val = inputs[idx] data_forward.append(val) mod.forward(io.DataBatch(data_forward)) result = mod.get_outputs()[0].asnumpy() return result.shape
python
def get_input_shape(sym, proto_obj): """Helper function to obtain the shape of an array""" arg_params = proto_obj.arg_dict aux_params = proto_obj.aux_dict model_input_shape = [data[1] for data in proto_obj.model_metadata.get('input_tensor_data')] data_names = [data[0] for data in proto_obj.model_metadata.get('input_tensor_data')] # creating dummy inputs inputs = [] for in_shape in model_input_shape: inputs.append(nd.ones(shape=in_shape)) data_shapes = [] for idx, input_name in enumerate(data_names): data_shapes.append((input_name, inputs[idx].shape)) ctx = context.cpu() # create a module mod = module.Module(symbol=sym, data_names=data_names, context=ctx, label_names=None) mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None) mod.set_params(arg_params=arg_params, aux_params=aux_params) data_forward = [] for idx, input_name in enumerate(data_names): val = inputs[idx] data_forward.append(val) mod.forward(io.DataBatch(data_forward)) result = mod.get_outputs()[0].asnumpy() return result.shape
[ "def", "get_input_shape", "(", "sym", ",", "proto_obj", ")", ":", "arg_params", "=", "proto_obj", ".", "arg_dict", "aux_params", "=", "proto_obj", ".", "aux_dict", "model_input_shape", "=", "[", "data", "[", "1", "]", "for", "data", "in", "proto_obj", ".", "model_metadata", ".", "get", "(", "'input_tensor_data'", ")", "]", "data_names", "=", "[", "data", "[", "0", "]", "for", "data", "in", "proto_obj", ".", "model_metadata", ".", "get", "(", "'input_tensor_data'", ")", "]", "# creating dummy inputs", "inputs", "=", "[", "]", "for", "in_shape", "in", "model_input_shape", ":", "inputs", ".", "append", "(", "nd", ".", "ones", "(", "shape", "=", "in_shape", ")", ")", "data_shapes", "=", "[", "]", "for", "idx", ",", "input_name", "in", "enumerate", "(", "data_names", ")", ":", "data_shapes", ".", "append", "(", "(", "input_name", ",", "inputs", "[", "idx", "]", ".", "shape", ")", ")", "ctx", "=", "context", ".", "cpu", "(", ")", "# create a module", "mod", "=", "module", ".", "Module", "(", "symbol", "=", "sym", ",", "data_names", "=", "data_names", ",", "context", "=", "ctx", ",", "label_names", "=", "None", ")", "mod", ".", "bind", "(", "for_training", "=", "False", ",", "data_shapes", "=", "data_shapes", ",", "label_shapes", "=", "None", ")", "mod", ".", "set_params", "(", "arg_params", "=", "arg_params", ",", "aux_params", "=", "aux_params", ")", "data_forward", "=", "[", "]", "for", "idx", ",", "input_name", "in", "enumerate", "(", "data_names", ")", ":", "val", "=", "inputs", "[", "idx", "]", "data_forward", ".", "append", "(", "val", ")", "mod", ".", "forward", "(", "io", ".", "DataBatch", "(", "data_forward", ")", ")", "result", "=", "mod", ".", "get_outputs", "(", ")", "[", "0", "]", ".", "asnumpy", "(", ")", "return", "result", ".", "shape" ]
Helper function to obtain the shape of an array
[ "Helper", "function", "to", "obtain", "the", "shape", "of", "an", "array" ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py#L216-L247
23,393
apache/incubator-mxnet
python/mxnet/image/image.py
imresize
def imresize(src, w, h, *args, **kwargs): r"""Resize image with OpenCV. .. note:: `imresize` uses OpenCV (not the CV2 Python library). MXNet must have been built with USE_OPENCV=1 for `imresize` to work. Parameters ---------- src : NDArray source image w : int, required Width of resized image. h : int, required Height of resized image. interp : int, optional, default=1 Interpolation method (default=cv2.INTER_LINEAR). Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. out : NDArray, optional The output NDArray to hold the result. Returns ------- out : NDArray or list of NDArrays The output of this function. Example ------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> new_image = mx.img.resize(image, 240, 360) >>> new_image <NDArray 240x360x3 @cpu(0)> """ return _internal._cvimresize(src, w, h, *args, **kwargs)
python
def imresize(src, w, h, *args, **kwargs): r"""Resize image with OpenCV. .. note:: `imresize` uses OpenCV (not the CV2 Python library). MXNet must have been built with USE_OPENCV=1 for `imresize` to work. Parameters ---------- src : NDArray source image w : int, required Width of resized image. h : int, required Height of resized image. interp : int, optional, default=1 Interpolation method (default=cv2.INTER_LINEAR). Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. out : NDArray, optional The output NDArray to hold the result. Returns ------- out : NDArray or list of NDArrays The output of this function. Example ------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> new_image = mx.img.resize(image, 240, 360) >>> new_image <NDArray 240x360x3 @cpu(0)> """ return _internal._cvimresize(src, w, h, *args, **kwargs)
[ "def", "imresize", "(", "src", ",", "w", ",", "h", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_internal", ".", "_cvimresize", "(", "src", ",", "w", ",", "h", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
r"""Resize image with OpenCV. .. note:: `imresize` uses OpenCV (not the CV2 Python library). MXNet must have been built with USE_OPENCV=1 for `imresize` to work. Parameters ---------- src : NDArray source image w : int, required Width of resized image. h : int, required Height of resized image. interp : int, optional, default=1 Interpolation method (default=cv2.INTER_LINEAR). Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. out : NDArray, optional The output NDArray to hold the result. Returns ------- out : NDArray or list of NDArrays The output of this function. Example ------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> new_image = mx.img.resize(image, 240, 360) >>> new_image <NDArray 240x360x3 @cpu(0)>
[ "r", "Resize", "image", "with", "OpenCV", "." ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/image/image.py#L86-L140
23,394
apache/incubator-mxnet
python/mxnet/image/image.py
imdecode
def imdecode(buf, *args, **kwargs): """Decode an image to an NDArray. .. note:: `imdecode` uses OpenCV (not the CV2 Python library). MXNet must have been built with USE_OPENCV=1 for `imdecode` to work. Parameters ---------- buf : str/bytes/bytearray or numpy.ndarray Binary image data as string or numpy ndarray. flag : int, optional, default=1 1 for three channel color output. 0 for grayscale output. to_rgb : int, optional, default=1 1 for RGB formatted output (MXNet default). 0 for BGR formatted output (OpenCV default). out : NDArray, optional Output buffer. Use `None` for automatic allocation. Returns ------- NDArray An `NDArray` containing the image. Example ------- >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 224x224x3 @cpu(0)> Set `flag` parameter to 0 to get grayscale output >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image, flag=0) >>> image <NDArray 224x224x1 @cpu(0)> Set `to_rgb` parameter to 0 to get output in OpenCV format (BGR) >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image, to_rgb=0) >>> image <NDArray 224x224x3 @cpu(0)> """ if not isinstance(buf, nd.NDArray): if sys.version_info[0] == 3 and not isinstance(buf, (bytes, bytearray, np.ndarray)): raise ValueError('buf must be of type bytes, bytearray or numpy.ndarray,' 'if you would like to input type str, please convert to bytes') buf = nd.array(np.frombuffer(buf, dtype=np.uint8), dtype=np.uint8) return _internal._cvimdecode(buf, *args, **kwargs)
python
def imdecode(buf, *args, **kwargs): """Decode an image to an NDArray. .. note:: `imdecode` uses OpenCV (not the CV2 Python library). MXNet must have been built with USE_OPENCV=1 for `imdecode` to work. Parameters ---------- buf : str/bytes/bytearray or numpy.ndarray Binary image data as string or numpy ndarray. flag : int, optional, default=1 1 for three channel color output. 0 for grayscale output. to_rgb : int, optional, default=1 1 for RGB formatted output (MXNet default). 0 for BGR formatted output (OpenCV default). out : NDArray, optional Output buffer. Use `None` for automatic allocation. Returns ------- NDArray An `NDArray` containing the image. Example ------- >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 224x224x3 @cpu(0)> Set `flag` parameter to 0 to get grayscale output >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image, flag=0) >>> image <NDArray 224x224x1 @cpu(0)> Set `to_rgb` parameter to 0 to get output in OpenCV format (BGR) >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image, to_rgb=0) >>> image <NDArray 224x224x3 @cpu(0)> """ if not isinstance(buf, nd.NDArray): if sys.version_info[0] == 3 and not isinstance(buf, (bytes, bytearray, np.ndarray)): raise ValueError('buf must be of type bytes, bytearray or numpy.ndarray,' 'if you would like to input type str, please convert to bytes') buf = nd.array(np.frombuffer(buf, dtype=np.uint8), dtype=np.uint8) return _internal._cvimdecode(buf, *args, **kwargs)
[ "def", "imdecode", "(", "buf", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "buf", ",", "nd", ".", "NDArray", ")", ":", "if", "sys", ".", "version_info", "[", "0", "]", "==", "3", "and", "not", "isinstance", "(", "buf", ",", "(", "bytes", ",", "bytearray", ",", "np", ".", "ndarray", ")", ")", ":", "raise", "ValueError", "(", "'buf must be of type bytes, bytearray or numpy.ndarray,'", "'if you would like to input type str, please convert to bytes'", ")", "buf", "=", "nd", ".", "array", "(", "np", ".", "frombuffer", "(", "buf", ",", "dtype", "=", "np", ".", "uint8", ")", ",", "dtype", "=", "np", ".", "uint8", ")", "return", "_internal", ".", "_cvimdecode", "(", "buf", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Decode an image to an NDArray. .. note:: `imdecode` uses OpenCV (not the CV2 Python library). MXNet must have been built with USE_OPENCV=1 for `imdecode` to work. Parameters ---------- buf : str/bytes/bytearray or numpy.ndarray Binary image data as string or numpy ndarray. flag : int, optional, default=1 1 for three channel color output. 0 for grayscale output. to_rgb : int, optional, default=1 1 for RGB formatted output (MXNet default). 0 for BGR formatted output (OpenCV default). out : NDArray, optional Output buffer. Use `None` for automatic allocation. Returns ------- NDArray An `NDArray` containing the image. Example ------- >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 224x224x3 @cpu(0)> Set `flag` parameter to 0 to get grayscale output >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image, flag=0) >>> image <NDArray 224x224x1 @cpu(0)> Set `to_rgb` parameter to 0 to get output in OpenCV format (BGR) >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image, to_rgb=0) >>> image <NDArray 224x224x3 @cpu(0)>
[ "Decode", "an", "image", "to", "an", "NDArray", "." ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/image/image.py#L143-L198
23,395
apache/incubator-mxnet
python/mxnet/image/image.py
scale_down
def scale_down(src_size, size): """Scales down crop size if it's larger than image size. If width/height of the crop is larger than the width/height of the image, sets the width/height to the width/height of the image. Parameters ---------- src_size : tuple of int Size of the image in (width, height) format. size : tuple of int Size of the crop in (width, height) format. Returns ------- tuple of int A tuple containing the scaled crop size in (width, height) format. Example -------- >>> src_size = (640,480) >>> size = (720,120) >>> new_size = mx.img.scale_down(src_size, size) >>> new_size (640,106) """ w, h = size sw, sh = src_size if sh < h: w, h = float(w * sh) / h, sh if sw < w: w, h = sw, float(h * sw) / w return int(w), int(h)
python
def scale_down(src_size, size): """Scales down crop size if it's larger than image size. If width/height of the crop is larger than the width/height of the image, sets the width/height to the width/height of the image. Parameters ---------- src_size : tuple of int Size of the image in (width, height) format. size : tuple of int Size of the crop in (width, height) format. Returns ------- tuple of int A tuple containing the scaled crop size in (width, height) format. Example -------- >>> src_size = (640,480) >>> size = (720,120) >>> new_size = mx.img.scale_down(src_size, size) >>> new_size (640,106) """ w, h = size sw, sh = src_size if sh < h: w, h = float(w * sh) / h, sh if sw < w: w, h = sw, float(h * sw) / w return int(w), int(h)
[ "def", "scale_down", "(", "src_size", ",", "size", ")", ":", "w", ",", "h", "=", "size", "sw", ",", "sh", "=", "src_size", "if", "sh", "<", "h", ":", "w", ",", "h", "=", "float", "(", "w", "*", "sh", ")", "/", "h", ",", "sh", "if", "sw", "<", "w", ":", "w", ",", "h", "=", "sw", ",", "float", "(", "h", "*", "sw", ")", "/", "w", "return", "int", "(", "w", ")", ",", "int", "(", "h", ")" ]
Scales down crop size if it's larger than image size. If width/height of the crop is larger than the width/height of the image, sets the width/height to the width/height of the image. Parameters ---------- src_size : tuple of int Size of the image in (width, height) format. size : tuple of int Size of the crop in (width, height) format. Returns ------- tuple of int A tuple containing the scaled crop size in (width, height) format. Example -------- >>> src_size = (640,480) >>> size = (720,120) >>> new_size = mx.img.scale_down(src_size, size) >>> new_size (640,106)
[ "Scales", "down", "crop", "size", "if", "it", "s", "larger", "than", "image", "size", "." ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/image/image.py#L201-L233
23,396
apache/incubator-mxnet
python/mxnet/image/image.py
copyMakeBorder
def copyMakeBorder(src, top, bot, left, right, *args, **kwargs): """Pad image border with OpenCV. Parameters ---------- src : NDArray source image top : int, required Top margin. bot : int, required Bottom margin. left : int, required Left margin. right : int, required Right margin. type : int, optional, default='0' Filling type (default=cv2.BORDER_CONSTANT). 0 - cv2.BORDER_CONSTANT - Adds a constant colored border. 1 - cv2.BORDER_REFLECT - Border will be mirror reflection of the border elements, like this : fedcba|abcdefgh|hgfedcb 2 - cv2.BORDER_REFLECT_101 or cv.BORDER_DEFAULT - Same as above, but with a slight change, like this : gfedcb|abcdefgh|gfedcba 3 - cv2.BORDER_REPLICATE - Last element is replicated throughout, like this: aaaaaa|abcdefgh|hhhhhhh 4 - cv2.BORDER_WRAP - it will look like this : cdefgh|abcdefgh|abcdefg value : double, optional, default=0 (Deprecated! Use ``values`` instead.) Fill with single value. values : tuple of <double>, optional, default=[] Fill with value(RGB[A] or gray), up to 4 channels. out : NDArray, optional The output NDArray to hold the result. Returns ------- out : NDArray or list of NDArrays The output of this function. Example -------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> new_image = mx_border = mx.image.copyMakeBorder(mx_img, 1, 2, 3, 4, type=0) >>> new_image <NDArray 2324x3489x3 @cpu(0)> """ return _internal._cvcopyMakeBorder(src, top, bot, left, right, *args, **kwargs)
python
def copyMakeBorder(src, top, bot, left, right, *args, **kwargs): """Pad image border with OpenCV. Parameters ---------- src : NDArray source image top : int, required Top margin. bot : int, required Bottom margin. left : int, required Left margin. right : int, required Right margin. type : int, optional, default='0' Filling type (default=cv2.BORDER_CONSTANT). 0 - cv2.BORDER_CONSTANT - Adds a constant colored border. 1 - cv2.BORDER_REFLECT - Border will be mirror reflection of the border elements, like this : fedcba|abcdefgh|hgfedcb 2 - cv2.BORDER_REFLECT_101 or cv.BORDER_DEFAULT - Same as above, but with a slight change, like this : gfedcb|abcdefgh|gfedcba 3 - cv2.BORDER_REPLICATE - Last element is replicated throughout, like this: aaaaaa|abcdefgh|hhhhhhh 4 - cv2.BORDER_WRAP - it will look like this : cdefgh|abcdefgh|abcdefg value : double, optional, default=0 (Deprecated! Use ``values`` instead.) Fill with single value. values : tuple of <double>, optional, default=[] Fill with value(RGB[A] or gray), up to 4 channels. out : NDArray, optional The output NDArray to hold the result. Returns ------- out : NDArray or list of NDArrays The output of this function. Example -------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> new_image = mx_border = mx.image.copyMakeBorder(mx_img, 1, 2, 3, 4, type=0) >>> new_image <NDArray 2324x3489x3 @cpu(0)> """ return _internal._cvcopyMakeBorder(src, top, bot, left, right, *args, **kwargs)
[ "def", "copyMakeBorder", "(", "src", ",", "top", ",", "bot", ",", "left", ",", "right", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_internal", ".", "_cvcopyMakeBorder", "(", "src", ",", "top", ",", "bot", ",", "left", ",", "right", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Pad image border with OpenCV. Parameters ---------- src : NDArray source image top : int, required Top margin. bot : int, required Bottom margin. left : int, required Left margin. right : int, required Right margin. type : int, optional, default='0' Filling type (default=cv2.BORDER_CONSTANT). 0 - cv2.BORDER_CONSTANT - Adds a constant colored border. 1 - cv2.BORDER_REFLECT - Border will be mirror reflection of the border elements, like this : fedcba|abcdefgh|hgfedcb 2 - cv2.BORDER_REFLECT_101 or cv.BORDER_DEFAULT - Same as above, but with a slight change, like this : gfedcb|abcdefgh|gfedcba 3 - cv2.BORDER_REPLICATE - Last element is replicated throughout, like this: aaaaaa|abcdefgh|hhhhhhh 4 - cv2.BORDER_WRAP - it will look like this : cdefgh|abcdefgh|abcdefg value : double, optional, default=0 (Deprecated! Use ``values`` instead.) Fill with single value. values : tuple of <double>, optional, default=[] Fill with value(RGB[A] or gray), up to 4 channels. out : NDArray, optional The output NDArray to hold the result. Returns ------- out : NDArray or list of NDArrays The output of this function. Example -------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> new_image = mx_border = mx.image.copyMakeBorder(mx_img, 1, 2, 3, 4, type=0) >>> new_image <NDArray 2324x3489x3 @cpu(0)>
[ "Pad", "image", "border", "with", "OpenCV", "." ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/image/image.py#L236-L286
23,397
apache/incubator-mxnet
python/mxnet/image/image.py
_get_interp_method
def _get_interp_method(interp, sizes=()): """Get the interpolation method for resize functions. The major purpose of this function is to wrap a random interp method selection and a auto-estimation method. Parameters ---------- interp : int interpolation method for all resizing operations Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. sizes : tuple of int (old_height, old_width, new_height, new_width), if None provided, auto(9) will return Area(2) anyway. Returns ------- int interp method from 0 to 4 """ if interp == 9: if sizes: assert len(sizes) == 4 oh, ow, nh, nw = sizes if nh > oh and nw > ow: return 2 elif nh < oh and nw < ow: return 3 else: return 1 else: return 2 if interp == 10: return random.randint(0, 4) if interp not in (0, 1, 2, 3, 4): raise ValueError('Unknown interp method %d' % interp) return interp
python
def _get_interp_method(interp, sizes=()): """Get the interpolation method for resize functions. The major purpose of this function is to wrap a random interp method selection and a auto-estimation method. Parameters ---------- interp : int interpolation method for all resizing operations Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. sizes : tuple of int (old_height, old_width, new_height, new_width), if None provided, auto(9) will return Area(2) anyway. Returns ------- int interp method from 0 to 4 """ if interp == 9: if sizes: assert len(sizes) == 4 oh, ow, nh, nw = sizes if nh > oh and nw > ow: return 2 elif nh < oh and nw < ow: return 3 else: return 1 else: return 2 if interp == 10: return random.randint(0, 4) if interp not in (0, 1, 2, 3, 4): raise ValueError('Unknown interp method %d' % interp) return interp
[ "def", "_get_interp_method", "(", "interp", ",", "sizes", "=", "(", ")", ")", ":", "if", "interp", "==", "9", ":", "if", "sizes", ":", "assert", "len", "(", "sizes", ")", "==", "4", "oh", ",", "ow", ",", "nh", ",", "nw", "=", "sizes", "if", "nh", ">", "oh", "and", "nw", ">", "ow", ":", "return", "2", "elif", "nh", "<", "oh", "and", "nw", "<", "ow", ":", "return", "3", "else", ":", "return", "1", "else", ":", "return", "2", "if", "interp", "==", "10", ":", "return", "random", ".", "randint", "(", "0", ",", "4", ")", "if", "interp", "not", "in", "(", "0", ",", "1", ",", "2", ",", "3", ",", "4", ")", ":", "raise", "ValueError", "(", "'Unknown interp method %d'", "%", "interp", ")", "return", "interp" ]
Get the interpolation method for resize functions. The major purpose of this function is to wrap a random interp method selection and a auto-estimation method. Parameters ---------- interp : int interpolation method for all resizing operations Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. sizes : tuple of int (old_height, old_width, new_height, new_width), if None provided, auto(9) will return Area(2) anyway. Returns ------- int interp method from 0 to 4
[ "Get", "the", "interpolation", "method", "for", "resize", "functions", ".", "The", "major", "purpose", "of", "this", "function", "is", "to", "wrap", "a", "random", "interp", "method", "selection", "and", "a", "auto", "-", "estimation", "method", "." ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/image/image.py#L289-L341
23,398
apache/incubator-mxnet
python/mxnet/image/image.py
resize_short
def resize_short(src, size, interp=2): """Resizes shorter edge to size. .. note:: `resize_short` uses OpenCV (not the CV2 Python library). MXNet must have been built with OpenCV for `resize_short` to work. Resizes the original image by setting the shorter edge to size and setting the longer edge accordingly. Resizing function is called from OpenCV. Parameters ---------- src : NDArray The original image. size : int The length to be set for the shorter edge. interp : int, optional, default=2 Interpolation method used for resizing the image. Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. Returns ------- NDArray An 'NDArray' containing the resized image. Example ------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> size = 640 >>> new_image = mx.img.resize_short(image, size) >>> new_image <NDArray 2321x3482x3 @cpu(0)> """ h, w, _ = src.shape if h > w: new_h, new_w = size * h // w, size else: new_h, new_w = size, size * w // h return imresize(src, new_w, new_h, interp=_get_interp_method(interp, (h, w, new_h, new_w)))
python
def resize_short(src, size, interp=2): """Resizes shorter edge to size. .. note:: `resize_short` uses OpenCV (not the CV2 Python library). MXNet must have been built with OpenCV for `resize_short` to work. Resizes the original image by setting the shorter edge to size and setting the longer edge accordingly. Resizing function is called from OpenCV. Parameters ---------- src : NDArray The original image. size : int The length to be set for the shorter edge. interp : int, optional, default=2 Interpolation method used for resizing the image. Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. Returns ------- NDArray An 'NDArray' containing the resized image. Example ------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> size = 640 >>> new_image = mx.img.resize_short(image, size) >>> new_image <NDArray 2321x3482x3 @cpu(0)> """ h, w, _ = src.shape if h > w: new_h, new_w = size * h // w, size else: new_h, new_w = size, size * w // h return imresize(src, new_w, new_h, interp=_get_interp_method(interp, (h, w, new_h, new_w)))
[ "def", "resize_short", "(", "src", ",", "size", ",", "interp", "=", "2", ")", ":", "h", ",", "w", ",", "_", "=", "src", ".", "shape", "if", "h", ">", "w", ":", "new_h", ",", "new_w", "=", "size", "*", "h", "//", "w", ",", "size", "else", ":", "new_h", ",", "new_w", "=", "size", ",", "size", "*", "w", "//", "h", "return", "imresize", "(", "src", ",", "new_w", ",", "new_h", ",", "interp", "=", "_get_interp_method", "(", "interp", ",", "(", "h", ",", "w", ",", "new_h", ",", "new_w", ")", ")", ")" ]
Resizes shorter edge to size. .. note:: `resize_short` uses OpenCV (not the CV2 Python library). MXNet must have been built with OpenCV for `resize_short` to work. Resizes the original image by setting the shorter edge to size and setting the longer edge accordingly. Resizing function is called from OpenCV. Parameters ---------- src : NDArray The original image. size : int The length to be set for the shorter edge. interp : int, optional, default=2 Interpolation method used for resizing the image. Possible values: 0: Nearest Neighbors Interpolation. 1: Bilinear interpolation. 2: Area-based (resampling using pixel area relation). It may be a preferred method for image decimation, as it gives moire-free results. But when the image is zoomed, it is similar to the Nearest Neighbors method. (used by default). 3: Bicubic interpolation over 4x4 pixel neighborhood. 4: Lanczos interpolation over 8x8 pixel neighborhood. 9: Cubic for enlarge, area for shrink, bilinear for others 10: Random select from interpolation method metioned above. Note: When shrinking an image, it will generally look best with AREA-based interpolation, whereas, when enlarging an image, it will generally look best with Bicubic (slow) or Bilinear (faster but still looks OK). More details can be found in the documentation of OpenCV, please refer to http://docs.opencv.org/master/da/d54/group__imgproc__transform.html. Returns ------- NDArray An 'NDArray' containing the resized image. Example ------- >>> with open("flower.jpeg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.img.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> size = 640 >>> new_image = mx.img.resize_short(image, size) >>> new_image <NDArray 2321x3482x3 @cpu(0)>
[ "Resizes", "shorter", "edge", "to", "size", "." ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/image/image.py#L344-L403
23,399
apache/incubator-mxnet
python/mxnet/image/image.py
center_crop
def center_crop(src, size, interp=2): """Crops the image `src` to the given `size` by trimming on all four sides and preserving the center of the image. Upsamples if `src` is smaller than `size`. .. note:: This requires MXNet to be compiled with USE_OPENCV. Parameters ---------- src : NDArray Binary source image data. size : list or tuple of int The desired output image size. interp : int, optional, default=2 Interpolation method. See resize_short for details. Returns ------- NDArray The cropped image. Tuple (x, y, width, height) where x, y are the positions of the crop in the original image and width, height the dimensions of the crop. Example ------- >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.image.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> cropped_image, (x, y, width, height) = mx.image.center_crop(image, (1000, 500)) >>> cropped_image <NDArray 500x1000x3 @cpu(0)> >>> x, y, width, height (1241, 910, 1000, 500) """ h, w, _ = src.shape new_w, new_h = scale_down((w, h), size) x0 = int((w - new_w) / 2) y0 = int((h - new_h) / 2) out = fixed_crop(src, x0, y0, new_w, new_h, size, interp) return out, (x0, y0, new_w, new_h)
python
def center_crop(src, size, interp=2): """Crops the image `src` to the given `size` by trimming on all four sides and preserving the center of the image. Upsamples if `src` is smaller than `size`. .. note:: This requires MXNet to be compiled with USE_OPENCV. Parameters ---------- src : NDArray Binary source image data. size : list or tuple of int The desired output image size. interp : int, optional, default=2 Interpolation method. See resize_short for details. Returns ------- NDArray The cropped image. Tuple (x, y, width, height) where x, y are the positions of the crop in the original image and width, height the dimensions of the crop. Example ------- >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.image.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> cropped_image, (x, y, width, height) = mx.image.center_crop(image, (1000, 500)) >>> cropped_image <NDArray 500x1000x3 @cpu(0)> >>> x, y, width, height (1241, 910, 1000, 500) """ h, w, _ = src.shape new_w, new_h = scale_down((w, h), size) x0 = int((w - new_w) / 2) y0 = int((h - new_h) / 2) out = fixed_crop(src, x0, y0, new_w, new_h, size, interp) return out, (x0, y0, new_w, new_h)
[ "def", "center_crop", "(", "src", ",", "size", ",", "interp", "=", "2", ")", ":", "h", ",", "w", ",", "_", "=", "src", ".", "shape", "new_w", ",", "new_h", "=", "scale_down", "(", "(", "w", ",", "h", ")", ",", "size", ")", "x0", "=", "int", "(", "(", "w", "-", "new_w", ")", "/", "2", ")", "y0", "=", "int", "(", "(", "h", "-", "new_h", ")", "/", "2", ")", "out", "=", "fixed_crop", "(", "src", ",", "x0", ",", "y0", ",", "new_w", ",", "new_h", ",", "size", ",", "interp", ")", "return", "out", ",", "(", "x0", ",", "y0", ",", "new_w", ",", "new_h", ")" ]
Crops the image `src` to the given `size` by trimming on all four sides and preserving the center of the image. Upsamples if `src` is smaller than `size`. .. note:: This requires MXNet to be compiled with USE_OPENCV. Parameters ---------- src : NDArray Binary source image data. size : list or tuple of int The desired output image size. interp : int, optional, default=2 Interpolation method. See resize_short for details. Returns ------- NDArray The cropped image. Tuple (x, y, width, height) where x, y are the positions of the crop in the original image and width, height the dimensions of the crop. Example ------- >>> with open("flower.jpg", 'rb') as fp: ... str_image = fp.read() ... >>> image = mx.image.imdecode(str_image) >>> image <NDArray 2321x3482x3 @cpu(0)> >>> cropped_image, (x, y, width, height) = mx.image.center_crop(image, (1000, 500)) >>> cropped_image <NDArray 500x1000x3 @cpu(0)> >>> x, y, width, height (1241, 910, 1000, 500)
[ "Crops", "the", "image", "src", "to", "the", "given", "size", "by", "trimming", "on", "all", "four", "sides", "and", "preserving", "the", "center", "of", "the", "image", ".", "Upsamples", "if", "src", "is", "smaller", "than", "size", "." ]
1af29e9c060a4c7d60eeaacba32afdb9a7775ba7
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/image/image.py#L477-L523