id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
29,900
awslabs/aws-sam-cli
samcli/commands/init/__init__.py
cli
def cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): """ \b Initialize a serverless application with a SAM template, folder structure for your Lambda functions, connected to an event source such as APIs, S3 Buckets or DynamoDB Tables. This application includes everything you need to get started with serverless and eventually grow into a production scale application. \b This command can initialize a boilerplate serverless app. If you want to create your own template as well as use a custom location please take a look at our official documentation. \b Common usage: \b Initializes a new SAM project using Python 3.6 default template runtime \b $ sam init --runtime python3.6 \b Initializes a new SAM project using Java 8 and Gradle dependency manager \b $ sam init --runtime java8 --dependency-manager gradle \b Initializes a new SAM project using custom template in a Git/Mercurial repository \b # gh being expanded to github url $ sam init --location gh:aws-samples/cookiecutter-aws-sam-python \b $ sam init --location git+ssh://git@github.com/aws-samples/cookiecutter-aws-sam-python.git \b $ sam init --location hg+ssh://hg@bitbucket.org/repo/template-name \b Initializes a new SAM project using custom template in a Zipfile \b $ sam init --location /path/to/template.zip \b $ sam init --location https://example.com/path/to/template.zip \b Initializes a new SAM project using custom template in a local path \b $ sam init --location /path/to/template/folder """ # All logic must be implemented in the `do_cli` method. This helps ease unit tests do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input)
python
def cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): """ \b Initialize a serverless application with a SAM template, folder structure for your Lambda functions, connected to an event source such as APIs, S3 Buckets or DynamoDB Tables. This application includes everything you need to get started with serverless and eventually grow into a production scale application. \b This command can initialize a boilerplate serverless app. If you want to create your own template as well as use a custom location please take a look at our official documentation. \b Common usage: \b Initializes a new SAM project using Python 3.6 default template runtime \b $ sam init --runtime python3.6 \b Initializes a new SAM project using Java 8 and Gradle dependency manager \b $ sam init --runtime java8 --dependency-manager gradle \b Initializes a new SAM project using custom template in a Git/Mercurial repository \b # gh being expanded to github url $ sam init --location gh:aws-samples/cookiecutter-aws-sam-python \b $ sam init --location git+ssh://git@github.com/aws-samples/cookiecutter-aws-sam-python.git \b $ sam init --location hg+ssh://hg@bitbucket.org/repo/template-name \b Initializes a new SAM project using custom template in a Zipfile \b $ sam init --location /path/to/template.zip \b $ sam init --location https://example.com/path/to/template.zip \b Initializes a new SAM project using custom template in a local path \b $ sam init --location /path/to/template/folder """ # All logic must be implemented in the `do_cli` method. This helps ease unit tests do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input)
[ "def", "cli", "(", "ctx", ",", "location", ",", "runtime", ",", "dependency_manager", ",", "output_dir", ",", "name", ",", "no_input", ")", ":", "# All logic must be implemented in the `do_cli` method. This helps ease unit tests", "do_cli", "(", "ctx", ",", "location", ...
\b Initialize a serverless application with a SAM template, folder structure for your Lambda functions, connected to an event source such as APIs, S3 Buckets or DynamoDB Tables. This application includes everything you need to get started with serverless and eventually grow into a production scale application. \b This command can initialize a boilerplate serverless app. If you want to create your own template as well as use a custom location please take a look at our official documentation. \b Common usage: \b Initializes a new SAM project using Python 3.6 default template runtime \b $ sam init --runtime python3.6 \b Initializes a new SAM project using Java 8 and Gradle dependency manager \b $ sam init --runtime java8 --dependency-manager gradle \b Initializes a new SAM project using custom template in a Git/Mercurial repository \b # gh being expanded to github url $ sam init --location gh:aws-samples/cookiecutter-aws-sam-python \b $ sam init --location git+ssh://git@github.com/aws-samples/cookiecutter-aws-sam-python.git \b $ sam init --location hg+ssh://hg@bitbucket.org/repo/template-name \b Initializes a new SAM project using custom template in a Zipfile \b $ sam init --location /path/to/template.zip \b $ sam init --location https://example.com/path/to/template.zip \b Initializes a new SAM project using custom template in a local path \b $ sam init --location /path/to/template/folder
[ "\\", "b", "Initialize", "a", "serverless", "application", "with", "a", "SAM", "template", "folder", "structure", "for", "your", "Lambda", "functions", "connected", "to", "an", "event", "source", "such", "as", "APIs", "S3", "Buckets", "or", "DynamoDB", "Tables...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/init/__init__.py#L30-L76
29,901
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/parser.py
SwaggerParser.get_apis
def get_apis(self): """ Parses a swagger document and returns a list of APIs configured in the document. Swagger documents have the following structure { "/path1": { # path "get": { # method "x-amazon-apigateway-integration": { # integration "type": "aws_proxy", # URI contains the Lambda function ARN that needs to be parsed to get Function Name "uri": { "Fn::Sub": "arn:aws:apigateway:aws:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/..." } } }, "post": { }, }, "/path2": { ... } } Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs that are configured in the Swagger document """ result = [] paths_dict = self.swagger.get("paths", {}) binary_media_types = self.get_binary_media_types() for full_path, path_config in paths_dict.items(): for method, method_config in path_config.items(): function_name = self._get_integration_function_name(method_config) if not function_name: LOG.debug("Lambda function integration not found in Swagger document at path='%s' method='%s'", full_path, method) continue if method.lower() == self._ANY_METHOD_EXTENSION_KEY: # Convert to a more commonly used method notation method = self._ANY_METHOD api = Api(path=full_path, method=method, function_name=function_name, cors=None, binary_media_types=binary_media_types) result.append(api) return result
python
def get_apis(self): """ Parses a swagger document and returns a list of APIs configured in the document. Swagger documents have the following structure { "/path1": { # path "get": { # method "x-amazon-apigateway-integration": { # integration "type": "aws_proxy", # URI contains the Lambda function ARN that needs to be parsed to get Function Name "uri": { "Fn::Sub": "arn:aws:apigateway:aws:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/..." } } }, "post": { }, }, "/path2": { ... } } Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs that are configured in the Swagger document """ result = [] paths_dict = self.swagger.get("paths", {}) binary_media_types = self.get_binary_media_types() for full_path, path_config in paths_dict.items(): for method, method_config in path_config.items(): function_name = self._get_integration_function_name(method_config) if not function_name: LOG.debug("Lambda function integration not found in Swagger document at path='%s' method='%s'", full_path, method) continue if method.lower() == self._ANY_METHOD_EXTENSION_KEY: # Convert to a more commonly used method notation method = self._ANY_METHOD api = Api(path=full_path, method=method, function_name=function_name, cors=None, binary_media_types=binary_media_types) result.append(api) return result
[ "def", "get_apis", "(", "self", ")", ":", "result", "=", "[", "]", "paths_dict", "=", "self", ".", "swagger", ".", "get", "(", "\"paths\"", ",", "{", "}", ")", "binary_media_types", "=", "self", ".", "get_binary_media_types", "(", ")", "for", "full_path"...
Parses a swagger document and returns a list of APIs configured in the document. Swagger documents have the following structure { "/path1": { # path "get": { # method "x-amazon-apigateway-integration": { # integration "type": "aws_proxy", # URI contains the Lambda function ARN that needs to be parsed to get Function Name "uri": { "Fn::Sub": "arn:aws:apigateway:aws:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/..." } } }, "post": { }, }, "/path2": { ... } } Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs that are configured in the Swagger document
[ "Parses", "a", "swagger", "document", "and", "returns", "a", "list", "of", "APIs", "configured", "in", "the", "document", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/parser.py#L38-L92
29,902
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/parser.py
SwaggerParser._get_integration_function_name
def _get_integration_function_name(self, method_config): """ Tries to parse the Lambda Function name from the Integration defined in the method configuration. Integration configuration is defined under the special "x-amazon-apigateway-integration" key. We care only about Lambda integrations, which are of type aws_proxy, and ignore the rest. Integration URI is complex and hard to parse. Hence we do our best to extract function name out of integration URI. If not possible, we return None. Parameters ---------- method_config : dict Dictionary containing the method configuration which might contain integration settings Returns ------- string or None Lambda function name, if possible. None, if not. """ if not isinstance(method_config, dict) or self._INTEGRATION_KEY not in method_config: return None integration = method_config[self._INTEGRATION_KEY] if integration \ and isinstance(integration, dict) \ and integration.get("type") == IntegrationType.aws_proxy.value: # Integration must be "aws_proxy" otherwise we don't care about it return LambdaUri.get_function_name(integration.get("uri"))
python
def _get_integration_function_name(self, method_config): """ Tries to parse the Lambda Function name from the Integration defined in the method configuration. Integration configuration is defined under the special "x-amazon-apigateway-integration" key. We care only about Lambda integrations, which are of type aws_proxy, and ignore the rest. Integration URI is complex and hard to parse. Hence we do our best to extract function name out of integration URI. If not possible, we return None. Parameters ---------- method_config : dict Dictionary containing the method configuration which might contain integration settings Returns ------- string or None Lambda function name, if possible. None, if not. """ if not isinstance(method_config, dict) or self._INTEGRATION_KEY not in method_config: return None integration = method_config[self._INTEGRATION_KEY] if integration \ and isinstance(integration, dict) \ and integration.get("type") == IntegrationType.aws_proxy.value: # Integration must be "aws_proxy" otherwise we don't care about it return LambdaUri.get_function_name(integration.get("uri"))
[ "def", "_get_integration_function_name", "(", "self", ",", "method_config", ")", ":", "if", "not", "isinstance", "(", "method_config", ",", "dict", ")", "or", "self", ".", "_INTEGRATION_KEY", "not", "in", "method_config", ":", "return", "None", "integration", "=...
Tries to parse the Lambda Function name from the Integration defined in the method configuration. Integration configuration is defined under the special "x-amazon-apigateway-integration" key. We care only about Lambda integrations, which are of type aws_proxy, and ignore the rest. Integration URI is complex and hard to parse. Hence we do our best to extract function name out of integration URI. If not possible, we return None. Parameters ---------- method_config : dict Dictionary containing the method configuration which might contain integration settings Returns ------- string or None Lambda function name, if possible. None, if not.
[ "Tries", "to", "parse", "the", "Lambda", "Function", "name", "from", "the", "Integration", "defined", "in", "the", "method", "configuration", ".", "Integration", "configuration", "is", "defined", "under", "the", "special", "x", "-", "amazon", "-", "apigateway", ...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/parser.py#L94-L121
29,903
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
LogsFormatter.do_format
def do_format(self, event_iterable): """ Formats the given CloudWatch Logs Event dictionary as necessary and returns an iterable that will return the formatted string. This can be used to parse and format the events based on context ie. In Lambda Function logs, a formatter may wish to color the "ERROR" keywords red, or highlight a filter keyword separately etc. This method takes an iterable as input and returns an iterable. It does not immediately format the event. Instead, it sets up the formatter chain appropriately and returns the iterable. Actual formatting happens only when the iterable is used by the caller. Parameters ---------- event_iterable : iterable of samcli.lib.logs.event.LogEvent Iterable that returns an object containing information about each log event. Returns ------- iterable of string Iterable that returns a formatted event as a string. """ for operation in self.formatter_chain: # Make sure the operation has access to certain basic objects like colored partial_op = functools.partial(operation, colored=self.colored) event_iterable = imap(partial_op, event_iterable) return event_iterable
python
def do_format(self, event_iterable): """ Formats the given CloudWatch Logs Event dictionary as necessary and returns an iterable that will return the formatted string. This can be used to parse and format the events based on context ie. In Lambda Function logs, a formatter may wish to color the "ERROR" keywords red, or highlight a filter keyword separately etc. This method takes an iterable as input and returns an iterable. It does not immediately format the event. Instead, it sets up the formatter chain appropriately and returns the iterable. Actual formatting happens only when the iterable is used by the caller. Parameters ---------- event_iterable : iterable of samcli.lib.logs.event.LogEvent Iterable that returns an object containing information about each log event. Returns ------- iterable of string Iterable that returns a formatted event as a string. """ for operation in self.formatter_chain: # Make sure the operation has access to certain basic objects like colored partial_op = functools.partial(operation, colored=self.colored) event_iterable = imap(partial_op, event_iterable) return event_iterable
[ "def", "do_format", "(", "self", ",", "event_iterable", ")", ":", "for", "operation", "in", "self", ".", "formatter_chain", ":", "# Make sure the operation has access to certain basic objects like colored", "partial_op", "=", "functools", ".", "partial", "(", "operation",...
Formats the given CloudWatch Logs Event dictionary as necessary and returns an iterable that will return the formatted string. This can be used to parse and format the events based on context ie. In Lambda Function logs, a formatter may wish to color the "ERROR" keywords red, or highlight a filter keyword separately etc. This method takes an iterable as input and returns an iterable. It does not immediately format the event. Instead, it sets up the formatter chain appropriately and returns the iterable. Actual formatting happens only when the iterable is used by the caller. Parameters ---------- event_iterable : iterable of samcli.lib.logs.event.LogEvent Iterable that returns an object containing information about each log event. Returns ------- iterable of string Iterable that returns a formatted event as a string.
[ "Formats", "the", "given", "CloudWatch", "Logs", "Event", "dictionary", "as", "necessary", "and", "returns", "an", "iterable", "that", "will", "return", "the", "formatted", "string", ".", "This", "can", "be", "used", "to", "parse", "and", "format", "the", "e...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L83-L111
29,904
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
LogsFormatter._pretty_print_event
def _pretty_print_event(event, colored): """ Basic formatter to convert an event object to string """ event.timestamp = colored.yellow(event.timestamp) event.log_stream_name = colored.cyan(event.log_stream_name) return ' '.join([event.log_stream_name, event.timestamp, event.message])
python
def _pretty_print_event(event, colored): """ Basic formatter to convert an event object to string """ event.timestamp = colored.yellow(event.timestamp) event.log_stream_name = colored.cyan(event.log_stream_name) return ' '.join([event.log_stream_name, event.timestamp, event.message])
[ "def", "_pretty_print_event", "(", "event", ",", "colored", ")", ":", "event", ".", "timestamp", "=", "colored", ".", "yellow", "(", "event", ".", "timestamp", ")", "event", ".", "log_stream_name", "=", "colored", ".", "cyan", "(", "event", ".", "log_strea...
Basic formatter to convert an event object to string
[ "Basic", "formatter", "to", "convert", "an", "event", "object", "to", "string" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L114-L121
29,905
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
KeywordHighlighter.highlight_keywords
def highlight_keywords(self, event, colored): """ Highlight the keyword in the log statement by drawing an underline """ if self.keyword: highlight = colored.underline(self.keyword) event.message = event.message.replace(self.keyword, highlight) return event
python
def highlight_keywords(self, event, colored): """ Highlight the keyword in the log statement by drawing an underline """ if self.keyword: highlight = colored.underline(self.keyword) event.message = event.message.replace(self.keyword, highlight) return event
[ "def", "highlight_keywords", "(", "self", ",", "event", ",", "colored", ")", ":", "if", "self", ".", "keyword", ":", "highlight", "=", "colored", ".", "underline", "(", "self", ".", "keyword", ")", "event", ".", "message", "=", "event", ".", "message", ...
Highlight the keyword in the log statement by drawing an underline
[ "Highlight", "the", "keyword", "in", "the", "log", "statement", "by", "drawing", "an", "underline" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L157-L165
29,906
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
JSONMsgFormatter.format_json
def format_json(event, colored): """ If the event message is a JSON string, then pretty print the JSON with 2 indents and sort the keys. This makes it very easy to visually parse and search JSON data """ try: if event.message.startswith("{"): msg_dict = json.loads(event.message) event.message = json.dumps(msg_dict, indent=2) except Exception: # Skip if the event message was not JSON pass return event
python
def format_json(event, colored): """ If the event message is a JSON string, then pretty print the JSON with 2 indents and sort the keys. This makes it very easy to visually parse and search JSON data """ try: if event.message.startswith("{"): msg_dict = json.loads(event.message) event.message = json.dumps(msg_dict, indent=2) except Exception: # Skip if the event message was not JSON pass return event
[ "def", "format_json", "(", "event", ",", "colored", ")", ":", "try", ":", "if", "event", ".", "message", ".", "startswith", "(", "\"{\"", ")", ":", "msg_dict", "=", "json", ".", "loads", "(", "event", ".", "message", ")", "event", ".", "message", "="...
If the event message is a JSON string, then pretty print the JSON with 2 indents and sort the keys. This makes it very easy to visually parse and search JSON data
[ "If", "the", "event", "message", "is", "a", "JSON", "string", "then", "pretty", "print", "the", "JSON", "with", "2", "indents", "and", "sort", "the", "keys", ".", "This", "makes", "it", "very", "easy", "to", "visually", "parse", "and", "search", "JSON", ...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L174-L188
29,907
awslabs/aws-sam-cli
samcli/commands/_utils/template.py
_resolve_relative_to
def _resolve_relative_to(path, original_root, new_root): """ If the given ``path`` is a relative path, then assume it is relative to ``original_root``. This method will update the path to be resolve it relative to ``new_root`` and return. Examples ------- # Assume a file called template.txt at location /tmp/original/root/template.txt expressed as relative path # We are trying to update it to be relative to /tmp/new/root instead of the /tmp/original/root >>> result = _resolve_relative_to("template.txt", \ "/tmp/original/root", \ "/tmp/new/root") >>> result ../../original/root/template.txt Returns ------- Updated path if the given path is a relative path. None, if the path is not a relative path. """ if not isinstance(path, six.string_types) \ or path.startswith("s3://") \ or os.path.isabs(path): # Value is definitely NOT a relative path. It is either a S3 URi or Absolute path or not a string at all return None # Value is definitely a relative path. Change it relative to the destination directory return os.path.relpath( os.path.normpath(os.path.join(original_root, path)), # Absolute original path w.r.t ``original_root`` new_root)
python
def _resolve_relative_to(path, original_root, new_root): """ If the given ``path`` is a relative path, then assume it is relative to ``original_root``. This method will update the path to be resolve it relative to ``new_root`` and return. Examples ------- # Assume a file called template.txt at location /tmp/original/root/template.txt expressed as relative path # We are trying to update it to be relative to /tmp/new/root instead of the /tmp/original/root >>> result = _resolve_relative_to("template.txt", \ "/tmp/original/root", \ "/tmp/new/root") >>> result ../../original/root/template.txt Returns ------- Updated path if the given path is a relative path. None, if the path is not a relative path. """ if not isinstance(path, six.string_types) \ or path.startswith("s3://") \ or os.path.isabs(path): # Value is definitely NOT a relative path. It is either a S3 URi or Absolute path or not a string at all return None # Value is definitely a relative path. Change it relative to the destination directory return os.path.relpath( os.path.normpath(os.path.join(original_root, path)), # Absolute original path w.r.t ``original_root`` new_root)
[ "def", "_resolve_relative_to", "(", "path", ",", "original_root", ",", "new_root", ")", ":", "if", "not", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", "or", "path", ".", "startswith", "(", "\"s3://\"", ")", "or", "os", ".", "path", "....
If the given ``path`` is a relative path, then assume it is relative to ``original_root``. This method will update the path to be resolve it relative to ``new_root`` and return. Examples ------- # Assume a file called template.txt at location /tmp/original/root/template.txt expressed as relative path # We are trying to update it to be relative to /tmp/new/root instead of the /tmp/original/root >>> result = _resolve_relative_to("template.txt", \ "/tmp/original/root", \ "/tmp/new/root") >>> result ../../original/root/template.txt Returns ------- Updated path if the given path is a relative path. None, if the path is not a relative path.
[ "If", "the", "given", "path", "is", "a", "relative", "path", "then", "assume", "it", "is", "relative", "to", "original_root", ".", "This", "method", "will", "update", "the", "path", "to", "be", "resolve", "it", "relative", "to", "new_root", "and", "return"...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/template.py#L208-L237
29,908
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader.read
def read(self): """ Gets the Swagger document from either of the given locations. If we fail to retrieve or parse the Swagger file, this method will return None. Returns ------- dict: Swagger document. None, if we cannot retrieve the document """ swagger = None # First check if there is inline swagger if self.definition_body: swagger = self._read_from_definition_body() if not swagger and self.definition_uri: # If not, then try to download it from the given URI swagger = self._download_swagger(self.definition_uri) return swagger
python
def read(self): """ Gets the Swagger document from either of the given locations. If we fail to retrieve or parse the Swagger file, this method will return None. Returns ------- dict: Swagger document. None, if we cannot retrieve the document """ swagger = None # First check if there is inline swagger if self.definition_body: swagger = self._read_from_definition_body() if not swagger and self.definition_uri: # If not, then try to download it from the given URI swagger = self._download_swagger(self.definition_uri) return swagger
[ "def", "read", "(", "self", ")", ":", "swagger", "=", "None", "# First check if there is inline swagger", "if", "self", ".", "definition_body", ":", "swagger", "=", "self", ".", "_read_from_definition_body", "(", ")", "if", "not", "swagger", "and", "self", ".", ...
Gets the Swagger document from either of the given locations. If we fail to retrieve or parse the Swagger file, this method will return None. Returns ------- dict: Swagger document. None, if we cannot retrieve the document
[ "Gets", "the", "Swagger", "document", "from", "either", "of", "the", "given", "locations", ".", "If", "we", "fail", "to", "retrieve", "or", "parse", "the", "Swagger", "file", "this", "method", "will", "return", "None", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L92-L113
29,909
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader._download_swagger
def _download_swagger(self, location): """ Download the file from given local or remote location and return it Parameters ---------- location : str or dict Local path or S3 path to Swagger file to download. Consult the ``__init__.py`` documentation for specifics on structure of this property. Returns ------- dict or None Downloaded and parsed Swagger document. None, if unable to download """ if not location: return bucket, key, version = self._parse_s3_location(location) if bucket and key: LOG.debug("Downloading Swagger document from Bucket=%s, Key=%s, Version=%s", bucket, key, version) swagger_str = self._download_from_s3(bucket, key, version) return yaml_parse(swagger_str) if not isinstance(location, string_types): # This is not a string and not a S3 Location dictionary. Probably something invalid LOG.debug("Unable to download Swagger file. Invalid location: %s", location) return # ``location`` is a string and not a S3 path. It is probably a local path. Let's resolve relative path if any filepath = location if self.working_dir: # Resolve relative paths, if any, with respect to working directory filepath = os.path.join(self.working_dir, location) if not os.path.exists(filepath): LOG.debug("Unable to download Swagger file. File not found at location %s", filepath) return LOG.debug("Reading Swagger document from local file at %s", filepath) with open(filepath, "r") as fp: return yaml_parse(fp.read())
python
def _download_swagger(self, location): """ Download the file from given local or remote location and return it Parameters ---------- location : str or dict Local path or S3 path to Swagger file to download. Consult the ``__init__.py`` documentation for specifics on structure of this property. Returns ------- dict or None Downloaded and parsed Swagger document. None, if unable to download """ if not location: return bucket, key, version = self._parse_s3_location(location) if bucket and key: LOG.debug("Downloading Swagger document from Bucket=%s, Key=%s, Version=%s", bucket, key, version) swagger_str = self._download_from_s3(bucket, key, version) return yaml_parse(swagger_str) if not isinstance(location, string_types): # This is not a string and not a S3 Location dictionary. Probably something invalid LOG.debug("Unable to download Swagger file. Invalid location: %s", location) return # ``location`` is a string and not a S3 path. It is probably a local path. Let's resolve relative path if any filepath = location if self.working_dir: # Resolve relative paths, if any, with respect to working directory filepath = os.path.join(self.working_dir, location) if not os.path.exists(filepath): LOG.debug("Unable to download Swagger file. File not found at location %s", filepath) return LOG.debug("Reading Swagger document from local file at %s", filepath) with open(filepath, "r") as fp: return yaml_parse(fp.read())
[ "def", "_download_swagger", "(", "self", ",", "location", ")", ":", "if", "not", "location", ":", "return", "bucket", ",", "key", ",", "version", "=", "self", ".", "_parse_s3_location", "(", "location", ")", "if", "bucket", "and", "key", ":", "LOG", ".",...
Download the file from given local or remote location and return it Parameters ---------- location : str or dict Local path or S3 path to Swagger file to download. Consult the ``__init__.py`` documentation for specifics on structure of this property. Returns ------- dict or None Downloaded and parsed Swagger document. None, if unable to download
[ "Download", "the", "file", "from", "given", "local", "or", "remote", "location", "and", "return", "it" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L138-L180
29,910
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader._download_from_s3
def _download_from_s3(bucket, key, version=None): """ Download a file from given S3 location, if available. Parameters ---------- bucket : str S3 Bucket name key : str S3 Bucket Key aka file path version : str Optional Version ID of the file Returns ------- str Contents of the file that was downloaded Raises ------ botocore.exceptions.ClientError if we were unable to download the file from S3 """ s3 = boto3.client('s3') extra_args = {} if version: extra_args["VersionId"] = version with tempfile.TemporaryFile() as fp: try: s3.download_fileobj( bucket, key, fp, ExtraArgs=extra_args) # go to start of file fp.seek(0) # Read and return all the contents return fp.read() except botocore.exceptions.ClientError: LOG.error("Unable to download Swagger document from S3 Bucket=%s Key=%s Version=%s", bucket, key, version) raise
python
def _download_from_s3(bucket, key, version=None): """ Download a file from given S3 location, if available. Parameters ---------- bucket : str S3 Bucket name key : str S3 Bucket Key aka file path version : str Optional Version ID of the file Returns ------- str Contents of the file that was downloaded Raises ------ botocore.exceptions.ClientError if we were unable to download the file from S3 """ s3 = boto3.client('s3') extra_args = {} if version: extra_args["VersionId"] = version with tempfile.TemporaryFile() as fp: try: s3.download_fileobj( bucket, key, fp, ExtraArgs=extra_args) # go to start of file fp.seek(0) # Read and return all the contents return fp.read() except botocore.exceptions.ClientError: LOG.error("Unable to download Swagger document from S3 Bucket=%s Key=%s Version=%s", bucket, key, version) raise
[ "def", "_download_from_s3", "(", "bucket", ",", "key", ",", "version", "=", "None", ")", ":", "s3", "=", "boto3", ".", "client", "(", "'s3'", ")", "extra_args", "=", "{", "}", "if", "version", ":", "extra_args", "[", "\"VersionId\"", "]", "=", "version...
Download a file from given S3 location, if available. Parameters ---------- bucket : str S3 Bucket name key : str S3 Bucket Key aka file path version : str Optional Version ID of the file Returns ------- str Contents of the file that was downloaded Raises ------ botocore.exceptions.ClientError if we were unable to download the file from S3
[ "Download", "a", "file", "from", "given", "S3", "location", "if", "available", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L183-L229
29,911
awslabs/aws-sam-cli
samcli/cli/context.py
Context.debug
def debug(self, value): """ Turn on debug logging if necessary. :param value: Value of debug flag """ self._debug = value if self._debug: # Turn on debug logging logging.getLogger().setLevel(logging.DEBUG)
python
def debug(self, value): """ Turn on debug logging if necessary. :param value: Value of debug flag """ self._debug = value if self._debug: # Turn on debug logging logging.getLogger().setLevel(logging.DEBUG)
[ "def", "debug", "(", "self", ",", "value", ")", ":", "self", ".", "_debug", "=", "value", "if", "self", ".", "_debug", ":", "# Turn on debug logging", "logging", ".", "getLogger", "(", ")", ".", "setLevel", "(", "logging", ".", "DEBUG", ")" ]
Turn on debug logging if necessary. :param value: Value of debug flag
[ "Turn", "on", "debug", "logging", "if", "necessary", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/context.py#L35-L45
29,912
awslabs/aws-sam-cli
samcli/local/init/__init__.py
generate_project
def generate_project( location=None, runtime="nodejs", dependency_manager=None, output_dir=".", name='sam-sample-app', no_input=False): """Generates project using cookiecutter and options given Generate project scaffolds a project using default templates if user doesn't provide one via location parameter. Default templates are automatically chosen depending on runtime given by the user. Parameters ---------- location: Path, optional Git, HTTP, Local path or Zip containing cookiecutter template (the default is None, which means no custom template) runtime: str, optional Lambda Runtime (the default is "nodejs", which creates a nodejs project) dependency_manager: str, optional Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime) output_dir: str, optional Output directory where project should be generated (the default is ".", which implies current folder) name: str, optional Name of the project (the default is "sam-sample-app", which implies a project named sam-sample-app will be created) no_input : bool, optional Whether to prompt for input or to accept default values (the default is False, which prompts the user for values it doesn't know for baking) Raises ------ GenerateProjectFailedError If the process of baking a project fails """ template = None for mapping in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))): if runtime in mapping['runtimes'] or any([r.startswith(runtime) for r in mapping['runtimes']]): if not dependency_manager: template = mapping['init_location'] break elif dependency_manager == mapping['dependency_manager']: template = mapping['init_location'] if not template: msg = "Lambda Runtime {} does not support dependency manager: {}".format(runtime, dependency_manager) raise GenerateProjectFailedError(project=name, provider_error=msg) params = { "template": location if location else template, "output_dir": output_dir, "no_input": no_input } LOG.debug("Parameters dict created with input given") LOG.debug("%s", params) if not location and name is not None: params['extra_context'] = {'project_name': name, 'runtime': runtime} params['no_input'] = True LOG.debug("Parameters dict updated with project name as extra_context") LOG.debug("%s", params) try: LOG.debug("Baking a new template with cookiecutter with all parameters") cookiecutter(**params) except CookiecutterException as e: raise GenerateProjectFailedError(project=name, provider_error=e)
python
def generate_project( location=None, runtime="nodejs", dependency_manager=None, output_dir=".", name='sam-sample-app', no_input=False): """Generates project using cookiecutter and options given Generate project scaffolds a project using default templates if user doesn't provide one via location parameter. Default templates are automatically chosen depending on runtime given by the user. Parameters ---------- location: Path, optional Git, HTTP, Local path or Zip containing cookiecutter template (the default is None, which means no custom template) runtime: str, optional Lambda Runtime (the default is "nodejs", which creates a nodejs project) dependency_manager: str, optional Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime) output_dir: str, optional Output directory where project should be generated (the default is ".", which implies current folder) name: str, optional Name of the project (the default is "sam-sample-app", which implies a project named sam-sample-app will be created) no_input : bool, optional Whether to prompt for input or to accept default values (the default is False, which prompts the user for values it doesn't know for baking) Raises ------ GenerateProjectFailedError If the process of baking a project fails """ template = None for mapping in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))): if runtime in mapping['runtimes'] or any([r.startswith(runtime) for r in mapping['runtimes']]): if not dependency_manager: template = mapping['init_location'] break elif dependency_manager == mapping['dependency_manager']: template = mapping['init_location'] if not template: msg = "Lambda Runtime {} does not support dependency manager: {}".format(runtime, dependency_manager) raise GenerateProjectFailedError(project=name, provider_error=msg) params = { "template": location if location else template, "output_dir": output_dir, "no_input": no_input } LOG.debug("Parameters dict created with input given") LOG.debug("%s", params) if not location and name is not None: params['extra_context'] = {'project_name': name, 'runtime': runtime} params['no_input'] = True LOG.debug("Parameters dict updated with project name as extra_context") LOG.debug("%s", params) try: LOG.debug("Baking a new template with cookiecutter with all parameters") cookiecutter(**params) except CookiecutterException as e: raise GenerateProjectFailedError(project=name, provider_error=e)
[ "def", "generate_project", "(", "location", "=", "None", ",", "runtime", "=", "\"nodejs\"", ",", "dependency_manager", "=", "None", ",", "output_dir", "=", "\".\"", ",", "name", "=", "'sam-sample-app'", ",", "no_input", "=", "False", ")", ":", "template", "=...
Generates project using cookiecutter and options given Generate project scaffolds a project using default templates if user doesn't provide one via location parameter. Default templates are automatically chosen depending on runtime given by the user. Parameters ---------- location: Path, optional Git, HTTP, Local path or Zip containing cookiecutter template (the default is None, which means no custom template) runtime: str, optional Lambda Runtime (the default is "nodejs", which creates a nodejs project) dependency_manager: str, optional Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime) output_dir: str, optional Output directory where project should be generated (the default is ".", which implies current folder) name: str, optional Name of the project (the default is "sam-sample-app", which implies a project named sam-sample-app will be created) no_input : bool, optional Whether to prompt for input or to accept default values (the default is False, which prompts the user for values it doesn't know for baking) Raises ------ GenerateProjectFailedError If the process of baking a project fails
[ "Generates", "project", "using", "cookiecutter", "and", "options", "given" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/init/__init__.py#L16-L83
29,913
awslabs/aws-sam-cli
samcli/lib/utils/time.py
to_utc
def to_utc(some_time): """ Convert the given date to UTC, if the date contains a timezone. Parameters ---------- some_time : datetime.datetime datetime object to convert to UTC Returns ------- datetime.datetime Converted datetime object """ # Convert timezone aware objects to UTC if some_time.tzinfo and some_time.utcoffset(): some_time = some_time.astimezone(tzutc()) # Now that time is UTC, simply remove the timezone component. return some_time.replace(tzinfo=None)
python
def to_utc(some_time): """ Convert the given date to UTC, if the date contains a timezone. Parameters ---------- some_time : datetime.datetime datetime object to convert to UTC Returns ------- datetime.datetime Converted datetime object """ # Convert timezone aware objects to UTC if some_time.tzinfo and some_time.utcoffset(): some_time = some_time.astimezone(tzutc()) # Now that time is UTC, simply remove the timezone component. return some_time.replace(tzinfo=None)
[ "def", "to_utc", "(", "some_time", ")", ":", "# Convert timezone aware objects to UTC", "if", "some_time", ".", "tzinfo", "and", "some_time", ".", "utcoffset", "(", ")", ":", "some_time", "=", "some_time", ".", "astimezone", "(", "tzutc", "(", ")", ")", "# Now...
Convert the given date to UTC, if the date contains a timezone. Parameters ---------- some_time : datetime.datetime datetime object to convert to UTC Returns ------- datetime.datetime Converted datetime object
[ "Convert", "the", "given", "date", "to", "UTC", "if", "the", "date", "contains", "a", "timezone", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/time.py#L68-L88
29,914
awslabs/aws-sam-cli
samcli/lib/utils/time.py
parse_date
def parse_date(date_string): """ Parse the given string as datetime object. This parser supports in almost any string formats. For relative times, like `10min ago`, this parser computes the actual time relative to current UTC time. This allows time to always be in UTC if an explicit time zone is not provided. Parameters ---------- date_string : str String representing the date Returns ------- datetime.datetime Parsed datetime object. None, if the string cannot be parsed. """ parser_settings = { # Relative times like '10m ago' must subtract from the current UTC time. Without this setting, dateparser # will use current local time as the base for subtraction, but falsely assume it is a UTC time. Therefore # the time that dateparser returns will be a `datetime` object that did not have any timezone information. # So be explicit to set the time to UTC. "RELATIVE_BASE": datetime.datetime.utcnow() } return dateparser.parse(date_string, settings=parser_settings)
python
def parse_date(date_string): """ Parse the given string as datetime object. This parser supports in almost any string formats. For relative times, like `10min ago`, this parser computes the actual time relative to current UTC time. This allows time to always be in UTC if an explicit time zone is not provided. Parameters ---------- date_string : str String representing the date Returns ------- datetime.datetime Parsed datetime object. None, if the string cannot be parsed. """ parser_settings = { # Relative times like '10m ago' must subtract from the current UTC time. Without this setting, dateparser # will use current local time as the base for subtraction, but falsely assume it is a UTC time. Therefore # the time that dateparser returns will be a `datetime` object that did not have any timezone information. # So be explicit to set the time to UTC. "RELATIVE_BASE": datetime.datetime.utcnow() } return dateparser.parse(date_string, settings=parser_settings)
[ "def", "parse_date", "(", "date_string", ")", ":", "parser_settings", "=", "{", "# Relative times like '10m ago' must subtract from the current UTC time. Without this setting, dateparser", "# will use current local time as the base for subtraction, but falsely assume it is a UTC time. Therefore"...
Parse the given string as datetime object. This parser supports in almost any string formats. For relative times, like `10min ago`, this parser computes the actual time relative to current UTC time. This allows time to always be in UTC if an explicit time zone is not provided. Parameters ---------- date_string : str String representing the date Returns ------- datetime.datetime Parsed datetime object. None, if the string cannot be parsed.
[ "Parse", "the", "given", "string", "as", "datetime", "object", ".", "This", "parser", "supports", "in", "almost", "any", "string", "formats", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/time.py#L91-L117
29,915
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.function_name
def function_name(self): """ Returns name of the function to invoke. If no function identifier is provided, this method will return name of the only function from the template :return string: Name of the function :raises InvokeContextException: If function identifier is not provided """ if self._function_identifier: return self._function_identifier # Function Identifier is *not* provided. If there is only one function in the template, # default to it. all_functions = [f for f in self._function_provider.get_all()] if len(all_functions) == 1: return all_functions[0].name # Get all the available function names to print helpful exception message all_function_names = [f.name for f in all_functions] # There are more functions in the template, and function identifier is not provided, hence raise. raise InvokeContextException("You must provide a function identifier (function's Logical ID in the template). " "Possible options in your template: {}".format(all_function_names))
python
def function_name(self): """ Returns name of the function to invoke. If no function identifier is provided, this method will return name of the only function from the template :return string: Name of the function :raises InvokeContextException: If function identifier is not provided """ if self._function_identifier: return self._function_identifier # Function Identifier is *not* provided. If there is only one function in the template, # default to it. all_functions = [f for f in self._function_provider.get_all()] if len(all_functions) == 1: return all_functions[0].name # Get all the available function names to print helpful exception message all_function_names = [f.name for f in all_functions] # There are more functions in the template, and function identifier is not provided, hence raise. raise InvokeContextException("You must provide a function identifier (function's Logical ID in the template). " "Possible options in your template: {}".format(all_function_names))
[ "def", "function_name", "(", "self", ")", ":", "if", "self", ".", "_function_identifier", ":", "return", "self", ".", "_function_identifier", "# Function Identifier is *not* provided. If there is only one function in the template,", "# default to it.", "all_functions", "=", "["...
Returns name of the function to invoke. If no function identifier is provided, this method will return name of the only function from the template :return string: Name of the function :raises InvokeContextException: If function identifier is not provided
[ "Returns", "name", "of", "the", "function", "to", "invoke", ".", "If", "no", "function", "identifier", "is", "provided", "this", "method", "will", "return", "name", "of", "the", "only", "function", "from", "the", "template" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L157-L180
29,916
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.local_lambda_runner
def local_lambda_runner(self): """ Returns an instance of the runner capable of running Lambda functions locally :return samcli.commands.local.lib.local_lambda.LocalLambdaRunner: Runner configured to run Lambda functions locally """ layer_downloader = LayerDownloader(self._layer_cache_basedir, self.get_cwd()) image_builder = LambdaImage(layer_downloader, self._skip_pull_image, self._force_image_build) lambda_runtime = LambdaRuntime(self._container_manager, image_builder) return LocalLambdaRunner(local_runtime=lambda_runtime, function_provider=self._function_provider, cwd=self.get_cwd(), env_vars_values=self._env_vars_value, debug_context=self._debug_context)
python
def local_lambda_runner(self): """ Returns an instance of the runner capable of running Lambda functions locally :return samcli.commands.local.lib.local_lambda.LocalLambdaRunner: Runner configured to run Lambda functions locally """ layer_downloader = LayerDownloader(self._layer_cache_basedir, self.get_cwd()) image_builder = LambdaImage(layer_downloader, self._skip_pull_image, self._force_image_build) lambda_runtime = LambdaRuntime(self._container_manager, image_builder) return LocalLambdaRunner(local_runtime=lambda_runtime, function_provider=self._function_provider, cwd=self.get_cwd(), env_vars_values=self._env_vars_value, debug_context=self._debug_context)
[ "def", "local_lambda_runner", "(", "self", ")", ":", "layer_downloader", "=", "LayerDownloader", "(", "self", ".", "_layer_cache_basedir", ",", "self", ".", "get_cwd", "(", ")", ")", "image_builder", "=", "LambdaImage", "(", "layer_downloader", ",", "self", ".",...
Returns an instance of the runner capable of running Lambda functions locally :return samcli.commands.local.lib.local_lambda.LocalLambdaRunner: Runner configured to run Lambda functions locally
[ "Returns", "an", "instance", "of", "the", "runner", "capable", "of", "running", "Lambda", "functions", "locally" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L183-L201
29,917
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.stdout
def stdout(self): """ Returns stream writer for stdout to output Lambda function logs to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stdout """ stream = self._log_file_handle if self._log_file_handle else osutils.stdout() return StreamWriter(stream, self._is_debugging)
python
def stdout(self): """ Returns stream writer for stdout to output Lambda function logs to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stdout """ stream = self._log_file_handle if self._log_file_handle else osutils.stdout() return StreamWriter(stream, self._is_debugging)
[ "def", "stdout", "(", "self", ")", ":", "stream", "=", "self", ".", "_log_file_handle", "if", "self", ".", "_log_file_handle", "else", "osutils", ".", "stdout", "(", ")", "return", "StreamWriter", "(", "stream", ",", "self", ".", "_is_debugging", ")" ]
Returns stream writer for stdout to output Lambda function logs to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stdout
[ "Returns", "stream", "writer", "for", "stdout", "to", "output", "Lambda", "function", "logs", "to" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L204-L214
29,918
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.stderr
def stderr(self): """ Returns stream writer for stderr to output Lambda function errors to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stderr """ stream = self._log_file_handle if self._log_file_handle else osutils.stderr() return StreamWriter(stream, self._is_debugging)
python
def stderr(self): """ Returns stream writer for stderr to output Lambda function errors to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stderr """ stream = self._log_file_handle if self._log_file_handle else osutils.stderr() return StreamWriter(stream, self._is_debugging)
[ "def", "stderr", "(", "self", ")", ":", "stream", "=", "self", ".", "_log_file_handle", "if", "self", ".", "_log_file_handle", "else", "osutils", ".", "stderr", "(", ")", "return", "StreamWriter", "(", "stream", ",", "self", ".", "_is_debugging", ")" ]
Returns stream writer for stderr to output Lambda function errors to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stderr
[ "Returns", "stream", "writer", "for", "stderr", "to", "output", "Lambda", "function", "errors", "to" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L217-L227
29,919
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.get_cwd
def get_cwd(self): """ Get the working directory. This is usually relative to the directory that contains the template. If a Docker volume location is specified, it takes preference All Lambda function code paths are resolved relative to this working directory :return string: Working directory """ cwd = os.path.dirname(os.path.abspath(self._template_file)) if self._docker_volume_basedir: cwd = self._docker_volume_basedir return cwd
python
def get_cwd(self): """ Get the working directory. This is usually relative to the directory that contains the template. If a Docker volume location is specified, it takes preference All Lambda function code paths are resolved relative to this working directory :return string: Working directory """ cwd = os.path.dirname(os.path.abspath(self._template_file)) if self._docker_volume_basedir: cwd = self._docker_volume_basedir return cwd
[ "def", "get_cwd", "(", "self", ")", ":", "cwd", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "self", ".", "_template_file", ")", ")", "if", "self", ".", "_docker_volume_basedir", ":", "cwd", "=", "self", ".", ...
Get the working directory. This is usually relative to the directory that contains the template. If a Docker volume location is specified, it takes preference All Lambda function code paths are resolved relative to this working directory :return string: Working directory
[ "Get", "the", "working", "directory", ".", "This", "is", "usually", "relative", "to", "the", "directory", "that", "contains", "the", "template", ".", "If", "a", "Docker", "volume", "location", "is", "specified", "it", "takes", "preference" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L238-L252
29,920
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext._get_env_vars_value
def _get_env_vars_value(filename): """ If the user provided a file containing values of environment variables, this method will read the file and return its value :param string filename: Path to file containing environment variable values :return dict: Value of environment variables, if provided. None otherwise :raises InvokeContextException: If the file was not found or not a valid JSON """ if not filename: return None # Try to read the file and parse it as JSON try: with open(filename, 'r') as fp: return json.load(fp) except Exception as ex: raise InvokeContextException("Could not read environment variables overrides from file {}: {}".format( filename, str(ex)))
python
def _get_env_vars_value(filename): """ If the user provided a file containing values of environment variables, this method will read the file and return its value :param string filename: Path to file containing environment variable values :return dict: Value of environment variables, if provided. None otherwise :raises InvokeContextException: If the file was not found or not a valid JSON """ if not filename: return None # Try to read the file and parse it as JSON try: with open(filename, 'r') as fp: return json.load(fp) except Exception as ex: raise InvokeContextException("Could not read environment variables overrides from file {}: {}".format( filename, str(ex)))
[ "def", "_get_env_vars_value", "(", "filename", ")", ":", "if", "not", "filename", ":", "return", "None", "# Try to read the file and parse it as JSON", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fp", ":", "return", "json", ".", "load"...
If the user provided a file containing values of environment variables, this method will read the file and return its value :param string filename: Path to file containing environment variable values :return dict: Value of environment variables, if provided. None otherwise :raises InvokeContextException: If the file was not found or not a valid JSON
[ "If", "the", "user", "provided", "a", "file", "containing", "values", "of", "environment", "variables", "this", "method", "will", "read", "the", "file", "and", "return", "its", "value" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L282-L303
29,921
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext._get_debug_context
def _get_debug_context(debug_port, debug_args, debugger_path): """ Creates a DebugContext if the InvokeContext is in a debugging mode Parameters ---------- debug_port int Port to bind the debugger to debug_args str Additional arguments passed to the debugger debugger_path str Path to the directory of the debugger to mount on Docker Returns ------- samcli.commands.local.lib.debug_context.DebugContext Object representing the DebugContext Raises ------ samcli.commands.local.cli_common.user_exceptions.DebugContext When the debugger_path is not valid """ if debug_port and debugger_path: try: debugger = Path(debugger_path).resolve(strict=True) except OSError as error: if error.errno == errno.ENOENT: raise DebugContextException("'{}' could not be found.".format(debugger_path)) else: raise error # We turn off pylint here due to https://github.com/PyCQA/pylint/issues/1660 if not debugger.is_dir(): # pylint: disable=no-member raise DebugContextException("'{}' should be a directory with the debugger in it.".format(debugger_path)) debugger_path = str(debugger) return DebugContext(debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path)
python
def _get_debug_context(debug_port, debug_args, debugger_path): """ Creates a DebugContext if the InvokeContext is in a debugging mode Parameters ---------- debug_port int Port to bind the debugger to debug_args str Additional arguments passed to the debugger debugger_path str Path to the directory of the debugger to mount on Docker Returns ------- samcli.commands.local.lib.debug_context.DebugContext Object representing the DebugContext Raises ------ samcli.commands.local.cli_common.user_exceptions.DebugContext When the debugger_path is not valid """ if debug_port and debugger_path: try: debugger = Path(debugger_path).resolve(strict=True) except OSError as error: if error.errno == errno.ENOENT: raise DebugContextException("'{}' could not be found.".format(debugger_path)) else: raise error # We turn off pylint here due to https://github.com/PyCQA/pylint/issues/1660 if not debugger.is_dir(): # pylint: disable=no-member raise DebugContextException("'{}' should be a directory with the debugger in it.".format(debugger_path)) debugger_path = str(debugger) return DebugContext(debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path)
[ "def", "_get_debug_context", "(", "debug_port", ",", "debug_args", ",", "debugger_path", ")", ":", "if", "debug_port", "and", "debugger_path", ":", "try", ":", "debugger", "=", "Path", "(", "debugger_path", ")", ".", "resolve", "(", "strict", "=", "True", ")...
Creates a DebugContext if the InvokeContext is in a debugging mode Parameters ---------- debug_port int Port to bind the debugger to debug_args str Additional arguments passed to the debugger debugger_path str Path to the directory of the debugger to mount on Docker Returns ------- samcli.commands.local.lib.debug_context.DebugContext Object representing the DebugContext Raises ------ samcli.commands.local.cli_common.user_exceptions.DebugContext When the debugger_path is not valid
[ "Creates", "a", "DebugContext", "if", "the", "InvokeContext", "is", "in", "a", "debugging", "mode" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L319-L356
29,922
awslabs/aws-sam-cli
samcli/local/docker/attach_api.py
_read_payload
def _read_payload(socket, payload_size): """ From the given socket, reads and yields payload of the given size. With sockets, we don't receive all data at once. Therefore this method will yield each time we read some data from the socket until the payload_size has reached or socket has no more data. Parameters ---------- socket Socket to read from payload_size : int Size of the payload to read. Exactly these many bytes are read from the socket before stopping the yield. Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream """ remaining = payload_size while remaining > 0: # Try and read as much as possible data = read(socket, remaining) if data is None: # ``read`` will terminate with an empty string. This is just a transient state where we didn't get any data continue if len(data) == 0: # pylint: disable=C1801 # Empty string. Socket does not have any more data. We are done here even if we haven't read full payload break remaining -= len(data) yield data
python
def _read_payload(socket, payload_size): """ From the given socket, reads and yields payload of the given size. With sockets, we don't receive all data at once. Therefore this method will yield each time we read some data from the socket until the payload_size has reached or socket has no more data. Parameters ---------- socket Socket to read from payload_size : int Size of the payload to read. Exactly these many bytes are read from the socket before stopping the yield. Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream """ remaining = payload_size while remaining > 0: # Try and read as much as possible data = read(socket, remaining) if data is None: # ``read`` will terminate with an empty string. This is just a transient state where we didn't get any data continue if len(data) == 0: # pylint: disable=C1801 # Empty string. Socket does not have any more data. We are done here even if we haven't read full payload break remaining -= len(data) yield data
[ "def", "_read_payload", "(", "socket", ",", "payload_size", ")", ":", "remaining", "=", "payload_size", "while", "remaining", ">", "0", ":", "# Try and read as much as possible", "data", "=", "read", "(", "socket", ",", "remaining", ")", "if", "data", "is", "N...
From the given socket, reads and yields payload of the given size. With sockets, we don't receive all data at once. Therefore this method will yield each time we read some data from the socket until the payload_size has reached or socket has no more data. Parameters ---------- socket Socket to read from payload_size : int Size of the payload to read. Exactly these many bytes are read from the socket before stopping the yield. Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream
[ "From", "the", "given", "socket", "reads", "and", "yields", "payload", "of", "the", "given", "size", ".", "With", "sockets", "we", "don", "t", "receive", "all", "data", "at", "once", ".", "Therefore", "this", "method", "will", "yield", "each", "time", "w...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/attach_api.py#L119-L155
29,923
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.resource_not_found
def resource_not_found(function_name): """ Creates a Lambda Service ResourceNotFound Response Parameters ---------- function_name str Name of the function that was requested to invoke Returns ------- Flask.Response A response object representing the ResourceNotFound Error """ exception_tuple = LambdaErrorResponses.ResourceNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.USER_ERROR, "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format(function_name) ), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def resource_not_found(function_name): """ Creates a Lambda Service ResourceNotFound Response Parameters ---------- function_name str Name of the function that was requested to invoke Returns ------- Flask.Response A response object representing the ResourceNotFound Error """ exception_tuple = LambdaErrorResponses.ResourceNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.USER_ERROR, "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format(function_name) ), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "resource_not_found", "(", "function_name", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "ResourceNotFoundException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "Lamb...
Creates a Lambda Service ResourceNotFound Response Parameters ---------- function_name str Name of the function that was requested to invoke Returns ------- Flask.Response A response object representing the ResourceNotFound Error
[ "Creates", "a", "Lambda", "Service", "ResourceNotFound", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L39-L62
29,924
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.invalid_request_content
def invalid_request_content(message): """ Creates a Lambda Service InvalidRequestContent Response Parameters ---------- message str Message to be added to the body of the response Returns ------- Flask.Response A response object representing the InvalidRequestContent Error """ exception_tuple = LambdaErrorResponses.InvalidRequestContentException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, message), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def invalid_request_content(message): """ Creates a Lambda Service InvalidRequestContent Response Parameters ---------- message str Message to be added to the body of the response Returns ------- Flask.Response A response object representing the InvalidRequestContent Error """ exception_tuple = LambdaErrorResponses.InvalidRequestContentException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, message), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "invalid_request_content", "(", "message", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "InvalidRequestContentException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "...
Creates a Lambda Service InvalidRequestContent Response Parameters ---------- message str Message to be added to the body of the response Returns ------- Flask.Response A response object representing the InvalidRequestContent Error
[ "Creates", "a", "Lambda", "Service", "InvalidRequestContent", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L65-L85
29,925
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.unsupported_media_type
def unsupported_media_type(content_type): """ Creates a Lambda Service UnsupportedMediaType Response Parameters ---------- content_type str Content Type of the request that was made Returns ------- Flask.Response A response object representing the UnsupportedMediaType Error """ exception_tuple = LambdaErrorResponses.UnsupportedMediaTypeException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, "Unsupported content type: {}".format(content_type)), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def unsupported_media_type(content_type): """ Creates a Lambda Service UnsupportedMediaType Response Parameters ---------- content_type str Content Type of the request that was made Returns ------- Flask.Response A response object representing the UnsupportedMediaType Error """ exception_tuple = LambdaErrorResponses.UnsupportedMediaTypeException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, "Unsupported content type: {}".format(content_type)), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "unsupported_media_type", "(", "content_type", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "UnsupportedMediaTypeException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", ...
Creates a Lambda Service UnsupportedMediaType Response Parameters ---------- content_type str Content Type of the request that was made Returns ------- Flask.Response A response object representing the UnsupportedMediaType Error
[ "Creates", "a", "Lambda", "Service", "UnsupportedMediaType", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L88-L109
29,926
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.generic_service_exception
def generic_service_exception(*args): """ Creates a Lambda Service Generic ServiceException Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericServiceException Error """ exception_tuple = LambdaErrorResponses.ServiceException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.SERVICE_ERROR, "ServiceException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def generic_service_exception(*args): """ Creates a Lambda Service Generic ServiceException Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericServiceException Error """ exception_tuple = LambdaErrorResponses.ServiceException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.SERVICE_ERROR, "ServiceException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "generic_service_exception", "(", "*", "args", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "ServiceException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "LambdaEr...
Creates a Lambda Service Generic ServiceException Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericServiceException Error
[ "Creates", "a", "Lambda", "Service", "Generic", "ServiceException", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L112-L132
29,927
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.generic_path_not_found
def generic_path_not_found(*args): """ Creates a Lambda Service Generic PathNotFound Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericPathNotFound Error """ exception_tuple = LambdaErrorResponses.PathNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.LOCAL_SERVICE_ERROR, "PathNotFoundException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def generic_path_not_found(*args): """ Creates a Lambda Service Generic PathNotFound Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericPathNotFound Error """ exception_tuple = LambdaErrorResponses.PathNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.LOCAL_SERVICE_ERROR, "PathNotFoundException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "generic_path_not_found", "(", "*", "args", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "PathNotFoundException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "Lambda...
Creates a Lambda Service Generic PathNotFound Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericPathNotFound Error
[ "Creates", "a", "Lambda", "Service", "Generic", "PathNotFound", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L158-L179
29,928
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.generic_method_not_allowed
def generic_method_not_allowed(*args): """ Creates a Lambda Service Generic MethodNotAllowed Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericMethodNotAllowed Error """ exception_tuple = LambdaErrorResponses.MethodNotAllowedException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.LOCAL_SERVICE_ERROR, "MethodNotAllowedException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def generic_method_not_allowed(*args): """ Creates a Lambda Service Generic MethodNotAllowed Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericMethodNotAllowed Error """ exception_tuple = LambdaErrorResponses.MethodNotAllowedException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.LOCAL_SERVICE_ERROR, "MethodNotAllowedException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "generic_method_not_allowed", "(", "*", "args", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "MethodNotAllowedException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", ...
Creates a Lambda Service Generic MethodNotAllowed Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericMethodNotAllowed Error
[ "Creates", "a", "Lambda", "Service", "Generic", "MethodNotAllowed", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L182-L203
29,929
awslabs/aws-sam-cli
samcli/lib/utils/codeuri.py
resolve_code_path
def resolve_code_path(cwd, codeuri): """ Returns path to the function code resolved based on current working directory. Parameters ---------- cwd str Current working directory codeuri CodeURI of the function. This should contain the path to the function code Returns ------- str Absolute path to the function code """ LOG.debug("Resolving code path. Cwd=%s, CodeUri=%s", cwd, codeuri) # First, let us figure out the current working directory. # If current working directory is not provided, then default to the directory where the CLI is running from if not cwd or cwd == PRESENT_DIR: cwd = os.getcwd() # Make sure cwd is an absolute path cwd = os.path.abspath(cwd) # Next, let us get absolute path of function code. # Codepath is always relative to current working directory # If the path is relative, then construct the absolute version if not os.path.isabs(codeuri): codeuri = os.path.normpath(os.path.join(cwd, codeuri)) return codeuri
python
def resolve_code_path(cwd, codeuri): """ Returns path to the function code resolved based on current working directory. Parameters ---------- cwd str Current working directory codeuri CodeURI of the function. This should contain the path to the function code Returns ------- str Absolute path to the function code """ LOG.debug("Resolving code path. Cwd=%s, CodeUri=%s", cwd, codeuri) # First, let us figure out the current working directory. # If current working directory is not provided, then default to the directory where the CLI is running from if not cwd or cwd == PRESENT_DIR: cwd = os.getcwd() # Make sure cwd is an absolute path cwd = os.path.abspath(cwd) # Next, let us get absolute path of function code. # Codepath is always relative to current working directory # If the path is relative, then construct the absolute version if not os.path.isabs(codeuri): codeuri = os.path.normpath(os.path.join(cwd, codeuri)) return codeuri
[ "def", "resolve_code_path", "(", "cwd", ",", "codeuri", ")", ":", "LOG", ".", "debug", "(", "\"Resolving code path. Cwd=%s, CodeUri=%s\"", ",", "cwd", ",", "codeuri", ")", "# First, let us figure out the current working directory.", "# If current working directory is not provid...
Returns path to the function code resolved based on current working directory. Parameters ---------- cwd str Current working directory codeuri CodeURI of the function. This should contain the path to the function code Returns ------- str Absolute path to the function code
[ "Returns", "path", "to", "the", "function", "code", "resolved", "based", "on", "current", "working", "directory", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/codeuri.py#L13-L46
29,930
awslabs/aws-sam-cli
samcli/local/apigw/path_converter.py
PathConverter.convert_path_to_flask
def convert_path_to_flask(path): """ Converts a Path from an Api Gateway defined path to one that is accepted by Flask Examples: '/id/{id}' => '/id/<id>' '/{proxy+}' => '/<path:proxy>' :param str path: Path to convert to Flask defined path :return str: Path representing a Flask path """ proxy_sub_path = APIGW_TO_FLASK_REGEX.sub(FLASK_CAPTURE_ALL_PATH, path) # Replace the '{' and '}' with '<' and '>' respectively return proxy_sub_path.replace(LEFT_BRACKET, LEFT_ANGLE_BRACKET).replace(RIGHT_BRACKET, RIGHT_ANGLE_BRACKET)
python
def convert_path_to_flask(path): """ Converts a Path from an Api Gateway defined path to one that is accepted by Flask Examples: '/id/{id}' => '/id/<id>' '/{proxy+}' => '/<path:proxy>' :param str path: Path to convert to Flask defined path :return str: Path representing a Flask path """ proxy_sub_path = APIGW_TO_FLASK_REGEX.sub(FLASK_CAPTURE_ALL_PATH, path) # Replace the '{' and '}' with '<' and '>' respectively return proxy_sub_path.replace(LEFT_BRACKET, LEFT_ANGLE_BRACKET).replace(RIGHT_BRACKET, RIGHT_ANGLE_BRACKET)
[ "def", "convert_path_to_flask", "(", "path", ")", ":", "proxy_sub_path", "=", "APIGW_TO_FLASK_REGEX", ".", "sub", "(", "FLASK_CAPTURE_ALL_PATH", ",", "path", ")", "# Replace the '{' and '}' with '<' and '>' respectively", "return", "proxy_sub_path", ".", "replace", "(", "...
Converts a Path from an Api Gateway defined path to one that is accepted by Flask Examples: '/id/{id}' => '/id/<id>' '/{proxy+}' => '/<path:proxy>' :param str path: Path to convert to Flask defined path :return str: Path representing a Flask path
[ "Converts", "a", "Path", "from", "an", "Api", "Gateway", "defined", "path", "to", "one", "that", "is", "accepted", "by", "Flask" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/path_converter.py#L37-L52
29,931
awslabs/aws-sam-cli
samcli/local/apigw/path_converter.py
PathConverter.convert_path_to_api_gateway
def convert_path_to_api_gateway(path): """ Converts a Path from a Flask defined path to one that is accepted by Api Gateway Examples: '/id/<id>' => '/id/{id}' '/<path:proxy>' => '/{proxy+}' :param str path: Path to convert to Api Gateway defined path :return str: Path representing an Api Gateway path """ proxy_sub_path = FLASK_TO_APIGW_REGEX.sub(PROXY_PATH_PARAMS, path) # Replace the '<' and '>' with '{' and '}' respectively return proxy_sub_path.replace(LEFT_ANGLE_BRACKET, LEFT_BRACKET).replace(RIGHT_ANGLE_BRACKET, RIGHT_BRACKET)
python
def convert_path_to_api_gateway(path): """ Converts a Path from a Flask defined path to one that is accepted by Api Gateway Examples: '/id/<id>' => '/id/{id}' '/<path:proxy>' => '/{proxy+}' :param str path: Path to convert to Api Gateway defined path :return str: Path representing an Api Gateway path """ proxy_sub_path = FLASK_TO_APIGW_REGEX.sub(PROXY_PATH_PARAMS, path) # Replace the '<' and '>' with '{' and '}' respectively return proxy_sub_path.replace(LEFT_ANGLE_BRACKET, LEFT_BRACKET).replace(RIGHT_ANGLE_BRACKET, RIGHT_BRACKET)
[ "def", "convert_path_to_api_gateway", "(", "path", ")", ":", "proxy_sub_path", "=", "FLASK_TO_APIGW_REGEX", ".", "sub", "(", "PROXY_PATH_PARAMS", ",", "path", ")", "# Replace the '<' and '>' with '{' and '}' respectively", "return", "proxy_sub_path", ".", "replace", "(", ...
Converts a Path from a Flask defined path to one that is accepted by Api Gateway Examples: '/id/<id>' => '/id/{id}' '/<path:proxy>' => '/{proxy+}' :param str path: Path to convert to Api Gateway defined path :return str: Path representing an Api Gateway path
[ "Converts", "a", "Path", "from", "a", "Flask", "defined", "path", "to", "one", "that", "is", "accepted", "by", "Api", "Gateway" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/path_converter.py#L55-L70
29,932
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/integration_uri.py
LambdaUri.get_function_name
def get_function_name(integration_uri): """ Gets the name of the function from the Integration URI ARN. This is a best effort service which returns None if function name could not be parsed. This can happen when the ARN is an intrinsic function which is too complex or the ARN is not a Lambda integration. Parameters ---------- integration_uri : basestring or dict Integration URI data extracted from Swagger dictionary. This could be a string of the ARN or an intrinsic function that will resolve to the ARN Returns ------- basestring or None If the function name could be parsed out of the Integration URI ARN. None, otherwise """ arn = LambdaUri._get_function_arn(integration_uri) LOG.debug("Extracted Function ARN: %s", arn) return LambdaUri._get_function_name_from_arn(arn)
python
def get_function_name(integration_uri): """ Gets the name of the function from the Integration URI ARN. This is a best effort service which returns None if function name could not be parsed. This can happen when the ARN is an intrinsic function which is too complex or the ARN is not a Lambda integration. Parameters ---------- integration_uri : basestring or dict Integration URI data extracted from Swagger dictionary. This could be a string of the ARN or an intrinsic function that will resolve to the ARN Returns ------- basestring or None If the function name could be parsed out of the Integration URI ARN. None, otherwise """ arn = LambdaUri._get_function_arn(integration_uri) LOG.debug("Extracted Function ARN: %s", arn) return LambdaUri._get_function_name_from_arn(arn)
[ "def", "get_function_name", "(", "integration_uri", ")", ":", "arn", "=", "LambdaUri", ".", "_get_function_arn", "(", "integration_uri", ")", "LOG", ".", "debug", "(", "\"Extracted Function ARN: %s\"", ",", "arn", ")", "return", "LambdaUri", ".", "_get_function_name...
Gets the name of the function from the Integration URI ARN. This is a best effort service which returns None if function name could not be parsed. This can happen when the ARN is an intrinsic function which is too complex or the ARN is not a Lambda integration. Parameters ---------- integration_uri : basestring or dict Integration URI data extracted from Swagger dictionary. This could be a string of the ARN or an intrinsic function that will resolve to the ARN Returns ------- basestring or None If the function name could be parsed out of the Integration URI ARN. None, otherwise
[ "Gets", "the", "name", "of", "the", "function", "from", "the", "Integration", "URI", "ARN", ".", "This", "is", "a", "best", "effort", "service", "which", "returns", "None", "if", "function", "name", "could", "not", "be", "parsed", ".", "This", "can", "ha...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/integration_uri.py#L42-L64
29,933
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/integration_uri.py
LambdaUri._get_function_name_from_arn
def _get_function_name_from_arn(function_arn): """ Given the integration ARN, extract the Lambda function name from the ARN. If there are stage variables, or other unsupported formats, this function will return None. Parameters ---------- function_arn : basestring or None Function ARN from the swagger document Returns ------- basestring or None Function name of this integration. None if the ARN is not parsable """ if not function_arn: return None matches = re.match(LambdaUri._REGEX_GET_FUNCTION_NAME, function_arn) if not matches or not matches.groups(): LOG.debug("No Lambda function ARN defined for integration containing ARN %s", function_arn) return None groups = matches.groups() maybe_function_name = groups[0] # This regex has only one group match # Function name could be a real name or a stage variable or some unknown format if re.match(LambdaUri._REGEX_STAGE_VARIABLE, maybe_function_name): # yes, this is a stage variable LOG.debug("Stage variables are not supported. Ignoring integration with function ARN %s", function_arn) return None elif re.match(LambdaUri._REGEX_VALID_FUNCTION_NAME, maybe_function_name): # Yes, this is a real function name return maybe_function_name # Some unknown format LOG.debug("Ignoring integration ARN. Unable to parse Function Name from function arn %s", function_arn)
python
def _get_function_name_from_arn(function_arn): """ Given the integration ARN, extract the Lambda function name from the ARN. If there are stage variables, or other unsupported formats, this function will return None. Parameters ---------- function_arn : basestring or None Function ARN from the swagger document Returns ------- basestring or None Function name of this integration. None if the ARN is not parsable """ if not function_arn: return None matches = re.match(LambdaUri._REGEX_GET_FUNCTION_NAME, function_arn) if not matches or not matches.groups(): LOG.debug("No Lambda function ARN defined for integration containing ARN %s", function_arn) return None groups = matches.groups() maybe_function_name = groups[0] # This regex has only one group match # Function name could be a real name or a stage variable or some unknown format if re.match(LambdaUri._REGEX_STAGE_VARIABLE, maybe_function_name): # yes, this is a stage variable LOG.debug("Stage variables are not supported. Ignoring integration with function ARN %s", function_arn) return None elif re.match(LambdaUri._REGEX_VALID_FUNCTION_NAME, maybe_function_name): # Yes, this is a real function name return maybe_function_name # Some unknown format LOG.debug("Ignoring integration ARN. Unable to parse Function Name from function arn %s", function_arn)
[ "def", "_get_function_name_from_arn", "(", "function_arn", ")", ":", "if", "not", "function_arn", ":", "return", "None", "matches", "=", "re", ".", "match", "(", "LambdaUri", ".", "_REGEX_GET_FUNCTION_NAME", ",", "function_arn", ")", "if", "not", "matches", "or"...
Given the integration ARN, extract the Lambda function name from the ARN. If there are stage variables, or other unsupported formats, this function will return None. Parameters ---------- function_arn : basestring or None Function ARN from the swagger document Returns ------- basestring or None Function name of this integration. None if the ARN is not parsable
[ "Given", "the", "integration", "ARN", "extract", "the", "Lambda", "function", "name", "from", "the", "ARN", ".", "If", "there", "are", "stage", "variables", "or", "other", "unsupported", "formats", "this", "function", "will", "return", "None", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/integration_uri.py#L131-L170
29,934
awslabs/aws-sam-cli
samcli/commands/local/invoke/cli.py
_get_event
def _get_event(event_file_name): """ Read the event JSON data from the given file. If no file is provided, read the event from stdin. :param string event_file_name: Path to event file, or '-' for stdin :return string: Contents of the event file or stdin """ if event_file_name == STDIN_FILE_NAME: # If event is empty, listen to stdin for event data until EOF LOG.info("Reading invoke payload from stdin (you can also pass it from file with --event)") # click.open_file knows to open stdin when filename is '-'. This is safer than manually opening streams, and # accidentally closing a standard stream with click.open_file(event_file_name, 'r') as fp: return fp.read()
python
def _get_event(event_file_name): """ Read the event JSON data from the given file. If no file is provided, read the event from stdin. :param string event_file_name: Path to event file, or '-' for stdin :return string: Contents of the event file or stdin """ if event_file_name == STDIN_FILE_NAME: # If event is empty, listen to stdin for event data until EOF LOG.info("Reading invoke payload from stdin (you can also pass it from file with --event)") # click.open_file knows to open stdin when filename is '-'. This is safer than manually opening streams, and # accidentally closing a standard stream with click.open_file(event_file_name, 'r') as fp: return fp.read()
[ "def", "_get_event", "(", "event_file_name", ")", ":", "if", "event_file_name", "==", "STDIN_FILE_NAME", ":", "# If event is empty, listen to stdin for event data until EOF", "LOG", ".", "info", "(", "\"Reading invoke payload from stdin (you can also pass it from file with --event)\"...
Read the event JSON data from the given file. If no file is provided, read the event from stdin. :param string event_file_name: Path to event file, or '-' for stdin :return string: Contents of the event file or stdin
[ "Read", "the", "event", "JSON", "data", "from", "the", "given", "file", ".", "If", "no", "file", "is", "provided", "read", "the", "event", "from", "stdin", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/invoke/cli.py#L112-L127
29,935
awslabs/aws-sam-cli
samcli/lib/samlib/resource_metadata_normalizer.py
ResourceMetadataNormalizer.normalize
def normalize(template_dict): """ Normalize all Resources in the template with the Metadata Key on the resource. This method will mutate the template Parameters ---------- template_dict dict Dictionary representing the template """ resources = template_dict.get(RESOURCES_KEY, {}) for logical_id, resource in resources.items(): resource_metadata = resource.get(METADATA_KEY, {}) asset_path = resource_metadata.get(ASSET_PATH_METADATA_KEY) asset_property = resource_metadata.get(ASSET_PROPERTY_METADATA_KEY) ResourceMetadataNormalizer._replace_property(asset_property, asset_path, resource, logical_id)
python
def normalize(template_dict): """ Normalize all Resources in the template with the Metadata Key on the resource. This method will mutate the template Parameters ---------- template_dict dict Dictionary representing the template """ resources = template_dict.get(RESOURCES_KEY, {}) for logical_id, resource in resources.items(): resource_metadata = resource.get(METADATA_KEY, {}) asset_path = resource_metadata.get(ASSET_PATH_METADATA_KEY) asset_property = resource_metadata.get(ASSET_PROPERTY_METADATA_KEY) ResourceMetadataNormalizer._replace_property(asset_property, asset_path, resource, logical_id)
[ "def", "normalize", "(", "template_dict", ")", ":", "resources", "=", "template_dict", ".", "get", "(", "RESOURCES_KEY", ",", "{", "}", ")", "for", "logical_id", ",", "resource", "in", "resources", ".", "items", "(", ")", ":", "resource_metadata", "=", "re...
Normalize all Resources in the template with the Metadata Key on the resource. This method will mutate the template Parameters ---------- template_dict dict Dictionary representing the template
[ "Normalize", "all", "Resources", "in", "the", "template", "with", "the", "Metadata", "Key", "on", "the", "resource", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/samlib/resource_metadata_normalizer.py#L19-L38
29,936
awslabs/aws-sam-cli
samcli/lib/samlib/resource_metadata_normalizer.py
ResourceMetadataNormalizer._replace_property
def _replace_property(property_key, property_value, resource, logical_id): """ Replace a property with an asset on a given resource This method will mutate the template Parameters ---------- property str The property to replace on the resource property_value str The new value of the property resource dict Dictionary representing the Resource to change logical_id str LogicalId of the Resource """ if property_key and property_value: resource.get(PROPERTIES_KEY, {})[property_key] = property_value elif property_key or property_value: LOG.info("WARNING: Ignoring Metadata for Resource %s. Metadata contains only aws:asset:path or " "aws:assert:property but not both", logical_id)
python
def _replace_property(property_key, property_value, resource, logical_id): """ Replace a property with an asset on a given resource This method will mutate the template Parameters ---------- property str The property to replace on the resource property_value str The new value of the property resource dict Dictionary representing the Resource to change logical_id str LogicalId of the Resource """ if property_key and property_value: resource.get(PROPERTIES_KEY, {})[property_key] = property_value elif property_key or property_value: LOG.info("WARNING: Ignoring Metadata for Resource %s. Metadata contains only aws:asset:path or " "aws:assert:property but not both", logical_id)
[ "def", "_replace_property", "(", "property_key", ",", "property_value", ",", "resource", ",", "logical_id", ")", ":", "if", "property_key", "and", "property_value", ":", "resource", ".", "get", "(", "PROPERTIES_KEY", ",", "{", "}", ")", "[", "property_key", "]...
Replace a property with an asset on a given resource This method will mutate the template Parameters ---------- property str The property to replace on the resource property_value str The new value of the property resource dict Dictionary representing the Resource to change logical_id str LogicalId of the Resource
[ "Replace", "a", "property", "with", "an", "asset", "on", "a", "given", "resource" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/samlib/resource_metadata_normalizer.py#L41-L63
29,937
awslabs/aws-sam-cli
samcli/cli/command.py
BaseCommand._set_commands
def _set_commands(package_names): """ Extract the command name from package name. Last part of the module path is the command ie. if path is foo.bar.baz, then "baz" is the command name. :param package_names: List of package names :return: Dictionary with command name as key and the package name as value. """ commands = {} for pkg_name in package_names: cmd_name = pkg_name.split('.')[-1] commands[cmd_name] = pkg_name return commands
python
def _set_commands(package_names): """ Extract the command name from package name. Last part of the module path is the command ie. if path is foo.bar.baz, then "baz" is the command name. :param package_names: List of package names :return: Dictionary with command name as key and the package name as value. """ commands = {} for pkg_name in package_names: cmd_name = pkg_name.split('.')[-1] commands[cmd_name] = pkg_name return commands
[ "def", "_set_commands", "(", "package_names", ")", ":", "commands", "=", "{", "}", "for", "pkg_name", "in", "package_names", ":", "cmd_name", "=", "pkg_name", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "commands", "[", "cmd_name", "]", "=", "pk...
Extract the command name from package name. Last part of the module path is the command ie. if path is foo.bar.baz, then "baz" is the command name. :param package_names: List of package names :return: Dictionary with command name as key and the package name as value.
[ "Extract", "the", "command", "name", "from", "package", "name", ".", "Last", "part", "of", "the", "module", "path", "is", "the", "command", "ie", ".", "if", "path", "is", "foo", ".", "bar", ".", "baz", "then", "baz", "is", "the", "command", "name", "...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/command.py#L62-L77
29,938
awslabs/aws-sam-cli
samcli/cli/command.py
BaseCommand.get_command
def get_command(self, ctx, cmd_name): """ Overrides method from ``click.MultiCommand`` that returns Click CLI object for given command name, if found. :param ctx: Click context :param cmd_name: Top-level command name :return: Click object representing the command """ if cmd_name not in self._commands: logger.error("Command %s not available", cmd_name) return pkg_name = self._commands[cmd_name] try: mod = importlib.import_module(pkg_name) except ImportError: logger.exception("Command '%s' is not configured correctly. Unable to import '%s'", cmd_name, pkg_name) return if not hasattr(mod, "cli"): logger.error("Command %s is not configured correctly. It must expose an function called 'cli'", cmd_name) return return mod.cli
python
def get_command(self, ctx, cmd_name): """ Overrides method from ``click.MultiCommand`` that returns Click CLI object for given command name, if found. :param ctx: Click context :param cmd_name: Top-level command name :return: Click object representing the command """ if cmd_name not in self._commands: logger.error("Command %s not available", cmd_name) return pkg_name = self._commands[cmd_name] try: mod = importlib.import_module(pkg_name) except ImportError: logger.exception("Command '%s' is not configured correctly. Unable to import '%s'", cmd_name, pkg_name) return if not hasattr(mod, "cli"): logger.error("Command %s is not configured correctly. It must expose an function called 'cli'", cmd_name) return return mod.cli
[ "def", "get_command", "(", "self", ",", "ctx", ",", "cmd_name", ")", ":", "if", "cmd_name", "not", "in", "self", ".", "_commands", ":", "logger", ".", "error", "(", "\"Command %s not available\"", ",", "cmd_name", ")", "return", "pkg_name", "=", "self", "....
Overrides method from ``click.MultiCommand`` that returns Click CLI object for given command name, if found. :param ctx: Click context :param cmd_name: Top-level command name :return: Click object representing the command
[ "Overrides", "method", "from", "click", ".", "MultiCommand", "that", "returns", "Click", "CLI", "object", "for", "given", "command", "name", "if", "found", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/command.py#L88-L112
29,939
awslabs/aws-sam-cli
samcli/lib/utils/stream_writer.py
StreamWriter.write
def write(self, output): """ Writes specified text to the underlying stream Parameters ---------- output bytes-like object Bytes to write """ self._stream.write(output) if self._auto_flush: self._stream.flush()
python
def write(self, output): """ Writes specified text to the underlying stream Parameters ---------- output bytes-like object Bytes to write """ self._stream.write(output) if self._auto_flush: self._stream.flush()
[ "def", "write", "(", "self", ",", "output", ")", ":", "self", ".", "_stream", ".", "write", "(", "output", ")", "if", "self", ".", "_auto_flush", ":", "self", ".", "_stream", ".", "flush", "(", ")" ]
Writes specified text to the underlying stream Parameters ---------- output bytes-like object Bytes to write
[ "Writes", "specified", "text", "to", "the", "underlying", "stream" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/stream_writer.py#L22-L34
29,940
awslabs/aws-sam-cli
samcli/lib/build/workflow_config.py
get_workflow_config
def get_workflow_config(runtime, code_dir, project_dir): """ Get a workflow config that corresponds to the runtime provided. This method examines contents of the project and code directories to determine the most appropriate workflow for the given runtime. Currently the decision is based on the presence of a supported manifest file. For runtimes that have more than one workflow, we choose a workflow by examining ``code_dir`` followed by ``project_dir`` for presence of a supported manifest. Parameters ---------- runtime str The runtime of the config code_dir str Directory where Lambda function code is present project_dir str Root of the Serverless application project. Returns ------- namedtuple(Capability) namedtuple that represents the Builder Workflow Config """ selectors_by_runtime = { "python2.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.6": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "nodejs4.3": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs6.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs8.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "ruby2.5": BasicWorkflowSelector(RUBY_BUNDLER_CONFIG), "dotnetcore2.0": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), "dotnetcore2.1": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), # When Maven builder exists, add to this list so we can automatically choose a builder based on the supported # manifest "java8": ManifestWorkflowSelector([ # Gradle builder needs custom executable paths to find `gradlew` binary JAVA_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_KOTLIN_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_MAVEN_CONFIG ]), } if runtime not in selectors_by_runtime: raise UnsupportedRuntimeException("'{}' runtime is not supported".format(runtime)) selector = selectors_by_runtime[runtime] try: config = selector.get_config(code_dir, project_dir) return config except ValueError as ex: raise UnsupportedRuntimeException("Unable to find a supported build workflow for runtime '{}'. Reason: {}" .format(runtime, str(ex)))
python
def get_workflow_config(runtime, code_dir, project_dir): """ Get a workflow config that corresponds to the runtime provided. This method examines contents of the project and code directories to determine the most appropriate workflow for the given runtime. Currently the decision is based on the presence of a supported manifest file. For runtimes that have more than one workflow, we choose a workflow by examining ``code_dir`` followed by ``project_dir`` for presence of a supported manifest. Parameters ---------- runtime str The runtime of the config code_dir str Directory where Lambda function code is present project_dir str Root of the Serverless application project. Returns ------- namedtuple(Capability) namedtuple that represents the Builder Workflow Config """ selectors_by_runtime = { "python2.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.6": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "nodejs4.3": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs6.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs8.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "ruby2.5": BasicWorkflowSelector(RUBY_BUNDLER_CONFIG), "dotnetcore2.0": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), "dotnetcore2.1": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), # When Maven builder exists, add to this list so we can automatically choose a builder based on the supported # manifest "java8": ManifestWorkflowSelector([ # Gradle builder needs custom executable paths to find `gradlew` binary JAVA_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_KOTLIN_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_MAVEN_CONFIG ]), } if runtime not in selectors_by_runtime: raise UnsupportedRuntimeException("'{}' runtime is not supported".format(runtime)) selector = selectors_by_runtime[runtime] try: config = selector.get_config(code_dir, project_dir) return config except ValueError as ex: raise UnsupportedRuntimeException("Unable to find a supported build workflow for runtime '{}'. Reason: {}" .format(runtime, str(ex)))
[ "def", "get_workflow_config", "(", "runtime", ",", "code_dir", ",", "project_dir", ")", ":", "selectors_by_runtime", "=", "{", "\"python2.7\"", ":", "BasicWorkflowSelector", "(", "PYTHON_PIP_CONFIG", ")", ",", "\"python3.6\"", ":", "BasicWorkflowSelector", "(", "PYTHO...
Get a workflow config that corresponds to the runtime provided. This method examines contents of the project and code directories to determine the most appropriate workflow for the given runtime. Currently the decision is based on the presence of a supported manifest file. For runtimes that have more than one workflow, we choose a workflow by examining ``code_dir`` followed by ``project_dir`` for presence of a supported manifest. Parameters ---------- runtime str The runtime of the config code_dir str Directory where Lambda function code is present project_dir str Root of the Serverless application project. Returns ------- namedtuple(Capability) namedtuple that represents the Builder Workflow Config
[ "Get", "a", "workflow", "config", "that", "corresponds", "to", "the", "runtime", "provided", ".", "This", "method", "examines", "contents", "of", "the", "project", "and", "code", "directories", "to", "determine", "the", "most", "appropriate", "workflow", "for", ...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/build/workflow_config.py#L70-L125
29,941
awslabs/aws-sam-cli
samcli/lib/build/workflow_config.py
supports_build_in_container
def supports_build_in_container(config): """ Given a workflow config, this method provides a boolean on whether the workflow can run within a container or not. Parameters ---------- config namedtuple(Capability) Config specifying the particular build workflow Returns ------- tuple(bool, str) True, if this workflow can be built inside a container. False, along with a reason message if it cannot be. """ def _key(c): return str(c.language) + str(c.dependency_manager) + str(c.application_framework) # This information could have beeen bundled inside the Workflow Config object. But we this way because # ultimately the workflow's implementation dictates whether it can run within a container or not. # A "workflow config" is like a primary key to identify the workflow. So we use the config as a key in the # map to identify which workflows can support building within a container. unsupported = { _key(DOTNET_CLIPACKAGE_CONFIG): "We do not support building .NET Core Lambda functions within a container. " "Try building without the container. Most .NET Core functions will build " "successfully.", } thiskey = _key(config) if thiskey in unsupported: return False, unsupported[thiskey] return True, None
python
def supports_build_in_container(config): """ Given a workflow config, this method provides a boolean on whether the workflow can run within a container or not. Parameters ---------- config namedtuple(Capability) Config specifying the particular build workflow Returns ------- tuple(bool, str) True, if this workflow can be built inside a container. False, along with a reason message if it cannot be. """ def _key(c): return str(c.language) + str(c.dependency_manager) + str(c.application_framework) # This information could have beeen bundled inside the Workflow Config object. But we this way because # ultimately the workflow's implementation dictates whether it can run within a container or not. # A "workflow config" is like a primary key to identify the workflow. So we use the config as a key in the # map to identify which workflows can support building within a container. unsupported = { _key(DOTNET_CLIPACKAGE_CONFIG): "We do not support building .NET Core Lambda functions within a container. " "Try building without the container. Most .NET Core functions will build " "successfully.", } thiskey = _key(config) if thiskey in unsupported: return False, unsupported[thiskey] return True, None
[ "def", "supports_build_in_container", "(", "config", ")", ":", "def", "_key", "(", "c", ")", ":", "return", "str", "(", "c", ".", "language", ")", "+", "str", "(", "c", ".", "dependency_manager", ")", "+", "str", "(", "c", ".", "application_framework", ...
Given a workflow config, this method provides a boolean on whether the workflow can run within a container or not. Parameters ---------- config namedtuple(Capability) Config specifying the particular build workflow Returns ------- tuple(bool, str) True, if this workflow can be built inside a container. False, along with a reason message if it cannot be.
[ "Given", "a", "workflow", "config", "this", "method", "provides", "a", "boolean", "on", "whether", "the", "workflow", "can", "run", "within", "a", "container", "or", "not", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/build/workflow_config.py#L128-L161
29,942
awslabs/aws-sam-cli
samcli/lib/build/workflow_config.py
ManifestWorkflowSelector.get_config
def get_config(self, code_dir, project_dir): """ Finds a configuration by looking for a manifest in the given directories. Returns ------- samcli.lib.build.workflow_config.CONFIG A supported configuration if one is found Raises ------ ValueError If none of the supported manifests files are found """ # Search for manifest first in code directory and then in the project directory. # Search order is important here because we want to prefer the manifest present within the code directory over # a manifest present in project directory. search_dirs = [code_dir, project_dir] LOG.debug("Looking for a supported build workflow in following directories: %s", search_dirs) for config in self.configs: if any([self._has_manifest(config, directory) for directory in search_dirs]): return config raise ValueError("None of the supported manifests '{}' were found in the following paths '{}'".format( [config.manifest_name for config in self.configs], search_dirs))
python
def get_config(self, code_dir, project_dir): """ Finds a configuration by looking for a manifest in the given directories. Returns ------- samcli.lib.build.workflow_config.CONFIG A supported configuration if one is found Raises ------ ValueError If none of the supported manifests files are found """ # Search for manifest first in code directory and then in the project directory. # Search order is important here because we want to prefer the manifest present within the code directory over # a manifest present in project directory. search_dirs = [code_dir, project_dir] LOG.debug("Looking for a supported build workflow in following directories: %s", search_dirs) for config in self.configs: if any([self._has_manifest(config, directory) for directory in search_dirs]): return config raise ValueError("None of the supported manifests '{}' were found in the following paths '{}'".format( [config.manifest_name for config in self.configs], search_dirs))
[ "def", "get_config", "(", "self", ",", "code_dir", ",", "project_dir", ")", ":", "# Search for manifest first in code directory and then in the project directory.", "# Search order is important here because we want to prefer the manifest present within the code directory over", "# a manifest...
Finds a configuration by looking for a manifest in the given directories. Returns ------- samcli.lib.build.workflow_config.CONFIG A supported configuration if one is found Raises ------ ValueError If none of the supported manifests files are found
[ "Finds", "a", "configuration", "by", "looking", "for", "a", "manifest", "in", "the", "given", "directories", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/build/workflow_config.py#L188-L216
29,943
awslabs/aws-sam-cli
samcli/yamlhelper.py
intrinsics_multi_constructor
def intrinsics_multi_constructor(loader, tag_prefix, node): """ YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the instrinsic name """ # Get the actual tag name excluding the first exclamation tag = node.tag[1:] # Some intrinsic functions doesn't support prefix "Fn::" prefix = "Fn::" if tag in ["Ref", "Condition"]: prefix = "" cfntag = prefix + tag if tag == "GetAtt" and isinstance(node.value, six.string_types): # ShortHand notation for !GetAtt accepts Resource.Attribute format # while the standard notation is to use an array # [Resource, Attribute]. Convert shorthand to standard format value = node.value.split(".", 1) elif isinstance(node, ScalarNode): # Value of this node is scalar value = loader.construct_scalar(node) elif isinstance(node, SequenceNode): # Value of this node is an array (Ex: [1,2]) value = loader.construct_sequence(node) else: # Value of this node is an mapping (ex: {foo: bar}) value = loader.construct_mapping(node) return {cfntag: value}
python
def intrinsics_multi_constructor(loader, tag_prefix, node): """ YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the instrinsic name """ # Get the actual tag name excluding the first exclamation tag = node.tag[1:] # Some intrinsic functions doesn't support prefix "Fn::" prefix = "Fn::" if tag in ["Ref", "Condition"]: prefix = "" cfntag = prefix + tag if tag == "GetAtt" and isinstance(node.value, six.string_types): # ShortHand notation for !GetAtt accepts Resource.Attribute format # while the standard notation is to use an array # [Resource, Attribute]. Convert shorthand to standard format value = node.value.split(".", 1) elif isinstance(node, ScalarNode): # Value of this node is scalar value = loader.construct_scalar(node) elif isinstance(node, SequenceNode): # Value of this node is an array (Ex: [1,2]) value = loader.construct_sequence(node) else: # Value of this node is an mapping (ex: {foo: bar}) value = loader.construct_mapping(node) return {cfntag: value}
[ "def", "intrinsics_multi_constructor", "(", "loader", ",", "tag_prefix", ",", "node", ")", ":", "# Get the actual tag name excluding the first exclamation", "tag", "=", "node", ".", "tag", "[", "1", ":", "]", "# Some intrinsic functions doesn't support prefix \"Fn::\"", "pr...
YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the instrinsic name
[ "YAML", "constructor", "to", "parse", "CloudFormation", "intrinsics", ".", "This", "will", "return", "a", "dictionary", "with", "key", "being", "the", "instrinsic", "name" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/yamlhelper.py#L12-L46
29,944
awslabs/aws-sam-cli
samcli/yamlhelper.py
yaml_parse
def yaml_parse(yamlstr): """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. return json.loads(yamlstr) except ValueError: yaml.SafeLoader.add_multi_constructor("!", intrinsics_multi_constructor) return yaml.safe_load(yamlstr)
python
def yaml_parse(yamlstr): """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. return json.loads(yamlstr) except ValueError: yaml.SafeLoader.add_multi_constructor("!", intrinsics_multi_constructor) return yaml.safe_load(yamlstr)
[ "def", "yaml_parse", "(", "yamlstr", ")", ":", "try", ":", "# PyYAML doesn't support json as well as it should, so if the input", "# is actually just json it is better to parse it with the standard", "# json parser.", "return", "json", ".", "loads", "(", "yamlstr", ")", "except",...
Parse a yaml string
[ "Parse", "a", "yaml", "string" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/yamlhelper.py#L58-L67
29,945
awslabs/aws-sam-cli
samcli/commands/local/lib/generated_sample_events/events.py
Events.encode
def encode(self, tags, encoding, values_to_sub): """ reads the encoding type from the event-mapping.json and determines whether a value needs encoding Parameters ---------- tags: dict the values of a particular event that can be substituted within the event json encoding: string string that helps navigate to the encoding field of the json values_to_sub: dict key/value pairs that will be substituted into the json Returns ------- values_to_sub: dict the encoded (if need be) values to substitute into the json. """ for tag in tags: if tags[tag].get(encoding) != "None": if tags[tag].get(encoding) == "url": values_to_sub[tag] = self.url_encode(values_to_sub[tag]) if tags[tag].get(encoding) == "base64": values_to_sub[tag] = self.base64_utf_encode(values_to_sub[tag]) return values_to_sub
python
def encode(self, tags, encoding, values_to_sub): """ reads the encoding type from the event-mapping.json and determines whether a value needs encoding Parameters ---------- tags: dict the values of a particular event that can be substituted within the event json encoding: string string that helps navigate to the encoding field of the json values_to_sub: dict key/value pairs that will be substituted into the json Returns ------- values_to_sub: dict the encoded (if need be) values to substitute into the json. """ for tag in tags: if tags[tag].get(encoding) != "None": if tags[tag].get(encoding) == "url": values_to_sub[tag] = self.url_encode(values_to_sub[tag]) if tags[tag].get(encoding) == "base64": values_to_sub[tag] = self.base64_utf_encode(values_to_sub[tag]) return values_to_sub
[ "def", "encode", "(", "self", ",", "tags", ",", "encoding", ",", "values_to_sub", ")", ":", "for", "tag", "in", "tags", ":", "if", "tags", "[", "tag", "]", ".", "get", "(", "encoding", ")", "!=", "\"None\"", ":", "if", "tags", "[", "tag", "]", "....
reads the encoding type from the event-mapping.json and determines whether a value needs encoding Parameters ---------- tags: dict the values of a particular event that can be substituted within the event json encoding: string string that helps navigate to the encoding field of the json values_to_sub: dict key/value pairs that will be substituted into the json Returns ------- values_to_sub: dict the encoded (if need be) values to substitute into the json.
[ "reads", "the", "encoding", "type", "from", "the", "event", "-", "mapping", ".", "json", "and", "determines", "whether", "a", "value", "needs", "encoding" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/generated_sample_events/events.py#L36-L62
29,946
awslabs/aws-sam-cli
samcli/commands/local/lib/generated_sample_events/events.py
Events.generate_event
def generate_event(self, service_name, event_type, values_to_sub): """ opens the event json, substitutes the values in, and returns the customized event json Parameters ---------- service_name: string name of the top level service (S3, apigateway, etc) event_type: string name of the event underneath the service values_to_sub: dict key/value pairs to substitute into the json Returns ------- renderer.render(): string string version of the custom event json """ # set variables for easy calling tags = self.event_mapping[service_name][event_type]['tags'] values_to_sub = self.encode(tags, 'encoding', values_to_sub) # construct the path to the Events json file this_folder = os.path.dirname(os.path.abspath(__file__)) file_name = self.event_mapping[service_name][event_type]['filename'] + ".json" file_path = os.path.join(this_folder, "events", service_name, file_name) # open the file with open(file_path) as f: data = json.load(f) data = json.dumps(data, indent=2) # return the substituted file return renderer.render(data, values_to_sub)
python
def generate_event(self, service_name, event_type, values_to_sub): """ opens the event json, substitutes the values in, and returns the customized event json Parameters ---------- service_name: string name of the top level service (S3, apigateway, etc) event_type: string name of the event underneath the service values_to_sub: dict key/value pairs to substitute into the json Returns ------- renderer.render(): string string version of the custom event json """ # set variables for easy calling tags = self.event_mapping[service_name][event_type]['tags'] values_to_sub = self.encode(tags, 'encoding', values_to_sub) # construct the path to the Events json file this_folder = os.path.dirname(os.path.abspath(__file__)) file_name = self.event_mapping[service_name][event_type]['filename'] + ".json" file_path = os.path.join(this_folder, "events", service_name, file_name) # open the file with open(file_path) as f: data = json.load(f) data = json.dumps(data, indent=2) # return the substituted file return renderer.render(data, values_to_sub)
[ "def", "generate_event", "(", "self", ",", "service_name", ",", "event_type", ",", "values_to_sub", ")", ":", "# set variables for easy calling", "tags", "=", "self", ".", "event_mapping", "[", "service_name", "]", "[", "event_type", "]", "[", "'tags'", "]", "va...
opens the event json, substitutes the values in, and returns the customized event json Parameters ---------- service_name: string name of the top level service (S3, apigateway, etc) event_type: string name of the event underneath the service values_to_sub: dict key/value pairs to substitute into the json Returns ------- renderer.render(): string string version of the custom event json
[ "opens", "the", "event", "json", "substitutes", "the", "values", "in", "and", "returns", "the", "customized", "event", "json" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/generated_sample_events/events.py#L94-L129
29,947
awslabs/aws-sam-cli
samcli/lib/utils/colors.py
Colored.underline
def underline(self, msg): """Underline the input""" return click.style(msg, underline=True) if self.colorize else msg
python
def underline(self, msg): """Underline the input""" return click.style(msg, underline=True) if self.colorize else msg
[ "def", "underline", "(", "self", ",", "msg", ")", ":", "return", "click", ".", "style", "(", "msg", ",", "underline", "=", "True", ")", "if", "self", ".", "colorize", "else", "msg" ]
Underline the input
[ "Underline", "the", "input" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/colors.py#L57-L59
29,948
awslabs/aws-sam-cli
samcli/lib/utils/colors.py
Colored._color
def _color(self, msg, color): """Internal helper method to add colors to input""" kwargs = {'fg': color} return click.style(msg, **kwargs) if self.colorize else msg
python
def _color(self, msg, color): """Internal helper method to add colors to input""" kwargs = {'fg': color} return click.style(msg, **kwargs) if self.colorize else msg
[ "def", "_color", "(", "self", ",", "msg", ",", "color", ")", ":", "kwargs", "=", "{", "'fg'", ":", "color", "}", "return", "click", ".", "style", "(", "msg", ",", "*", "*", "kwargs", ")", "if", "self", ".", "colorize", "else", "msg" ]
Internal helper method to add colors to input
[ "Internal", "helper", "method", "to", "add", "colors", "to", "input" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/colors.py#L61-L64
29,949
awslabs/aws-sam-cli
samcli/commands/local/lib/provider.py
LayerVersion._compute_layer_version
def _compute_layer_version(is_defined_within_template, arn): """ Parses out the Layer version from the arn Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- int The Version of the LayerVersion """ if is_defined_within_template: return None try: _, layer_version = arn.rsplit(':', 1) layer_version = int(layer_version) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") return layer_version
python
def _compute_layer_version(is_defined_within_template, arn): """ Parses out the Layer version from the arn Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- int The Version of the LayerVersion """ if is_defined_within_template: return None try: _, layer_version = arn.rsplit(':', 1) layer_version = int(layer_version) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") return layer_version
[ "def", "_compute_layer_version", "(", "is_defined_within_template", ",", "arn", ")", ":", "if", "is_defined_within_template", ":", "return", "None", "try", ":", "_", ",", "layer_version", "=", "arn", ".", "rsplit", "(", "':'", ",", "1", ")", "layer_version", "...
Parses out the Layer version from the arn Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- int The Version of the LayerVersion
[ "Parses", "out", "the", "Layer", "version", "from", "the", "arn" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/provider.py#L72-L99
29,950
awslabs/aws-sam-cli
samcli/commands/local/lib/provider.py
LayerVersion._compute_layer_name
def _compute_layer_name(is_defined_within_template, arn): """ Computes a unique name based on the LayerVersion Arn Format: <Name of the LayerVersion>-<Version of the LayerVersion>-<sha256 of the arn> Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- str A unique name that represents the LayerVersion """ # If the Layer is defined in the template, the arn will represent the LogicalId of the LayerVersion Resource, # which does not require creating a name based on the arn. if is_defined_within_template: return arn try: _, layer_name, layer_version = arn.rsplit(':', 2) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") return LayerVersion.LAYER_NAME_DELIMETER.join([layer_name, layer_version, hashlib.sha256(arn.encode('utf-8')).hexdigest()[0:10]])
python
def _compute_layer_name(is_defined_within_template, arn): """ Computes a unique name based on the LayerVersion Arn Format: <Name of the LayerVersion>-<Version of the LayerVersion>-<sha256 of the arn> Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- str A unique name that represents the LayerVersion """ # If the Layer is defined in the template, the arn will represent the LogicalId of the LayerVersion Resource, # which does not require creating a name based on the arn. if is_defined_within_template: return arn try: _, layer_name, layer_version = arn.rsplit(':', 2) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") return LayerVersion.LAYER_NAME_DELIMETER.join([layer_name, layer_version, hashlib.sha256(arn.encode('utf-8')).hexdigest()[0:10]])
[ "def", "_compute_layer_name", "(", "is_defined_within_template", ",", "arn", ")", ":", "# If the Layer is defined in the template, the arn will represent the LogicalId of the LayerVersion Resource,", "# which does not require creating a name based on the arn.", "if", "is_defined_within_templat...
Computes a unique name based on the LayerVersion Arn Format: <Name of the LayerVersion>-<Version of the LayerVersion>-<sha256 of the arn> Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- str A unique name that represents the LayerVersion
[ "Computes", "a", "unique", "name", "based", "on", "the", "LayerVersion", "Arn" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/provider.py#L102-L134
29,951
awslabs/aws-sam-cli
samcli/lib/utils/osutils.py
mkdir_temp
def mkdir_temp(mode=0o755): """ Context manager that makes a temporary directory and yields it name. Directory is deleted after the context exits Parameters ---------- mode : octal Permissions to apply to the directory. Defaults to '755' because don't want directories world writable Returns ------- str Path to the directory """ temp_dir = None try: temp_dir = tempfile.mkdtemp() os.chmod(temp_dir, mode) yield temp_dir finally: if temp_dir: shutil.rmtree(temp_dir)
python
def mkdir_temp(mode=0o755): """ Context manager that makes a temporary directory and yields it name. Directory is deleted after the context exits Parameters ---------- mode : octal Permissions to apply to the directory. Defaults to '755' because don't want directories world writable Returns ------- str Path to the directory """ temp_dir = None try: temp_dir = tempfile.mkdtemp() os.chmod(temp_dir, mode) yield temp_dir finally: if temp_dir: shutil.rmtree(temp_dir)
[ "def", "mkdir_temp", "(", "mode", "=", "0o755", ")", ":", "temp_dir", "=", "None", "try", ":", "temp_dir", "=", "tempfile", ".", "mkdtemp", "(", ")", "os", ".", "chmod", "(", "temp_dir", ",", "mode", ")", "yield", "temp_dir", "finally", ":", "if", "t...
Context manager that makes a temporary directory and yields it name. Directory is deleted after the context exits Parameters ---------- mode : octal Permissions to apply to the directory. Defaults to '755' because don't want directories world writable Returns ------- str Path to the directory
[ "Context", "manager", "that", "makes", "a", "temporary", "directory", "and", "yields", "it", "name", ".", "Directory", "is", "deleted", "after", "the", "context", "exits" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/osutils.py#L14-L40
29,952
awslabs/aws-sam-cli
samcli/local/layers/layer_downloader.py
LayerDownloader.download_all
def download_all(self, layers, force=False): """ Download a list of layers to the cache Parameters ---------- layers list(samcli.commands.local.lib.provider.Layer) List of Layers representing the layer to be downloaded force bool True to download the layer even if it exists already on the system Returns ------- List(Path) List of Paths to where the layer was cached """ layer_dirs = [] for layer in layers: layer_dirs.append(self.download(layer, force)) return layer_dirs
python
def download_all(self, layers, force=False): """ Download a list of layers to the cache Parameters ---------- layers list(samcli.commands.local.lib.provider.Layer) List of Layers representing the layer to be downloaded force bool True to download the layer even if it exists already on the system Returns ------- List(Path) List of Paths to where the layer was cached """ layer_dirs = [] for layer in layers: layer_dirs.append(self.download(layer, force)) return layer_dirs
[ "def", "download_all", "(", "self", ",", "layers", ",", "force", "=", "False", ")", ":", "layer_dirs", "=", "[", "]", "for", "layer", "in", "layers", ":", "layer_dirs", ".", "append", "(", "self", ".", "download", "(", "layer", ",", "force", ")", ")"...
Download a list of layers to the cache Parameters ---------- layers list(samcli.commands.local.lib.provider.Layer) List of Layers representing the layer to be downloaded force bool True to download the layer even if it exists already on the system Returns ------- List(Path) List of Paths to where the layer was cached
[ "Download", "a", "list", "of", "layers", "to", "the", "cache" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/layers/layer_downloader.py#L54-L74
29,953
awslabs/aws-sam-cli
samcli/local/layers/layer_downloader.py
LayerDownloader.download
def download(self, layer, force=False): """ Download a given layer to the local cache. Parameters ---------- layer samcli.commands.local.lib.provider.Layer Layer representing the layer to be downloaded. force bool True to download the layer even if it exists already on the system Returns ------- Path Path object that represents where the layer is download to """ if layer.is_defined_within_template: LOG.info("%s is a local Layer in the template", layer.name) layer.codeuri = resolve_code_path(self.cwd, layer.codeuri) return layer # disabling no-member due to https://github.com/PyCQA/pylint/issues/1660 layer_path = Path(self.layer_cache).joinpath(layer.name).resolve() # pylint: disable=no-member is_layer_downloaded = self._is_layer_cached(layer_path) layer.codeuri = str(layer_path) if is_layer_downloaded and not force: LOG.info("%s is already cached. Skipping download", layer.arn) return layer layer_zip_path = layer.codeuri + '.zip' layer_zip_uri = self._fetch_layer_uri(layer) unzip_from_uri(layer_zip_uri, layer_zip_path, unzip_output_dir=layer.codeuri, progressbar_label='Downloading {}'.format(layer.layer_arn)) return layer
python
def download(self, layer, force=False): """ Download a given layer to the local cache. Parameters ---------- layer samcli.commands.local.lib.provider.Layer Layer representing the layer to be downloaded. force bool True to download the layer even if it exists already on the system Returns ------- Path Path object that represents where the layer is download to """ if layer.is_defined_within_template: LOG.info("%s is a local Layer in the template", layer.name) layer.codeuri = resolve_code_path(self.cwd, layer.codeuri) return layer # disabling no-member due to https://github.com/PyCQA/pylint/issues/1660 layer_path = Path(self.layer_cache).joinpath(layer.name).resolve() # pylint: disable=no-member is_layer_downloaded = self._is_layer_cached(layer_path) layer.codeuri = str(layer_path) if is_layer_downloaded and not force: LOG.info("%s is already cached. Skipping download", layer.arn) return layer layer_zip_path = layer.codeuri + '.zip' layer_zip_uri = self._fetch_layer_uri(layer) unzip_from_uri(layer_zip_uri, layer_zip_path, unzip_output_dir=layer.codeuri, progressbar_label='Downloading {}'.format(layer.layer_arn)) return layer
[ "def", "download", "(", "self", ",", "layer", ",", "force", "=", "False", ")", ":", "if", "layer", ".", "is_defined_within_template", ":", "LOG", ".", "info", "(", "\"%s is a local Layer in the template\"", ",", "layer", ".", "name", ")", "layer", ".", "code...
Download a given layer to the local cache. Parameters ---------- layer samcli.commands.local.lib.provider.Layer Layer representing the layer to be downloaded. force bool True to download the layer even if it exists already on the system Returns ------- Path Path object that represents where the layer is download to
[ "Download", "a", "given", "layer", "to", "the", "local", "cache", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/layers/layer_downloader.py#L76-L113
29,954
awslabs/aws-sam-cli
samcli/local/layers/layer_downloader.py
LayerDownloader._fetch_layer_uri
def _fetch_layer_uri(self, layer): """ Fetch the Layer Uri based on the LayerVersion Arn Parameters ---------- layer samcli.commands.local.lib.provider.LayerVersion LayerVersion to fetch Returns ------- str The Uri to download the LayerVersion Content from Raises ------ samcli.commands.local.cli_common.user_exceptions.NoCredentialsError When the Credentials given are not sufficient to call AWS Lambda """ try: layer_version_response = self.lambda_client.get_layer_version(LayerName=layer.layer_arn, VersionNumber=layer.version) except NoCredentialsError: raise CredentialsRequired("Layers require credentials to download the layers locally.") except ClientError as e: error_code = e.response.get('Error').get('Code') error_exc = { 'AccessDeniedException': CredentialsRequired( "Credentials provided are missing lambda:Getlayerversion policy that is needed to download the " "layer or you do not have permission to download the layer"), 'ResourceNotFoundException': ResourceNotFound("{} was not found.".format(layer.arn)) } if error_code in error_exc: raise error_exc[error_code] # If it was not 'AccessDeniedException' or 'ResourceNotFoundException' re-raise raise e return layer_version_response.get("Content").get("Location")
python
def _fetch_layer_uri(self, layer): """ Fetch the Layer Uri based on the LayerVersion Arn Parameters ---------- layer samcli.commands.local.lib.provider.LayerVersion LayerVersion to fetch Returns ------- str The Uri to download the LayerVersion Content from Raises ------ samcli.commands.local.cli_common.user_exceptions.NoCredentialsError When the Credentials given are not sufficient to call AWS Lambda """ try: layer_version_response = self.lambda_client.get_layer_version(LayerName=layer.layer_arn, VersionNumber=layer.version) except NoCredentialsError: raise CredentialsRequired("Layers require credentials to download the layers locally.") except ClientError as e: error_code = e.response.get('Error').get('Code') error_exc = { 'AccessDeniedException': CredentialsRequired( "Credentials provided are missing lambda:Getlayerversion policy that is needed to download the " "layer or you do not have permission to download the layer"), 'ResourceNotFoundException': ResourceNotFound("{} was not found.".format(layer.arn)) } if error_code in error_exc: raise error_exc[error_code] # If it was not 'AccessDeniedException' or 'ResourceNotFoundException' re-raise raise e return layer_version_response.get("Content").get("Location")
[ "def", "_fetch_layer_uri", "(", "self", ",", "layer", ")", ":", "try", ":", "layer_version_response", "=", "self", ".", "lambda_client", ".", "get_layer_version", "(", "LayerName", "=", "layer", ".", "layer_arn", ",", "VersionNumber", "=", "layer", ".", "versi...
Fetch the Layer Uri based on the LayerVersion Arn Parameters ---------- layer samcli.commands.local.lib.provider.LayerVersion LayerVersion to fetch Returns ------- str The Uri to download the LayerVersion Content from Raises ------ samcli.commands.local.cli_common.user_exceptions.NoCredentialsError When the Credentials given are not sufficient to call AWS Lambda
[ "Fetch", "the", "Layer", "Uri", "based", "on", "the", "LayerVersion", "Arn" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/layers/layer_downloader.py#L115-L154
29,955
awslabs/aws-sam-cli
samcli/local/layers/layer_downloader.py
LayerDownloader._create_cache
def _create_cache(layer_cache): """ Create the Cache directory if it does not exist. Parameters ---------- layer_cache Directory to where the layers should be cached Returns ------- None """ Path(layer_cache).mkdir(mode=0o700, parents=True, exist_ok=True)
python
def _create_cache(layer_cache): """ Create the Cache directory if it does not exist. Parameters ---------- layer_cache Directory to where the layers should be cached Returns ------- None """ Path(layer_cache).mkdir(mode=0o700, parents=True, exist_ok=True)
[ "def", "_create_cache", "(", "layer_cache", ")", ":", "Path", "(", "layer_cache", ")", ".", "mkdir", "(", "mode", "=", "0o700", ",", "parents", "=", "True", ",", "exist_ok", "=", "True", ")" ]
Create the Cache directory if it does not exist. Parameters ---------- layer_cache Directory to where the layers should be cached Returns ------- None
[ "Create", "the", "Cache", "directory", "if", "it", "does", "not", "exist", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/layers/layer_downloader.py#L174-L188
29,956
awslabs/aws-sam-cli
samcli/commands/validate/lib/sam_template_validator.py
SamTemplateValidator.is_valid
def is_valid(self): """ Runs the SAM Translator to determine if the template provided is valid. This is similar to running a ChangeSet in CloudFormation for a SAM Template Raises ------- InvalidSamDocumentException If the template is not valid, an InvalidSamDocumentException is raised """ managed_policy_map = self.managed_policy_loader.load() sam_translator = Translator(managed_policy_map=managed_policy_map, sam_parser=self.sam_parser, plugins=[]) self._replace_local_codeuri() try: template = sam_translator.translate(sam_template=self.sam_template, parameter_values={}) LOG.debug("Translated template is:\n%s", yaml_dump(template)) except InvalidDocumentException as e: raise InvalidSamDocumentException( functools.reduce(lambda message, error: message + ' ' + str(error), e.causes, str(e)))
python
def is_valid(self): """ Runs the SAM Translator to determine if the template provided is valid. This is similar to running a ChangeSet in CloudFormation for a SAM Template Raises ------- InvalidSamDocumentException If the template is not valid, an InvalidSamDocumentException is raised """ managed_policy_map = self.managed_policy_loader.load() sam_translator = Translator(managed_policy_map=managed_policy_map, sam_parser=self.sam_parser, plugins=[]) self._replace_local_codeuri() try: template = sam_translator.translate(sam_template=self.sam_template, parameter_values={}) LOG.debug("Translated template is:\n%s", yaml_dump(template)) except InvalidDocumentException as e: raise InvalidSamDocumentException( functools.reduce(lambda message, error: message + ' ' + str(error), e.causes, str(e)))
[ "def", "is_valid", "(", "self", ")", ":", "managed_policy_map", "=", "self", ".", "managed_policy_loader", ".", "load", "(", ")", "sam_translator", "=", "Translator", "(", "managed_policy_map", "=", "managed_policy_map", ",", "sam_parser", "=", "self", ".", "sam...
Runs the SAM Translator to determine if the template provided is valid. This is similar to running a ChangeSet in CloudFormation for a SAM Template Raises ------- InvalidSamDocumentException If the template is not valid, an InvalidSamDocumentException is raised
[ "Runs", "the", "SAM", "Translator", "to", "determine", "if", "the", "template", "provided", "is", "valid", ".", "This", "is", "similar", "to", "running", "a", "ChangeSet", "in", "CloudFormation", "for", "a", "SAM", "Template" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/validate/lib/sam_template_validator.py#L44-L68
29,957
awslabs/aws-sam-cli
samcli/commands/validate/lib/sam_template_validator.py
SamTemplateValidator._update_to_s3_uri
def _update_to_s3_uri(property_key, resource_property_dict, s3_uri_value="s3://bucket/value"): """ Updates the 'property_key' in the 'resource_property_dict' to the value of 's3_uri_value' Note: The function will mutate the resource_property_dict that is pass in Parameters ---------- property_key str, required Key in the resource_property_dict resource_property_dict dict, required Property dictionary of a Resource in the template to replace s3_uri_value str, optional Value to update the value of the property_key to """ uri_property = resource_property_dict.get(property_key, ".") # ignore if dict or already an S3 Uri if isinstance(uri_property, dict) or SamTemplateValidator.is_s3_uri(uri_property): return resource_property_dict[property_key] = s3_uri_value
python
def _update_to_s3_uri(property_key, resource_property_dict, s3_uri_value="s3://bucket/value"): """ Updates the 'property_key' in the 'resource_property_dict' to the value of 's3_uri_value' Note: The function will mutate the resource_property_dict that is pass in Parameters ---------- property_key str, required Key in the resource_property_dict resource_property_dict dict, required Property dictionary of a Resource in the template to replace s3_uri_value str, optional Value to update the value of the property_key to """ uri_property = resource_property_dict.get(property_key, ".") # ignore if dict or already an S3 Uri if isinstance(uri_property, dict) or SamTemplateValidator.is_s3_uri(uri_property): return resource_property_dict[property_key] = s3_uri_value
[ "def", "_update_to_s3_uri", "(", "property_key", ",", "resource_property_dict", ",", "s3_uri_value", "=", "\"s3://bucket/value\"", ")", ":", "uri_property", "=", "resource_property_dict", ".", "get", "(", "property_key", ",", "\".\"", ")", "# ignore if dict or already an ...
Updates the 'property_key' in the 'resource_property_dict' to the value of 's3_uri_value' Note: The function will mutate the resource_property_dict that is pass in Parameters ---------- property_key str, required Key in the resource_property_dict resource_property_dict dict, required Property dictionary of a Resource in the template to replace s3_uri_value str, optional Value to update the value of the property_key to
[ "Updates", "the", "property_key", "in", "the", "resource_property_dict", "to", "the", "value", "of", "s3_uri_value" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/validate/lib/sam_template_validator.py#L115-L136
29,958
awslabs/aws-sam-cli
samcli/commands/logs/logs_context.py
LogsCommandContext.formatter
def formatter(self): """ Creates and returns a Formatter capable of nicely formatting Lambda function logs Returns ------- LogsFormatter """ formatter_chain = [ LambdaLogMsgFormatters.colorize_errors, # Format JSON "before" highlighting the keywords. Otherwise, JSON will be invalid from all the # ANSI color codes and fail to pretty print JSONMsgFormatter.format_json, KeywordHighlighter(self._filter_pattern).highlight_keywords, ] return LogsFormatter(self.colored, formatter_chain)
python
def formatter(self): """ Creates and returns a Formatter capable of nicely formatting Lambda function logs Returns ------- LogsFormatter """ formatter_chain = [ LambdaLogMsgFormatters.colorize_errors, # Format JSON "before" highlighting the keywords. Otherwise, JSON will be invalid from all the # ANSI color codes and fail to pretty print JSONMsgFormatter.format_json, KeywordHighlighter(self._filter_pattern).highlight_keywords, ] return LogsFormatter(self.colored, formatter_chain)
[ "def", "formatter", "(", "self", ")", ":", "formatter_chain", "=", "[", "LambdaLogMsgFormatters", ".", "colorize_errors", ",", "# Format JSON \"before\" highlighting the keywords. Otherwise, JSON will be invalid from all the", "# ANSI color codes and fail to pretty print", "JSONMsgForm...
Creates and returns a Formatter capable of nicely formatting Lambda function logs Returns ------- LogsFormatter
[ "Creates", "and", "returns", "a", "Formatter", "capable", "of", "nicely", "formatting", "Lambda", "function", "logs" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/logs/logs_context.py#L103-L121
29,959
awslabs/aws-sam-cli
samcli/commands/logs/logs_context.py
LogsCommandContext.log_group_name
def log_group_name(self): """ Name of the AWS CloudWatch Log Group that we will be querying. It generates the name based on the Lambda Function name and stack name provided. Returns ------- str Name of the CloudWatch Log Group """ function_id = self._function_name if self._stack_name: function_id = self._get_resource_id_from_stack(self._cfn_client, self._stack_name, self._function_name) LOG.debug("Function with LogicalId '%s' in stack '%s' resolves to actual physical ID '%s'", self._function_name, self._stack_name, function_id) return LogGroupProvider.for_lambda_function(function_id)
python
def log_group_name(self): """ Name of the AWS CloudWatch Log Group that we will be querying. It generates the name based on the Lambda Function name and stack name provided. Returns ------- str Name of the CloudWatch Log Group """ function_id = self._function_name if self._stack_name: function_id = self._get_resource_id_from_stack(self._cfn_client, self._stack_name, self._function_name) LOG.debug("Function with LogicalId '%s' in stack '%s' resolves to actual physical ID '%s'", self._function_name, self._stack_name, function_id) return LogGroupProvider.for_lambda_function(function_id)
[ "def", "log_group_name", "(", "self", ")", ":", "function_id", "=", "self", ".", "_function_name", "if", "self", ".", "_stack_name", ":", "function_id", "=", "self", ".", "_get_resource_id_from_stack", "(", "self", ".", "_cfn_client", ",", "self", ".", "_stack...
Name of the AWS CloudWatch Log Group that we will be querying. It generates the name based on the Lambda Function name and stack name provided. Returns ------- str Name of the CloudWatch Log Group
[ "Name", "of", "the", "AWS", "CloudWatch", "Log", "Group", "that", "we", "will", "be", "querying", ".", "It", "generates", "the", "name", "based", "on", "the", "Lambda", "Function", "name", "and", "stack", "name", "provided", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/logs/logs_context.py#L132-L149
29,960
awslabs/aws-sam-cli
samcli/commands/logs/logs_context.py
LogsCommandContext._parse_time
def _parse_time(time_str, property_name): """ Parse the time from the given string, convert to UTC, and return the datetime object Parameters ---------- time_str : str The time to parse property_name : str Name of the property where this time came from. Used in the exception raised if time is not parseable Returns ------- datetime.datetime Parsed datetime object Raises ------ samcli.commands.exceptions.UserException If the string cannot be parsed as a timestamp """ if not time_str: return parsed = parse_date(time_str) if not parsed: raise UserException("Unable to parse the time provided by '{}'".format(property_name)) return to_utc(parsed)
python
def _parse_time(time_str, property_name): """ Parse the time from the given string, convert to UTC, and return the datetime object Parameters ---------- time_str : str The time to parse property_name : str Name of the property where this time came from. Used in the exception raised if time is not parseable Returns ------- datetime.datetime Parsed datetime object Raises ------ samcli.commands.exceptions.UserException If the string cannot be parsed as a timestamp """ if not time_str: return parsed = parse_date(time_str) if not parsed: raise UserException("Unable to parse the time provided by '{}'".format(property_name)) return to_utc(parsed)
[ "def", "_parse_time", "(", "time_str", ",", "property_name", ")", ":", "if", "not", "time_str", ":", "return", "parsed", "=", "parse_date", "(", "time_str", ")", "if", "not", "parsed", ":", "raise", "UserException", "(", "\"Unable to parse the time provided by '{}...
Parse the time from the given string, convert to UTC, and return the datetime object Parameters ---------- time_str : str The time to parse property_name : str Name of the property where this time came from. Used in the exception raised if time is not parseable Returns ------- datetime.datetime Parsed datetime object Raises ------ samcli.commands.exceptions.UserException If the string cannot be parsed as a timestamp
[ "Parse", "the", "time", "from", "the", "given", "string", "convert", "to", "UTC", "and", "return", "the", "datetime", "object" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/logs/logs_context.py#L191-L220
29,961
awslabs/aws-sam-cli
samcli/commands/logs/logs_context.py
LogsCommandContext._get_resource_id_from_stack
def _get_resource_id_from_stack(cfn_client, stack_name, logical_id): """ Given the LogicalID of a resource, call AWS CloudFormation to get physical ID of the resource within the specified stack. Parameters ---------- cfn_client CloudFormation client provided by AWS SDK stack_name : str Name of the stack to query logical_id : str LogicalId of the resource Returns ------- str Physical ID of the resource Raises ------ samcli.commands.exceptions.UserException If the stack or resource does not exist """ LOG.debug("Getting resource's PhysicalId from AWS CloudFormation stack. StackName=%s, LogicalId=%s", stack_name, logical_id) try: response = cfn_client.describe_stack_resource(StackName=stack_name, LogicalResourceId=logical_id) LOG.debug("Response from AWS CloudFormation %s", response) return response["StackResourceDetail"]["PhysicalResourceId"] except botocore.exceptions.ClientError as ex: LOG.debug("Unable to fetch resource name from CloudFormation Stack: " "StackName=%s, ResourceLogicalId=%s, Response=%s", stack_name, logical_id, ex.response) # The exception message already has a well formatted error message that we can surface to user raise UserException(str(ex))
python
def _get_resource_id_from_stack(cfn_client, stack_name, logical_id): """ Given the LogicalID of a resource, call AWS CloudFormation to get physical ID of the resource within the specified stack. Parameters ---------- cfn_client CloudFormation client provided by AWS SDK stack_name : str Name of the stack to query logical_id : str LogicalId of the resource Returns ------- str Physical ID of the resource Raises ------ samcli.commands.exceptions.UserException If the stack or resource does not exist """ LOG.debug("Getting resource's PhysicalId from AWS CloudFormation stack. StackName=%s, LogicalId=%s", stack_name, logical_id) try: response = cfn_client.describe_stack_resource(StackName=stack_name, LogicalResourceId=logical_id) LOG.debug("Response from AWS CloudFormation %s", response) return response["StackResourceDetail"]["PhysicalResourceId"] except botocore.exceptions.ClientError as ex: LOG.debug("Unable to fetch resource name from CloudFormation Stack: " "StackName=%s, ResourceLogicalId=%s, Response=%s", stack_name, logical_id, ex.response) # The exception message already has a well formatted error message that we can surface to user raise UserException(str(ex))
[ "def", "_get_resource_id_from_stack", "(", "cfn_client", ",", "stack_name", ",", "logical_id", ")", ":", "LOG", ".", "debug", "(", "\"Getting resource's PhysicalId from AWS CloudFormation stack. StackName=%s, LogicalId=%s\"", ",", "stack_name", ",", "logical_id", ")", "try", ...
Given the LogicalID of a resource, call AWS CloudFormation to get physical ID of the resource within the specified stack. Parameters ---------- cfn_client CloudFormation client provided by AWS SDK stack_name : str Name of the stack to query logical_id : str LogicalId of the resource Returns ------- str Physical ID of the resource Raises ------ samcli.commands.exceptions.UserException If the stack or resource does not exist
[ "Given", "the", "LogicalID", "of", "a", "resource", "call", "AWS", "CloudFormation", "to", "get", "physical", "ID", "of", "the", "resource", "within", "the", "specified", "stack", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/logs/logs_context.py#L223-L264
29,962
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_base_provider.py
SamBaseProvider.get_template
def get_template(template_dict, parameter_overrides=None): """ Given a SAM template dictionary, return a cleaned copy of the template where SAM plugins have been run and parameter values have been substituted. Parameters ---------- template_dict : dict unprocessed SAM template dictionary parameter_overrides: dict Optional dictionary of values for template parameters Returns ------- dict Processed SAM template """ template_dict = template_dict or {} if template_dict: template_dict = SamTranslatorWrapper(template_dict).run_plugins() template_dict = SamBaseProvider._resolve_parameters(template_dict, parameter_overrides) ResourceMetadataNormalizer.normalize(template_dict) return template_dict
python
def get_template(template_dict, parameter_overrides=None): """ Given a SAM template dictionary, return a cleaned copy of the template where SAM plugins have been run and parameter values have been substituted. Parameters ---------- template_dict : dict unprocessed SAM template dictionary parameter_overrides: dict Optional dictionary of values for template parameters Returns ------- dict Processed SAM template """ template_dict = template_dict or {} if template_dict: template_dict = SamTranslatorWrapper(template_dict).run_plugins() template_dict = SamBaseProvider._resolve_parameters(template_dict, parameter_overrides) ResourceMetadataNormalizer.normalize(template_dict) return template_dict
[ "def", "get_template", "(", "template_dict", ",", "parameter_overrides", "=", "None", ")", ":", "template_dict", "=", "template_dict", "or", "{", "}", "if", "template_dict", ":", "template_dict", "=", "SamTranslatorWrapper", "(", "template_dict", ")", ".", "run_pl...
Given a SAM template dictionary, return a cleaned copy of the template where SAM plugins have been run and parameter values have been substituted. Parameters ---------- template_dict : dict unprocessed SAM template dictionary parameter_overrides: dict Optional dictionary of values for template parameters Returns ------- dict Processed SAM template
[ "Given", "a", "SAM", "template", "dictionary", "return", "a", "cleaned", "copy", "of", "the", "template", "where", "SAM", "plugins", "have", "been", "run", "and", "parameter", "values", "have", "been", "substituted", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_base_provider.py#L41-L66
29,963
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_base_provider.py
SamBaseProvider._resolve_parameters
def _resolve_parameters(template_dict, parameter_overrides): """ In the given template, apply parameter values to resolve intrinsic functions Parameters ---------- template_dict : dict SAM Template parameter_overrides : dict Values for template parameters provided by user Returns ------- dict Resolved SAM template """ parameter_values = SamBaseProvider._get_parameter_values(template_dict, parameter_overrides) supported_intrinsics = {action.intrinsic_name: action() for action in SamBaseProvider._SUPPORTED_INTRINSICS} # Intrinsics resolver will mutate the original template return IntrinsicsResolver(parameters=parameter_values, supported_intrinsics=supported_intrinsics)\ .resolve_parameter_refs(template_dict)
python
def _resolve_parameters(template_dict, parameter_overrides): """ In the given template, apply parameter values to resolve intrinsic functions Parameters ---------- template_dict : dict SAM Template parameter_overrides : dict Values for template parameters provided by user Returns ------- dict Resolved SAM template """ parameter_values = SamBaseProvider._get_parameter_values(template_dict, parameter_overrides) supported_intrinsics = {action.intrinsic_name: action() for action in SamBaseProvider._SUPPORTED_INTRINSICS} # Intrinsics resolver will mutate the original template return IntrinsicsResolver(parameters=parameter_values, supported_intrinsics=supported_intrinsics)\ .resolve_parameter_refs(template_dict)
[ "def", "_resolve_parameters", "(", "template_dict", ",", "parameter_overrides", ")", ":", "parameter_values", "=", "SamBaseProvider", ".", "_get_parameter_values", "(", "template_dict", ",", "parameter_overrides", ")", "supported_intrinsics", "=", "{", "action", ".", "i...
In the given template, apply parameter values to resolve intrinsic functions Parameters ---------- template_dict : dict SAM Template parameter_overrides : dict Values for template parameters provided by user Returns ------- dict Resolved SAM template
[ "In", "the", "given", "template", "apply", "parameter", "values", "to", "resolve", "intrinsic", "functions" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_base_provider.py#L69-L93
29,964
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_base_provider.py
SamBaseProvider._get_parameter_values
def _get_parameter_values(template_dict, parameter_overrides): """ Construct a final list of values for CloudFormation template parameters based on user-supplied values, default values provided in template, and sane defaults for pseudo-parameters. Parameters ---------- template_dict : dict SAM template dictionary parameter_overrides : dict User-supplied values for CloudFormation template parameters Returns ------- dict Values for template parameters to substitute in template with """ default_values = SamBaseProvider._get_default_parameter_values(template_dict) # NOTE: Ordering of following statements is important. It makes sure that any user-supplied values # override the defaults parameter_values = {} parameter_values.update(SamBaseProvider._DEFAULT_PSEUDO_PARAM_VALUES) parameter_values.update(default_values) parameter_values.update(parameter_overrides or {}) return parameter_values
python
def _get_parameter_values(template_dict, parameter_overrides): """ Construct a final list of values for CloudFormation template parameters based on user-supplied values, default values provided in template, and sane defaults for pseudo-parameters. Parameters ---------- template_dict : dict SAM template dictionary parameter_overrides : dict User-supplied values for CloudFormation template parameters Returns ------- dict Values for template parameters to substitute in template with """ default_values = SamBaseProvider._get_default_parameter_values(template_dict) # NOTE: Ordering of following statements is important. It makes sure that any user-supplied values # override the defaults parameter_values = {} parameter_values.update(SamBaseProvider._DEFAULT_PSEUDO_PARAM_VALUES) parameter_values.update(default_values) parameter_values.update(parameter_overrides or {}) return parameter_values
[ "def", "_get_parameter_values", "(", "template_dict", ",", "parameter_overrides", ")", ":", "default_values", "=", "SamBaseProvider", ".", "_get_default_parameter_values", "(", "template_dict", ")", "# NOTE: Ordering of following statements is important. It makes sure that any user-s...
Construct a final list of values for CloudFormation template parameters based on user-supplied values, default values provided in template, and sane defaults for pseudo-parameters. Parameters ---------- template_dict : dict SAM template dictionary parameter_overrides : dict User-supplied values for CloudFormation template parameters Returns ------- dict Values for template parameters to substitute in template with
[ "Construct", "a", "final", "list", "of", "values", "for", "CloudFormation", "template", "parameters", "based", "on", "user", "-", "supplied", "values", "default", "values", "provided", "in", "template", "and", "sane", "defaults", "for", "pseudo", "-", "parameter...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_base_provider.py#L96-L124
29,965
awslabs/aws-sam-cli
samcli/local/docker/utils.py
to_posix_path
def to_posix_path(code_path): """ Change the code_path to be of unix-style if running on windows when supplied with an absolute windows path. Parameters ---------- code_path : str Directory in the host operating system that should be mounted within the container. Returns ------- str Posix equivalent of absolute windows style path. Examples -------- >>> to_posix_path('/Users/UserName/sam-app') /Users/UserName/sam-app >>> to_posix_path('C:\\\\Users\\\\UserName\\\\AppData\\\\Local\\\\Temp\\\\mydir') /c/Users/UserName/AppData/Local/Temp/mydir """ return re.sub("^([A-Za-z])+:", lambda match: posixpath.sep + match.group().replace(":", "").lower(), pathlib.PureWindowsPath(code_path).as_posix()) if os.name == "nt" else code_path
python
def to_posix_path(code_path): """ Change the code_path to be of unix-style if running on windows when supplied with an absolute windows path. Parameters ---------- code_path : str Directory in the host operating system that should be mounted within the container. Returns ------- str Posix equivalent of absolute windows style path. Examples -------- >>> to_posix_path('/Users/UserName/sam-app') /Users/UserName/sam-app >>> to_posix_path('C:\\\\Users\\\\UserName\\\\AppData\\\\Local\\\\Temp\\\\mydir') /c/Users/UserName/AppData/Local/Temp/mydir """ return re.sub("^([A-Za-z])+:", lambda match: posixpath.sep + match.group().replace(":", "").lower(), pathlib.PureWindowsPath(code_path).as_posix()) if os.name == "nt" else code_path
[ "def", "to_posix_path", "(", "code_path", ")", ":", "return", "re", ".", "sub", "(", "\"^([A-Za-z])+:\"", ",", "lambda", "match", ":", "posixpath", ".", "sep", "+", "match", ".", "group", "(", ")", ".", "replace", "(", "\":\"", ",", "\"\"", ")", ".", ...
Change the code_path to be of unix-style if running on windows when supplied with an absolute windows path. Parameters ---------- code_path : str Directory in the host operating system that should be mounted within the container. Returns ------- str Posix equivalent of absolute windows style path. Examples -------- >>> to_posix_path('/Users/UserName/sam-app') /Users/UserName/sam-app >>> to_posix_path('C:\\\\Users\\\\UserName\\\\AppData\\\\Local\\\\Temp\\\\mydir') /c/Users/UserName/AppData/Local/Temp/mydir
[ "Change", "the", "code_path", "to", "be", "of", "unix", "-", "style", "if", "running", "on", "windows", "when", "supplied", "with", "an", "absolute", "windows", "path", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/utils.py#L14-L36
29,966
awslabs/aws-sam-cli
samcli/local/docker/lambda_container.py
LambdaContainer._get_entry_point
def _get_entry_point(runtime, debug_options=None): # pylint: disable=too-many-branches """ Returns the entry point for the container. The default value for the entry point is already configured in the Dockerfile. We override this default specifically when enabling debugging. The overridden entry point includes a few extra flags to start the runtime in debug mode. :param string runtime: Lambda function runtime name :param int debug_port: Optional, port for debugger :param string debug_args: Optional additional arguments passed to the entry point. :return list: List containing the new entry points. Each element in the list is one portion of the command. ie. if command is ``node index.js arg1 arg2``, then this list will be ["node", "index.js", "arg1", "arg2"] """ if not debug_options: return None if runtime not in LambdaContainer._supported_runtimes(): raise DebuggingNotSupported( "Debugging is not currently supported for {}".format(runtime)) debug_port = debug_options.debug_port debug_args_list = [] if debug_options.debug_args: debug_args_list = debug_options.debug_args.split(" ") # configs from: https://github.com/lambci/docker-lambda # to which we add the extra debug mode options entrypoint = None if runtime == Runtime.java8.value: entrypoint = ["/usr/bin/java"] \ + debug_args_list \ + [ "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,quiet=y,address=" + str(debug_port), "-XX:MaxHeapSize=2834432k", "-XX:MaxMetaspaceSize=163840k", "-XX:ReservedCodeCacheSize=81920k", "-XX:+UseSerialGC", # "-Xshare:on", doesn't work in conjunction with the debug options "-XX:-TieredCompilation", "-Djava.net.preferIPv4Stack=true", "-jar", "/var/runtime/lib/LambdaJavaRTEntry-1.0.jar", ] elif runtime in (Runtime.dotnetcore20.value, Runtime.dotnetcore21.value): entrypoint = ["/var/lang/bin/dotnet"] \ + debug_args_list \ + [ "/var/runtime/MockBootstraps.dll", "--debugger-spin-wait" ] elif runtime == Runtime.go1x.value: entrypoint = ["/var/runtime/aws-lambda-go"] \ + debug_args_list \ + [ "-debug=true", "-delvePort=" + str(debug_port), "-delvePath=" + LambdaContainer._DEFAULT_CONTAINER_DBG_GO_PATH, ] elif runtime == Runtime.nodejs.value: entrypoint = ["/usr/bin/node"] \ + debug_args_list \ + [ "--debug-brk=" + str(debug_port), "--nolazy", "--max-old-space-size=1229", "--max-new-space-size=153", "--max-executable-size=153", "--expose-gc", "/var/runtime/node_modules/awslambda/bin/awslambda", ] elif runtime == Runtime.nodejs43.value: entrypoint = ["/usr/local/lib64/node-v4.3.x/bin/node"] \ + debug_args_list \ + [ "--debug-brk=" + str(debug_port), "--nolazy", "--max-old-space-size=2547", "--max-semi-space-size=150", "--max-executable-size=160", "--expose-gc", "/var/runtime/node_modules/awslambda/index.js", ] elif runtime == Runtime.nodejs610.value: entrypoint = ["/var/lang/bin/node"] \ + debug_args_list \ + [ "--debug-brk=" + str(debug_port), "--nolazy", "--max-old-space-size=2547", "--max-semi-space-size=150", "--max-executable-size=160", "--expose-gc", "/var/runtime/node_modules/awslambda/index.js", ] elif runtime == Runtime.nodejs810.value: entrypoint = ["/var/lang/bin/node"] \ + debug_args_list \ + [ # Node8 requires the host to be explicitly set in order to bind to localhost # instead of 127.0.0.1. https://github.com/nodejs/node/issues/11591#issuecomment-283110138 "--inspect-brk=0.0.0.0:" + str(debug_port), "--nolazy", "--expose-gc", "--max-semi-space-size=150", "--max-old-space-size=2707", "/var/runtime/node_modules/awslambda/index.js", ] elif runtime == Runtime.python27.value: entrypoint = ["/usr/bin/python2.7"] \ + debug_args_list \ + [ "/var/runtime/awslambda/bootstrap.py" ] elif runtime == Runtime.python36.value: entrypoint = ["/var/lang/bin/python3.6"] \ + debug_args_list \ + [ "/var/runtime/awslambda/bootstrap.py" ] elif runtime == Runtime.python37.value: entrypoint = ["/var/rapid/init", "--bootstrap", "/var/lang/bin/python3.7", "--bootstrap-args", json.dumps(debug_args_list + ["/var/runtime/bootstrap"]) ] return entrypoint
python
def _get_entry_point(runtime, debug_options=None): # pylint: disable=too-many-branches """ Returns the entry point for the container. The default value for the entry point is already configured in the Dockerfile. We override this default specifically when enabling debugging. The overridden entry point includes a few extra flags to start the runtime in debug mode. :param string runtime: Lambda function runtime name :param int debug_port: Optional, port for debugger :param string debug_args: Optional additional arguments passed to the entry point. :return list: List containing the new entry points. Each element in the list is one portion of the command. ie. if command is ``node index.js arg1 arg2``, then this list will be ["node", "index.js", "arg1", "arg2"] """ if not debug_options: return None if runtime not in LambdaContainer._supported_runtimes(): raise DebuggingNotSupported( "Debugging is not currently supported for {}".format(runtime)) debug_port = debug_options.debug_port debug_args_list = [] if debug_options.debug_args: debug_args_list = debug_options.debug_args.split(" ") # configs from: https://github.com/lambci/docker-lambda # to which we add the extra debug mode options entrypoint = None if runtime == Runtime.java8.value: entrypoint = ["/usr/bin/java"] \ + debug_args_list \ + [ "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,quiet=y,address=" + str(debug_port), "-XX:MaxHeapSize=2834432k", "-XX:MaxMetaspaceSize=163840k", "-XX:ReservedCodeCacheSize=81920k", "-XX:+UseSerialGC", # "-Xshare:on", doesn't work in conjunction with the debug options "-XX:-TieredCompilation", "-Djava.net.preferIPv4Stack=true", "-jar", "/var/runtime/lib/LambdaJavaRTEntry-1.0.jar", ] elif runtime in (Runtime.dotnetcore20.value, Runtime.dotnetcore21.value): entrypoint = ["/var/lang/bin/dotnet"] \ + debug_args_list \ + [ "/var/runtime/MockBootstraps.dll", "--debugger-spin-wait" ] elif runtime == Runtime.go1x.value: entrypoint = ["/var/runtime/aws-lambda-go"] \ + debug_args_list \ + [ "-debug=true", "-delvePort=" + str(debug_port), "-delvePath=" + LambdaContainer._DEFAULT_CONTAINER_DBG_GO_PATH, ] elif runtime == Runtime.nodejs.value: entrypoint = ["/usr/bin/node"] \ + debug_args_list \ + [ "--debug-brk=" + str(debug_port), "--nolazy", "--max-old-space-size=1229", "--max-new-space-size=153", "--max-executable-size=153", "--expose-gc", "/var/runtime/node_modules/awslambda/bin/awslambda", ] elif runtime == Runtime.nodejs43.value: entrypoint = ["/usr/local/lib64/node-v4.3.x/bin/node"] \ + debug_args_list \ + [ "--debug-brk=" + str(debug_port), "--nolazy", "--max-old-space-size=2547", "--max-semi-space-size=150", "--max-executable-size=160", "--expose-gc", "/var/runtime/node_modules/awslambda/index.js", ] elif runtime == Runtime.nodejs610.value: entrypoint = ["/var/lang/bin/node"] \ + debug_args_list \ + [ "--debug-brk=" + str(debug_port), "--nolazy", "--max-old-space-size=2547", "--max-semi-space-size=150", "--max-executable-size=160", "--expose-gc", "/var/runtime/node_modules/awslambda/index.js", ] elif runtime == Runtime.nodejs810.value: entrypoint = ["/var/lang/bin/node"] \ + debug_args_list \ + [ # Node8 requires the host to be explicitly set in order to bind to localhost # instead of 127.0.0.1. https://github.com/nodejs/node/issues/11591#issuecomment-283110138 "--inspect-brk=0.0.0.0:" + str(debug_port), "--nolazy", "--expose-gc", "--max-semi-space-size=150", "--max-old-space-size=2707", "/var/runtime/node_modules/awslambda/index.js", ] elif runtime == Runtime.python27.value: entrypoint = ["/usr/bin/python2.7"] \ + debug_args_list \ + [ "/var/runtime/awslambda/bootstrap.py" ] elif runtime == Runtime.python36.value: entrypoint = ["/var/lang/bin/python3.6"] \ + debug_args_list \ + [ "/var/runtime/awslambda/bootstrap.py" ] elif runtime == Runtime.python37.value: entrypoint = ["/var/rapid/init", "--bootstrap", "/var/lang/bin/python3.7", "--bootstrap-args", json.dumps(debug_args_list + ["/var/runtime/bootstrap"]) ] return entrypoint
[ "def", "_get_entry_point", "(", "runtime", ",", "debug_options", "=", "None", ")", ":", "# pylint: disable=too-many-branches", "if", "not", "debug_options", ":", "return", "None", "if", "runtime", "not", "in", "LambdaContainer", ".", "_supported_runtimes", "(", ")",...
Returns the entry point for the container. The default value for the entry point is already configured in the Dockerfile. We override this default specifically when enabling debugging. The overridden entry point includes a few extra flags to start the runtime in debug mode. :param string runtime: Lambda function runtime name :param int debug_port: Optional, port for debugger :param string debug_args: Optional additional arguments passed to the entry point. :return list: List containing the new entry points. Each element in the list is one portion of the command. ie. if command is ``node index.js arg1 arg2``, then this list will be ["node", "index.js", "arg1", "arg2"]
[ "Returns", "the", "entry", "point", "for", "the", "container", ".", "The", "default", "value", "for", "the", "entry", "point", "is", "already", "configured", "in", "the", "Dockerfile", ".", "We", "override", "this", "default", "specifically", "when", "enabling...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/lambda_container.py#L161-L305
29,967
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
SamApiProvider._extract_apis
def _extract_apis(self, resources): """ Extract all Implicit Apis (Apis defined through Serverless Function with an Api Event :param dict resources: Dictionary of SAM/CloudFormation resources :return: List of nametuple Api """ # Some properties like BinaryMediaTypes, Cors are set once on the resource but need to be applied to each API. # For Implicit APIs, which are defined on the Function resource, these properties # are defined on a AWS::Serverless::Api resource with logical ID "ServerlessRestApi". Therefore, no matter # if it is an implicit API or an explicit API, there is a corresponding resource of type AWS::Serverless::Api # that contains these additional configurations. # # We use this assumption in the following loop to collect information from resources of type # AWS::Serverless::Api. We also extract API from Serverless::Function resource and add them to the # corresponding Serverless::Api resource. This is all done using the ``collector``. collector = ApiCollector() for logical_id, resource in resources.items(): resource_type = resource.get(SamApiProvider._TYPE) if resource_type == SamApiProvider._SERVERLESS_FUNCTION: self._extract_apis_from_function(logical_id, resource, collector) if resource_type == SamApiProvider._SERVERLESS_API: self._extract_from_serverless_api(logical_id, resource, collector) apis = SamApiProvider._merge_apis(collector) return self._normalize_apis(apis)
python
def _extract_apis(self, resources): """ Extract all Implicit Apis (Apis defined through Serverless Function with an Api Event :param dict resources: Dictionary of SAM/CloudFormation resources :return: List of nametuple Api """ # Some properties like BinaryMediaTypes, Cors are set once on the resource but need to be applied to each API. # For Implicit APIs, which are defined on the Function resource, these properties # are defined on a AWS::Serverless::Api resource with logical ID "ServerlessRestApi". Therefore, no matter # if it is an implicit API or an explicit API, there is a corresponding resource of type AWS::Serverless::Api # that contains these additional configurations. # # We use this assumption in the following loop to collect information from resources of type # AWS::Serverless::Api. We also extract API from Serverless::Function resource and add them to the # corresponding Serverless::Api resource. This is all done using the ``collector``. collector = ApiCollector() for logical_id, resource in resources.items(): resource_type = resource.get(SamApiProvider._TYPE) if resource_type == SamApiProvider._SERVERLESS_FUNCTION: self._extract_apis_from_function(logical_id, resource, collector) if resource_type == SamApiProvider._SERVERLESS_API: self._extract_from_serverless_api(logical_id, resource, collector) apis = SamApiProvider._merge_apis(collector) return self._normalize_apis(apis)
[ "def", "_extract_apis", "(", "self", ",", "resources", ")", ":", "# Some properties like BinaryMediaTypes, Cors are set once on the resource but need to be applied to each API.", "# For Implicit APIs, which are defined on the Function resource, these properties", "# are defined on a AWS::Serverl...
Extract all Implicit Apis (Apis defined through Serverless Function with an Api Event :param dict resources: Dictionary of SAM/CloudFormation resources :return: List of nametuple Api
[ "Extract", "all", "Implicit", "Apis", "(", "Apis", "defined", "through", "Serverless", "Function", "with", "an", "Api", "Event" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L76-L107
29,968
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
SamApiProvider._merge_apis
def _merge_apis(collector): """ Quite often, an API is defined both in Implicit and Explicit API definitions. In such cases, Implicit API definition wins because that conveys clear intent that the API is backed by a function. This method will merge two such list of Apis with the right order of precedence. If a Path+Method combination is defined in both the places, only one wins. Parameters ---------- collector : ApiCollector Collector object that holds all the APIs specified in the template Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs obtained by combining both the input lists. """ implicit_apis = [] explicit_apis = [] # Store implicit and explicit APIs separately in order to merge them later in the correct order # Implicit APIs are defined on a resource with logicalID ServerlessRestApi for logical_id, apis in collector: if logical_id == SamApiProvider._IMPLICIT_API_RESOURCE_ID: implicit_apis.extend(apis) else: explicit_apis.extend(apis) # We will use "path+method" combination as key to this dictionary and store the Api config for this combination. # If an path+method combo already exists, then overwrite it if and only if this is an implicit API all_apis = {} # By adding implicit APIs to the end of the list, they will be iterated last. If a configuration was already # written by explicit API, it will be overriden by implicit API, just by virtue of order of iteration. all_configs = explicit_apis + implicit_apis for config in all_configs: # Normalize the methods before de-duping to allow an ANY method in implicit API to override a regular HTTP # method on explicit API. for normalized_method in SamApiProvider._normalize_http_methods(config.method): key = config.path + normalized_method all_apis[key] = config result = set(all_apis.values()) # Assign to a set() to de-dupe LOG.debug("Removed duplicates from '%d' Explicit APIs and '%d' Implicit APIs to produce '%d' APIs", len(explicit_apis), len(implicit_apis), len(result)) return list(result)
python
def _merge_apis(collector): """ Quite often, an API is defined both in Implicit and Explicit API definitions. In such cases, Implicit API definition wins because that conveys clear intent that the API is backed by a function. This method will merge two such list of Apis with the right order of precedence. If a Path+Method combination is defined in both the places, only one wins. Parameters ---------- collector : ApiCollector Collector object that holds all the APIs specified in the template Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs obtained by combining both the input lists. """ implicit_apis = [] explicit_apis = [] # Store implicit and explicit APIs separately in order to merge them later in the correct order # Implicit APIs are defined on a resource with logicalID ServerlessRestApi for logical_id, apis in collector: if logical_id == SamApiProvider._IMPLICIT_API_RESOURCE_ID: implicit_apis.extend(apis) else: explicit_apis.extend(apis) # We will use "path+method" combination as key to this dictionary and store the Api config for this combination. # If an path+method combo already exists, then overwrite it if and only if this is an implicit API all_apis = {} # By adding implicit APIs to the end of the list, they will be iterated last. If a configuration was already # written by explicit API, it will be overriden by implicit API, just by virtue of order of iteration. all_configs = explicit_apis + implicit_apis for config in all_configs: # Normalize the methods before de-duping to allow an ANY method in implicit API to override a regular HTTP # method on explicit API. for normalized_method in SamApiProvider._normalize_http_methods(config.method): key = config.path + normalized_method all_apis[key] = config result = set(all_apis.values()) # Assign to a set() to de-dupe LOG.debug("Removed duplicates from '%d' Explicit APIs and '%d' Implicit APIs to produce '%d' APIs", len(explicit_apis), len(implicit_apis), len(result)) return list(result)
[ "def", "_merge_apis", "(", "collector", ")", ":", "implicit_apis", "=", "[", "]", "explicit_apis", "=", "[", "]", "# Store implicit and explicit APIs separately in order to merge them later in the correct order", "# Implicit APIs are defined on a resource with logicalID ServerlessRestA...
Quite often, an API is defined both in Implicit and Explicit API definitions. In such cases, Implicit API definition wins because that conveys clear intent that the API is backed by a function. This method will merge two such list of Apis with the right order of precedence. If a Path+Method combination is defined in both the places, only one wins. Parameters ---------- collector : ApiCollector Collector object that holds all the APIs specified in the template Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs obtained by combining both the input lists.
[ "Quite", "often", "an", "API", "is", "defined", "both", "in", "Implicit", "and", "Explicit", "API", "definitions", ".", "In", "such", "cases", "Implicit", "API", "definition", "wins", "because", "that", "conveys", "clear", "intent", "that", "the", "API", "is...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L150-L198
29,969
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
SamApiProvider._normalize_apis
def _normalize_apis(apis): """ Normalize the APIs to use standard method name Parameters ---------- apis : list of samcli.commands.local.lib.provider.Api List of APIs to replace normalize Returns ------- list of samcli.commands.local.lib.provider.Api List of normalized APIs """ result = list() for api in apis: for normalized_method in SamApiProvider._normalize_http_methods(api.method): # _replace returns a copy of the namedtuple. This is the official way of creating copies of namedtuple result.append(api._replace(method=normalized_method)) return result
python
def _normalize_apis(apis): """ Normalize the APIs to use standard method name Parameters ---------- apis : list of samcli.commands.local.lib.provider.Api List of APIs to replace normalize Returns ------- list of samcli.commands.local.lib.provider.Api List of normalized APIs """ result = list() for api in apis: for normalized_method in SamApiProvider._normalize_http_methods(api.method): # _replace returns a copy of the namedtuple. This is the official way of creating copies of namedtuple result.append(api._replace(method=normalized_method)) return result
[ "def", "_normalize_apis", "(", "apis", ")", ":", "result", "=", "list", "(", ")", "for", "api", "in", "apis", ":", "for", "normalized_method", "in", "SamApiProvider", ".", "_normalize_http_methods", "(", "api", ".", "method", ")", ":", "# _replace returns a co...
Normalize the APIs to use standard method name Parameters ---------- apis : list of samcli.commands.local.lib.provider.Api List of APIs to replace normalize Returns ------- list of samcli.commands.local.lib.provider.Api List of normalized APIs
[ "Normalize", "the", "APIs", "to", "use", "standard", "method", "name" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L201-L222
29,970
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
SamApiProvider._extract_apis_from_function
def _extract_apis_from_function(logical_id, function_resource, collector): """ Fetches a list of APIs configured for this SAM Function resource. Parameters ---------- logical_id : str Logical ID of the resource function_resource : dict Contents of the function resource including its properties collector : ApiCollector Instance of the API collector that where we will save the API information """ resource_properties = function_resource.get("Properties", {}) serverless_function_events = resource_properties.get(SamApiProvider._FUNCTION_EVENT, {}) SamApiProvider._extract_apis_from_events(logical_id, serverless_function_events, collector)
python
def _extract_apis_from_function(logical_id, function_resource, collector): """ Fetches a list of APIs configured for this SAM Function resource. Parameters ---------- logical_id : str Logical ID of the resource function_resource : dict Contents of the function resource including its properties collector : ApiCollector Instance of the API collector that where we will save the API information """ resource_properties = function_resource.get("Properties", {}) serverless_function_events = resource_properties.get(SamApiProvider._FUNCTION_EVENT, {}) SamApiProvider._extract_apis_from_events(logical_id, serverless_function_events, collector)
[ "def", "_extract_apis_from_function", "(", "logical_id", ",", "function_resource", ",", "collector", ")", ":", "resource_properties", "=", "function_resource", ".", "get", "(", "\"Properties\"", ",", "{", "}", ")", "serverless_function_events", "=", "resource_properties...
Fetches a list of APIs configured for this SAM Function resource. Parameters ---------- logical_id : str Logical ID of the resource function_resource : dict Contents of the function resource including its properties collector : ApiCollector Instance of the API collector that where we will save the API information
[ "Fetches", "a", "list", "of", "APIs", "configured", "for", "this", "SAM", "Function", "resource", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L225-L243
29,971
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
SamApiProvider._normalize_http_methods
def _normalize_http_methods(http_method): """ Normalizes Http Methods. Api Gateway allows a Http Methods of ANY. This is a special verb to denote all supported Http Methods on Api Gateway. :param str http_method: Http method :yield str: Either the input http_method or one of the _ANY_HTTP_METHODS (normalized Http Methods) """ if http_method.upper() == 'ANY': for method in SamApiProvider._ANY_HTTP_METHODS: yield method.upper() else: yield http_method.upper()
python
def _normalize_http_methods(http_method): """ Normalizes Http Methods. Api Gateway allows a Http Methods of ANY. This is a special verb to denote all supported Http Methods on Api Gateway. :param str http_method: Http method :yield str: Either the input http_method or one of the _ANY_HTTP_METHODS (normalized Http Methods) """ if http_method.upper() == 'ANY': for method in SamApiProvider._ANY_HTTP_METHODS: yield method.upper() else: yield http_method.upper()
[ "def", "_normalize_http_methods", "(", "http_method", ")", ":", "if", "http_method", ".", "upper", "(", ")", "==", "'ANY'", ":", "for", "method", "in", "SamApiProvider", ".", "_ANY_HTTP_METHODS", ":", "yield", "method", ".", "upper", "(", ")", "else", ":", ...
Normalizes Http Methods. Api Gateway allows a Http Methods of ANY. This is a special verb to denote all supported Http Methods on Api Gateway. :param str http_method: Http method :yield str: Either the input http_method or one of the _ANY_HTTP_METHODS (normalized Http Methods)
[ "Normalizes", "Http", "Methods", ".", "Api", "Gateway", "allows", "a", "Http", "Methods", "of", "ANY", ".", "This", "is", "a", "special", "verb", "to", "denote", "all", "supported", "Http", "Methods", "on", "Api", "Gateway", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L301-L314
29,972
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
ApiCollector.add_apis
def add_apis(self, logical_id, apis): """ Stores the given APIs tagged under the given logicalId Parameters ---------- logical_id : str LogicalId of the AWS::Serverless::Api resource apis : list of samcli.commands.local.lib.provider.Api List of APIs available in this resource """ properties = self._get_properties(logical_id) properties.apis.extend(apis)
python
def add_apis(self, logical_id, apis): """ Stores the given APIs tagged under the given logicalId Parameters ---------- logical_id : str LogicalId of the AWS::Serverless::Api resource apis : list of samcli.commands.local.lib.provider.Api List of APIs available in this resource """ properties = self._get_properties(logical_id) properties.apis.extend(apis)
[ "def", "add_apis", "(", "self", ",", "logical_id", ",", "apis", ")", ":", "properties", "=", "self", ".", "_get_properties", "(", "logical_id", ")", "properties", ".", "apis", ".", "extend", "(", "apis", ")" ]
Stores the given APIs tagged under the given logicalId Parameters ---------- logical_id : str LogicalId of the AWS::Serverless::Api resource apis : list of samcli.commands.local.lib.provider.Api List of APIs available in this resource
[ "Stores", "the", "given", "APIs", "tagged", "under", "the", "given", "logicalId" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L350-L363
29,973
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
ApiCollector.add_binary_media_types
def add_binary_media_types(self, logical_id, binary_media_types): """ Stores the binary media type configuration for the API with given logical ID Parameters ---------- logical_id : str LogicalId of the AWS::Serverless::Api resource binary_media_types : list of str List of binary media types supported by this resource """ properties = self._get_properties(logical_id) binary_media_types = binary_media_types or [] for value in binary_media_types: normalized_value = self._normalize_binary_media_type(value) # If the value is not supported, then just skip it. if normalized_value: properties.binary_media_types.add(normalized_value) else: LOG.debug("Unsupported data type of binary media type value of resource '%s'", logical_id)
python
def add_binary_media_types(self, logical_id, binary_media_types): """ Stores the binary media type configuration for the API with given logical ID Parameters ---------- logical_id : str LogicalId of the AWS::Serverless::Api resource binary_media_types : list of str List of binary media types supported by this resource """ properties = self._get_properties(logical_id) binary_media_types = binary_media_types or [] for value in binary_media_types: normalized_value = self._normalize_binary_media_type(value) # If the value is not supported, then just skip it. if normalized_value: properties.binary_media_types.add(normalized_value) else: LOG.debug("Unsupported data type of binary media type value of resource '%s'", logical_id)
[ "def", "add_binary_media_types", "(", "self", ",", "logical_id", ",", "binary_media_types", ")", ":", "properties", "=", "self", ".", "_get_properties", "(", "logical_id", ")", "binary_media_types", "=", "binary_media_types", "or", "[", "]", "for", "value", "in", ...
Stores the binary media type configuration for the API with given logical ID Parameters ---------- logical_id : str LogicalId of the AWS::Serverless::Api resource binary_media_types : list of str List of binary media types supported by this resource
[ "Stores", "the", "binary", "media", "type", "configuration", "for", "the", "API", "with", "given", "logical", "ID" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L365-L388
29,974
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
ApiCollector._get_apis_with_config
def _get_apis_with_config(self, logical_id): """ Returns the list of APIs in this resource along with other extra configuration such as binary media types, cors etc. Additional configuration is merged directly into the API data because these properties, although defined globally, actually apply to each API. Parameters ---------- logical_id : str Logical ID of the resource to fetch data for Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs with additional configurations for the resource with given logicalId. If there are no APIs, then it returns an empty list """ properties = self._get_properties(logical_id) # These configs need to be applied to each API binary_media = sorted(list(properties.binary_media_types)) # Also sort the list to keep the ordering stable cors = properties.cors result = [] for api in properties.apis: # Create a copy of the API with updated configuration updated_api = api._replace(binary_media_types=binary_media, cors=cors) result.append(updated_api) return result
python
def _get_apis_with_config(self, logical_id): """ Returns the list of APIs in this resource along with other extra configuration such as binary media types, cors etc. Additional configuration is merged directly into the API data because these properties, although defined globally, actually apply to each API. Parameters ---------- logical_id : str Logical ID of the resource to fetch data for Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs with additional configurations for the resource with given logicalId. If there are no APIs, then it returns an empty list """ properties = self._get_properties(logical_id) # These configs need to be applied to each API binary_media = sorted(list(properties.binary_media_types)) # Also sort the list to keep the ordering stable cors = properties.cors result = [] for api in properties.apis: # Create a copy of the API with updated configuration updated_api = api._replace(binary_media_types=binary_media, cors=cors) result.append(updated_api) return result
[ "def", "_get_apis_with_config", "(", "self", ",", "logical_id", ")", ":", "properties", "=", "self", ".", "_get_properties", "(", "logical_id", ")", "# These configs need to be applied to each API", "binary_media", "=", "sorted", "(", "list", "(", "properties", ".", ...
Returns the list of APIs in this resource along with other extra configuration such as binary media types, cors etc. Additional configuration is merged directly into the API data because these properties, although defined globally, actually apply to each API. Parameters ---------- logical_id : str Logical ID of the resource to fetch data for Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs with additional configurations for the resource with given logicalId. If there are no APIs, then it returns an empty list
[ "Returns", "the", "list", "of", "APIs", "in", "this", "resource", "along", "with", "other", "extra", "configuration", "such", "as", "binary", "media", "types", "cors", "etc", ".", "Additional", "configuration", "is", "merged", "directly", "into", "the", "API",...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L390-L421
29,975
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_api_provider.py
ApiCollector._get_properties
def _get_properties(self, logical_id): """ Returns the properties of resource with given logical ID. If a resource is not found, then it returns an empty data. Parameters ---------- logical_id : str Logical ID of the resource Returns ------- samcli.commands.local.lib.sam_api_provider.ApiCollector.Properties Properties object for this resource. """ if logical_id not in self.by_resource: self.by_resource[logical_id] = self.Properties(apis=[], # Use a set() to be able to easily de-dupe binary_media_types=set(), cors=None) return self.by_resource[logical_id]
python
def _get_properties(self, logical_id): """ Returns the properties of resource with given logical ID. If a resource is not found, then it returns an empty data. Parameters ---------- logical_id : str Logical ID of the resource Returns ------- samcli.commands.local.lib.sam_api_provider.ApiCollector.Properties Properties object for this resource. """ if logical_id not in self.by_resource: self.by_resource[logical_id] = self.Properties(apis=[], # Use a set() to be able to easily de-dupe binary_media_types=set(), cors=None) return self.by_resource[logical_id]
[ "def", "_get_properties", "(", "self", ",", "logical_id", ")", ":", "if", "logical_id", "not", "in", "self", ".", "by_resource", ":", "self", ".", "by_resource", "[", "logical_id", "]", "=", "self", ".", "Properties", "(", "apis", "=", "[", "]", ",", "...
Returns the properties of resource with given logical ID. If a resource is not found, then it returns an empty data. Parameters ---------- logical_id : str Logical ID of the resource Returns ------- samcli.commands.local.lib.sam_api_provider.ApiCollector.Properties Properties object for this resource.
[ "Returns", "the", "properties", "of", "resource", "with", "given", "logical", "ID", ".", "If", "a", "resource", "is", "not", "found", "then", "it", "returns", "an", "empty", "data", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_api_provider.py#L423-L445
29,976
awslabs/aws-sam-cli
samcli/local/lambdafn/runtime.py
_unzip_file
def _unzip_file(filepath): """ Helper method to unzip a file to a temporary directory :param string filepath: Absolute path to this file :return string: Path to the temporary directory where it was unzipped """ temp_dir = tempfile.mkdtemp() if os.name == 'posix': os.chmod(temp_dir, 0o755) LOG.info("Decompressing %s", filepath) unzip(filepath, temp_dir) # The directory that Python returns might have symlinks. The Docker File sharing settings will not resolve # symlinks. Hence get the real path before passing to Docker. # Especially useful in Mac OSX which returns /var/folders which is a symlink to /private/var/folders that is a # part of Docker's Shared Files directories return os.path.realpath(temp_dir)
python
def _unzip_file(filepath): """ Helper method to unzip a file to a temporary directory :param string filepath: Absolute path to this file :return string: Path to the temporary directory where it was unzipped """ temp_dir = tempfile.mkdtemp() if os.name == 'posix': os.chmod(temp_dir, 0o755) LOG.info("Decompressing %s", filepath) unzip(filepath, temp_dir) # The directory that Python returns might have symlinks. The Docker File sharing settings will not resolve # symlinks. Hence get the real path before passing to Docker. # Especially useful in Mac OSX which returns /var/folders which is a symlink to /private/var/folders that is a # part of Docker's Shared Files directories return os.path.realpath(temp_dir)
[ "def", "_unzip_file", "(", "filepath", ")", ":", "temp_dir", "=", "tempfile", ".", "mkdtemp", "(", ")", "if", "os", ".", "name", "==", "'posix'", ":", "os", ".", "chmod", "(", "temp_dir", ",", "0o755", ")", "LOG", ".", "info", "(", "\"Decompressing %s\...
Helper method to unzip a file to a temporary directory :param string filepath: Absolute path to this file :return string: Path to the temporary directory where it was unzipped
[ "Helper", "method", "to", "unzip", "a", "file", "to", "a", "temporary", "directory" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/runtime.py#L184-L205
29,977
awslabs/aws-sam-cli
samcli/local/lambdafn/runtime.py
LambdaRuntime.invoke
def invoke(self, function_config, event, debug_context=None, stdout=None, stderr=None): """ Invoke the given Lambda function locally. ##### NOTE: THIS IS A LONG BLOCKING CALL ##### This method will block until either the Lambda function completes or timed out, which could be seconds. A blocking call will block the thread preventing any other operations from happening. If you are using this method in a web-server or in contexts where your application needs to be responsive when function is running, take care to invoke the function in a separate thread. Co-Routines or micro-threads might not perform well because the underlying implementation essentially blocks on a socket, which is synchronous. :param FunctionConfig function_config: Configuration of the function to invoke :param event: String input event passed to Lambda function :param DebugContext debug_context: Debugging context for the function (includes port, args, and path) :param io.IOBase stdout: Optional. IO Stream to that receives stdout text from container. :param io.IOBase stderr: Optional. IO Stream that receives stderr text from container :raises Keyboard """ timer = None # Update with event input environ = function_config.env_vars environ.add_lambda_event_body(event) # Generate a dictionary of environment variable key:values env_vars = environ.resolve() with self._get_code_dir(function_config.code_abs_path) as code_dir: container = LambdaContainer(function_config.runtime, function_config.handler, code_dir, function_config.layers, self._image_builder, memory_mb=function_config.memory, env_vars=env_vars, debug_options=debug_context) try: # Start the container. This call returns immediately after the container starts self._container_manager.run(container) # Setup appropriate interrupt - timeout or Ctrl+C - before function starts executing. # # Start the timer **after** container starts. Container startup takes several seconds, only after which, # our Lambda function code will run. Starting the timer is a reasonable approximation that function has # started running. timer = self._configure_interrupt(function_config.name, function_config.timeout, container, bool(debug_context)) # NOTE: BLOCKING METHOD # Block the thread waiting to fetch logs from the container. This method will return after container # terminates, either successfully or killed by one of the interrupt handlers above. container.wait_for_logs(stdout=stdout, stderr=stderr) except KeyboardInterrupt: # When user presses Ctrl+C, we receive a Keyboard Interrupt. This is especially very common when # container is in debugging mode. We have special handling of Ctrl+C. So handle KeyboardInterrupt # and swallow the exception. The ``finally`` block will also take care of cleaning it up. LOG.debug("Ctrl+C was pressed. Aborting Lambda execution") finally: # We will be done with execution, if either the execution completed or an interrupt was fired # Any case, cleanup the timer and container. # # If we are in debugging mode, timer would not be created. So skip cleanup of the timer if timer: timer.cancel() self._container_manager.stop(container)
python
def invoke(self, function_config, event, debug_context=None, stdout=None, stderr=None): """ Invoke the given Lambda function locally. ##### NOTE: THIS IS A LONG BLOCKING CALL ##### This method will block until either the Lambda function completes or timed out, which could be seconds. A blocking call will block the thread preventing any other operations from happening. If you are using this method in a web-server or in contexts where your application needs to be responsive when function is running, take care to invoke the function in a separate thread. Co-Routines or micro-threads might not perform well because the underlying implementation essentially blocks on a socket, which is synchronous. :param FunctionConfig function_config: Configuration of the function to invoke :param event: String input event passed to Lambda function :param DebugContext debug_context: Debugging context for the function (includes port, args, and path) :param io.IOBase stdout: Optional. IO Stream to that receives stdout text from container. :param io.IOBase stderr: Optional. IO Stream that receives stderr text from container :raises Keyboard """ timer = None # Update with event input environ = function_config.env_vars environ.add_lambda_event_body(event) # Generate a dictionary of environment variable key:values env_vars = environ.resolve() with self._get_code_dir(function_config.code_abs_path) as code_dir: container = LambdaContainer(function_config.runtime, function_config.handler, code_dir, function_config.layers, self._image_builder, memory_mb=function_config.memory, env_vars=env_vars, debug_options=debug_context) try: # Start the container. This call returns immediately after the container starts self._container_manager.run(container) # Setup appropriate interrupt - timeout or Ctrl+C - before function starts executing. # # Start the timer **after** container starts. Container startup takes several seconds, only after which, # our Lambda function code will run. Starting the timer is a reasonable approximation that function has # started running. timer = self._configure_interrupt(function_config.name, function_config.timeout, container, bool(debug_context)) # NOTE: BLOCKING METHOD # Block the thread waiting to fetch logs from the container. This method will return after container # terminates, either successfully or killed by one of the interrupt handlers above. container.wait_for_logs(stdout=stdout, stderr=stderr) except KeyboardInterrupt: # When user presses Ctrl+C, we receive a Keyboard Interrupt. This is especially very common when # container is in debugging mode. We have special handling of Ctrl+C. So handle KeyboardInterrupt # and swallow the exception. The ``finally`` block will also take care of cleaning it up. LOG.debug("Ctrl+C was pressed. Aborting Lambda execution") finally: # We will be done with execution, if either the execution completed or an interrupt was fired # Any case, cleanup the timer and container. # # If we are in debugging mode, timer would not be created. So skip cleanup of the timer if timer: timer.cancel() self._container_manager.stop(container)
[ "def", "invoke", "(", "self", ",", "function_config", ",", "event", ",", "debug_context", "=", "None", ",", "stdout", "=", "None", ",", "stderr", "=", "None", ")", ":", "timer", "=", "None", "# Update with event input", "environ", "=", "function_config", "."...
Invoke the given Lambda function locally. ##### NOTE: THIS IS A LONG BLOCKING CALL ##### This method will block until either the Lambda function completes or timed out, which could be seconds. A blocking call will block the thread preventing any other operations from happening. If you are using this method in a web-server or in contexts where your application needs to be responsive when function is running, take care to invoke the function in a separate thread. Co-Routines or micro-threads might not perform well because the underlying implementation essentially blocks on a socket, which is synchronous. :param FunctionConfig function_config: Configuration of the function to invoke :param event: String input event passed to Lambda function :param DebugContext debug_context: Debugging context for the function (includes port, args, and path) :param io.IOBase stdout: Optional. IO Stream to that receives stdout text from container. :param io.IOBase stderr: Optional. IO Stream that receives stderr text from container :raises Keyboard
[ "Invoke", "the", "given", "Lambda", "function", "locally", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/runtime.py#L42-L116
29,978
awslabs/aws-sam-cli
samcli/local/lambdafn/runtime.py
LambdaRuntime._configure_interrupt
def _configure_interrupt(self, function_name, timeout, container, is_debugging): """ When a Lambda function is executing, we setup certain interrupt handlers to stop the execution. Usually, we setup a function timeout interrupt to kill the container after timeout expires. If debugging though, we don't enforce a timeout. But we setup a SIGINT interrupt to catch Ctrl+C and terminate the container. :param string function_name: Name of the function we are running :param integer timeout: Timeout in seconds :param samcli.local.docker.container.Container container: Instance of a container to terminate :param bool is_debugging: Are we debugging? :return threading.Timer: Timer object, if we setup a timer. None otherwise """ def timer_handler(): # NOTE: This handler runs in a separate thread. So don't try to mutate any non-thread-safe data structures LOG.info("Function '%s' timed out after %d seconds", function_name, timeout) self._container_manager.stop(container) def signal_handler(sig, frame): # NOTE: This handler runs in a separate thread. So don't try to mutate any non-thread-safe data structures LOG.info("Execution of function %s was interrupted", function_name) self._container_manager.stop(container) if is_debugging: LOG.debug("Setting up SIGTERM interrupt handler") signal.signal(signal.SIGTERM, signal_handler) else: # Start a timer, we'll use this to abort the function if it runs beyond the specified timeout LOG.debug("Starting a timer for %s seconds for function '%s'", timeout, function_name) timer = threading.Timer(timeout, timer_handler, ()) timer.start() return timer
python
def _configure_interrupt(self, function_name, timeout, container, is_debugging): """ When a Lambda function is executing, we setup certain interrupt handlers to stop the execution. Usually, we setup a function timeout interrupt to kill the container after timeout expires. If debugging though, we don't enforce a timeout. But we setup a SIGINT interrupt to catch Ctrl+C and terminate the container. :param string function_name: Name of the function we are running :param integer timeout: Timeout in seconds :param samcli.local.docker.container.Container container: Instance of a container to terminate :param bool is_debugging: Are we debugging? :return threading.Timer: Timer object, if we setup a timer. None otherwise """ def timer_handler(): # NOTE: This handler runs in a separate thread. So don't try to mutate any non-thread-safe data structures LOG.info("Function '%s' timed out after %d seconds", function_name, timeout) self._container_manager.stop(container) def signal_handler(sig, frame): # NOTE: This handler runs in a separate thread. So don't try to mutate any non-thread-safe data structures LOG.info("Execution of function %s was interrupted", function_name) self._container_manager.stop(container) if is_debugging: LOG.debug("Setting up SIGTERM interrupt handler") signal.signal(signal.SIGTERM, signal_handler) else: # Start a timer, we'll use this to abort the function if it runs beyond the specified timeout LOG.debug("Starting a timer for %s seconds for function '%s'", timeout, function_name) timer = threading.Timer(timeout, timer_handler, ()) timer.start() return timer
[ "def", "_configure_interrupt", "(", "self", ",", "function_name", ",", "timeout", ",", "container", ",", "is_debugging", ")", ":", "def", "timer_handler", "(", ")", ":", "# NOTE: This handler runs in a separate thread. So don't try to mutate any non-thread-safe data structures"...
When a Lambda function is executing, we setup certain interrupt handlers to stop the execution. Usually, we setup a function timeout interrupt to kill the container after timeout expires. If debugging though, we don't enforce a timeout. But we setup a SIGINT interrupt to catch Ctrl+C and terminate the container. :param string function_name: Name of the function we are running :param integer timeout: Timeout in seconds :param samcli.local.docker.container.Container container: Instance of a container to terminate :param bool is_debugging: Are we debugging? :return threading.Timer: Timer object, if we setup a timer. None otherwise
[ "When", "a", "Lambda", "function", "is", "executing", "we", "setup", "certain", "interrupt", "handlers", "to", "stop", "the", "execution", ".", "Usually", "we", "setup", "a", "function", "timeout", "interrupt", "to", "kill", "the", "container", "after", "timeo...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/runtime.py#L118-L149
29,979
awslabs/aws-sam-cli
samcli/local/lambdafn/runtime.py
LambdaRuntime._get_code_dir
def _get_code_dir(self, code_path): """ Method to get a path to a directory where the Lambda function code is available. This directory will be mounted directly inside the Docker container. This method handles a few different cases for ``code_path``: - ``code_path``is a existent zip/jar file: Unzip in a temp directory and return the temp directory - ``code_path`` is a existent directory: Return this immediately - ``code_path`` is a file/dir that does not exist: Return it as is. May be this method is not clever to detect the existence of the path :param string code_path: Path to the code. This could be pointing at a file or folder either on a local disk or in some network file system :return string: Directory containing Lambda function code. It can be mounted directly in container """ decompressed_dir = None try: if os.path.isfile(code_path) and code_path.endswith(self.SUPPORTED_ARCHIVE_EXTENSIONS): decompressed_dir = _unzip_file(code_path) yield decompressed_dir else: LOG.debug("Code %s is not a zip/jar file", code_path) yield code_path finally: if decompressed_dir: shutil.rmtree(decompressed_dir)
python
def _get_code_dir(self, code_path): """ Method to get a path to a directory where the Lambda function code is available. This directory will be mounted directly inside the Docker container. This method handles a few different cases for ``code_path``: - ``code_path``is a existent zip/jar file: Unzip in a temp directory and return the temp directory - ``code_path`` is a existent directory: Return this immediately - ``code_path`` is a file/dir that does not exist: Return it as is. May be this method is not clever to detect the existence of the path :param string code_path: Path to the code. This could be pointing at a file or folder either on a local disk or in some network file system :return string: Directory containing Lambda function code. It can be mounted directly in container """ decompressed_dir = None try: if os.path.isfile(code_path) and code_path.endswith(self.SUPPORTED_ARCHIVE_EXTENSIONS): decompressed_dir = _unzip_file(code_path) yield decompressed_dir else: LOG.debug("Code %s is not a zip/jar file", code_path) yield code_path finally: if decompressed_dir: shutil.rmtree(decompressed_dir)
[ "def", "_get_code_dir", "(", "self", ",", "code_path", ")", ":", "decompressed_dir", "=", "None", "try", ":", "if", "os", ".", "path", ".", "isfile", "(", "code_path", ")", "and", "code_path", ".", "endswith", "(", "self", ".", "SUPPORTED_ARCHIVE_EXTENSIONS"...
Method to get a path to a directory where the Lambda function code is available. This directory will be mounted directly inside the Docker container. This method handles a few different cases for ``code_path``: - ``code_path``is a existent zip/jar file: Unzip in a temp directory and return the temp directory - ``code_path`` is a existent directory: Return this immediately - ``code_path`` is a file/dir that does not exist: Return it as is. May be this method is not clever to detect the existence of the path :param string code_path: Path to the code. This could be pointing at a file or folder either on a local disk or in some network file system :return string: Directory containing Lambda function code. It can be mounted directly in container
[ "Method", "to", "get", "a", "path", "to", "a", "directory", "where", "the", "Lambda", "function", "code", "is", "available", ".", "This", "directory", "will", "be", "mounted", "directly", "inside", "the", "Docker", "container", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/runtime.py#L152-L181
29,980
awslabs/aws-sam-cli
samcli/local/docker/lambda_image.py
LambdaImage.build
def build(self, runtime, layers): """ Build the image if one is not already on the system that matches the runtime and layers Parameters ---------- runtime str Name of the Lambda runtime layers list(samcli.commands.local.lib.provider.Layer) List of layers Returns ------- str The image to be used (REPOSITORY:TAG) """ base_image = "{}:{}".format(self._DOCKER_LAMBDA_REPO_NAME, runtime) # Don't build the image if there are no layers. if not layers: LOG.debug("Skipping building an image since no layers were defined") return base_image downloaded_layers = self.layer_downloader.download_all(layers, self.force_image_build) docker_image_version = self._generate_docker_image_version(downloaded_layers, runtime) image_tag = "{}:{}".format(self._SAM_CLI_REPO_NAME, docker_image_version) image_not_found = False try: self.docker_client.images.get(image_tag) except docker.errors.ImageNotFound: LOG.info("Image was not found.") image_not_found = True if self.force_image_build or \ image_not_found or \ any(layer.is_defined_within_template for layer in downloaded_layers): LOG.info("Building image...") self._build_image(base_image, image_tag, downloaded_layers) return image_tag
python
def build(self, runtime, layers): """ Build the image if one is not already on the system that matches the runtime and layers Parameters ---------- runtime str Name of the Lambda runtime layers list(samcli.commands.local.lib.provider.Layer) List of layers Returns ------- str The image to be used (REPOSITORY:TAG) """ base_image = "{}:{}".format(self._DOCKER_LAMBDA_REPO_NAME, runtime) # Don't build the image if there are no layers. if not layers: LOG.debug("Skipping building an image since no layers were defined") return base_image downloaded_layers = self.layer_downloader.download_all(layers, self.force_image_build) docker_image_version = self._generate_docker_image_version(downloaded_layers, runtime) image_tag = "{}:{}".format(self._SAM_CLI_REPO_NAME, docker_image_version) image_not_found = False try: self.docker_client.images.get(image_tag) except docker.errors.ImageNotFound: LOG.info("Image was not found.") image_not_found = True if self.force_image_build or \ image_not_found or \ any(layer.is_defined_within_template for layer in downloaded_layers): LOG.info("Building image...") self._build_image(base_image, image_tag, downloaded_layers) return image_tag
[ "def", "build", "(", "self", ",", "runtime", ",", "layers", ")", ":", "base_image", "=", "\"{}:{}\"", ".", "format", "(", "self", ".", "_DOCKER_LAMBDA_REPO_NAME", ",", "runtime", ")", "# Don't build the image if there are no layers.", "if", "not", "layers", ":", ...
Build the image if one is not already on the system that matches the runtime and layers Parameters ---------- runtime str Name of the Lambda runtime layers list(samcli.commands.local.lib.provider.Layer) List of layers Returns ------- str The image to be used (REPOSITORY:TAG)
[ "Build", "the", "image", "if", "one", "is", "not", "already", "on", "the", "system", "that", "matches", "the", "runtime", "and", "layers" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/lambda_image.py#L73-L115
29,981
awslabs/aws-sam-cli
samcli/local/docker/lambda_image.py
LambdaImage._generate_docker_image_version
def _generate_docker_image_version(layers, runtime): """ Generate the Docker TAG that will be used to create the image Parameters ---------- layers list(samcli.commands.local.lib.provider.Layer) List of the layers runtime str Runtime of the image to create Returns ------- str String representing the TAG to be attached to the image """ # Docker has a concept of a TAG on an image. This is plus the REPOSITORY is a way to determine # a version of the image. We will produced a TAG for a combination of the runtime with the layers # specified in the template. This will allow reuse of the runtime and layers across different # functions that are defined. If two functions use the same runtime with the same layers (in the # same order), SAM CLI will only produce one image and use this image across both functions for invoke. return runtime + '-' + hashlib.sha256( "-".join([layer.name for layer in layers]).encode('utf-8')).hexdigest()[0:25]
python
def _generate_docker_image_version(layers, runtime): """ Generate the Docker TAG that will be used to create the image Parameters ---------- layers list(samcli.commands.local.lib.provider.Layer) List of the layers runtime str Runtime of the image to create Returns ------- str String representing the TAG to be attached to the image """ # Docker has a concept of a TAG on an image. This is plus the REPOSITORY is a way to determine # a version of the image. We will produced a TAG for a combination of the runtime with the layers # specified in the template. This will allow reuse of the runtime and layers across different # functions that are defined. If two functions use the same runtime with the same layers (in the # same order), SAM CLI will only produce one image and use this image across both functions for invoke. return runtime + '-' + hashlib.sha256( "-".join([layer.name for layer in layers]).encode('utf-8')).hexdigest()[0:25]
[ "def", "_generate_docker_image_version", "(", "layers", ",", "runtime", ")", ":", "# Docker has a concept of a TAG on an image. This is plus the REPOSITORY is a way to determine", "# a version of the image. We will produced a TAG for a combination of the runtime with the layers", "# specified in...
Generate the Docker TAG that will be used to create the image Parameters ---------- layers list(samcli.commands.local.lib.provider.Layer) List of the layers runtime str Runtime of the image to create Returns ------- str String representing the TAG to be attached to the image
[ "Generate", "the", "Docker", "TAG", "that", "will", "be", "used", "to", "create", "the", "image" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/lambda_image.py#L118-L142
29,982
awslabs/aws-sam-cli
samcli/local/docker/lambda_image.py
LambdaImage._build_image
def _build_image(self, base_image, docker_tag, layers): """ Builds the image Parameters ---------- base_image str Base Image to use for the new image docker_tag Docker tag (REPOSITORY:TAG) to use when building the image layers list(samcli.commands.local.lib.provider.Layer) List of Layers to be use to mount in the image Returns ------- None Raises ------ samcli.commands.local.cli_common.user_exceptions.ImageBuildException When docker fails to build the image """ dockerfile_content = self._generate_dockerfile(base_image, layers) # Create dockerfile in the same directory of the layer cache dockerfile_name = "dockerfile_" + str(uuid.uuid4()) full_dockerfile_path = Path(self.layer_downloader.layer_cache, dockerfile_name) try: with open(str(full_dockerfile_path), "w") as dockerfile: dockerfile.write(dockerfile_content) tar_paths = {str(full_dockerfile_path): "Dockerfile"} for layer in layers: tar_paths[layer.codeuri] = '/' + layer.name with create_tarball(tar_paths) as tarballfile: try: self.docker_client.images.build(fileobj=tarballfile, custom_context=True, rm=True, tag=docker_tag, pull=not self.skip_pull_image) except (docker.errors.BuildError, docker.errors.APIError): LOG.exception("Failed to build Docker Image") raise ImageBuildException("Building Image failed.") finally: if full_dockerfile_path.exists(): full_dockerfile_path.unlink()
python
def _build_image(self, base_image, docker_tag, layers): """ Builds the image Parameters ---------- base_image str Base Image to use for the new image docker_tag Docker tag (REPOSITORY:TAG) to use when building the image layers list(samcli.commands.local.lib.provider.Layer) List of Layers to be use to mount in the image Returns ------- None Raises ------ samcli.commands.local.cli_common.user_exceptions.ImageBuildException When docker fails to build the image """ dockerfile_content = self._generate_dockerfile(base_image, layers) # Create dockerfile in the same directory of the layer cache dockerfile_name = "dockerfile_" + str(uuid.uuid4()) full_dockerfile_path = Path(self.layer_downloader.layer_cache, dockerfile_name) try: with open(str(full_dockerfile_path), "w") as dockerfile: dockerfile.write(dockerfile_content) tar_paths = {str(full_dockerfile_path): "Dockerfile"} for layer in layers: tar_paths[layer.codeuri] = '/' + layer.name with create_tarball(tar_paths) as tarballfile: try: self.docker_client.images.build(fileobj=tarballfile, custom_context=True, rm=True, tag=docker_tag, pull=not self.skip_pull_image) except (docker.errors.BuildError, docker.errors.APIError): LOG.exception("Failed to build Docker Image") raise ImageBuildException("Building Image failed.") finally: if full_dockerfile_path.exists(): full_dockerfile_path.unlink()
[ "def", "_build_image", "(", "self", ",", "base_image", ",", "docker_tag", ",", "layers", ")", ":", "dockerfile_content", "=", "self", ".", "_generate_dockerfile", "(", "base_image", ",", "layers", ")", "# Create dockerfile in the same directory of the layer cache", "doc...
Builds the image Parameters ---------- base_image str Base Image to use for the new image docker_tag Docker tag (REPOSITORY:TAG) to use when building the image layers list(samcli.commands.local.lib.provider.Layer) List of Layers to be use to mount in the image Returns ------- None Raises ------ samcli.commands.local.cli_common.user_exceptions.ImageBuildException When docker fails to build the image
[ "Builds", "the", "image" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/lambda_image.py#L144-L192
29,983
awslabs/aws-sam-cli
samcli/local/docker/lambda_image.py
LambdaImage._generate_dockerfile
def _generate_dockerfile(base_image, layers): """ Generate the Dockerfile contents A generated Dockerfile will look like the following: ``` FROM lambci/lambda:python3.6 ADD --chown=sbx_user1051:495 layer1 /opt ADD --chown=sbx_user1051:495 layer2 /opt ``` Parameters ---------- base_image str Base Image to use for the new image layers list(samcli.commands.local.lib.provider.Layer) List of Layers to be use to mount in the image Returns ------- str String representing the Dockerfile contents for the image """ dockerfile_content = "FROM {}\n".format(base_image) for layer in layers: dockerfile_content = dockerfile_content + \ "ADD --chown=sbx_user1051:495 {} {}\n".format(layer.name, LambdaImage._LAYERS_DIR) return dockerfile_content
python
def _generate_dockerfile(base_image, layers): """ Generate the Dockerfile contents A generated Dockerfile will look like the following: ``` FROM lambci/lambda:python3.6 ADD --chown=sbx_user1051:495 layer1 /opt ADD --chown=sbx_user1051:495 layer2 /opt ``` Parameters ---------- base_image str Base Image to use for the new image layers list(samcli.commands.local.lib.provider.Layer) List of Layers to be use to mount in the image Returns ------- str String representing the Dockerfile contents for the image """ dockerfile_content = "FROM {}\n".format(base_image) for layer in layers: dockerfile_content = dockerfile_content + \ "ADD --chown=sbx_user1051:495 {} {}\n".format(layer.name, LambdaImage._LAYERS_DIR) return dockerfile_content
[ "def", "_generate_dockerfile", "(", "base_image", ",", "layers", ")", ":", "dockerfile_content", "=", "\"FROM {}\\n\"", ".", "format", "(", "base_image", ")", "for", "layer", "in", "layers", ":", "dockerfile_content", "=", "dockerfile_content", "+", "\"ADD --chown=s...
Generate the Dockerfile contents A generated Dockerfile will look like the following: ``` FROM lambci/lambda:python3.6 ADD --chown=sbx_user1051:495 layer1 /opt ADD --chown=sbx_user1051:495 layer2 /opt ``` Parameters ---------- base_image str Base Image to use for the new image layers list(samcli.commands.local.lib.provider.Layer) List of Layers to be use to mount in the image Returns ------- str String representing the Dockerfile contents for the image
[ "Generate", "the", "Dockerfile", "contents" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/lambda_image.py#L195-L225
29,984
awslabs/aws-sam-cli
samcli/commands/local/lib/local_api_service.py
LocalApiService.start
def start(self): """ Creates and starts the local API Gateway service. This method will block until the service is stopped manually using an interrupt. After the service is started, callers can make HTTP requests to the endpoint to invoke the Lambda function and receive a response. NOTE: This is a blocking call that will not return until the thread is interrupted with SIGINT/SIGTERM """ routing_list = self._make_routing_list(self.api_provider) if not routing_list: raise NoApisDefined("No APIs available in SAM template") static_dir_path = self._make_static_dir_path(self.cwd, self.static_dir) # We care about passing only stderr to the Service and not stdout because stdout from Docker container # contains the response to the API which is sent out as HTTP response. Only stderr needs to be printed # to the console or a log file. stderr from Docker container contains runtime logs and output of print # statements from the Lambda function service = LocalApigwService(routing_list=routing_list, lambda_runner=self.lambda_runner, static_dir=static_dir_path, port=self.port, host=self.host, stderr=self.stderr_stream) service.create() # Print out the list of routes that will be mounted self._print_routes(self.api_provider, self.host, self.port) LOG.info("You can now browse to the above endpoints to invoke your functions. " "You do not need to restart/reload SAM CLI while working on your functions, " "changes will be reflected instantly/automatically. You only need to restart " "SAM CLI if you update your AWS SAM template") service.run()
python
def start(self): """ Creates and starts the local API Gateway service. This method will block until the service is stopped manually using an interrupt. After the service is started, callers can make HTTP requests to the endpoint to invoke the Lambda function and receive a response. NOTE: This is a blocking call that will not return until the thread is interrupted with SIGINT/SIGTERM """ routing_list = self._make_routing_list(self.api_provider) if not routing_list: raise NoApisDefined("No APIs available in SAM template") static_dir_path = self._make_static_dir_path(self.cwd, self.static_dir) # We care about passing only stderr to the Service and not stdout because stdout from Docker container # contains the response to the API which is sent out as HTTP response. Only stderr needs to be printed # to the console or a log file. stderr from Docker container contains runtime logs and output of print # statements from the Lambda function service = LocalApigwService(routing_list=routing_list, lambda_runner=self.lambda_runner, static_dir=static_dir_path, port=self.port, host=self.host, stderr=self.stderr_stream) service.create() # Print out the list of routes that will be mounted self._print_routes(self.api_provider, self.host, self.port) LOG.info("You can now browse to the above endpoints to invoke your functions. " "You do not need to restart/reload SAM CLI while working on your functions, " "changes will be reflected instantly/automatically. You only need to restart " "SAM CLI if you update your AWS SAM template") service.run()
[ "def", "start", "(", "self", ")", ":", "routing_list", "=", "self", ".", "_make_routing_list", "(", "self", ".", "api_provider", ")", "if", "not", "routing_list", ":", "raise", "NoApisDefined", "(", "\"No APIs available in SAM template\"", ")", "static_dir_path", ...
Creates and starts the local API Gateway service. This method will block until the service is stopped manually using an interrupt. After the service is started, callers can make HTTP requests to the endpoint to invoke the Lambda function and receive a response. NOTE: This is a blocking call that will not return until the thread is interrupted with SIGINT/SIGTERM
[ "Creates", "and", "starts", "the", "local", "API", "Gateway", "service", ".", "This", "method", "will", "block", "until", "the", "service", "is", "stopped", "manually", "using", "an", "interrupt", ".", "After", "the", "service", "is", "started", "callers", "...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/local_api_service.py#L47-L83
29,985
awslabs/aws-sam-cli
samcli/commands/local/lib/local_api_service.py
LocalApiService._make_routing_list
def _make_routing_list(api_provider): """ Returns a list of routes to configure the Local API Service based on the APIs configured in the template. Parameters ---------- api_provider : samcli.commands.local.lib.sam_api_provider.SamApiProvider Returns ------- list(samcli.local.apigw.service.Route) List of Routes to pass to the service """ routes = [] for api in api_provider.get_all(): route = Route(methods=[api.method], function_name=api.function_name, path=api.path, binary_types=api.binary_media_types) routes.append(route) return routes
python
def _make_routing_list(api_provider): """ Returns a list of routes to configure the Local API Service based on the APIs configured in the template. Parameters ---------- api_provider : samcli.commands.local.lib.sam_api_provider.SamApiProvider Returns ------- list(samcli.local.apigw.service.Route) List of Routes to pass to the service """ routes = [] for api in api_provider.get_all(): route = Route(methods=[api.method], function_name=api.function_name, path=api.path, binary_types=api.binary_media_types) routes.append(route) return routes
[ "def", "_make_routing_list", "(", "api_provider", ")", ":", "routes", "=", "[", "]", "for", "api", "in", "api_provider", ".", "get_all", "(", ")", ":", "route", "=", "Route", "(", "methods", "=", "[", "api", ".", "method", "]", ",", "function_name", "=...
Returns a list of routes to configure the Local API Service based on the APIs configured in the template. Parameters ---------- api_provider : samcli.commands.local.lib.sam_api_provider.SamApiProvider Returns ------- list(samcli.local.apigw.service.Route) List of Routes to pass to the service
[ "Returns", "a", "list", "of", "routes", "to", "configure", "the", "Local", "API", "Service", "based", "on", "the", "APIs", "configured", "in", "the", "template", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/local_api_service.py#L86-L106
29,986
awslabs/aws-sam-cli
samcli/commands/local/lib/local_api_service.py
LocalApiService._print_routes
def _print_routes(api_provider, host, port): """ Helper method to print the APIs that will be mounted. This method is purely for printing purposes. This method takes in a list of Route Configurations and prints out the Routes grouped by path. Grouping routes by Function Name + Path is the bulk of the logic. Example output: Mounting Product at http://127.0.0.1:3000/path1/bar [GET, POST, DELETE] Mounting Product at http://127.0.0.1:3000/path2/bar [HEAD] :param samcli.commands.local.lib.provider.ApiProvider api_provider: API Provider that can return a list of APIs :param string host: Host name where the service is running :param int port: Port number where the service is running :returns list(string): List of lines that were printed to the console. Helps with testing """ grouped_api_configs = {} for api in api_provider.get_all(): key = "{}-{}".format(api.function_name, api.path) config = grouped_api_configs.get(key, {}) config.setdefault("methods", []) config["function_name"] = api.function_name config["path"] = api.path config["methods"].append(api.method) grouped_api_configs[key] = config print_lines = [] for _, config in grouped_api_configs.items(): methods_str = "[{}]".format(', '.join(config["methods"])) output = "Mounting {} at http://{}:{}{} {}".format( config["function_name"], host, port, config["path"], methods_str) print_lines.append(output) LOG.info(output) return print_lines
python
def _print_routes(api_provider, host, port): """ Helper method to print the APIs that will be mounted. This method is purely for printing purposes. This method takes in a list of Route Configurations and prints out the Routes grouped by path. Grouping routes by Function Name + Path is the bulk of the logic. Example output: Mounting Product at http://127.0.0.1:3000/path1/bar [GET, POST, DELETE] Mounting Product at http://127.0.0.1:3000/path2/bar [HEAD] :param samcli.commands.local.lib.provider.ApiProvider api_provider: API Provider that can return a list of APIs :param string host: Host name where the service is running :param int port: Port number where the service is running :returns list(string): List of lines that were printed to the console. Helps with testing """ grouped_api_configs = {} for api in api_provider.get_all(): key = "{}-{}".format(api.function_name, api.path) config = grouped_api_configs.get(key, {}) config.setdefault("methods", []) config["function_name"] = api.function_name config["path"] = api.path config["methods"].append(api.method) grouped_api_configs[key] = config print_lines = [] for _, config in grouped_api_configs.items(): methods_str = "[{}]".format(', '.join(config["methods"])) output = "Mounting {} at http://{}:{}{} {}".format( config["function_name"], host, port, config["path"], methods_str) print_lines.append(output) LOG.info(output) return print_lines
[ "def", "_print_routes", "(", "api_provider", ",", "host", ",", "port", ")", ":", "grouped_api_configs", "=", "{", "}", "for", "api", "in", "api_provider", ".", "get_all", "(", ")", ":", "key", "=", "\"{}-{}\"", ".", "format", "(", "api", ".", "function_n...
Helper method to print the APIs that will be mounted. This method is purely for printing purposes. This method takes in a list of Route Configurations and prints out the Routes grouped by path. Grouping routes by Function Name + Path is the bulk of the logic. Example output: Mounting Product at http://127.0.0.1:3000/path1/bar [GET, POST, DELETE] Mounting Product at http://127.0.0.1:3000/path2/bar [HEAD] :param samcli.commands.local.lib.provider.ApiProvider api_provider: API Provider that can return a list of APIs :param string host: Host name where the service is running :param int port: Port number where the service is running :returns list(string): List of lines that were printed to the console. Helps with testing
[ "Helper", "method", "to", "print", "the", "APIs", "that", "will", "be", "mounted", ".", "This", "method", "is", "purely", "for", "printing", "purposes", ".", "This", "method", "takes", "in", "a", "list", "of", "Route", "Configurations", "and", "prints", "o...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/local_api_service.py#L109-L151
29,987
awslabs/aws-sam-cli
samcli/commands/local/lib/local_api_service.py
LocalApiService._make_static_dir_path
def _make_static_dir_path(cwd, static_dir): """ This method returns the path to the directory where static files are to be served from. If static_dir is a relative path, then it is resolved to be relative to the current working directory. If no static directory is provided, or if the resolved directory does not exist, this method will return None :param string cwd: Current working directory relative to which we will resolve the static directory :param string static_dir: Path to the static directory :return string: Path to the static directory, if it exists. None, otherwise """ if not static_dir: return None static_dir_path = os.path.join(cwd, static_dir) if os.path.exists(static_dir_path): LOG.info("Mounting static files from %s at /", static_dir_path) return static_dir_path
python
def _make_static_dir_path(cwd, static_dir): """ This method returns the path to the directory where static files are to be served from. If static_dir is a relative path, then it is resolved to be relative to the current working directory. If no static directory is provided, or if the resolved directory does not exist, this method will return None :param string cwd: Current working directory relative to which we will resolve the static directory :param string static_dir: Path to the static directory :return string: Path to the static directory, if it exists. None, otherwise """ if not static_dir: return None static_dir_path = os.path.join(cwd, static_dir) if os.path.exists(static_dir_path): LOG.info("Mounting static files from %s at /", static_dir_path) return static_dir_path
[ "def", "_make_static_dir_path", "(", "cwd", ",", "static_dir", ")", ":", "if", "not", "static_dir", ":", "return", "None", "static_dir_path", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "static_dir", ")", "if", "os", ".", "path", ".", "exists",...
This method returns the path to the directory where static files are to be served from. If static_dir is a relative path, then it is resolved to be relative to the current working directory. If no static directory is provided, or if the resolved directory does not exist, this method will return None :param string cwd: Current working directory relative to which we will resolve the static directory :param string static_dir: Path to the static directory :return string: Path to the static directory, if it exists. None, otherwise
[ "This", "method", "returns", "the", "path", "to", "the", "directory", "where", "static", "files", "are", "to", "be", "served", "from", ".", "If", "static_dir", "is", "a", "relative", "path", "then", "it", "is", "resolved", "to", "be", "relative", "to", "...
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/local_api_service.py#L154-L170
29,988
awslabs/aws-sam-cli
samcli/local/lambda_service/local_lambda_invoke_service.py
LocalLambdaInvokeService.validate_request
def validate_request(): """ Validates the incoming request The following are invalid 1. The Request data is not json serializable 2. Query Parameters are sent to the endpoint 3. The Request Content-Type is not application/json 4. 'X-Amz-Log-Type' header is not 'None' 5. 'X-Amz-Invocation-Type' header is not 'RequestResponse' Returns ------- flask.Response If the request is not valid a flask Response is returned None: If the request passes all validation """ flask_request = request request_data = flask_request.get_data() if not request_data: request_data = b'{}' request_data = request_data.decode('utf-8') try: json.loads(request_data) except ValueError as json_error: LOG.debug("Request body was not json. Exception: %s", str(json_error)) return LambdaErrorResponses.invalid_request_content( "Could not parse request body into json: No JSON object could be decoded") if flask_request.args: LOG.debug("Query parameters are in the request but not supported") return LambdaErrorResponses.invalid_request_content("Query Parameters are not supported") request_headers = CaseInsensitiveDict(flask_request.headers) log_type = request_headers.get('X-Amz-Log-Type', 'None') if log_type != 'None': LOG.debug("log-type: %s is not supported. None is only supported.", log_type) return LambdaErrorResponses.not_implemented_locally( "log-type: {} is not supported. None is only supported.".format(log_type)) invocation_type = request_headers.get('X-Amz-Invocation-Type', 'RequestResponse') if invocation_type != 'RequestResponse': LOG.warning("invocation-type: %s is not supported. RequestResponse is only supported.", invocation_type) return LambdaErrorResponses.not_implemented_locally( "invocation-type: {} is not supported. RequestResponse is only supported.".format(invocation_type))
python
def validate_request(): """ Validates the incoming request The following are invalid 1. The Request data is not json serializable 2. Query Parameters are sent to the endpoint 3. The Request Content-Type is not application/json 4. 'X-Amz-Log-Type' header is not 'None' 5. 'X-Amz-Invocation-Type' header is not 'RequestResponse' Returns ------- flask.Response If the request is not valid a flask Response is returned None: If the request passes all validation """ flask_request = request request_data = flask_request.get_data() if not request_data: request_data = b'{}' request_data = request_data.decode('utf-8') try: json.loads(request_data) except ValueError as json_error: LOG.debug("Request body was not json. Exception: %s", str(json_error)) return LambdaErrorResponses.invalid_request_content( "Could not parse request body into json: No JSON object could be decoded") if flask_request.args: LOG.debug("Query parameters are in the request but not supported") return LambdaErrorResponses.invalid_request_content("Query Parameters are not supported") request_headers = CaseInsensitiveDict(flask_request.headers) log_type = request_headers.get('X-Amz-Log-Type', 'None') if log_type != 'None': LOG.debug("log-type: %s is not supported. None is only supported.", log_type) return LambdaErrorResponses.not_implemented_locally( "log-type: {} is not supported. None is only supported.".format(log_type)) invocation_type = request_headers.get('X-Amz-Invocation-Type', 'RequestResponse') if invocation_type != 'RequestResponse': LOG.warning("invocation-type: %s is not supported. RequestResponse is only supported.", invocation_type) return LambdaErrorResponses.not_implemented_locally( "invocation-type: {} is not supported. RequestResponse is only supported.".format(invocation_type))
[ "def", "validate_request", "(", ")", ":", "flask_request", "=", "request", "request_data", "=", "flask_request", ".", "get_data", "(", ")", "if", "not", "request_data", ":", "request_data", "=", "b'{}'", "request_data", "=", "request_data", ".", "decode", "(", ...
Validates the incoming request The following are invalid 1. The Request data is not json serializable 2. Query Parameters are sent to the endpoint 3. The Request Content-Type is not application/json 4. 'X-Amz-Log-Type' header is not 'None' 5. 'X-Amz-Invocation-Type' header is not 'RequestResponse' Returns ------- flask.Response If the request is not valid a flask Response is returned None: If the request passes all validation
[ "Validates", "the", "incoming", "request" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/local_lambda_invoke_service.py#L57-L107
29,989
awslabs/aws-sam-cli
samcli/local/lambda_service/local_lambda_invoke_service.py
LocalLambdaInvokeService._invoke_request_handler
def _invoke_request_handler(self, function_name): """ Request Handler for the Local Lambda Invoke path. This method is responsible for understanding the incoming request and invoking the Local Lambda Function Parameters ---------- function_name str Name of the function to invoke Returns ------- A Flask Response response object as if it was returned from Lambda """ flask_request = request request_data = flask_request.get_data() if not request_data: request_data = b'{}' request_data = request_data.decode('utf-8') stdout_stream = io.BytesIO() stdout_stream_writer = StreamWriter(stdout_stream, self.is_debugging) try: self.lambda_runner.invoke(function_name, request_data, stdout=stdout_stream_writer, stderr=self.stderr) except FunctionNotFound: LOG.debug('%s was not found to invoke.', function_name) return LambdaErrorResponses.resource_not_found(function_name) lambda_response, lambda_logs, is_lambda_user_error_response = \ LambdaOutputParser.get_lambda_output(stdout_stream) if self.stderr and lambda_logs: # Write the logs to stderr if available. self.stderr.write(lambda_logs) if is_lambda_user_error_response: return self.service_response(lambda_response, {'Content-Type': 'application/json', 'x-amz-function-error': 'Unhandled'}, 200) return self.service_response(lambda_response, {'Content-Type': 'application/json'}, 200)
python
def _invoke_request_handler(self, function_name): """ Request Handler for the Local Lambda Invoke path. This method is responsible for understanding the incoming request and invoking the Local Lambda Function Parameters ---------- function_name str Name of the function to invoke Returns ------- A Flask Response response object as if it was returned from Lambda """ flask_request = request request_data = flask_request.get_data() if not request_data: request_data = b'{}' request_data = request_data.decode('utf-8') stdout_stream = io.BytesIO() stdout_stream_writer = StreamWriter(stdout_stream, self.is_debugging) try: self.lambda_runner.invoke(function_name, request_data, stdout=stdout_stream_writer, stderr=self.stderr) except FunctionNotFound: LOG.debug('%s was not found to invoke.', function_name) return LambdaErrorResponses.resource_not_found(function_name) lambda_response, lambda_logs, is_lambda_user_error_response = \ LambdaOutputParser.get_lambda_output(stdout_stream) if self.stderr and lambda_logs: # Write the logs to stderr if available. self.stderr.write(lambda_logs) if is_lambda_user_error_response: return self.service_response(lambda_response, {'Content-Type': 'application/json', 'x-amz-function-error': 'Unhandled'}, 200) return self.service_response(lambda_response, {'Content-Type': 'application/json'}, 200)
[ "def", "_invoke_request_handler", "(", "self", ",", "function_name", ")", ":", "flask_request", "=", "request", "request_data", "=", "flask_request", ".", "get_data", "(", ")", "if", "not", "request_data", ":", "request_data", "=", "b'{}'", "request_data", "=", ...
Request Handler for the Local Lambda Invoke path. This method is responsible for understanding the incoming request and invoking the Local Lambda Function Parameters ---------- function_name str Name of the function to invoke Returns ------- A Flask Response response object as if it was returned from Lambda
[ "Request", "Handler", "for", "the", "Local", "Lambda", "Invoke", "path", ".", "This", "method", "is", "responsible", "for", "understanding", "the", "incoming", "request", "and", "invoking", "the", "Local", "Lambda", "Function" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/local_lambda_invoke_service.py#L118-L162
29,990
awslabs/aws-sam-cli
samcli/local/lambdafn/zip.py
unzip
def unzip(zip_file_path, output_dir, permission=None): """ Unzip the given file into the given directory while preserving file permissions in the process. Parameters ---------- zip_file_path : str Path to the zip file output_dir : str Path to the directory where the it should be unzipped to permission : octal int Permission to set """ with zipfile.ZipFile(zip_file_path, 'r') as zip_ref: # For each item in the zip file, extract the file and set permissions if available for file_info in zip_ref.infolist(): name = file_info.filename extracted_path = os.path.join(output_dir, name) zip_ref.extract(name, output_dir) _set_permissions(file_info, extracted_path) _override_permissions(extracted_path, permission) _override_permissions(output_dir, permission)
python
def unzip(zip_file_path, output_dir, permission=None): """ Unzip the given file into the given directory while preserving file permissions in the process. Parameters ---------- zip_file_path : str Path to the zip file output_dir : str Path to the directory where the it should be unzipped to permission : octal int Permission to set """ with zipfile.ZipFile(zip_file_path, 'r') as zip_ref: # For each item in the zip file, extract the file and set permissions if available for file_info in zip_ref.infolist(): name = file_info.filename extracted_path = os.path.join(output_dir, name) zip_ref.extract(name, output_dir) _set_permissions(file_info, extracted_path) _override_permissions(extracted_path, permission) _override_permissions(output_dir, permission)
[ "def", "unzip", "(", "zip_file_path", ",", "output_dir", ",", "permission", "=", "None", ")", ":", "with", "zipfile", ".", "ZipFile", "(", "zip_file_path", ",", "'r'", ")", "as", "zip_ref", ":", "# For each item in the zip file, extract the file and set permissions if...
Unzip the given file into the given directory while preserving file permissions in the process. Parameters ---------- zip_file_path : str Path to the zip file output_dir : str Path to the directory where the it should be unzipped to permission : octal int Permission to set
[ "Unzip", "the", "given", "file", "into", "the", "given", "directory", "while", "preserving", "file", "permissions", "in", "the", "process", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/zip.py#L23-L51
29,991
awslabs/aws-sam-cli
samcli/local/lambdafn/zip.py
_set_permissions
def _set_permissions(zip_file_info, extracted_path): """ Sets permissions on the extracted file by reading the ``external_attr`` property of given file info. Parameters ---------- zip_file_info : zipfile.ZipInfo Object containing information about a file within a zip archive extracted_path : str Path where the file has been extracted to """ # Permission information is stored in first two bytes. permission = zip_file_info.external_attr >> 16 if not permission: # Zips created on certain Windows machines, however, might not have any permission information on them. # Skip setting a permission on these files. LOG.debug("File %s in zipfile does not have permission information", zip_file_info.filename) return os.chmod(extracted_path, permission)
python
def _set_permissions(zip_file_info, extracted_path): """ Sets permissions on the extracted file by reading the ``external_attr`` property of given file info. Parameters ---------- zip_file_info : zipfile.ZipInfo Object containing information about a file within a zip archive extracted_path : str Path where the file has been extracted to """ # Permission information is stored in first two bytes. permission = zip_file_info.external_attr >> 16 if not permission: # Zips created on certain Windows machines, however, might not have any permission information on them. # Skip setting a permission on these files. LOG.debug("File %s in zipfile does not have permission information", zip_file_info.filename) return os.chmod(extracted_path, permission)
[ "def", "_set_permissions", "(", "zip_file_info", ",", "extracted_path", ")", ":", "# Permission information is stored in first two bytes.", "permission", "=", "zip_file_info", ".", "external_attr", ">>", "16", "if", "not", "permission", ":", "# Zips created on certain Windows...
Sets permissions on the extracted file by reading the ``external_attr`` property of given file info. Parameters ---------- zip_file_info : zipfile.ZipInfo Object containing information about a file within a zip archive extracted_path : str Path where the file has been extracted to
[ "Sets", "permissions", "on", "the", "extracted", "file", "by", "reading", "the", "external_attr", "property", "of", "given", "file", "info", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/zip.py#L70-L91
29,992
awslabs/aws-sam-cli
samcli/local/lambdafn/zip.py
unzip_from_uri
def unzip_from_uri(uri, layer_zip_path, unzip_output_dir, progressbar_label): """ Download the LayerVersion Zip to the Layer Pkg Cache Parameters ---------- uri str Uri to download from layer_zip_path str Path to where the content from the uri should be downloaded to unzip_output_dir str Path to unzip the zip to progressbar_label str Label to use in the Progressbar """ try: get_request = requests.get(uri, stream=True, verify=os.environ.get('AWS_CA_BUNDLE', True)) with open(layer_zip_path, 'wb') as local_layer_file: file_length = int(get_request.headers['Content-length']) with progressbar(file_length, progressbar_label) as p_bar: # Set the chunk size to None. Since we are streaming the request, None will allow the data to be # read as it arrives in whatever size the chunks are received. for data in get_request.iter_content(chunk_size=None): local_layer_file.write(data) p_bar.update(len(data)) # Forcefully set the permissions to 700 on files and directories. This is to ensure the owner # of the files is the only one that can read, write, or execute the files. unzip(layer_zip_path, unzip_output_dir, permission=0o700) finally: # Remove the downloaded zip file path_to_layer = Path(layer_zip_path) if path_to_layer.exists(): path_to_layer.unlink()
python
def unzip_from_uri(uri, layer_zip_path, unzip_output_dir, progressbar_label): """ Download the LayerVersion Zip to the Layer Pkg Cache Parameters ---------- uri str Uri to download from layer_zip_path str Path to where the content from the uri should be downloaded to unzip_output_dir str Path to unzip the zip to progressbar_label str Label to use in the Progressbar """ try: get_request = requests.get(uri, stream=True, verify=os.environ.get('AWS_CA_BUNDLE', True)) with open(layer_zip_path, 'wb') as local_layer_file: file_length = int(get_request.headers['Content-length']) with progressbar(file_length, progressbar_label) as p_bar: # Set the chunk size to None. Since we are streaming the request, None will allow the data to be # read as it arrives in whatever size the chunks are received. for data in get_request.iter_content(chunk_size=None): local_layer_file.write(data) p_bar.update(len(data)) # Forcefully set the permissions to 700 on files and directories. This is to ensure the owner # of the files is the only one that can read, write, or execute the files. unzip(layer_zip_path, unzip_output_dir, permission=0o700) finally: # Remove the downloaded zip file path_to_layer = Path(layer_zip_path) if path_to_layer.exists(): path_to_layer.unlink()
[ "def", "unzip_from_uri", "(", "uri", ",", "layer_zip_path", ",", "unzip_output_dir", ",", "progressbar_label", ")", ":", "try", ":", "get_request", "=", "requests", ".", "get", "(", "uri", ",", "stream", "=", "True", ",", "verify", "=", "os", ".", "environ...
Download the LayerVersion Zip to the Layer Pkg Cache Parameters ---------- uri str Uri to download from layer_zip_path str Path to where the content from the uri should be downloaded to unzip_output_dir str Path to unzip the zip to progressbar_label str Label to use in the Progressbar
[ "Download", "the", "LayerVersion", "Zip", "to", "the", "Layer", "Pkg", "Cache" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/zip.py#L94-L130
29,993
awslabs/aws-sam-cli
samcli/local/apigw/local_apigw_service.py
LocalApigwService._generate_route_keys
def _generate_route_keys(self, methods, path): """ Generates the key to the _dict_of_routes based on the list of methods and path supplied :param list(str) methods: List of HTTP Methods :param str path: Path off the base url :return: str of Path:Method """ for method in methods: yield self._route_key(method, path)
python
def _generate_route_keys(self, methods, path): """ Generates the key to the _dict_of_routes based on the list of methods and path supplied :param list(str) methods: List of HTTP Methods :param str path: Path off the base url :return: str of Path:Method """ for method in methods: yield self._route_key(method, path)
[ "def", "_generate_route_keys", "(", "self", ",", "methods", ",", "path", ")", ":", "for", "method", "in", "methods", ":", "yield", "self", ".", "_route_key", "(", "method", ",", "path", ")" ]
Generates the key to the _dict_of_routes based on the list of methods and path supplied :param list(str) methods: List of HTTP Methods :param str path: Path off the base url :return: str of Path:Method
[ "Generates", "the", "key", "to", "the", "_dict_of_routes", "based", "on", "the", "list", "of", "methods", "and", "path", "supplied" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/local_apigw_service.py#L92-L102
29,994
awslabs/aws-sam-cli
samcli/local/apigw/local_apigw_service.py
LocalApigwService._parse_lambda_output
def _parse_lambda_output(lambda_output, binary_types, flask_request): """ Parses the output from the Lambda Container :param str lambda_output: Output from Lambda Invoke :return: Tuple(int, dict, str, bool) """ json_output = json.loads(lambda_output) if not isinstance(json_output, dict): raise TypeError("Lambda returned %{s} instead of dict", type(json_output)) status_code = json_output.get("statusCode") or 200 headers = CaseInsensitiveDict(json_output.get("headers") or {}) body = json_output.get("body") or "no data" is_base_64_encoded = json_output.get("isBase64Encoded") or False try: status_code = int(status_code) if status_code <= 0: raise ValueError except ValueError: message = "statusCode must be a positive int" LOG.error(message) raise TypeError(message) # If the customer doesn't define Content-Type default to application/json if "Content-Type" not in headers: LOG.info("No Content-Type given. Defaulting to 'application/json'.") headers["Content-Type"] = "application/json" if LocalApigwService._should_base64_decode_body(binary_types, flask_request, headers, is_base_64_encoded): body = base64.b64decode(body) return status_code, headers, body
python
def _parse_lambda_output(lambda_output, binary_types, flask_request): """ Parses the output from the Lambda Container :param str lambda_output: Output from Lambda Invoke :return: Tuple(int, dict, str, bool) """ json_output = json.loads(lambda_output) if not isinstance(json_output, dict): raise TypeError("Lambda returned %{s} instead of dict", type(json_output)) status_code = json_output.get("statusCode") or 200 headers = CaseInsensitiveDict(json_output.get("headers") or {}) body = json_output.get("body") or "no data" is_base_64_encoded = json_output.get("isBase64Encoded") or False try: status_code = int(status_code) if status_code <= 0: raise ValueError except ValueError: message = "statusCode must be a positive int" LOG.error(message) raise TypeError(message) # If the customer doesn't define Content-Type default to application/json if "Content-Type" not in headers: LOG.info("No Content-Type given. Defaulting to 'application/json'.") headers["Content-Type"] = "application/json" if LocalApigwService._should_base64_decode_body(binary_types, flask_request, headers, is_base_64_encoded): body = base64.b64decode(body) return status_code, headers, body
[ "def", "_parse_lambda_output", "(", "lambda_output", ",", "binary_types", ",", "flask_request", ")", ":", "json_output", "=", "json", ".", "loads", "(", "lambda_output", ")", "if", "not", "isinstance", "(", "json_output", ",", "dict", ")", ":", "raise", "TypeE...
Parses the output from the Lambda Container :param str lambda_output: Output from Lambda Invoke :return: Tuple(int, dict, str, bool)
[ "Parses", "the", "output", "from", "the", "Lambda", "Container" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/local_apigw_service.py#L197-L231
29,995
awslabs/aws-sam-cli
samcli/local/apigw/local_apigw_service.py
LocalApigwService._should_base64_decode_body
def _should_base64_decode_body(binary_types, flask_request, lamba_response_headers, is_base_64_encoded): """ Whether or not the body should be decoded from Base64 to Binary Parameters ---------- binary_types list(basestring) Corresponds to self.binary_types (aka. what is parsed from SAM Template flask_request flask.request Flask request lamba_response_headers dict Headers Lambda returns is_base_64_encoded bool True if the body is Base64 encoded Returns ------- True if the body from the request should be converted to binary, otherwise false """ best_match_mimetype = flask_request.accept_mimetypes.best_match([lamba_response_headers["Content-Type"]]) is_best_match_in_binary_types = best_match_mimetype in binary_types or '*/*' in binary_types return best_match_mimetype and is_best_match_in_binary_types and is_base_64_encoded
python
def _should_base64_decode_body(binary_types, flask_request, lamba_response_headers, is_base_64_encoded): """ Whether or not the body should be decoded from Base64 to Binary Parameters ---------- binary_types list(basestring) Corresponds to self.binary_types (aka. what is parsed from SAM Template flask_request flask.request Flask request lamba_response_headers dict Headers Lambda returns is_base_64_encoded bool True if the body is Base64 encoded Returns ------- True if the body from the request should be converted to binary, otherwise false """ best_match_mimetype = flask_request.accept_mimetypes.best_match([lamba_response_headers["Content-Type"]]) is_best_match_in_binary_types = best_match_mimetype in binary_types or '*/*' in binary_types return best_match_mimetype and is_best_match_in_binary_types and is_base_64_encoded
[ "def", "_should_base64_decode_body", "(", "binary_types", ",", "flask_request", ",", "lamba_response_headers", ",", "is_base_64_encoded", ")", ":", "best_match_mimetype", "=", "flask_request", ".", "accept_mimetypes", ".", "best_match", "(", "[", "lamba_response_headers", ...
Whether or not the body should be decoded from Base64 to Binary Parameters ---------- binary_types list(basestring) Corresponds to self.binary_types (aka. what is parsed from SAM Template flask_request flask.request Flask request lamba_response_headers dict Headers Lambda returns is_base_64_encoded bool True if the body is Base64 encoded Returns ------- True if the body from the request should be converted to binary, otherwise false
[ "Whether", "or", "not", "the", "body", "should", "be", "decoded", "from", "Base64", "to", "Binary" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/local_apigw_service.py#L234-L257
29,996
awslabs/aws-sam-cli
samcli/local/apigw/local_apigw_service.py
LocalApigwService._construct_event
def _construct_event(flask_request, port, binary_types): """ Helper method that constructs the Event to be passed to Lambda :param request flask_request: Flask Request :return: String representing the event """ identity = ContextIdentity(source_ip=flask_request.remote_addr) endpoint = PathConverter.convert_path_to_api_gateway(flask_request.endpoint) method = flask_request.method request_data = flask_request.get_data() request_mimetype = flask_request.mimetype is_base_64 = LocalApigwService._should_base64_encode(binary_types, request_mimetype) if is_base_64: LOG.debug("Incoming Request seems to be binary. Base64 encoding the request data before sending to Lambda.") request_data = base64.b64encode(request_data) if request_data: # Flask does not parse/decode the request data. We should do it ourselves request_data = request_data.decode('utf-8') context = RequestContext(resource_path=endpoint, http_method=method, stage="prod", identity=identity, path=endpoint) event_headers = dict(flask_request.headers) event_headers["X-Forwarded-Proto"] = flask_request.scheme event_headers["X-Forwarded-Port"] = str(port) # APIGW does not support duplicate query parameters. Flask gives query params as a list so # we need to convert only grab the first item unless many were given, were we grab the last to be consistent # with APIGW query_string_dict = LocalApigwService._query_string_params(flask_request) event = ApiGatewayLambdaEvent(http_method=method, body=request_data, resource=endpoint, request_context=context, query_string_params=query_string_dict, headers=event_headers, path_parameters=flask_request.view_args, path=flask_request.path, is_base_64_encoded=is_base_64) event_str = json.dumps(event.to_dict()) LOG.debug("Constructed String representation of Event to invoke Lambda. Event: %s", event_str) return event_str
python
def _construct_event(flask_request, port, binary_types): """ Helper method that constructs the Event to be passed to Lambda :param request flask_request: Flask Request :return: String representing the event """ identity = ContextIdentity(source_ip=flask_request.remote_addr) endpoint = PathConverter.convert_path_to_api_gateway(flask_request.endpoint) method = flask_request.method request_data = flask_request.get_data() request_mimetype = flask_request.mimetype is_base_64 = LocalApigwService._should_base64_encode(binary_types, request_mimetype) if is_base_64: LOG.debug("Incoming Request seems to be binary. Base64 encoding the request data before sending to Lambda.") request_data = base64.b64encode(request_data) if request_data: # Flask does not parse/decode the request data. We should do it ourselves request_data = request_data.decode('utf-8') context = RequestContext(resource_path=endpoint, http_method=method, stage="prod", identity=identity, path=endpoint) event_headers = dict(flask_request.headers) event_headers["X-Forwarded-Proto"] = flask_request.scheme event_headers["X-Forwarded-Port"] = str(port) # APIGW does not support duplicate query parameters. Flask gives query params as a list so # we need to convert only grab the first item unless many were given, were we grab the last to be consistent # with APIGW query_string_dict = LocalApigwService._query_string_params(flask_request) event = ApiGatewayLambdaEvent(http_method=method, body=request_data, resource=endpoint, request_context=context, query_string_params=query_string_dict, headers=event_headers, path_parameters=flask_request.view_args, path=flask_request.path, is_base_64_encoded=is_base_64) event_str = json.dumps(event.to_dict()) LOG.debug("Constructed String representation of Event to invoke Lambda. Event: %s", event_str) return event_str
[ "def", "_construct_event", "(", "flask_request", ",", "port", ",", "binary_types", ")", ":", "identity", "=", "ContextIdentity", "(", "source_ip", "=", "flask_request", ".", "remote_addr", ")", "endpoint", "=", "PathConverter", ".", "convert_path_to_api_gateway", "(...
Helper method that constructs the Event to be passed to Lambda :param request flask_request: Flask Request :return: String representing the event
[ "Helper", "method", "that", "constructs", "the", "Event", "to", "be", "passed", "to", "Lambda" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/local_apigw_service.py#L260-L314
29,997
awslabs/aws-sam-cli
samcli/local/apigw/local_apigw_service.py
LocalApigwService._query_string_params
def _query_string_params(flask_request): """ Constructs an APIGW equivalent query string dictionary Parameters ---------- flask_request request Request from Flask Returns dict (str: str) ------- Empty dict if no query params where in the request otherwise returns a dictionary of key to value """ query_string_dict = {} # Flask returns an ImmutableMultiDict so convert to a dictionary that becomes # a dict(str: list) then iterate over for query_string_key, query_string_list in flask_request.args.lists(): query_string_value_length = len(query_string_list) # if the list is empty, default to empty string if not query_string_value_length: query_string_dict[query_string_key] = "" else: # APIGW doesn't handle duplicate query string keys, picking the last one in the list query_string_dict[query_string_key] = query_string_list[-1] return query_string_dict
python
def _query_string_params(flask_request): """ Constructs an APIGW equivalent query string dictionary Parameters ---------- flask_request request Request from Flask Returns dict (str: str) ------- Empty dict if no query params where in the request otherwise returns a dictionary of key to value """ query_string_dict = {} # Flask returns an ImmutableMultiDict so convert to a dictionary that becomes # a dict(str: list) then iterate over for query_string_key, query_string_list in flask_request.args.lists(): query_string_value_length = len(query_string_list) # if the list is empty, default to empty string if not query_string_value_length: query_string_dict[query_string_key] = "" else: # APIGW doesn't handle duplicate query string keys, picking the last one in the list query_string_dict[query_string_key] = query_string_list[-1] return query_string_dict
[ "def", "_query_string_params", "(", "flask_request", ")", ":", "query_string_dict", "=", "{", "}", "# Flask returns an ImmutableMultiDict so convert to a dictionary that becomes", "# a dict(str: list) then iterate over", "for", "query_string_key", ",", "query_string_list", "in", "f...
Constructs an APIGW equivalent query string dictionary Parameters ---------- flask_request request Request from Flask Returns dict (str: str) ------- Empty dict if no query params where in the request otherwise returns a dictionary of key to value
[ "Constructs", "an", "APIGW", "equivalent", "query", "string", "dictionary" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/local_apigw_service.py#L317-L345
29,998
awslabs/aws-sam-cli
samcli/commands/local/lib/local_lambda.py
LocalLambdaRunner.invoke
def invoke(self, function_name, event, stdout=None, stderr=None): """ Find the Lambda function with given name and invoke it. Pass the given event to the function and return response through the given streams. This function will block until either the function completes or times out. Parameters ---------- function_name str Name of the Lambda function to invoke event str Event data passed to the function. Must be a valid JSON String. stdout samcli.lib.utils.stream_writer.StreamWriter Stream writer to write the output of the Lambda function to. stderr samcli.lib.utils.stream_writer.StreamWriter Stream writer to write the Lambda runtime logs to. Raises ------ FunctionNotfound When we cannot find a function with the given name """ # Generate the correct configuration based on given inputs function = self.provider.get(function_name) if not function: all_functions = [f.name for f in self.provider.get_all()] available_function_message = "{} not found. Possible options in your template: {}"\ .format(function_name, all_functions) LOG.info(available_function_message) raise FunctionNotFound("Unable to find a Function with name '%s'", function_name) LOG.debug("Found one Lambda function with name '%s'", function_name) LOG.info("Invoking %s (%s)", function.handler, function.runtime) config = self._get_invoke_config(function) # Invoke the function self.local_runtime.invoke(config, event, debug_context=self.debug_context, stdout=stdout, stderr=stderr)
python
def invoke(self, function_name, event, stdout=None, stderr=None): """ Find the Lambda function with given name and invoke it. Pass the given event to the function and return response through the given streams. This function will block until either the function completes or times out. Parameters ---------- function_name str Name of the Lambda function to invoke event str Event data passed to the function. Must be a valid JSON String. stdout samcli.lib.utils.stream_writer.StreamWriter Stream writer to write the output of the Lambda function to. stderr samcli.lib.utils.stream_writer.StreamWriter Stream writer to write the Lambda runtime logs to. Raises ------ FunctionNotfound When we cannot find a function with the given name """ # Generate the correct configuration based on given inputs function = self.provider.get(function_name) if not function: all_functions = [f.name for f in self.provider.get_all()] available_function_message = "{} not found. Possible options in your template: {}"\ .format(function_name, all_functions) LOG.info(available_function_message) raise FunctionNotFound("Unable to find a Function with name '%s'", function_name) LOG.debug("Found one Lambda function with name '%s'", function_name) LOG.info("Invoking %s (%s)", function.handler, function.runtime) config = self._get_invoke_config(function) # Invoke the function self.local_runtime.invoke(config, event, debug_context=self.debug_context, stdout=stdout, stderr=stderr)
[ "def", "invoke", "(", "self", ",", "function_name", ",", "event", ",", "stdout", "=", "None", ",", "stderr", "=", "None", ")", ":", "# Generate the correct configuration based on given inputs", "function", "=", "self", ".", "provider", ".", "get", "(", "function...
Find the Lambda function with given name and invoke it. Pass the given event to the function and return response through the given streams. This function will block until either the function completes or times out. Parameters ---------- function_name str Name of the Lambda function to invoke event str Event data passed to the function. Must be a valid JSON String. stdout samcli.lib.utils.stream_writer.StreamWriter Stream writer to write the output of the Lambda function to. stderr samcli.lib.utils.stream_writer.StreamWriter Stream writer to write the Lambda runtime logs to. Raises ------ FunctionNotfound When we cannot find a function with the given name
[ "Find", "the", "Lambda", "function", "with", "given", "name", "and", "invoke", "it", ".", "Pass", "the", "given", "event", "to", "the", "function", "and", "return", "response", "through", "the", "given", "streams", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/local_lambda.py#L49-L89
29,999
awslabs/aws-sam-cli
samcli/commands/local/lib/local_lambda.py
LocalLambdaRunner._get_invoke_config
def _get_invoke_config(self, function): """ Returns invoke configuration to pass to Lambda Runtime to invoke the given function :param samcli.commands.local.lib.provider.Function function: Lambda function to generate the configuration for :return samcli.local.lambdafn.config.FunctionConfig: Function configuration to pass to Lambda runtime """ env_vars = self._make_env_vars(function) code_abs_path = resolve_code_path(self.cwd, function.codeuri) LOG.debug("Resolved absolute path to code is %s", code_abs_path) function_timeout = function.timeout # The Runtime container handles timeout inside the container. When debugging with short timeouts, this can # cause the container execution to stop. When in debug mode, we set the timeout in the container to a max 10 # hours. This will ensure the container doesn't unexpectedly stop while debugging function code if self.is_debugging(): function_timeout = self.MAX_DEBUG_TIMEOUT return FunctionConfig(name=function.name, runtime=function.runtime, handler=function.handler, code_abs_path=code_abs_path, layers=function.layers, memory=function.memory, timeout=function_timeout, env_vars=env_vars)
python
def _get_invoke_config(self, function): """ Returns invoke configuration to pass to Lambda Runtime to invoke the given function :param samcli.commands.local.lib.provider.Function function: Lambda function to generate the configuration for :return samcli.local.lambdafn.config.FunctionConfig: Function configuration to pass to Lambda runtime """ env_vars = self._make_env_vars(function) code_abs_path = resolve_code_path(self.cwd, function.codeuri) LOG.debug("Resolved absolute path to code is %s", code_abs_path) function_timeout = function.timeout # The Runtime container handles timeout inside the container. When debugging with short timeouts, this can # cause the container execution to stop. When in debug mode, we set the timeout in the container to a max 10 # hours. This will ensure the container doesn't unexpectedly stop while debugging function code if self.is_debugging(): function_timeout = self.MAX_DEBUG_TIMEOUT return FunctionConfig(name=function.name, runtime=function.runtime, handler=function.handler, code_abs_path=code_abs_path, layers=function.layers, memory=function.memory, timeout=function_timeout, env_vars=env_vars)
[ "def", "_get_invoke_config", "(", "self", ",", "function", ")", ":", "env_vars", "=", "self", ".", "_make_env_vars", "(", "function", ")", "code_abs_path", "=", "resolve_code_path", "(", "self", ".", "cwd", ",", "function", ".", "codeuri", ")", "LOG", ".", ...
Returns invoke configuration to pass to Lambda Runtime to invoke the given function :param samcli.commands.local.lib.provider.Function function: Lambda function to generate the configuration for :return samcli.local.lambdafn.config.FunctionConfig: Function configuration to pass to Lambda runtime
[ "Returns", "invoke", "configuration", "to", "pass", "to", "Lambda", "Runtime", "to", "invoke", "the", "given", "function" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/local_lambda.py#L103-L131