_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q32300
crypto_core_ed25519_add
train
def crypto_core_ed25519_add(p, q): """ Add two points on the edwards25519 curve. :param p: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence representing a point on the edwards25519 curve :type p: bytes :param q: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence representing a point on the edwards25519 curve :type q: bytes :return: a point on the edwards25519 curve represented as a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence :rtype: bytes """ ensure(isinstance(p, bytes) and isinstance(q, bytes) and len(p) == crypto_core_ed25519_BYTES and len(q) == crypto_core_ed25519_BYTES, 'Each point must be a {} long bytes sequence'.format( 'crypto_core_ed25519_BYTES'), raising=exc.TypeError) r = ffi.new("unsigned char[]", crypto_core_ed25519_BYTES) rc = lib.crypto_core_ed25519_add(r, p, q) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ffi.buffer(r, crypto_core_ed25519_BYTES)[:]
python
{ "resource": "" }
q32301
crypto_box_keypair
train
def crypto_box_keypair(): """ Returns a randomly generated public and secret key. :rtype: (bytes(public_key), bytes(secret_key)) """ pk = ffi.new("unsigned char[]", crypto_box_PUBLICKEYBYTES) sk = ffi.new("unsigned char[]", crypto_box_SECRETKEYBYTES) rc = lib.crypto_box_keypair(pk, sk) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ( ffi.buffer(pk, crypto_box_PUBLICKEYBYTES)[:], ffi.buffer(sk, crypto_box_SECRETKEYBYTES)[:], )
python
{ "resource": "" }
q32302
crypto_box
train
def crypto_box(message, nonce, pk, sk): """ Encrypts and returns a message ``message`` using the secret key ``sk``, public key ``pk``, and the nonce ``nonce``. :param message: bytes :param nonce: bytes :param pk: bytes :param sk: bytes :rtype: bytes """ if len(nonce) != crypto_box_NONCEBYTES: raise exc.ValueError("Invalid nonce size") if len(pk) != crypto_box_PUBLICKEYBYTES: raise exc.ValueError("Invalid public key") if len(sk) != crypto_box_SECRETKEYBYTES: raise exc.ValueError("Invalid secret key") padded = (b"\x00" * crypto_box_ZEROBYTES) + message ciphertext = ffi.new("unsigned char[]", len(padded)) rc = lib.crypto_box(ciphertext, padded, len(padded), nonce, pk, sk) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ffi.buffer(ciphertext, len(padded))[crypto_box_BOXZEROBYTES:]
python
{ "resource": "" }
q32303
crypto_box_open
train
def crypto_box_open(ciphertext, nonce, pk, sk): """ Decrypts and returns an encrypted message ``ciphertext``, using the secret key ``sk``, public key ``pk``, and the nonce ``nonce``. :param ciphertext: bytes :param nonce: bytes :param pk: bytes :param sk: bytes :rtype: bytes """ if len(nonce) != crypto_box_NONCEBYTES: raise exc.ValueError("Invalid nonce size") if len(pk) != crypto_box_PUBLICKEYBYTES: raise exc.ValueError("Invalid public key") if len(sk) != crypto_box_SECRETKEYBYTES: raise exc.ValueError("Invalid secret key") padded = (b"\x00" * crypto_box_BOXZEROBYTES) + ciphertext plaintext = ffi.new("unsigned char[]", len(padded)) res = lib.crypto_box_open(plaintext, padded, len(padded), nonce, pk, sk) ensure(res == 0, "An error occurred trying to decrypt the message", raising=exc.CryptoError) return ffi.buffer(plaintext, len(padded))[crypto_box_ZEROBYTES:]
python
{ "resource": "" }
q32304
crypto_box_beforenm
train
def crypto_box_beforenm(pk, sk): """ Computes and returns the shared key for the public key ``pk`` and the secret key ``sk``. This can be used to speed up operations where the same set of keys is going to be used multiple times. :param pk: bytes :param sk: bytes :rtype: bytes """ if len(pk) != crypto_box_PUBLICKEYBYTES: raise exc.ValueError("Invalid public key") if len(sk) != crypto_box_SECRETKEYBYTES: raise exc.ValueError("Invalid secret key") k = ffi.new("unsigned char[]", crypto_box_BEFORENMBYTES) rc = lib.crypto_box_beforenm(k, pk, sk) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ffi.buffer(k, crypto_box_BEFORENMBYTES)[:]
python
{ "resource": "" }
q32305
crypto_box_afternm
train
def crypto_box_afternm(message, nonce, k): """ Encrypts and returns the message ``message`` using the shared key ``k`` and the nonce ``nonce``. :param message: bytes :param nonce: bytes :param k: bytes :rtype: bytes """ if len(nonce) != crypto_box_NONCEBYTES: raise exc.ValueError("Invalid nonce") if len(k) != crypto_box_BEFORENMBYTES: raise exc.ValueError("Invalid shared key") padded = b"\x00" * crypto_box_ZEROBYTES + message ciphertext = ffi.new("unsigned char[]", len(padded)) rc = lib.crypto_box_afternm(ciphertext, padded, len(padded), nonce, k) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ffi.buffer(ciphertext, len(padded))[crypto_box_BOXZEROBYTES:]
python
{ "resource": "" }
q32306
crypto_box_open_afternm
train
def crypto_box_open_afternm(ciphertext, nonce, k): """ Decrypts and returns the encrypted message ``ciphertext``, using the shared key ``k`` and the nonce ``nonce``. :param ciphertext: bytes :param nonce: bytes :param k: bytes :rtype: bytes """ if len(nonce) != crypto_box_NONCEBYTES: raise exc.ValueError("Invalid nonce") if len(k) != crypto_box_BEFORENMBYTES: raise exc.ValueError("Invalid shared key") padded = (b"\x00" * crypto_box_BOXZEROBYTES) + ciphertext plaintext = ffi.new("unsigned char[]", len(padded)) res = lib.crypto_box_open_afternm( plaintext, padded, len(padded), nonce, k) ensure(res == 0, "An error occurred trying to decrypt the message", raising=exc.CryptoError) return ffi.buffer(plaintext, len(padded))[crypto_box_ZEROBYTES:]
python
{ "resource": "" }
q32307
crypto_box_seal
train
def crypto_box_seal(message, pk): """ Encrypts and returns a message ``message`` using an ephemeral secret key and the public key ``pk``. The ephemeral public key, which is embedded in the sealed box, is also used, in combination with ``pk``, to derive the nonce needed for the underlying box construct. :param message: bytes :param pk: bytes :rtype: bytes .. versionadded:: 1.2 """ ensure(isinstance(message, bytes), "input message must be bytes", raising=TypeError) ensure(isinstance(pk, bytes), "public key must be bytes", raising=TypeError) if len(pk) != crypto_box_PUBLICKEYBYTES: raise exc.ValueError("Invalid public key") _mlen = len(message) _clen = crypto_box_SEALBYTES + _mlen ciphertext = ffi.new("unsigned char[]", _clen) rc = lib.crypto_box_seal(ciphertext, message, _mlen, pk) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ffi.buffer(ciphertext, _clen)[:]
python
{ "resource": "" }
q32308
crypto_box_seal_open
train
def crypto_box_seal_open(ciphertext, pk, sk): """ Decrypts and returns an encrypted message ``ciphertext``, using the recipent's secret key ``sk`` and the sender's ephemeral public key embedded in the sealed box. The box contruct nonce is derived from the recipient's public key ``pk`` and the sender's public key. :param ciphertext: bytes :param pk: bytes :param sk: bytes :rtype: bytes .. versionadded:: 1.2 """ ensure(isinstance(ciphertext, bytes), "input ciphertext must be bytes", raising=TypeError) ensure(isinstance(pk, bytes), "public key must be bytes", raising=TypeError) ensure(isinstance(sk, bytes), "secret key must be bytes", raising=TypeError) if len(pk) != crypto_box_PUBLICKEYBYTES: raise exc.ValueError("Invalid public key") if len(sk) != crypto_box_SECRETKEYBYTES: raise exc.ValueError("Invalid secret key") _clen = len(ciphertext) ensure(_clen >= crypto_box_SEALBYTES, ("Input cyphertext must be " "at least {} long").format(crypto_box_SEALBYTES), raising=exc.TypeError) _mlen = _clen - crypto_box_SEALBYTES # zero-length malloc results are implementation.dependent plaintext = ffi.new("unsigned char[]", max(1, _mlen)) res = lib.crypto_box_seal_open(plaintext, ciphertext, _clen, pk, sk) ensure(res == 0, "An error occurred trying to decrypt the message", raising=exc.CryptoError) return ffi.buffer(plaintext, _mlen)[:]
python
{ "resource": "" }
q32309
sodium_memcmp
train
def sodium_memcmp(inp1, inp2): """ Compare contents of two memory regions in constant time """ ensure(isinstance(inp1, bytes), raising=exc.TypeError) ensure(isinstance(inp2, bytes), raising=exc.TypeError) ln = max(len(inp1), len(inp2)) buf1 = ffi.new("char []", ln) buf2 = ffi.new("char []", ln) ffi.memmove(buf1, inp1, len(inp1)) ffi.memmove(buf2, inp2, len(inp2)) eqL = len(inp1) == len(inp2) eqC = lib.sodium_memcmp(buf1, buf2, ln) == 0 return eqL and eqC
python
{ "resource": "" }
q32310
sodium_increment
train
def sodium_increment(inp): """ Increment the value of a byte-sequence interpreted as the little-endian representation of a unsigned big integer. :param inp: input bytes buffer :type inp: bytes :return: a byte-sequence representing, as a little-endian unsigned big integer, the value ``to_int(inp)`` incremented by one. :rtype: bytes """ ensure(isinstance(inp, bytes), raising=exc.TypeError) ln = len(inp) buf = ffi.new("unsigned char []", ln) ffi.memmove(buf, inp, ln) lib.sodium_increment(buf, ln) return ffi.buffer(buf, ln)[:]
python
{ "resource": "" }
q32311
Definition.error_lineno
train
def error_lineno(self): """Get the line number with which to report violations.""" if isinstance(self.docstring, Docstring): return self.docstring.start return self.start
python
{ "resource": "" }
q32312
Definition.source
train
def source(self): """Return the source code for the definition.""" full_src = self._source[self._slice] def is_empty_or_comment(line): return line.strip() == '' or line.strip().startswith('#') filtered_src = dropwhile(is_empty_or_comment, reversed(full_src)) return ''.join(reversed(list(filtered_src)))
python
{ "resource": "" }
q32313
Function.is_public
train
def is_public(self): """Return True iff this function should be considered public.""" if self.dunder_all is not None: return self.name in self.dunder_all else: return not self.name.startswith('_')
python
{ "resource": "" }
q32314
Parser.parse
train
def parse(self, filelike, filename): """Parse the given file-like object and return its Module object.""" self.log = log self.source = filelike.readlines() src = ''.join(self.source) try: compile(src, filename, 'exec') except SyntaxError as error: raise ParseError() from error self.stream = TokenStream(StringIO(src)) self.filename = filename self.dunder_all = None self.dunder_all_error = None self.future_imports = set() self._accumulated_decorators = [] return self.parse_module()
python
{ "resource": "" }
q32315
Parser.consume
train
def consume(self, kind): """Consume one token and verify it is of the expected kind.""" next_token = self.stream.move() if next_token.kind != kind: raise UnexpectedTokenError(token=next_token, expected_kind=kind)
python
{ "resource": "" }
q32316
Parser.parse_definition
train
def parse_definition(self, class_): """Parse a definition and return its value in a `class_` object.""" start = self.line self.consume(tk.NAME) name = self.current.value self.log.debug("parsing %s '%s'", class_.__name__, name) self.stream.move() if self.current.kind == tk.OP and self.current.value == '(': parenthesis_level = 0 while True: if self.current.kind == tk.OP: if self.current.value == '(': parenthesis_level += 1 elif self.current.value == ')': parenthesis_level -= 1 if parenthesis_level == 0: break self.stream.move() if self.current.kind != tk.OP or self.current.value != ':': self.leapfrog(tk.OP, value=":") else: self.consume(tk.OP) if self.current.kind in (tk.NEWLINE, tk.COMMENT): skipped_error_codes = self.parse_skip_comment() self.leapfrog(tk.INDENT) assert self.current.kind != tk.INDENT docstring = self.parse_docstring() decorators = self._accumulated_decorators self.log.debug("current accumulated decorators: %s", decorators) self._accumulated_decorators = [] self.log.debug("parsing nested definitions.") children = list(self.parse_definitions(class_)) self.log.debug("finished parsing nested definitions for '%s'", name) end = self.line - 1 else: # one-liner definition skipped_error_codes = '' docstring = self.parse_docstring() decorators = [] # TODO children = [] end = self.line self.leapfrog(tk.NEWLINE) definition = class_(name, self.source, start, end, decorators, docstring, children, None, skipped_error_codes) for child in definition.children: child.parent = definition self.log.debug("finished parsing %s '%s'. Next token is %r", class_.__name__, name, self.current) return definition
python
{ "resource": "" }
q32317
Parser.parse_skip_comment
train
def parse_skip_comment(self): """Parse a definition comment for noqa skips.""" skipped_error_codes = '' if self.current.kind == tk.COMMENT: if 'noqa: ' in self.current.value: skipped_error_codes = ''.join( self.current.value.split('noqa: ')[1:]) elif self.current.value.startswith('# noqa'): skipped_error_codes = 'all' return skipped_error_codes
python
{ "resource": "" }
q32318
Parser._parse_from_import_source
train
def _parse_from_import_source(self): """Parse the 'from x import' part in a 'from x import y' statement. Return true iff `x` is __future__. """ assert self.current.value == 'from', self.current.value self.stream.move() is_future_import = self.current.value == '__future__' self.stream.move() while (self.current is not None and self.current.kind in (tk.DOT, tk.NAME, tk.OP) and self.current.value != 'import'): self.stream.move() if self.current is None or self.current.value != 'import': return False self.check_current(value='import') assert self.current.value == 'import', self.current.value self.stream.move() return is_future_import
python
{ "resource": "" }
q32319
setup_stream_handlers
train
def setup_stream_handlers(conf): """Setup logging stream handlers according to the options.""" class StdoutFilter(logging.Filter): def filter(self, record): return record.levelno in (logging.DEBUG, logging.INFO) log.handlers = [] stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setLevel(logging.WARNING) stdout_handler.addFilter(StdoutFilter()) if conf.debug: stdout_handler.setLevel(logging.DEBUG) elif conf.verbose: stdout_handler.setLevel(logging.INFO) else: stdout_handler.setLevel(logging.WARNING) log.addHandler(stdout_handler) stderr_handler = logging.StreamHandler(sys.stderr) msg_format = "%(levelname)s: %(message)s" stderr_handler.setFormatter(logging.Formatter(fmt=msg_format)) stderr_handler.setLevel(logging.WARNING) log.addHandler(stderr_handler)
python
{ "resource": "" }
q32320
check
train
def check(filenames, select=None, ignore=None, ignore_decorators=None): """Generate docstring errors that exist in `filenames` iterable. By default, the PEP-257 convention is checked. To specifically define the set of error codes to check for, supply either `select` or `ignore` (but not both). In either case, the parameter should be a collection of error code strings, e.g., {'D100', 'D404'}. When supplying `select`, only specified error codes will be reported. When supplying `ignore`, all error codes which were not specified will be reported. Note that ignored error code refer to the entire set of possible error codes, which is larger than just the PEP-257 convention. To your convenience, you may use `pydocstyle.violations.conventions.pep257` as a base set to add or remove errors from. Examples --------- >>> check(['pydocstyle.py']) <generator object check at 0x...> >>> check(['pydocstyle.py'], select=['D100']) <generator object check at 0x...> >>> check(['pydocstyle.py'], ignore=conventions.pep257 - {'D100'}) <generator object check at 0x...> """ if select is not None and ignore is not None: raise IllegalConfiguration('Cannot pass both select and ignore. ' 'They are mutually exclusive.') elif select is not None: checked_codes = select elif ignore is not None: checked_codes = list(set(violations.ErrorRegistry.get_error_codes()) - set(ignore)) else: checked_codes = violations.conventions.pep257 for filename in filenames: log.info('Checking file %s.', filename) try: with tk.open(filename) as file: source = file.read() for error in ConventionChecker().check_source(source, filename, ignore_decorators): code = getattr(error, 'code', None) if code in checked_codes: yield error except (EnvironmentError, AllError, ParseError) as error: log.warning('Error in file %s: %s', filename, error) yield error except tk.TokenError: yield SyntaxError('invalid syntax in file %s' % filename)
python
{ "resource": "" }
q32321
ConventionChecker._get_docstring_indent
train
def _get_docstring_indent(definition, docstring): """Return the indentation of the docstring's opening quotes.""" before_docstring, _, _ = definition.source.partition(docstring) _, _, indent = before_docstring.rpartition('\n') return indent
python
{ "resource": "" }
q32322
ConventionChecker._get_leading_words
train
def _get_leading_words(line): """Return any leading set of words from `line`. For example, if `line` is " Hello world!!!", returns "Hello world". """ result = re("[\w ]+").match(line.strip()) if result is not None: return result.group()
python
{ "resource": "" }
q32323
ConventionChecker._is_a_docstring_section
train
def _is_a_docstring_section(context): """Check if the suspected context is really a section header. Lets have a look at the following example docstring: '''Title. Some part of the docstring that specifies what the function returns. <----- Not a real section name. It has a suffix and the previous line is not empty and does not end with a punctuation sign. This is another line in the docstring. It describes stuff, but we forgot to add a blank line between it and the section name. Parameters <-- A real section name. The previous line ends with ---------- a period, therefore it is in a new grammatical context. param : int examples : list <------- Not a section - previous line doesn't end A list of examples. with punctuation. notes : list <---------- Not a section - there's text after the A list of notes. colon. Notes: <--- Suspected as a context because there's a suffix to the ----- section, but it's a colon so it's probably a mistake. Bla. ''' To make sure this is really a section we check these conditions: * There's no suffix to the section name or it's just a colon AND * The previous line is empty OR it ends with punctuation. If one of the conditions is true, we will consider the line as a section name. """ section_name_suffix = \ context.line.strip().lstrip(context.section_name.strip()).strip() section_suffix_is_only_colon = section_name_suffix == ':' punctuation = [',', ';', '.', '-', '\\', '/', ']', '}', ')'] prev_line_ends_with_punctuation = \ any(context.previous_line.strip().endswith(x) for x in punctuation) this_line_looks_like_a_section_name = \ is_blank(section_name_suffix) or section_suffix_is_only_colon prev_line_looks_like_end_of_paragraph = \ prev_line_ends_with_punctuation or is_blank(context.previous_line) return (this_line_looks_like_a_section_name and prev_line_looks_like_end_of_paragraph)
python
{ "resource": "" }
q32324
Error.set_context
train
def set_context(self, definition: Definition, explanation: str) -> None: """Set the source code context for this error.""" self.definition = definition self.explanation = explanation
python
{ "resource": "" }
q32325
Error.message
train
def message(self) -> str: """Return the message to print to the user.""" ret = '{}: {}'.format(self.code, self.short_desc) if self.context is not None: specific_error_msg = self.context.format(*self.parameters) ret += ' ({})'.format(specific_error_msg) return ret
python
{ "resource": "" }
q32326
Error.lines
train
def lines(self) -> str: """Return the source code lines for this error.""" if self.definition is None: return '' source = '' lines = self.definition.source offset = self.definition.start # type: ignore lines_stripped = list(reversed(list(dropwhile(is_blank, reversed(lines))))) numbers_width = len(str(offset + len(lines_stripped))) line_format = '{{:{}}}:{{}}'.format(numbers_width) for n, line in enumerate(lines_stripped): if line: line = ' ' + line source += line_format.format(n + offset, line) if n > 5: source += ' ...\n' break return source
python
{ "resource": "" }
q32327
ErrorRegistry.create_group
train
def create_group(cls, prefix: str, name: str) -> ErrorGroup: """Create a new error group and return it.""" group = cls.ErrorGroup(prefix, name) cls.groups.append(group) return group
python
{ "resource": "" }
q32328
ErrorRegistry.get_error_codes
train
def get_error_codes(cls) -> Iterable[str]: """Yield all registered codes.""" for group in cls.groups: for error in group.errors: yield error.code
python
{ "resource": "" }
q32329
ErrorRegistry.to_rst
train
def to_rst(cls) -> str: """Output the registry as reStructuredText, for documentation.""" sep_line = '+' + 6 * '-' + '+' + '-' * 71 + '+\n' blank_line = '|' + 78 * ' ' + '|\n' table = '' for group in cls.groups: table += sep_line table += blank_line table += '|' + '**{}**'.format(group.name).center(78) + '|\n' table += blank_line for error in group.errors: table += sep_line table += ('|' + error.code.center(6) + '| ' + error.short_desc.ljust(70) + '|\n') table += sep_line return table
python
{ "resource": "" }
q32330
check_initialized
train
def check_initialized(method): """Check that the configuration object was initialized.""" def _decorator(self, *args, **kwargs): if self._arguments is None or self._options is None: raise RuntimeError('using an uninitialized configuration') return method(self, *args, **kwargs) return _decorator
python
{ "resource": "" }
q32331
ConfigurationParser.parse
train
def parse(self): """Parse the configuration. If one of `BASE_ERROR_SELECTION_OPTIONS` was selected, overrides all error codes to check and disregards any error code related configurations from the configuration files. """ self._options, self._arguments = self._parse_args() self._arguments = self._arguments or ['.'] if not self._validate_options(self._options): raise IllegalConfiguration() self._run_conf = self._create_run_config(self._options) config = self._create_check_config(self._options, use_defaults=False) self._override_by_cli = config
python
{ "resource": "" }
q32332
ConfigurationParser.get_files_to_check
train
def get_files_to_check(self): """Generate files and error codes to check on each one. Walk dir trees under `self._arguments` and yield file names that `match` under each directory that `match_dir`. The method locates the configuration for each file name and yields a tuple of (filename, [error_codes]). With every discovery of a new configuration file `IllegalConfiguration` might be raised. """ def _get_matches(conf): """Return the `match` and `match_dir` functions for `config`.""" match_func = re(conf.match + '$').match match_dir_func = re(conf.match_dir + '$').match return match_func, match_dir_func def _get_ignore_decorators(conf): """Return the `ignore_decorators` as None or regex.""" return (re(conf.ignore_decorators) if conf.ignore_decorators else None) for name in self._arguments: if os.path.isdir(name): for root, dirs, filenames in os.walk(name): config = self._get_config(os.path.abspath(root)) match, match_dir = _get_matches(config) ignore_decorators = _get_ignore_decorators(config) # Skip any dirs that do not match match_dir dirs[:] = [d for d in dirs if match_dir(d)] for filename in filenames: if match(filename): full_path = os.path.join(root, filename) yield (full_path, list(config.checked_codes), ignore_decorators) else: config = self._get_config(os.path.abspath(name)) match, _ = _get_matches(config) ignore_decorators = _get_ignore_decorators(config) if match(name): yield (name, list(config.checked_codes), ignore_decorators)
python
{ "resource": "" }
q32333
ConfigurationParser._get_config_by_discovery
train
def _get_config_by_discovery(self, node): """Get a configuration for checking `node` by config discovery. Config discovery happens when no explicit config file is specified. The file system is searched for config files starting from the directory containing the file being checked, and up until the root directory of the project. See `_get_config` for further details. """ path = self._get_node_dir(node) if path in self._cache: return self._cache[path] config_file = self._get_config_file_in_folder(path) if config_file is None: parent_dir, tail = os.path.split(path) if tail: # No configuration file, simply take the parent's. config = self._get_config(parent_dir) else: # There's no configuration file and no parent directory. # Use the default configuration or the one given in the CLI. config = self._create_check_config(self._options) else: # There's a config file! Read it and merge if necessary. options, inherit = self._read_configuration_file(config_file) parent_dir, tail = os.path.split(path) if tail and inherit: # There is a parent dir and we should try to merge. parent_config = self._get_config(parent_dir) config = self._merge_configuration(parent_config, options) else: # No need to merge or parent dir does not exist. config = self._create_check_config(options) return config
python
{ "resource": "" }
q32334
ConfigurationParser._get_config
train
def _get_config(self, node): """Get and cache the run configuration for `node`. If no configuration exists (not local and not for the parent node), returns and caches a default configuration. The algorithm: ------------- * If the current directory's configuration exists in `self._cache` - return it. * If a configuration file does not exist in this directory: * If the directory is not a root directory: * Cache its configuration as this directory's and return it. * Else: * Cache a default configuration and return it. * Else: * Read the configuration file. * If a parent directory exists AND the configuration file allows inheritance: * Read the parent configuration by calling this function with the parent directory as `node`. * Merge the parent configuration with the current one and cache it. * If the user has specified one of `BASE_ERROR_SELECTION_OPTIONS` in the CLI - return the CLI configuration with the configuration match clauses * Set the `--add-select` and `--add-ignore` CLI configurations. """ if self._run_conf.config is None: log.debug('No config file specified, discovering.') config = self._get_config_by_discovery(node) else: log.debug('Using config file %r', self._run_conf.config) if not os.path.exists(self._run_conf.config): raise IllegalConfiguration('Configuration file {!r} specified ' 'via --config was not found.' .format(self._run_conf.config)) if None in self._cache: return self._cache[None] options, _ = self._read_configuration_file(self._run_conf.config) if options is None: log.warning('Configuration file does not contain a ' 'pydocstyle section. Using default configuration.') config = self._create_check_config(self._options) else: config = self._create_check_config(options) # Make the CLI always win final_config = {} for attr in CheckConfiguration._fields: cli_val = getattr(self._override_by_cli, attr) conf_val = getattr(config, attr) final_config[attr] = cli_val if cli_val is not None else conf_val config = CheckConfiguration(**final_config) self._set_add_options(config.checked_codes, self._options) # Handle caching if self._run_conf.config is not None: self._cache[None] = config else: self._cache[self._get_node_dir(node)] = config return config
python
{ "resource": "" }
q32335
ConfigurationParser._get_node_dir
train
def _get_node_dir(node): """Return the absolute path of the directory of a filesystem node.""" path = os.path.abspath(node) return path if os.path.isdir(path) else os.path.dirname(path)
python
{ "resource": "" }
q32336
ConfigurationParser._read_configuration_file
train
def _read_configuration_file(self, path): """Try to read and parse `path` as a configuration file. If the configurations were illegal (checked with `self._validate_options`), raises `IllegalConfiguration`. Returns (options, should_inherit). """ parser = RawConfigParser(inline_comment_prefixes=('#', ';')) options = None should_inherit = True if parser.read(path) and self._get_section_name(parser): all_options = self._parser.option_list[:] for group in self._parser.option_groups: all_options.extend(group.option_list) option_list = {o.dest: o.type or o.action for o in all_options} # First, read the default values new_options, _ = self._parse_args([]) # Second, parse the configuration section_name = self._get_section_name(parser) for opt in parser.options(section_name): if opt == 'inherit': should_inherit = parser.getboolean(section_name, opt) continue if opt.replace('_', '-') not in self.CONFIG_FILE_OPTIONS: log.warning("Unknown option '{}' ignored".format(opt)) continue normalized_opt = opt.replace('-', '_') opt_type = option_list[normalized_opt] if opt_type in ('int', 'count'): value = parser.getint(section_name, opt) elif opt_type == 'string': value = parser.get(section_name, opt) else: assert opt_type in ('store_true', 'store_false') value = parser.getboolean(section_name, opt) setattr(new_options, normalized_opt, value) # Third, fix the set-options options = self._fix_set_options(new_options) if options is not None: if not self._validate_options(options): raise IllegalConfiguration('in file: {}'.format(path)) return options, should_inherit
python
{ "resource": "" }
q32337
ConfigurationParser._merge_configuration
train
def _merge_configuration(self, parent_config, child_options): """Merge parent config into the child options. The migration process requires an `options` object for the child in order to distinguish between mutually exclusive codes, add-select and add-ignore error codes. """ # Copy the parent error codes so we won't override them error_codes = copy.deepcopy(parent_config.checked_codes) if self._has_exclusive_option(child_options): error_codes = self._get_exclusive_error_codes(child_options) self._set_add_options(error_codes, child_options) kwargs = dict(checked_codes=error_codes) for key in ('match', 'match_dir', 'ignore_decorators'): kwargs[key] = \ getattr(child_options, key) or getattr(parent_config, key) return CheckConfiguration(**kwargs)
python
{ "resource": "" }
q32338
ConfigurationParser._parse_args
train
def _parse_args(self, args=None, values=None): """Parse the options using `self._parser` and reformat the options.""" options, arguments = self._parser.parse_args(args, values) return self._fix_set_options(options), arguments
python
{ "resource": "" }
q32339
ConfigurationParser._create_run_config
train
def _create_run_config(options): """Create a `RunConfiguration` object from `options`.""" values = {opt: getattr(options, opt) for opt in RunConfiguration._fields} return RunConfiguration(**values)
python
{ "resource": "" }
q32340
ConfigurationParser._create_check_config
train
def _create_check_config(cls, options, use_defaults=True): """Create a `CheckConfiguration` object from `options`. If `use_defaults`, any of the match options that are `None` will be replaced with their default value and the default convention will be set for the checked codes. """ checked_codes = None if cls._has_exclusive_option(options) or use_defaults: checked_codes = cls._get_checked_errors(options) kwargs = dict(checked_codes=checked_codes) for key in ('match', 'match_dir', 'ignore_decorators'): kwargs[key] = getattr(cls, 'DEFAULT_{}_RE'.format(key.upper())) \ if getattr(options, key) is None and use_defaults \ else getattr(options, key) return CheckConfiguration(**kwargs)
python
{ "resource": "" }
q32341
ConfigurationParser._get_section_name
train
def _get_section_name(cls, parser): """Parse options from relevant section.""" for section_name in cls.POSSIBLE_SECTION_NAMES: if parser.has_section(section_name): return section_name return None
python
{ "resource": "" }
q32342
ConfigurationParser._get_config_file_in_folder
train
def _get_config_file_in_folder(cls, path): """Look for a configuration file in `path`. If exists return its full path, otherwise None. """ if os.path.isfile(path): path = os.path.dirname(path) for fn in cls.PROJECT_CONFIG_FILES: config = RawConfigParser() full_path = os.path.join(path, fn) if config.read(full_path) and cls._get_section_name(config): return full_path
python
{ "resource": "" }
q32343
ConfigurationParser._get_exclusive_error_codes
train
def _get_exclusive_error_codes(cls, options): """Extract the error codes from the selected exclusive option.""" codes = set(ErrorRegistry.get_error_codes()) checked_codes = None if options.ignore is not None: ignored = cls._expand_error_codes(options.ignore) checked_codes = codes - ignored elif options.select is not None: checked_codes = cls._expand_error_codes(options.select) elif options.convention is not None: checked_codes = getattr(conventions, options.convention) # To not override the conventions nor the options - copy them. return copy.deepcopy(checked_codes)
python
{ "resource": "" }
q32344
ConfigurationParser._set_add_options
train
def _set_add_options(cls, checked_codes, options): """Set `checked_codes` by the `add_ignore` or `add_select` options.""" checked_codes |= cls._expand_error_codes(options.add_select) checked_codes -= cls._expand_error_codes(options.add_ignore)
python
{ "resource": "" }
q32345
ConfigurationParser._expand_error_codes
train
def _expand_error_codes(code_parts): """Return an expanded set of error codes to ignore.""" codes = set(ErrorRegistry.get_error_codes()) expanded_codes = set() try: for part in code_parts: # Dealing with split-lined configurations; The part might begin # with a whitespace due to the newline character. part = part.strip() if not part: continue codes_to_add = {code for code in codes if code.startswith(part)} if not codes_to_add: log.warning( 'Error code passed is not a prefix of any ' 'known errors: %s', part) expanded_codes.update(codes_to_add) except TypeError as e: raise IllegalConfiguration(e) return expanded_codes
python
{ "resource": "" }
q32346
ConfigurationParser._get_checked_errors
train
def _get_checked_errors(cls, options): """Extract the codes needed to be checked from `options`.""" checked_codes = cls._get_exclusive_error_codes(options) if checked_codes is None: checked_codes = cls.DEFAULT_CONVENTION cls._set_add_options(checked_codes, options) return checked_codes
python
{ "resource": "" }
q32347
ConfigurationParser._validate_options
train
def _validate_options(cls, options): """Validate the mutually exclusive options. Return `True` iff only zero or one of `BASE_ERROR_SELECTION_OPTIONS` was selected. """ for opt1, opt2 in \ itertools.permutations(cls.BASE_ERROR_SELECTION_OPTIONS, 2): if getattr(options, opt1) and getattr(options, opt2): log.error('Cannot pass both {} and {}. They are ' 'mutually exclusive.'.format(opt1, opt2)) return False if options.convention and options.convention not in conventions: log.error("Illegal convention '{}'. Possible conventions: {}" .format(options.convention, ', '.join(conventions.keys()))) return False return True
python
{ "resource": "" }
q32348
ConfigurationParser._has_exclusive_option
train
def _has_exclusive_option(cls, options): """Return `True` iff one or more exclusive options were selected.""" return any([getattr(options, opt) is not None for opt in cls.BASE_ERROR_SELECTION_OPTIONS])
python
{ "resource": "" }
q32349
pairwise
train
def pairwise( iterable: Iterable, default_value: Any, ) -> Iterable[Tuple[Any, Any]]: """Return pairs of items from `iterable`. pairwise([1, 2, 3], default_value=None) -> (1, 2) (2, 3), (3, None) """ a, b = tee(iterable) _ = next(b, default_value) return zip_longest(a, b, fillvalue=default_value)
python
{ "resource": "" }
q32350
load_wordlist
train
def load_wordlist(name: str) -> Iterator[str]: """Iterate over lines of a wordlist data file. `name` should be the name of a package data file within the data/ directory. Whitespace and #-prefixed comments are stripped from each line. """ data = pkgutil.get_data('pydocstyle', 'data/' + name) if data is not None: text = data.decode('utf8') for line in text.splitlines(): line = COMMENT_RE.sub('', line).strip() if line: yield line
python
{ "resource": "" }
q32351
EvolutionaryAlgorithmSearchCV.possible_params
train
def possible_params(self): """ Used when assuming params is a list. """ return self.params if isinstance(self.params, list) else [self.params]
python
{ "resource": "" }
q32352
AuthorizedHttp.urlopen
train
def urlopen(self, method, url, body=None, headers=None, **kwargs): """Implementation of urllib3's urlopen.""" # pylint: disable=arguments-differ # We use kwargs to collect additional args that we don't need to # introspect here. However, we do explicitly collect the two # positional arguments. # Use a kwarg for this instead of an attribute to maintain # thread-safety. _credential_refresh_attempt = kwargs.pop( '_credential_refresh_attempt', 0) if headers is None: headers = self.headers # Make a copy of the headers. They will be modified by the credentials # and we want to pass the original headers if we recurse. request_headers = headers.copy() self.credentials.before_request( self._request, method, url, request_headers) response = self.http.urlopen( method, url, body=body, headers=request_headers, **kwargs) # If the response indicated that the credentials needed to be # refreshed, then refresh the credentials and re-attempt the # request. # A stored token may expire between the time it is retrieved and # the time the request is made, so we may need to try twice. # The reason urllib3's retries aren't used is because they # don't allow you to modify the request headers. :/ if (response.status in self._refresh_status_codes and _credential_refresh_attempt < self._max_refresh_attempts): _LOGGER.info( 'Refreshing credentials due to a %s response. Attempt %s/%s.', response.status, _credential_refresh_attempt + 1, self._max_refresh_attempts) self.credentials.refresh(self._request) # Recurse. Pass in the original headers, not our modified set. return self.urlopen( method, url, body=body, headers=headers, _credential_refresh_attempt=_credential_refresh_attempt + 1, **kwargs) return response
python
{ "resource": "" }
q32353
Credentials.service_account_email
train
def service_account_email(self): """The service account email.""" if self._service_account_id is None: self._service_account_id = app_identity.get_service_account_name() return self._service_account_id
python
{ "resource": "" }
q32354
convert
train
def convert(credentials): """Convert oauth2client credentials to google-auth credentials. This class converts: - :class:`oauth2client.client.OAuth2Credentials` to :class:`google.oauth2.credentials.Credentials`. - :class:`oauth2client.client.GoogleCredentials` to :class:`google.oauth2.credentials.Credentials`. - :class:`oauth2client.service_account.ServiceAccountCredentials` to :class:`google.oauth2.service_account.Credentials`. - :class:`oauth2client.service_account._JWTAccessCredentials` to :class:`google.oauth2.service_account.Credentials`. - :class:`oauth2client.contrib.gce.AppAssertionCredentials` to :class:`google.auth.compute_engine.Credentials`. - :class:`oauth2client.contrib.appengine.AppAssertionCredentials` to :class:`google.auth.app_engine.Credentials`. Returns: google.auth.credentials.Credentials: The converted credentials. Raises: ValueError: If the credentials could not be converted. """ credentials_class = type(credentials) try: return _CLASS_CONVERSION_MAP[credentials_class](credentials) except KeyError as caught_exc: new_exc = ValueError(_CONVERT_ERROR_TMPL.format(credentials_class)) six.raise_from(new_exc, caught_exc)
python
{ "resource": "" }
q32355
Credentials._update_token
train
def _update_token(self, request): """Updates credentials with a new access_token representing the impersonated account. Args: request (google.auth.transport.requests.Request): Request object to use for refreshing credentials. """ # Refresh our source credentials. self._source_credentials.refresh(request) body = { "delegates": self._delegates, "scope": self._target_scopes, "lifetime": str(self._lifetime) + "s" } headers = { 'Content-Type': 'application/json', } # Apply the source credentials authentication info. self._source_credentials.apply(headers) self.token, self.expiry = _make_iam_token_request( request=request, principal=self._target_principal, headers=headers, body=body)
python
{ "resource": "" }
q32356
verify_signature
train
def verify_signature(message, signature, certs): """Verify an RSA cryptographic signature. Checks that the provided ``signature`` was generated from ``bytes`` using the private key associated with the ``cert``. Args: message (Union[str, bytes]): The plaintext message. signature (Union[str, bytes]): The cryptographic signature to check. certs (Union[Sequence, str, bytes]): The certificate or certificates to use to check the signature. Returns: bool: True if the signature is valid, otherwise False. """ if isinstance(certs, (six.text_type, six.binary_type)): certs = [certs] for cert in certs: verifier = rsa.RSAVerifier.from_string(cert) if verifier.verify(message, signature): return True return False
python
{ "resource": "" }
q32357
Signer._make_signing_request
train
def _make_signing_request(self, message): """Makes a request to the API signBlob API.""" message = _helpers.to_bytes(message) method = 'POST' url = _SIGN_BLOB_URI.format(self._service_account_email) headers = {} body = json.dumps({ 'bytesToSign': base64.b64encode(message).decode('utf-8'), }) self._credentials.before_request(self._request, method, url, headers) response = self._request( url=url, method=method, body=body, headers=headers) if response.status != http_client.OK: raise exceptions.TransportError( 'Error calling the IAM signBytes API: {}'.format( response.data)) return json.loads(response.data.decode('utf-8'))
python
{ "resource": "" }
q32358
_fetch_certs
train
def _fetch_certs(request, certs_url): """Fetches certificates. Google-style cerificate endpoints return JSON in the format of ``{'key id': 'x509 certificate'}``. Args: request (google.auth.transport.Request): The object used to make HTTP requests. certs_url (str): The certificate endpoint URL. Returns: Mapping[str, str]: A mapping of public key ID to x.509 certificate data. """ response = request(certs_url, method='GET') if response.status != http_client.OK: raise exceptions.TransportError( 'Could not fetch certificates at {}'.format(certs_url)) return json.loads(response.data.decode('utf-8'))
python
{ "resource": "" }
q32359
verify_token
train
def verify_token(id_token, request, audience=None, certs_url=_GOOGLE_OAUTH2_CERTS_URL): """Verifies an ID token and returns the decoded token. Args: id_token (Union[str, bytes]): The encoded token. request (google.auth.transport.Request): The object used to make HTTP requests. audience (str): The audience that this token is intended for. If None then the audience is not verified. certs_url (str): The URL that specifies the certificates to use to verify the token. This URL should return JSON in the format of ``{'key id': 'x509 certificate'}``. Returns: Mapping[str, Any]: The decoded token. """ certs = _fetch_certs(request, certs_url) return jwt.decode(id_token, certs=certs, audience=audience)
python
{ "resource": "" }
q32360
verify_oauth2_token
train
def verify_oauth2_token(id_token, request, audience=None): """Verifies an ID Token issued by Google's OAuth 2.0 authorization server. Args: id_token (Union[str, bytes]): The encoded token. request (google.auth.transport.Request): The object used to make HTTP requests. audience (str): The audience that this token is intended for. This is typically your application's OAuth 2.0 client ID. If None then the audience is not verified. Returns: Mapping[str, Any]: The decoded token. """ return verify_token( id_token, request, audience=audience, certs_url=_GOOGLE_OAUTH2_CERTS_URL)
python
{ "resource": "" }
q32361
verify_firebase_token
train
def verify_firebase_token(id_token, request, audience=None): """Verifies an ID Token issued by Firebase Authentication. Args: id_token (Union[str, bytes]): The encoded token. request (google.auth.transport.Request): The object used to make HTTP requests. audience (str): The audience that this token is intended for. This is typically your Firebase application ID. If None then the audience is not verified. Returns: Mapping[str, Any]: The decoded token. """ return verify_token( id_token, request, audience=audience, certs_url=_GOOGLE_APIS_CERTS_URL)
python
{ "resource": "" }
q32362
_decode_jwt_segment
train
def _decode_jwt_segment(encoded_section): """Decodes a single JWT segment.""" section_bytes = _helpers.padded_urlsafe_b64decode(encoded_section) try: return json.loads(section_bytes.decode('utf-8')) except ValueError as caught_exc: new_exc = ValueError('Can\'t parse segment: {0}'.format(section_bytes)) six.raise_from(new_exc, caught_exc)
python
{ "resource": "" }
q32363
_unverified_decode
train
def _unverified_decode(token): """Decodes a token and does no verification. Args: token (Union[str, bytes]): The encoded JWT. Returns: Tuple[str, str, str, str]: header, payload, signed_section, and signature. Raises: ValueError: if there are an incorrect amount of segments in the token. """ token = _helpers.to_bytes(token) if token.count(b'.') != 2: raise ValueError( 'Wrong number of segments in token: {0}'.format(token)) encoded_header, encoded_payload, signature = token.split(b'.') signed_section = encoded_header + b'.' + encoded_payload signature = _helpers.padded_urlsafe_b64decode(signature) # Parse segments header = _decode_jwt_segment(encoded_header) payload = _decode_jwt_segment(encoded_payload) return header, payload, signed_section, signature
python
{ "resource": "" }
q32364
decode
train
def decode(token, certs=None, verify=True, audience=None): """Decode and verify a JWT. Args: token (str): The encoded JWT. certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The certificate used to validate the JWT signature. If bytes or string, it must the the public key certificate in PEM format. If a mapping, it must be a mapping of key IDs to public key certificates in PEM format. The mapping must contain the same key ID that's specified in the token's header. verify (bool): Whether to perform signature and claim validation. Verification is done by default. audience (str): The audience claim, 'aud', that this JWT should contain. If None then the JWT's 'aud' parameter is not verified. Returns: Mapping[str, str]: The deserialized JSON payload in the JWT. Raises: ValueError: if any verification checks failed. """ header, payload, signed_section, signature = _unverified_decode(token) if not verify: return payload # If certs is specified as a dictionary of key IDs to certificates, then # use the certificate identified by the key ID in the token header. if isinstance(certs, collections.Mapping): key_id = header.get('kid') if key_id: if key_id not in certs: raise ValueError( 'Certificate for key id {} not found.'.format(key_id)) certs_to_check = [certs[key_id]] # If there's no key id in the header, check against all of the certs. else: certs_to_check = certs.values() else: certs_to_check = certs # Verify that the signature matches the message. if not crypt.verify_signature(signed_section, signature, certs_to_check): raise ValueError('Could not verify token signature.') # Verify the issued at and created times in the payload. _verify_iat_and_exp(payload) # Check audience. if audience is not None: claim_audience = payload.get('aud') if audience != claim_audience: raise ValueError( 'Token has wrong audience {}, expected {}'.format( claim_audience, audience)) return payload
python
{ "resource": "" }
q32365
OnDemandCredentials._get_jwt_for_audience
train
def _get_jwt_for_audience(self, audience): """Get a JWT For a given audience. If there is already an existing, non-expired token in the cache for the audience, that token is used. Otherwise, a new token will be created. Args: audience (str): The intended audience. Returns: bytes: The encoded JWT. """ token, expiry = self._cache.get(audience, (None, None)) if token is None or expiry < _helpers.utcnow(): token, expiry = self._make_jwt_for_audience(audience) self._cache[audience] = token, expiry return token
python
{ "resource": "" }
q32366
OnDemandCredentials.before_request
train
def before_request(self, request, method, url, headers): """Performs credential-specific before request logic. Args: request (Any): Unused. JWT credentials do not need to make an HTTP request to refresh. method (str): The request's HTTP method. url (str): The request's URI. This is used as the audience claim when generating the JWT. headers (Mapping): The request's headers. """ # pylint: disable=unused-argument # (pylint doesn't correctly recognize overridden methods.) parts = urllib.parse.urlsplit(url) # Strip query string and fragment audience = urllib.parse.urlunsplit( (parts.scheme, parts.netloc, parts.path, "", "")) token = self._get_jwt_for_audience(audience) self.apply(headers, token=token)
python
{ "resource": "" }
q32367
RSAVerifier.from_string
train
def from_string(cls, public_key): """Construct an Verifier instance from a public key or public certificate string. Args: public_key (Union[str, bytes]): The public key in PEM format or the x509 public key certificate. Returns: Verifier: The constructed verifier. Raises: ValueError: If the public key can't be parsed. """ public_key_data = _helpers.to_bytes(public_key) if _CERTIFICATE_MARKER in public_key_data: cert = cryptography.x509.load_pem_x509_certificate( public_key_data, _BACKEND) pubkey = cert.public_key() else: pubkey = serialization.load_pem_public_key( public_key_data, _BACKEND) return cls(pubkey)
python
{ "resource": "" }
q32368
RSASigner.from_string
train
def from_string(cls, key, key_id=None): """Construct a RSASigner from a private key in PEM format. Args: key (Union[bytes, str]): Private key in PEM format. key_id (str): An optional key id used to identify the private key. Returns: google.auth.crypt._cryptography_rsa.RSASigner: The constructed signer. Raises: ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode). UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded into a UTF-8 ``str``. ValueError: If ``cryptography`` "Could not deserialize key data." """ key = _helpers.to_bytes(key) private_key = serialization.load_pem_private_key( key, password=None, backend=_BACKEND) return cls(private_key, key_id=key_id)
python
{ "resource": "" }
q32369
_warn_about_problematic_credentials
train
def _warn_about_problematic_credentials(credentials): """Determines if the credentials are problematic. Credentials from the Cloud SDK that are associated with Cloud SDK's project are problematic because they may not have APIs enabled and have limited quota. If this is the case, warn about it. """ from google.auth import _cloud_sdk if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID: warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING)
python
{ "resource": "" }
q32370
_load_credentials_from_file
train
def _load_credentials_from_file(filename): """Loads credentials from a file. The credentials file must be a service account key or stored authorized user credentials. Args: filename (str): The full path to the credentials file. Returns: Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded credentials and the project ID. Authorized user credentials do not have the project ID information. Raises: google.auth.exceptions.DefaultCredentialsError: if the file is in the wrong format or is missing. """ if not os.path.exists(filename): raise exceptions.DefaultCredentialsError( 'File {} was not found.'.format(filename)) with io.open(filename, 'r') as file_obj: try: info = json.load(file_obj) except ValueError as caught_exc: new_exc = exceptions.DefaultCredentialsError( 'File {} is not a valid json file.'.format(filename), caught_exc) six.raise_from(new_exc, caught_exc) # The type key should indicate that the file is either a service account # credentials file or an authorized user credentials file. credential_type = info.get('type') if credential_type == _AUTHORIZED_USER_TYPE: from google.auth import _cloud_sdk try: credentials = _cloud_sdk.load_authorized_user_credentials(info) except ValueError as caught_exc: msg = 'Failed to load authorized user credentials from {}'.format( filename) new_exc = exceptions.DefaultCredentialsError(msg, caught_exc) six.raise_from(new_exc, caught_exc) # Authorized user credentials do not contain the project ID. _warn_about_problematic_credentials(credentials) return credentials, None elif credential_type == _SERVICE_ACCOUNT_TYPE: from google.oauth2 import service_account try: credentials = ( service_account.Credentials.from_service_account_info(info)) except ValueError as caught_exc: msg = 'Failed to load service account credentials from {}'.format( filename) new_exc = exceptions.DefaultCredentialsError(msg, caught_exc) six.raise_from(new_exc, caught_exc) return credentials, info.get('project_id') else: raise exceptions.DefaultCredentialsError( 'The file {file} does not have a valid type. ' 'Type is {type}, expected one of {valid_types}.'.format( file=filename, type=credential_type, valid_types=_VALID_TYPES))
python
{ "resource": "" }
q32371
_get_gcloud_sdk_credentials
train
def _get_gcloud_sdk_credentials(): """Gets the credentials and project ID from the Cloud SDK.""" from google.auth import _cloud_sdk # Check if application default credentials exist. credentials_filename = ( _cloud_sdk.get_application_default_credentials_path()) if not os.path.isfile(credentials_filename): return None, None credentials, project_id = _load_credentials_from_file( credentials_filename) if not project_id: project_id = _cloud_sdk.get_project_id() return credentials, project_id
python
{ "resource": "" }
q32372
_get_explicit_environ_credentials
train
def _get_explicit_environ_credentials(): """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment variable.""" explicit_file = os.environ.get(environment_vars.CREDENTIALS) if explicit_file is not None: credentials, project_id = _load_credentials_from_file( os.environ[environment_vars.CREDENTIALS]) return credentials, project_id else: return None, None
python
{ "resource": "" }
q32373
_get_gae_credentials
train
def _get_gae_credentials(): """Gets Google App Engine App Identity credentials and project ID.""" # While this library is normally bundled with app_engine, there are # some cases where it's not available, so we tolerate ImportError. try: import google.auth.app_engine as app_engine except ImportError: return None, None try: credentials = app_engine.Credentials() project_id = app_engine.get_project_id() return credentials, project_id except EnvironmentError: return None, None
python
{ "resource": "" }
q32374
_get_gce_credentials
train
def _get_gce_credentials(request=None): """Gets credentials and project ID from the GCE Metadata Service.""" # Ping requires a transport, but we want application default credentials # to require no arguments. So, we'll use the _http_client transport which # uses http.client. This is only acceptable because the metadata server # doesn't do SSL and never requires proxies. # While this library is normally bundled with compute_engine, there are # some cases where it's not available, so we tolerate ImportError. try: from google.auth import compute_engine from google.auth.compute_engine import _metadata except ImportError: return None, None if request is None: request = google.auth.transport._http_client.Request() if _metadata.ping(request=request): # Get the project ID. try: project_id = _metadata.get_project_id(request=request) except exceptions.TransportError: project_id = None return compute_engine.Credentials(), project_id else: return None, None
python
{ "resource": "" }
q32375
default
train
def default(scopes=None, request=None): """Gets the default credentials for the current environment. `Application Default Credentials`_ provides an easy way to obtain credentials to call Google APIs for server-to-server or local applications. This function acquires credentials from the environment in the following order: 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set to the path of a valid service account JSON private key file, then it is loaded and returned. The project ID returned is the project ID defined in the service account file if available (some older files do not contain project ID information). 2. If the `Google Cloud SDK`_ is installed and has application default credentials set they are loaded and returned. To enable application default credentials with the Cloud SDK run:: gcloud auth application-default login If the Cloud SDK has an active project, the project ID is returned. The active project can be set using:: gcloud config set project 3. If the application is running in the `App Engine standard environment`_ then the credentials and project ID from the `App Identity Service`_ are used. 4. If the application is running in `Compute Engine`_ or the `App Engine flexible environment`_ then the credentials and project ID are obtained from the `Metadata Service`_. 5. If no credentials are found, :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised. .. _Application Default Credentials: https://developers.google.com\ /identity/protocols/application-default-credentials .. _Google Cloud SDK: https://cloud.google.com/sdk .. _App Engine standard environment: https://cloud.google.com/appengine .. _App Identity Service: https://cloud.google.com/appengine/docs/python\ /appidentity/ .. _Compute Engine: https://cloud.google.com/compute .. _App Engine flexible environment: https://cloud.google.com\ /appengine/flexible .. _Metadata Service: https://cloud.google.com/compute/docs\ /storing-retrieving-metadata Example:: import google.auth credentials, project_id = google.auth.default() Args: scopes (Sequence[str]): The list of scopes for the credentials. If specified, the credentials will automatically be scoped if necessary. request (google.auth.transport.Request): An object used to make HTTP requests. This is used to detect whether the application is running on Compute Engine. If not specified, then it will use the standard library http client to make requests. Returns: Tuple[~google.auth.credentials.Credentials, Optional[str]]: the current environment's credentials and project ID. Project ID may be None, which indicates that the Project ID could not be ascertained from the environment. Raises: ~google.auth.exceptions.DefaultCredentialsError: If no credentials were found, or if the credentials found were invalid. """ from google.auth.credentials import with_scopes_if_required explicit_project_id = os.environ.get( environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)) checkers = ( _get_explicit_environ_credentials, _get_gcloud_sdk_credentials, _get_gae_credentials, lambda: _get_gce_credentials(request)) for checker in checkers: credentials, project_id = checker() if credentials is not None: credentials = with_scopes_if_required(credentials, scopes) effective_project_id = explicit_project_id or project_id if not effective_project_id: _LOGGER.warning( 'No project ID could be determined. Consider running ' '`gcloud config set project` or setting the %s ' 'environment variable', environment_vars.PROJECT) return credentials, effective_project_id raise exceptions.DefaultCredentialsError(_HELP_MESSAGE)
python
{ "resource": "" }
q32376
AuthMetadataPlugin._get_authorization_headers
train
def _get_authorization_headers(self, context): """Gets the authorization headers for a request. Returns: Sequence[Tuple[str, str]]: A list of request headers (key, value) to add to the request. """ headers = {} self._credentials.before_request( self._request, context.method_name, context.service_url, headers) return list(six.iteritems(headers))
python
{ "resource": "" }
q32377
Credentials.from_service_account_info
train
def from_service_account_info(cls, info, **kwargs): """Creates a Credentials instance from parsed service account info. Args: info (Mapping[str, str]): The service account info in Google format. kwargs: Additional arguments to pass to the constructor. Returns: google.auth.service_account.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format. """ signer = _service_account_info.from_dict( info, require=['client_email', 'token_uri']) return cls._from_signer_and_info(signer, info, **kwargs)
python
{ "resource": "" }
q32378
Credentials.from_service_account_file
train
def from_service_account_file(cls, filename, **kwargs): """Creates a Credentials instance from a service account json file. Args: filename (str): The path to the service account json file. kwargs: Additional arguments to pass to the constructor. Returns: google.auth.service_account.Credentials: The constructed credentials. """ info, signer = _service_account_info.from_filename( filename, require=['client_email', 'token_uri']) return cls._from_signer_and_info(signer, info, **kwargs)
python
{ "resource": "" }
q32379
Credentials.with_subject
train
def with_subject(self, subject): """Create a copy of these credentials with the specified subject. Args: subject (str): The subject claim. Returns: google.auth.service_account.Credentials: A new credentials instance. """ return self.__class__( self._signer, service_account_email=self._service_account_email, scopes=self._scopes, token_uri=self._token_uri, subject=subject, project_id=self._project_id, additional_claims=self._additional_claims.copy())
python
{ "resource": "" }
q32380
IDTokenCredentials.with_target_audience
train
def with_target_audience(self, target_audience): """Create a copy of these credentials with the specified target audience. Args: target_audience (str): The intended audience for these credentials, used when requesting the ID Token. Returns: google.auth.service_account.IDTokenCredentials: A new credentials instance. """ return self.__class__( self._signer, service_account_email=self._service_account_email, token_uri=self._token_uri, target_audience=target_audience, additional_claims=self._additional_claims.copy())
python
{ "resource": "" }
q32381
AuthorizedSession.request
train
def request(self, method, url, data=None, headers=None, **kwargs): """Implementation of Requests' request.""" # pylint: disable=arguments-differ # Requests has a ton of arguments to request, but only two # (method, url) are required. We pass through all of the other # arguments to super, so no need to exhaustively list them here. # Use a kwarg for this instead of an attribute to maintain # thread-safety. _credential_refresh_attempt = kwargs.pop( '_credential_refresh_attempt', 0) # Make a copy of the headers. They will be modified by the credentials # and we want to pass the original headers if we recurse. request_headers = headers.copy() if headers is not None else {} self.credentials.before_request( self._auth_request, method, url, request_headers) response = super(AuthorizedSession, self).request( method, url, data=data, headers=request_headers, **kwargs) # If the response indicated that the credentials needed to be # refreshed, then refresh the credentials and re-attempt the # request. # A stored token may expire between the time it is retrieved and # the time the request is made, so we may need to try twice. if (response.status_code in self._refresh_status_codes and _credential_refresh_attempt < self._max_refresh_attempts): _LOGGER.info( 'Refreshing credentials due to a %s response. Attempt %s/%s.', response.status_code, _credential_refresh_attempt + 1, self._max_refresh_attempts) auth_request_with_timeout = functools.partial( self._auth_request, timeout=self._refresh_timeout) self.credentials.refresh(auth_request_with_timeout) # Recurse. Pass in the original headers, not our modified set. return self.request( method, url, data=data, headers=headers, _credential_refresh_attempt=_credential_refresh_attempt + 1, **kwargs) return response
python
{ "resource": "" }
q32382
with_scopes_if_required
train
def with_scopes_if_required(credentials, scopes): """Creates a copy of the credentials with scopes if scoping is required. This helper function is useful when you do not know (or care to know) the specific type of credentials you are using (such as when you use :func:`google.auth.default`). This function will call :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if the credentials require scoping. Otherwise, it will return the credentials as-is. Args: credentials (google.auth.credentials.Credentials): The credentials to scope if necessary. scopes (Sequence[str]): The list of scopes to use. Returns: google.auth.credentials.Credentials: Either a new set of scoped credentials, or the passed in credentials instance if no scoping was required. """ if isinstance(credentials, Scoped) and credentials.requires_scopes: return credentials.with_scopes(scopes) else: return credentials
python
{ "resource": "" }
q32383
_bit_list_to_bytes
train
def _bit_list_to_bytes(bit_list): """Converts an iterable of 1s and 0s to bytes. Combines the list 8 at a time, treating each group of 8 bits as a single byte. Args: bit_list (Sequence): Sequence of 1s and 0s. Returns: bytes: The decoded bytes. """ num_bits = len(bit_list) byte_vals = bytearray() for start in six.moves.xrange(0, num_bits, 8): curr_bits = bit_list[start:start + 8] char_val = sum( val * digit for val, digit in six.moves.zip(_POW2, curr_bits)) byte_vals.append(char_val) return bytes(byte_vals)
python
{ "resource": "" }
q32384
RSASigner.from_string
train
def from_string(cls, key, key_id=None): """Construct an Signer instance from a private key in PEM format. Args: key (str): Private key in PEM format. key_id (str): An optional key id used to identify the private key. Returns: google.auth.crypt.Signer: The constructed signer. Raises: ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in PEM format. """ key = _helpers.from_bytes(key) # PEM expects str in Python 3 marker_id, key_bytes = pem.readPemBlocksFromFile( six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER) # Key is in pkcs1 format. if marker_id == 0: private_key = rsa.key.PrivateKey.load_pkcs1( key_bytes, format='DER') # Key is in pkcs8. elif marker_id == 1: key_info, remaining = decoder.decode( key_bytes, asn1Spec=_PKCS8_SPEC) if remaining != b'': raise ValueError('Unused bytes', remaining) private_key_info = key_info.getComponentByName('privateKey') private_key = rsa.key.PrivateKey.load_pkcs1( private_key_info.asOctets(), format='DER') else: raise ValueError('No key could be detected.') return cls(private_key, key_id=key_id)
python
{ "resource": "" }
q32385
_handle_error_response
train
def _handle_error_response(response_body): """"Translates an error response into an exception. Args: response_body (str): The decoded response data. Raises: google.auth.exceptions.RefreshError """ try: error_data = json.loads(response_body) error_details = '{}: {}'.format( error_data['error'], error_data.get('error_description')) # If no details could be extracted, use the response data. except (KeyError, ValueError): error_details = response_body raise exceptions.RefreshError( error_details, response_body)
python
{ "resource": "" }
q32386
_parse_expiry
train
def _parse_expiry(response_data): """Parses the expiry field from a response into a datetime. Args: response_data (Mapping): The JSON-parsed response data. Returns: Optional[datetime]: The expiration or ``None`` if no expiration was specified. """ expires_in = response_data.get('expires_in', None) if expires_in is not None: return _helpers.utcnow() + datetime.timedelta( seconds=expires_in) else: return None
python
{ "resource": "" }
q32387
_token_endpoint_request
train
def _token_endpoint_request(request, token_uri, body): """Makes a request to the OAuth 2.0 authorization server's token endpoint. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. body (Mapping[str, str]): The parameters to send in the request body. Returns: Mapping[str, str]: The JSON-decoded response data. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. """ body = urllib.parse.urlencode(body) headers = { 'content-type': _URLENCODED_CONTENT_TYPE, } response = request( method='POST', url=token_uri, headers=headers, body=body) response_body = response.data.decode('utf-8') if response.status != http_client.OK: _handle_error_response(response_body) response_data = json.loads(response_body) return response_data
python
{ "resource": "" }
q32388
jwt_grant
train
def jwt_grant(request, token_uri, assertion): """Implements the JWT Profile for OAuth 2.0 Authorization Grants. For more details, see `rfc7523 section 4`_. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. assertion (str): The OAuth 2.0 assertion. Returns: Tuple[str, Optional[datetime], Mapping[str, str]]: The access token, expiration, and additional data returned by the token endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4 """ body = { 'assertion': assertion, 'grant_type': _JWT_GRANT_TYPE, } response_data = _token_endpoint_request(request, token_uri, body) try: access_token = response_data['access_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No access token in response.', response_data) six.raise_from(new_exc, caught_exc) expiry = _parse_expiry(response_data) return access_token, expiry, response_data
python
{ "resource": "" }
q32389
id_token_jwt_grant
train
def id_token_jwt_grant(request, token_uri, assertion): """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but requests an OpenID Connect ID Token instead of an access token. This is a variant on the standard JWT Profile that is currently unique to Google. This was added for the benefit of authenticating to services that require ID Tokens instead of access tokens or JWT bearer tokens. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorization server's token endpoint URI. assertion (str): JWT token signed by a service account. The token's payload must include a ``target_audience`` claim. Returns: Tuple[str, Optional[datetime], Mapping[str, str]]: The (encoded) Open ID Connect ID Token, expiration, and additional data returned by the endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. """ body = { 'assertion': assertion, 'grant_type': _JWT_GRANT_TYPE, } response_data = _token_endpoint_request(request, token_uri, body) try: id_token = response_data['id_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No ID token in response.', response_data) six.raise_from(new_exc, caught_exc) payload = jwt.decode(id_token, verify=False) expiry = datetime.datetime.utcfromtimestamp(payload['exp']) return id_token, expiry, response_data
python
{ "resource": "" }
q32390
refresh_grant
train
def refresh_grant(request, token_uri, refresh_token, client_id, client_secret): """Implements the OAuth 2.0 refresh token grant. For more details, see `rfc678 section 6`_. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. refresh_token (str): The refresh token to use to get a new access token. client_id (str): The OAuth 2.0 application's client ID. client_secret (str): The Oauth 2.0 appliaction's client secret. Returns: Tuple[str, Optional[str], Optional[datetime], Mapping[str, str]]: The access token, new refresh token, expiration, and additional data returned by the token endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6 """ body = { 'grant_type': _REFRESH_GRANT_TYPE, 'client_id': client_id, 'client_secret': client_secret, 'refresh_token': refresh_token, } response_data = _token_endpoint_request(request, token_uri, body) try: access_token = response_data['access_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No access token in response.', response_data) six.raise_from(new_exc, caught_exc) refresh_token = response_data.get('refresh_token', refresh_token) expiry = _parse_expiry(response_data) return access_token, refresh_token, expiry, response_data
python
{ "resource": "" }
q32391
get_config_path
train
def get_config_path(): """Returns the absolute path the the Cloud SDK's configuration directory. Returns: str: The Cloud SDK config path. """ # If the path is explicitly set, return that. try: return os.environ[environment_vars.CLOUD_SDK_CONFIG_DIR] except KeyError: pass # Non-windows systems store this at ~/.config/gcloud if os.name != 'nt': return os.path.join( os.path.expanduser('~'), '.config', _CONFIG_DIRECTORY) # Windows systems store config at %APPDATA%\gcloud else: try: return os.path.join( os.environ[_WINDOWS_CONFIG_ROOT_ENV_VAR], _CONFIG_DIRECTORY) except KeyError: # This should never happen unless someone is really # messing with things, but we'll cover the case anyway. drive = os.environ.get('SystemDrive', 'C:') return os.path.join( drive, '\\', _CONFIG_DIRECTORY)
python
{ "resource": "" }
q32392
get_project_id
train
def get_project_id(): """Gets the project ID from the Cloud SDK. Returns: Optional[str]: The project ID. """ if os.name == 'nt': command = _CLOUD_SDK_WINDOWS_COMMAND else: command = _CLOUD_SDK_POSIX_COMMAND try: output = subprocess.check_output( (command,) + _CLOUD_SDK_CONFIG_COMMAND, stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, OSError, IOError): return None try: configuration = json.loads(output.decode('utf-8')) except ValueError: return None try: return configuration['configuration']['properties']['core']['project'] except KeyError: return None
python
{ "resource": "" }
q32393
Credentials._retrieve_info
train
def _retrieve_info(self, request): """Retrieve information about the service account. Updates the scopes and retrieves the full service account email. Args: request (google.auth.transport.Request): The object used to make HTTP requests. """ info = _metadata.get_service_account_info( request, service_account=self._service_account_email) self._service_account_email = info['email'] self._scopes = info['scopes']
python
{ "resource": "" }
q32394
Credentials.refresh
train
def refresh(self, request): """Refresh the access token and scopes. Args: request (google.auth.transport.Request): The object used to make HTTP requests. Raises: google.auth.exceptions.RefreshError: If the Compute Engine metadata service can't be reached if if the instance has not credentials. """ try: self._retrieve_info(request) self.token, self.expiry = _metadata.get_service_account_token( request, service_account=self._service_account_email) except exceptions.TransportError as caught_exc: new_exc = exceptions.RefreshError(caught_exc) six.raise_from(new_exc, caught_exc)
python
{ "resource": "" }
q32395
Credentials.from_authorized_user_info
train
def from_authorized_user_info(cls, info, scopes=None): """Creates a Credentials instance from parsed authorized user info. Args: info (Mapping[str, str]): The authorized user info in Google format. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format. """ keys_needed = set(('refresh_token', 'client_id', 'client_secret')) missing = keys_needed.difference(six.iterkeys(info)) if missing: raise ValueError( 'Authorized user info was not in the expected format, missing ' 'fields {}.'.format(', '.join(missing))) return Credentials( None, # No access token, must be refreshed. refresh_token=info['refresh_token'], token_uri=_GOOGLE_OAUTH2_TOKEN_ENDPOINT, scopes=scopes, client_id=info['client_id'], client_secret=info['client_secret'])
python
{ "resource": "" }
q32396
Credentials.from_authorized_user_file
train
def from_authorized_user_file(cls, filename, scopes=None): """Creates a Credentials instance from an authorized user json file. Args: filename (str): The path to the authorized user json file. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the file is not in the expected format. """ with io.open(filename, 'r', encoding='utf-8') as json_file: data = json.load(json_file) return cls.from_authorized_user_info(data, scopes)
python
{ "resource": "" }
q32397
ping
train
def ping(request, timeout=_METADATA_DEFAULT_TIMEOUT, retry_count=3): """Checks to see if the metadata server is available. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. timeout (int): How long to wait for the metadata server to respond. retry_count (int): How many times to attempt connecting to metadata server using above timeout. Returns: bool: True if the metadata server is reachable, False otherwise. """ # NOTE: The explicit ``timeout`` is a workaround. The underlying # issue is that resolving an unknown host on some networks will take # 20-30 seconds; making this timeout short fixes the issue, but # could lead to false negatives in the event that we are on GCE, but # the metadata resolution was particularly slow. The latter case is # "unlikely". retries = 0 while retries < retry_count: try: response = request( url=_METADATA_IP_ROOT, method='GET', headers=_METADATA_HEADERS, timeout=timeout) metadata_flavor = response.headers.get(_METADATA_FLAVOR_HEADER) return (response.status == http_client.OK and metadata_flavor == _METADATA_FLAVOR_VALUE) except exceptions.TransportError: _LOGGER.info('Compute Engine Metadata server unavailable on' 'attempt %s of %s', retries+1, retry_count) retries += 1 return False
python
{ "resource": "" }
q32398
get
train
def get(request, path, root=_METADATA_ROOT, recursive=False): """Fetch a resource from the metadata server. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. path (str): The resource to retrieve. For example, ``'instance/service-accounts/default'``. root (str): The full path to the metadata server root. recursive (bool): Whether to do a recursive query of metadata. See https://cloud.google.com/compute/docs/metadata#aggcontents for more details. Returns: Union[Mapping, str]: If the metadata server returns JSON, a mapping of the decoded JSON is return. Otherwise, the response content is returned as a string. Raises: google.auth.exceptions.TransportError: if an error occurred while retrieving metadata. """ base_url = urlparse.urljoin(root, path) query_params = {} if recursive: query_params['recursive'] = 'true' url = _helpers.update_query(base_url, query_params) response = request(url=url, method='GET', headers=_METADATA_HEADERS) if response.status == http_client.OK: content = _helpers.from_bytes(response.data) if response.headers['content-type'] == 'application/json': try: return json.loads(content) except ValueError as caught_exc: new_exc = exceptions.TransportError( 'Received invalid JSON from the Google Compute Engine' 'metadata service: {:.20}'.format(content)) six.raise_from(new_exc, caught_exc) else: return content else: raise exceptions.TransportError( 'Failed to retrieve {} from the Google Compute Engine' 'metadata service. Status: {} Response:\n{}'.format( url, response.status, response.data), response)
python
{ "resource": "" }
q32399
get_service_account_token
train
def get_service_account_token(request, service_account='default'): """Get the OAuth 2.0 access token for a service account. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. service_account (str): The string 'default' or a service account email address. The determines which service account for which to acquire an access token. Returns: Union[str, datetime]: The access token and its expiration. Raises: google.auth.exceptions.TransportError: if an error occurred while retrieving metadata. """ token_json = get( request, 'instance/service-accounts/{0}/token'.format(service_account)) token_expiry = _helpers.utcnow() + datetime.timedelta( seconds=token_json['expires_in']) return token_json['access_token'], token_expiry
python
{ "resource": "" }