id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8,700
|
__init__.py
|
rembo10_headphones/lib/mako/__init__.py
|
# mako/__init__.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = "1.1.6"
| 242
|
Python
|
.py
| 6
| 39
| 74
| 0.74359
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,701
|
codegen.py
|
rembo10_headphones/lib/mako/codegen.py
|
# mako/codegen.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""provides functionality for rendering a parsetree constructing into module
source code."""
import json
import re
import time
from mako import ast
from mako import compat
from mako import exceptions
from mako import filters
from mako import parsetree
from mako import util
from mako.pygen import PythonPrinter
MAGIC_NUMBER = 10
# names which are hardwired into the
# template and are not accessed via the
# context itself
TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"])
RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED)
def compile( # noqa
node,
uri,
filename=None,
default_filters=None,
buffer_filters=None,
imports=None,
future_imports=None,
source_encoding=None,
generate_magic_comment=True,
disable_unicode=False,
strict_undefined=False,
enable_loop=True,
reserved_names=frozenset(),
):
"""Generate module source code given a parsetree node,
uri, and optional source filename"""
# if on Py2K, push the "source_encoding" string to be
# a bytestring itself, as we will be embedding it into
# the generated source and we don't want to coerce the
# result into a unicode object, in "disable_unicode" mode
if not compat.py3k and isinstance(source_encoding, compat.text_type):
source_encoding = source_encoding.encode(source_encoding)
buf = util.FastEncodingBuffer()
printer = PythonPrinter(buf)
_GenerateRenderMethod(
printer,
_CompileContext(
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
disable_unicode,
strict_undefined,
enable_loop,
reserved_names,
),
node,
)
return buf.getvalue()
class _CompileContext(object):
def __init__(
self,
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
disable_unicode,
strict_undefined,
enable_loop,
reserved_names,
):
self.uri = uri
self.filename = filename
self.default_filters = default_filters
self.buffer_filters = buffer_filters
self.imports = imports
self.future_imports = future_imports
self.source_encoding = source_encoding
self.generate_magic_comment = generate_magic_comment
self.disable_unicode = disable_unicode
self.strict_undefined = strict_undefined
self.enable_loop = enable_loop
self.reserved_names = reserved_names
class _GenerateRenderMethod(object):
"""A template visitor object which generates the
full module source for a template.
"""
def __init__(self, printer, compiler, node):
self.printer = printer
self.compiler = compiler
self.node = node
self.identifier_stack = [None]
self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag))
if self.in_def:
name = "render_%s" % node.funcname
args = node.get_argument_expressions()
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
defs = None
pagetag = None
if node.is_block and not node.is_anonymous:
args += ["**pageargs"]
else:
defs = self.write_toplevel()
pagetag = self.compiler.pagetag
name = "render_body"
if pagetag is not None:
args = pagetag.body_decl.get_argument_expressions()
if not pagetag.body_decl.kwargs:
args += ["**pageargs"]
cached = eval(pagetag.attributes.get("cached", "False"))
self.compiler.enable_loop = self.compiler.enable_loop or eval(
pagetag.attributes.get("enable_loop", "False")
)
else:
args = ["**pageargs"]
cached = False
buffered = filtered = False
if args is None:
args = ["context"]
else:
args = [a for a in ["context"] + args]
self.write_render_callable(
pagetag or node, name, args, buffered, filtered, cached
)
if defs is not None:
for node in defs:
_GenerateRenderMethod(printer, compiler, node)
if not self.in_def:
self.write_metadata_struct()
def write_metadata_struct(self):
self.printer.source_map[self.printer.lineno] = max(
self.printer.source_map
)
struct = {
"filename": self.compiler.filename,
"uri": self.compiler.uri,
"source_encoding": self.compiler.source_encoding,
"line_map": self.printer.source_map,
}
self.printer.writelines(
'"""',
"__M_BEGIN_METADATA",
json.dumps(struct),
"__M_END_METADATA\n" '"""',
)
@property
def identifiers(self):
return self.identifier_stack[-1]
def write_toplevel(self):
"""Traverse a template structure for module-level directives and
generate the start of module-level code.
"""
inherit = []
namespaces = {}
module_code = []
self.compiler.pagetag = None
class FindTopLevel(object):
def visitInheritTag(s, node):
inherit.append(node)
def visitNamespaceTag(s, node):
namespaces[node.name] = node
def visitPageTag(s, node):
self.compiler.pagetag = node
def visitCode(s, node):
if node.ismodule:
module_code.append(node)
f = FindTopLevel()
for n in self.node.nodes:
n.accept_visitor(f)
self.compiler.namespaces = namespaces
module_ident = set()
for n in module_code:
module_ident = module_ident.union(n.declared_identifiers())
module_identifiers = _Identifiers(self.compiler)
module_identifiers.declared = module_ident
# module-level names, python code
if (
self.compiler.generate_magic_comment
and self.compiler.source_encoding
):
self.printer.writeline(
"# -*- coding:%s -*-" % self.compiler.source_encoding
)
if self.compiler.future_imports:
self.printer.writeline(
"from __future__ import %s"
% (", ".join(self.compiler.future_imports),)
)
self.printer.writeline("from mako import runtime, filters, cache")
self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING")
self.printer.writeline("__M_dict_builtin = dict")
self.printer.writeline("__M_locals_builtin = locals")
self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER)
self.printer.writeline("_modified_time = %r" % time.time())
self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
self.printer.writeline(
"_template_filename = %r" % self.compiler.filename
)
self.printer.writeline("_template_uri = %r" % self.compiler.uri)
self.printer.writeline(
"_source_encoding = %r" % self.compiler.source_encoding
)
if self.compiler.imports:
buf = ""
for imp in self.compiler.imports:
buf += imp + "\n"
self.printer.writeline(imp)
impcode = ast.PythonCode(
buf,
source="",
lineno=0,
pos=0,
filename="template defined imports",
)
else:
impcode = None
main_identifiers = module_identifiers.branch(self.node)
mit = module_identifiers.topleveldefs
module_identifiers.topleveldefs = mit.union(
main_identifiers.topleveldefs
)
module_identifiers.declared.update(TOPLEVEL_DECLARED)
if impcode:
module_identifiers.declared.update(impcode.declared_identifiers)
self.compiler.identifiers = module_identifiers
self.printer.writeline(
"_exports = %r"
% [n.name for n in main_identifiers.topleveldefs.values()]
)
self.printer.write_blanks(2)
if len(module_code):
self.write_module_code(module_code)
if len(inherit):
self.write_namespaces(namespaces)
self.write_inherit(inherit[-1])
elif len(namespaces):
self.write_namespaces(namespaces)
return list(main_identifiers.topleveldefs.values())
def write_render_callable(
self, node, name, args, buffered, filtered, cached
):
"""write a top-level render callable.
this could be the main render() method or that of a top-level def."""
if self.in_def:
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_toplevel(%s)" % decorator
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"def %s(%s):" % (name, ",".join(args)),
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writeline("context._push_buffer()")
self.identifier_stack.append(
self.compiler.identifiers.branch(self.node)
)
if (not self.in_def or self.node.is_block) and "**pageargs" in args:
self.identifier_stack[-1].argument_declared.add("pageargs")
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
self.printer.writeline(
"__M_locals = __M_dict_builtin(%s)"
% ",".join(
[
"%s=%s" % (x, x)
for x in self.identifiers.argument_declared
]
)
)
self.write_variable_declares(self.identifiers, toplevel=True)
for n in self.node.nodes:
n.accept_visitor(self)
self.write_def_finish(self.node, buffered, filtered, cached)
self.printer.writeline(None)
self.printer.write_blanks(2)
if cached:
self.write_cache_decorator(
node, name, args, buffered, self.identifiers, toplevel=True
)
def write_module_code(self, module_code):
"""write module-level template code, i.e. that which
is enclosed in <%! %> tags in the template."""
for n in module_code:
self.printer.write_indented_block(n.text, starting_lineno=n.lineno)
def write_inherit(self, node):
"""write the module-level inheritance-determination callable."""
self.printer.writelines(
"def _mako_inherit(template, context):",
"_mako_generate_namespaces(context)",
"return runtime._inherit_from(context, %s, _template_uri)"
% (node.parsed_attributes["file"]),
None,
)
def write_namespaces(self, namespaces):
"""write the module-level namespace-generating callable."""
self.printer.writelines(
"def _mako_get_namespace(context, name):",
"try:",
"return context.namespaces[(__name__, name)]",
"except KeyError:",
"_mako_generate_namespaces(context)",
"return context.namespaces[(__name__, name)]",
None,
None,
)
self.printer.writeline("def _mako_generate_namespaces(context):")
for node in namespaces.values():
if "import" in node.attributes:
self.compiler.has_ns_imports = True
self.printer.start_source(node.lineno)
if len(node.nodes):
self.printer.writeline("def make_namespace():")
export = []
identifiers = self.compiler.identifiers.branch(node)
self.in_def = True
class NSDefVisitor(object):
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
if node.is_anonymous:
raise exceptions.CompileException(
"Can't put anonymous blocks inside "
"<%namespace>",
**node.exception_kwargs
)
self.write_inline_def(node, identifiers, nested=False)
export.append(node.funcname)
vis = NSDefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.printer.writeline("return [%s]" % (",".join(export)))
self.printer.writeline(None)
self.in_def = False
callable_name = "make_namespace()"
else:
callable_name = "None"
if "file" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.TemplateNamespace(%r,"
" context._clean_inheritance_tokens(),"
" templateuri=%s, callables=%s, "
" calling_uri=_template_uri)"
% (
node.name,
node.parsed_attributes.get("file", "None"),
callable_name,
)
)
elif "module" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.ModuleNamespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri,"
" module=%s)"
% (
node.name,
callable_name,
node.parsed_attributes.get("module", "None"),
)
)
else:
self.printer.writeline(
"ns = runtime.Namespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri)"
% (node.name, callable_name)
)
if eval(node.attributes.get("inheritable", "False")):
self.printer.writeline("context['self'].%s = ns" % (node.name))
self.printer.writeline(
"context.namespaces[(__name__, %s)] = ns" % repr(node.name)
)
self.printer.write_blanks(1)
if not len(namespaces):
self.printer.writeline("pass")
self.printer.writeline(None)
def write_variable_declares(self, identifiers, toplevel=False, limit=None):
"""write variable declarations at the top of a function.
the variable declarations are in the form of callable
definitions for defs and/or name lookup within the
function's context argument. the names declared are based
on the names that are referenced in the function body,
which don't otherwise have any explicit assignment
operation. names that are assigned within the body are
assumed to be locally-scoped variables and are not
separately declared.
for def callable definitions, if the def is a top-level
callable then a 'stub' callable is generated which wraps
the current Context into a closure. if the def is not
top-level, it is fully rendered as a local closure.
"""
# collection of all defs available to us in this scope
comp_idents = dict([(c.funcname, c) for c in identifiers.defs])
to_write = set()
# write "context.get()" for all variables we are going to
# need that arent in the namespace yet
to_write = to_write.union(identifiers.undeclared)
# write closure functions for closures that we define
# right here
to_write = to_write.union(
[c.funcname for c in identifiers.closuredefs.values()]
)
# remove identifiers that are declared in the argument
# signature of the callable
to_write = to_write.difference(identifiers.argument_declared)
# remove identifiers that we are going to assign to.
# in this way we mimic Python's behavior,
# i.e. assignment to a variable within a block
# means that variable is now a "locally declared" var,
# which cannot be referenced beforehand.
to_write = to_write.difference(identifiers.locally_declared)
if self.compiler.enable_loop:
has_loop = "loop" in to_write
to_write.discard("loop")
else:
has_loop = False
# if a limiting set was sent, constraint to those items in that list
# (this is used for the caching decorator)
if limit is not None:
to_write = to_write.intersection(limit)
if toplevel and getattr(self.compiler, "has_ns_imports", False):
self.printer.writeline("_import_ns = {}")
self.compiler.has_imports = True
for ident, ns in self.compiler.namespaces.items():
if "import" in ns.attributes:
self.printer.writeline(
"_mako_get_namespace(context, %r)."
"_populate(_import_ns, %r)"
% (
ident,
re.split(r"\s*,\s*", ns.attributes["import"]),
)
)
if has_loop:
self.printer.writeline("loop = __M_loop = runtime.LoopStack()")
for ident in to_write:
if ident in comp_idents:
comp = comp_idents[ident]
if comp.is_block:
if not comp.is_anonymous:
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
else:
if comp.is_root():
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
elif ident in self.compiler.namespaces:
self.printer.writeline(
"%s = _mako_get_namespace(context, %r)" % (ident, ident)
)
else:
if getattr(self.compiler, "has_ns_imports", False):
if self.compiler.strict_undefined:
self.printer.writelines(
"%s = _import_ns.get(%r, UNDEFINED)"
% (ident, ident),
"if %s is UNDEFINED:" % ident,
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
None,
)
else:
self.printer.writeline(
"%s = _import_ns.get"
"(%r, context.get(%r, UNDEFINED))"
% (ident, ident, ident)
)
else:
if self.compiler.strict_undefined:
self.printer.writelines(
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
)
else:
self.printer.writeline(
"%s = context.get(%r, UNDEFINED)" % (ident, ident)
)
self.printer.writeline("__M_writer = context.writer()")
def write_def_decl(self, node, identifiers):
"""write a locally-available callable referencing a top-level def"""
funcname = node.funcname
namedecls = node.get_argument_expressions()
nameargs = node.get_argument_expressions(as_call=True)
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
nameargs.insert(0, "context._locals(__M_locals)")
else:
nameargs.insert(0, "context")
self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
self.printer.writeline(
"return render_%s(%s)" % (funcname, ",".join(nameargs))
)
self.printer.writeline(None)
def write_inline_def(self, node, identifiers, nested):
"""write a locally-available def callable inside an enclosing def."""
namedecls = node.get_argument_expressions()
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_inline(context, %s)" % decorator
)
self.printer.writeline(
"def %s(%s):" % (node.funcname, ",".join(namedecls))
)
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
self.printer.writelines(
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writelines("context._push_buffer()")
identifiers = identifiers.branch(node, nested=nested)
self.write_variable_declares(identifiers)
self.identifier_stack.append(identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, filtered, cached)
self.printer.writeline(None)
if cached:
self.write_cache_decorator(
node,
node.funcname,
namedecls,
False,
identifiers,
inline=True,
toplevel=False,
)
def write_def_finish(
self, node, buffered, filtered, cached, callstack=True
):
"""write the end section of a rendering function, either outermost or
inline.
this takes into account if the rendering function was filtered,
buffered, etc. and closes the corresponding try: block if any, and
writes code to retrieve captured content, apply filters, send proper
return value."""
if not buffered and not cached and not filtered:
self.printer.writeline("return ''")
if callstack:
self.printer.writelines(
"finally:", "context.caller_stack._pop_frame()", None
)
if buffered or filtered or cached:
if buffered or cached:
# in a caching scenario, don't try to get a writer
# from the context after popping; assume the caching
# implemenation might be using a context with no
# extra buffers
self.printer.writelines(
"finally:", "__M_buf = context._pop_buffer()"
)
else:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
)
if callstack:
self.printer.writeline("context.caller_stack._pop_frame()")
s = "__M_buf.getvalue()"
if filtered:
s = self.create_filter_callable(
node.filter_args.args, s, False
)
self.printer.writeline(None)
if buffered and not cached:
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
if buffered or cached:
self.printer.writeline("return %s" % s)
else:
self.printer.writelines("__M_writer(%s)" % s, "return ''")
def write_cache_decorator(
self,
node_or_pagetag,
name,
args,
buffered,
identifiers,
inline=False,
toplevel=False,
):
"""write a post-function decorator to replace a rendering
callable with a cached version of itself."""
self.printer.writeline("__M_%s = %s" % (name, name))
cachekey = node_or_pagetag.parsed_attributes.get(
"cache_key", repr(name)
)
cache_args = {}
if self.compiler.pagetag is not None:
cache_args.update(
(pa[6:], self.compiler.pagetag.parsed_attributes[pa])
for pa in self.compiler.pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
cache_args.update(
(pa[6:], node_or_pagetag.parsed_attributes[pa])
for pa in node_or_pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
if "timeout" in cache_args:
cache_args["timeout"] = int(eval(cache_args["timeout"]))
self.printer.writeline("def %s(%s):" % (name, ",".join(args)))
# form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
pass_args = [
"%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args
]
self.write_variable_declares(
identifiers,
toplevel=toplevel,
limit=node_or_pagetag.undeclared_identifiers(),
)
if buffered:
s = (
"context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r)"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
)
)
# apply buffer_filters
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
self.printer.writelines("return " + s, None)
else:
self.printer.writelines(
"__M_writer(context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r))"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
),
"return ''",
None,
)
def create_filter_callable(self, args, target, is_expression):
"""write a filter-applying expression based on the filters
present in the given filter names, adjusting for the global
'default' filter aliases as needed."""
def locate_encode(name):
if re.match(r"decode\..+", name):
return "filters." + name
elif self.compiler.disable_unicode:
return filters.NON_UNICODE_ESCAPES.get(name, name)
else:
return filters.DEFAULT_ESCAPES.get(name, name)
if "n" not in args:
if is_expression:
if self.compiler.pagetag:
args = self.compiler.pagetag.filter_args.args + args
if self.compiler.default_filters and "n" not in args:
args = self.compiler.default_filters + args
for e in args:
# if filter given as a function, get just the identifier portion
if e == "n":
continue
m = re.match(r"(.+?)(\(.*\))", e)
if m:
ident, fargs = m.group(1, 2)
f = locate_encode(ident)
e = f + fargs
else:
e = locate_encode(e)
assert e is not None
target = "%s(%s)" % (e, target)
return target
def visitExpression(self, node):
self.printer.start_source(node.lineno)
if (
len(node.escapes)
or (
self.compiler.pagetag is not None
and len(self.compiler.pagetag.filter_args.args)
)
or len(self.compiler.default_filters)
):
s = self.create_filter_callable(
node.escapes_code.args, "%s" % node.text, True
)
self.printer.writeline("__M_writer(%s)" % s)
else:
self.printer.writeline("__M_writer(%s)" % node.text)
def visitControlLine(self, node):
if node.isend:
self.printer.writeline(None)
if node.has_loop_context:
self.printer.writeline("finally:")
self.printer.writeline("loop = __M_loop._exit()")
self.printer.writeline(None)
else:
self.printer.start_source(node.lineno)
if self.compiler.enable_loop and node.keyword == "for":
text = mangle_mako_loop(node, self.printer)
else:
text = node.text
self.printer.writeline(text)
children = node.get_children()
# this covers the three situations where we want to insert a pass:
# 1) a ternary control line with no children,
# 2) a primary control line with nothing but its own ternary
# and end control lines, and
# 3) any control line with no content other than comments
if not children or (
compat.all(
isinstance(c, (parsetree.Comment, parsetree.ControlLine))
for c in children
)
and compat.all(
(node.is_ternary(c.keyword) or c.isend)
for c in children
if isinstance(c, parsetree.ControlLine)
)
):
self.printer.writeline("pass")
def visitText(self, node):
self.printer.start_source(node.lineno)
self.printer.writeline("__M_writer(%s)" % repr(node.content))
def visitTextTag(self, node):
filtered = len(node.filter_args.args) > 0
if filtered:
self.printer.writelines(
"__M_writer = context._push_writer()", "try:"
)
for n in node.nodes:
n.accept_visitor(self)
if filtered:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
"__M_writer(%s)"
% self.create_filter_callable(
node.filter_args.args, "__M_buf.getvalue()", False
),
None,
)
def visitCode(self, node):
if not node.ismodule:
self.printer.write_indented_block(
node.text, starting_lineno=node.lineno
)
if not self.in_def and len(self.identifiers.locally_assigned) > 0:
# if we are the "template" def, fudge locally
# declared/modified variables into the "__M_locals" dictionary,
# which is used for def calls within the same template,
# to simulate "enclosing scope"
self.printer.writeline(
"__M_locals_builtin_stored = __M_locals_builtin()"
)
self.printer.writeline(
"__M_locals.update(__M_dict_builtin([(__M_key,"
" __M_locals_builtin_stored[__M_key]) for __M_key in"
" [%s] if __M_key in __M_locals_builtin_stored]))"
% ",".join([repr(x) for x in node.declared_identifiers()])
)
def visitIncludeTag(self, node):
self.printer.start_source(node.lineno)
args = node.attributes.get("args")
if args:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri, %s)"
% (node.parsed_attributes["file"], args)
)
else:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri)"
% (node.parsed_attributes["file"])
)
def visitNamespaceTag(self, node):
pass
def visitDefTag(self, node):
pass
def visitBlockTag(self, node):
if node.is_anonymous:
self.printer.writeline("%s()" % node.funcname)
else:
nameargs = node.get_argument_expressions(as_call=True)
nameargs += ["**pageargs"]
self.printer.writeline(
"if 'parent' not in context._data or "
"not hasattr(context._data['parent'], '%s'):" % node.funcname
)
self.printer.writeline(
"context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))
)
self.printer.writeline("\n")
def visitCallNamespaceTag(self, node):
# TODO: we can put namespace-specific checks here, such
# as ensure the given namespace will be imported,
# pre-import the namespace, etc.
self.visitCallTag(node)
def visitCallTag(self, node):
self.printer.writeline("def ccall(caller):")
export = ["body"]
callable_identifiers = self.identifiers.branch(node, nested=True)
body_identifiers = callable_identifiers.branch(node, nested=False)
# we want the 'caller' passed to ccall to be used
# for the body() function, but for other non-body()
# <%def>s within <%call> we want the current caller
# off the call stack (if any)
body_identifiers.add_declared("caller")
self.identifier_stack.append(body_identifiers)
class DefVisitor(object):
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
self.write_inline_def(node, callable_identifiers, nested=False)
if not node.is_anonymous:
export.append(node.funcname)
# remove defs that are within the <%call> from the
# "closuredefs" defined in the body, so they dont render twice
if node.funcname in body_identifiers.closuredefs:
del body_identifiers.closuredefs[node.funcname]
vis = DefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.identifier_stack.pop()
bodyargs = node.body_decl.get_argument_expressions()
self.printer.writeline("def body(%s):" % ",".join(bodyargs))
# TODO: figure out best way to specify
# buffering/nonbuffering (at call time would be better)
buffered = False
if buffered:
self.printer.writelines("context._push_buffer()", "try:")
self.write_variable_declares(body_identifiers)
self.identifier_stack.append(body_identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, False, False, callstack=False)
self.printer.writelines(None, "return [%s]" % (",".join(export)), None)
self.printer.writelines(
# push on caller for nested call
"context.caller_stack.nextcaller = "
"runtime.Namespace('caller', context, "
"callables=ccall(__M_caller))",
"try:",
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"__M_writer(%s)"
% self.create_filter_callable([], node.expression, True),
"finally:",
"context.caller_stack.nextcaller = None",
None,
)
class _Identifiers(object):
"""tracks the status of identifier names as template code is rendered."""
def __init__(self, compiler, node=None, parent=None, nested=False):
if parent is not None:
# if we are the branch created in write_namespaces(),
# we don't share any context from the main body().
if isinstance(node, parsetree.NamespaceTag):
self.declared = set()
self.topleveldefs = util.SetLikeDict()
else:
# things that have already been declared
# in an enclosing namespace (i.e. names we can just use)
self.declared = (
set(parent.declared)
.union([c.name for c in parent.closuredefs.values()])
.union(parent.locally_declared)
.union(parent.argument_declared)
)
# if these identifiers correspond to a "nested"
# scope, it means whatever the parent identifiers
# had as undeclared will have been declared by that parent,
# and therefore we have them in our scope.
if nested:
self.declared = self.declared.union(parent.undeclared)
# top level defs that are available
self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
else:
self.declared = set()
self.topleveldefs = util.SetLikeDict()
self.compiler = compiler
# things within this level that are referenced before they
# are declared (e.g. assigned to)
self.undeclared = set()
# things that are declared locally. some of these things
# could be in the "undeclared" list as well if they are
# referenced before declared
self.locally_declared = set()
# assignments made in explicit python blocks.
# these will be propagated to
# the context of local def calls.
self.locally_assigned = set()
# things that are declared in the argument
# signature of the def callable
self.argument_declared = set()
# closure defs that are defined in this level
self.closuredefs = util.SetLikeDict()
self.node = node
if node is not None:
node.accept_visitor(self)
illegal_names = self.compiler.reserved_names.intersection(
self.locally_declared
)
if illegal_names:
raise exceptions.NameConflictError(
"Reserved words declared in template: %s"
% ", ".join(illegal_names)
)
def branch(self, node, **kwargs):
"""create a new Identifiers for a new Node, with
this Identifiers as the parent."""
return _Identifiers(self.compiler, node, self, **kwargs)
@property
def defs(self):
return set(self.topleveldefs.union(self.closuredefs).values())
def __repr__(self):
return (
"Identifiers(declared=%r, locally_declared=%r, "
"undeclared=%r, topleveldefs=%r, closuredefs=%r, "
"argumentdeclared=%r)"
% (
list(self.declared),
list(self.locally_declared),
list(self.undeclared),
[c.name for c in self.topleveldefs.values()],
[c.name for c in self.closuredefs.values()],
self.argument_declared,
)
)
def check_declared(self, node):
"""update the state of this Identifiers with the undeclared
and declared identifiers of the given node."""
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.locally_declared.add(ident)
def add_declared(self, ident):
self.declared.add(ident)
if ident in self.undeclared:
self.undeclared.remove(ident)
def visitExpression(self, node):
self.check_declared(node)
def visitControlLine(self, node):
self.check_declared(node)
def visitCode(self, node):
if not node.ismodule:
self.check_declared(node)
self.locally_assigned = self.locally_assigned.union(
node.declared_identifiers()
)
def visitNamespaceTag(self, node):
# only traverse into the sub-elements of a
# <%namespace> tag if we are the branch created in
# write_namespaces()
if self.node is node:
for n in node.nodes:
n.accept_visitor(self)
def _check_name_exists(self, collection, node):
existing = collection.get(node.funcname)
collection[node.funcname] = node
if (
existing is not None
and existing is not node
and (node.is_block or existing.is_block)
):
raise exceptions.CompileException(
"%%def or %%block named '%s' already "
"exists in this template." % node.funcname,
**node.exception_kwargs
)
def visitDefTag(self, node):
if node.is_root() and not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
# visit defs only one level deep
if node is self.node:
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitBlockTag(self, node):
if node is not self.node and not node.is_anonymous:
if isinstance(self.node, parsetree.DefTag):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of def '%s'"
% (node.name, self.node.name),
**node.exception_kwargs
)
elif isinstance(
self.node, (parsetree.CallTag, parsetree.CallNamespaceTag)
):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of <%%call> tag"
% (node.name,),
**node.exception_kwargs
)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
if not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
self.undeclared.add(node.funcname)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitTextTag(self, node):
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
def visitIncludeTag(self, node):
self.check_declared(node)
def visitPageTag(self, node):
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
self.check_declared(node)
def visitCallNamespaceTag(self, node):
self.visitCallTag(node)
def visitCallTag(self, node):
if node is self.node:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
else:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
_FOR_LOOP = re.compile(
r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*"
r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):"
)
def mangle_mako_loop(node, printer):
"""converts a for loop into a context manager wrapped around a for loop
when access to the `loop` variable has been detected in the for loop body
"""
loop_variable = LoopVariable()
node.accept_visitor(loop_variable)
if loop_variable.detected:
node.nodes[-1].has_loop_context = True
match = _FOR_LOOP.match(node.text)
if match:
printer.writelines(
"loop = __M_loop._enter(%s)" % match.group(2),
"try:"
# 'with __M_loop(%s) as loop:' % match.group(2)
)
text = "for %s in loop:" % match.group(1)
else:
raise SyntaxError("Couldn't apply loop context: %s" % node.text)
else:
text = node.text
return text
class LoopVariable(object):
"""A node visitor which looks for the name 'loop' within undeclared
identifiers."""
def __init__(self):
self.detected = False
def _loop_reference_detected(self, node):
if "loop" in node.undeclared_identifiers():
self.detected = True
else:
for n in node.get_children():
n.accept_visitor(self)
def visitControlLine(self, node):
self._loop_reference_detected(node)
def visitCode(self, node):
self._loop_reference_detected(node)
def visitExpression(self, node):
self._loop_reference_detected(node)
| 47,892
|
Python
|
.py
| 1,141
| 29.138475
| 79
| 0.547709
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,702
|
_ast_util.py
|
rembo10_headphones/lib/mako/_ast_util.py
|
# mako/_ast_util.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
ast
~~~
This is a stripped down version of Armin Ronacher's ast module.
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import Add
from _ast import And
from _ast import AST
from _ast import BitAnd
from _ast import BitOr
from _ast import BitXor
from _ast import Div
from _ast import Eq
from _ast import FloorDiv
from _ast import Gt
from _ast import GtE
from _ast import If
from _ast import In
from _ast import Invert
from _ast import Is
from _ast import IsNot
from _ast import LShift
from _ast import Lt
from _ast import LtE
from _ast import Mod
from _ast import Mult
from _ast import Name
from _ast import Not
from _ast import NotEq
from _ast import NotIn
from _ast import Or
from _ast import PyCF_ONLY_AST
from _ast import RShift
from _ast import Sub
from _ast import UAdd
from _ast import USub
from mako.compat import arg_stringname
BOOLOP_SYMBOLS = {And: "and", Or: "or"}
BINOP_SYMBOLS = {
Add: "+",
Sub: "-",
Mult: "*",
Div: "/",
FloorDiv: "//",
Mod: "%",
LShift: "<<",
RShift: ">>",
BitOr: "|",
BitAnd: "&",
BitXor: "^",
}
CMPOP_SYMBOLS = {
Eq: "==",
Gt: ">",
GtE: ">=",
In: "in",
Is: "is",
IsNot: "is not",
Lt: "<",
LtE: "<=",
NotEq: "!=",
NotIn: "not in",
}
UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"}
ALL_SYMBOLS = {}
ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
ALL_SYMBOLS.update(BINOP_SYMBOLS)
ALL_SYMBOLS.update(CMPOP_SYMBOLS)
ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
def parse(expr, filename="<unknown>", mode="exec"):
"""Parse an expression into an AST node."""
return compile(expr, filename, mode, PyCF_ONLY_AST)
def iter_fields(node):
"""Iterate over all fields of a node, only yielding existing fields."""
# CPython 2.5 compat
if not hasattr(node, "_fields") or not node._fields:
return
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass
class NodeVisitor(object):
"""
Walks the abstract syntax tree and call visitor functions for every node
found. The visitor functions may return values which will be forwarded
by the `visit` method.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `get_visitor` function. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
Don't use the `NodeVisitor` if you want to apply changes to nodes during
traversing. For this a special visitor exists (`NodeTransformer`) that
allows modifications.
"""
def get_visitor(self, node):
"""
Return the visitor function for this node or `None` if no visitor
exists for this node. In that case the generic visit function is
used instead.
"""
method = "visit_" + node.__class__.__name__
return getattr(self, method, None)
def visit(self, node):
"""Visit a node."""
f = self.get_visitor(node)
if f is not None:
return f(node)
return self.generic_visit(node)
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, AST):
self.visit(item)
elif isinstance(value, AST):
self.visit(value)
class NodeTransformer(NodeVisitor):
"""
Walks the abstract syntax tree and allows modifications of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor functions to replace or remove the old node. If the return
value of the visitor function is `None` the node will be removed
from the previous location otherwise it's replaced with the return
value. The return value may be the original node in which case no
replacement takes place.
Here an example transformer that rewrites all `foo` to `data['foo']`::
class RewriteName(NodeTransformer):
def visit_Name(self, node):
return copy_location(Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
), node)
Keep in mind that if the node you're operating on has child nodes
you must either transform the child nodes yourself or call the generic
visit function for the node first.
Nodes that were part of a collection of statements (that applies to
all statement nodes) may also return a list of nodes rather than just
a single node.
Usually you use the transformer like this::
node = YourTransformer().visit(node)
"""
def generic_visit(self, node):
for field, old_value in iter_fields(node):
old_value = getattr(node, field, None)
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
class SourceGenerator(NodeVisitor):
"""
This visitor is able to transform a well formed syntax tree into python
sourcecode. For more details have a look at the docstring of the
`node_to_source` function.
"""
def __init__(self, indent_with):
self.result = []
self.indent_with = indent_with
self.indentation = 0
self.new_lines = 0
def write(self, x):
if self.new_lines:
if self.result:
self.result.append("\n" * self.new_lines)
self.result.append(self.indent_with * self.indentation)
self.new_lines = 0
self.result.append(x)
def newline(self, n=1):
self.new_lines = max(self.new_lines, n)
def body(self, statements):
self.new_line = True
self.indentation += 1
for stmt in statements:
self.visit(stmt)
self.indentation -= 1
def body_or_else(self, node):
self.body(node.body)
if node.orelse:
self.newline()
self.write("else:")
self.body(node.orelse)
def signature(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(", ")
else:
want_comma.append(True)
padding = [None] * (len(node.args) - len(node.defaults))
for arg, default in zip(node.args, padding + node.defaults):
write_comma()
self.visit(arg)
if default is not None:
self.write("=")
self.visit(default)
if node.vararg is not None:
write_comma()
self.write("*" + arg_stringname(node.vararg))
if node.kwarg is not None:
write_comma()
self.write("**" + arg_stringname(node.kwarg))
def decorators(self, node):
for decorator in node.decorator_list:
self.newline()
self.write("@")
self.visit(decorator)
# Statements
def visit_Assign(self, node):
self.newline()
for idx, target in enumerate(node.targets):
if idx:
self.write(", ")
self.visit(target)
self.write(" = ")
self.visit(node.value)
def visit_AugAssign(self, node):
self.newline()
self.visit(node.target)
self.write(BINOP_SYMBOLS[type(node.op)] + "=")
self.visit(node.value)
def visit_ImportFrom(self, node):
self.newline()
self.write("from %s%s import " % ("." * node.level, node.module))
for idx, item in enumerate(node.names):
if idx:
self.write(", ")
self.write(item)
def visit_Import(self, node):
self.newline()
for item in node.names:
self.write("import ")
self.visit(item)
def visit_Expr(self, node):
self.newline()
self.generic_visit(node)
def visit_FunctionDef(self, node):
self.newline(n=2)
self.decorators(node)
self.newline()
self.write("def %s(" % node.name)
self.signature(node.args)
self.write("):")
self.body(node.body)
def visit_ClassDef(self, node):
have_args = []
def paren_or_comma():
if have_args:
self.write(", ")
else:
have_args.append(True)
self.write("(")
self.newline(n=3)
self.decorators(node)
self.newline()
self.write("class %s" % node.name)
for base in node.bases:
paren_or_comma()
self.visit(base)
# XXX: the if here is used to keep this module compatible
# with python 2.6.
if hasattr(node, "keywords"):
for keyword in node.keywords:
paren_or_comma()
self.write(keyword.arg + "=")
self.visit(keyword.value)
if getattr(node, "starargs", None):
paren_or_comma()
self.write("*")
self.visit(node.starargs)
if getattr(node, "kwargs", None):
paren_or_comma()
self.write("**")
self.visit(node.kwargs)
self.write(have_args and "):" or ":")
self.body(node.body)
def visit_If(self, node):
self.newline()
self.write("if ")
self.visit(node.test)
self.write(":")
self.body(node.body)
while True:
else_ = node.orelse
if len(else_) == 1 and isinstance(else_[0], If):
node = else_[0]
self.newline()
self.write("elif ")
self.visit(node.test)
self.write(":")
self.body(node.body)
else:
self.newline()
self.write("else:")
self.body(else_)
break
def visit_For(self, node):
self.newline()
self.write("for ")
self.visit(node.target)
self.write(" in ")
self.visit(node.iter)
self.write(":")
self.body_or_else(node)
def visit_While(self, node):
self.newline()
self.write("while ")
self.visit(node.test)
self.write(":")
self.body_or_else(node)
def visit_With(self, node):
self.newline()
self.write("with ")
self.visit(node.context_expr)
if node.optional_vars is not None:
self.write(" as ")
self.visit(node.optional_vars)
self.write(":")
self.body(node.body)
def visit_Pass(self, node):
self.newline()
self.write("pass")
def visit_Print(self, node):
# XXX: python 2.6 only
self.newline()
self.write("print ")
want_comma = False
if node.dest is not None:
self.write(" >> ")
self.visit(node.dest)
want_comma = True
for value in node.values:
if want_comma:
self.write(", ")
self.visit(value)
want_comma = True
if not node.nl:
self.write(",")
def visit_Delete(self, node):
self.newline()
self.write("del ")
for idx, target in enumerate(node):
if idx:
self.write(", ")
self.visit(target)
def visit_TryExcept(self, node):
self.newline()
self.write("try:")
self.body(node.body)
for handler in node.handlers:
self.visit(handler)
def visit_TryFinally(self, node):
self.newline()
self.write("try:")
self.body(node.body)
self.newline()
self.write("finally:")
self.body(node.finalbody)
def visit_Global(self, node):
self.newline()
self.write("global " + ", ".join(node.names))
def visit_Nonlocal(self, node):
self.newline()
self.write("nonlocal " + ", ".join(node.names))
def visit_Return(self, node):
self.newline()
self.write("return ")
self.visit(node.value)
def visit_Break(self, node):
self.newline()
self.write("break")
def visit_Continue(self, node):
self.newline()
self.write("continue")
def visit_Raise(self, node):
# XXX: Python 2.6 / 3.0 compatibility
self.newline()
self.write("raise")
if hasattr(node, "exc") and node.exc is not None:
self.write(" ")
self.visit(node.exc)
if node.cause is not None:
self.write(" from ")
self.visit(node.cause)
elif hasattr(node, "type") and node.type is not None:
self.visit(node.type)
if node.inst is not None:
self.write(", ")
self.visit(node.inst)
if node.tback is not None:
self.write(", ")
self.visit(node.tback)
# Expressions
def visit_Attribute(self, node):
self.visit(node.value)
self.write("." + node.attr)
def visit_Call(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(", ")
else:
want_comma.append(True)
self.visit(node.func)
self.write("(")
for arg in node.args:
write_comma()
self.visit(arg)
for keyword in node.keywords:
write_comma()
self.write(keyword.arg + "=")
self.visit(keyword.value)
if getattr(node, "starargs", None):
write_comma()
self.write("*")
self.visit(node.starargs)
if getattr(node, "kwargs", None):
write_comma()
self.write("**")
self.visit(node.kwargs)
self.write(")")
def visit_Name(self, node):
self.write(node.id)
def visit_NameConstant(self, node):
self.write(str(node.value))
def visit_arg(self, node):
self.write(node.arg)
def visit_Str(self, node):
self.write(repr(node.s))
def visit_Bytes(self, node):
self.write(repr(node.s))
def visit_Num(self, node):
self.write(repr(node.n))
# newly needed in Python 3.8
def visit_Constant(self, node):
self.write(repr(node.value))
def visit_Tuple(self, node):
self.write("(")
idx = -1
for idx, item in enumerate(node.elts):
if idx:
self.write(", ")
self.visit(item)
self.write(idx and ")" or ",)")
def sequence_visit(left, right):
def visit(self, node):
self.write(left)
for idx, item in enumerate(node.elts):
if idx:
self.write(", ")
self.visit(item)
self.write(right)
return visit
visit_List = sequence_visit("[", "]")
visit_Set = sequence_visit("{", "}")
del sequence_visit
def visit_Dict(self, node):
self.write("{")
for idx, (key, value) in enumerate(zip(node.keys, node.values)):
if idx:
self.write(", ")
self.visit(key)
self.write(": ")
self.visit(value)
self.write("}")
def visit_BinOp(self, node):
self.write("(")
self.visit(node.left)
self.write(" %s " % BINOP_SYMBOLS[type(node.op)])
self.visit(node.right)
self.write(")")
def visit_BoolOp(self, node):
self.write("(")
for idx, value in enumerate(node.values):
if idx:
self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)])
self.visit(value)
self.write(")")
def visit_Compare(self, node):
self.write("(")
self.visit(node.left)
for op, right in zip(node.ops, node.comparators):
self.write(" %s " % CMPOP_SYMBOLS[type(op)])
self.visit(right)
self.write(")")
def visit_UnaryOp(self, node):
self.write("(")
op = UNARYOP_SYMBOLS[type(node.op)]
self.write(op)
if op == "not":
self.write(" ")
self.visit(node.operand)
self.write(")")
def visit_Subscript(self, node):
self.visit(node.value)
self.write("[")
self.visit(node.slice)
self.write("]")
def visit_Slice(self, node):
if node.lower is not None:
self.visit(node.lower)
self.write(":")
if node.upper is not None:
self.visit(node.upper)
if node.step is not None:
self.write(":")
if not (isinstance(node.step, Name) and node.step.id == "None"):
self.visit(node.step)
def visit_ExtSlice(self, node):
for idx, item in node.dims:
if idx:
self.write(", ")
self.visit(item)
def visit_Yield(self, node):
self.write("yield ")
self.visit(node.value)
def visit_Lambda(self, node):
self.write("lambda ")
self.signature(node.args)
self.write(": ")
self.visit(node.body)
def visit_Ellipsis(self, node):
self.write("Ellipsis")
def generator_visit(left, right):
def visit(self, node):
self.write(left)
self.visit(node.elt)
for comprehension in node.generators:
self.visit(comprehension)
self.write(right)
return visit
visit_ListComp = generator_visit("[", "]")
visit_GeneratorExp = generator_visit("(", ")")
visit_SetComp = generator_visit("{", "}")
del generator_visit
def visit_DictComp(self, node):
self.write("{")
self.visit(node.key)
self.write(": ")
self.visit(node.value)
for comprehension in node.generators:
self.visit(comprehension)
self.write("}")
def visit_IfExp(self, node):
self.visit(node.body)
self.write(" if ")
self.visit(node.test)
self.write(" else ")
self.visit(node.orelse)
def visit_Starred(self, node):
self.write("*")
self.visit(node.value)
def visit_Repr(self, node):
# XXX: python 2.6 only
self.write("`")
self.visit(node.value)
self.write("`")
# Helper Nodes
def visit_alias(self, node):
self.write(node.name)
if node.asname is not None:
self.write(" as " + node.asname)
def visit_comprehension(self, node):
self.write(" for ")
self.visit(node.target)
self.write(" in ")
self.visit(node.iter)
if node.ifs:
for if_ in node.ifs:
self.write(" if ")
self.visit(if_)
def visit_excepthandler(self, node):
self.newline()
self.write("except")
if node.type is not None:
self.write(" ")
self.visit(node.type)
if node.name is not None:
self.write(" as ")
self.visit(node.name)
self.write(":")
self.body(node.body)
| 20,414
|
Python
|
.py
| 606
| 24.280528
| 76
| 0.558331
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,703
|
compat.py
|
rembo10_headphones/lib/mako/compat.py
|
# mako/compat.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import collections
import inspect
import sys
py3k = sys.version_info >= (3, 0)
py2k = sys.version_info < (3,)
py27 = sys.version_info >= (2, 7)
jython = sys.platform.startswith("java")
win32 = sys.platform.startswith("win")
pypy = hasattr(sys, "pypy_version_info")
ArgSpec = collections.namedtuple(
"ArgSpec", ["args", "varargs", "keywords", "defaults"]
)
def inspect_getargspec(func):
"""getargspec based on fully vendored getfullargspec from Python 3.3."""
if inspect.ismethod(func):
func = func.__func__
if not inspect.isfunction(func):
raise TypeError("{!r} is not a Python function".format(func))
co = func.__code__
if not inspect.iscode(co):
raise TypeError("{!r} is not a code object".format(co))
nargs = co.co_argcount
names = co.co_varnames
nkwargs = co.co_kwonlyargcount if py3k else 0
args = list(names[:nargs])
nargs += nkwargs
varargs = None
if co.co_flags & inspect.CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & inspect.CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
return ArgSpec(args, varargs, varkw, func.__defaults__)
if py3k:
from io import StringIO
import builtins as compat_builtins
from urllib.parse import quote_plus, unquote_plus
from html.entities import codepoint2name, name2codepoint
string_types = (str,)
binary_type = bytes
text_type = str
from io import BytesIO as byte_buffer
def u(s):
return s
def b(s):
return s.encode("latin-1")
def octal(lit):
return eval("0o" + lit)
else:
import __builtin__ as compat_builtins # noqa
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
byte_buffer = StringIO
from urllib import quote_plus, unquote_plus # noqa
from htmlentitydefs import codepoint2name, name2codepoint # noqa
string_types = (basestring,) # noqa
binary_type = str
text_type = unicode # noqa
def u(s):
return unicode(s, "utf-8") # noqa
def b(s):
return s
def octal(lit):
return eval("0" + lit)
if py3k:
from importlib import machinery, util
if hasattr(util, 'module_from_spec'):
# Python 3.5+
def load_module(module_id, path):
spec = util.spec_from_file_location(module_id, path)
module = util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
else:
def load_module(module_id, path):
module = machinery.SourceFileLoader(module_id, path).load_module()
del sys.modules[module_id]
return module
else:
import imp
def load_module(module_id, path):
fp = open(path, "rb")
try:
module = imp.load_source(module_id, path, fp)
del sys.modules[module_id]
return module
finally:
fp.close()
if py3k:
def reraise(tp, value, tb=None, cause=None):
if cause is not None:
value.__cause__ = cause
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
exec(
"def reraise(tp, value, tb=None, cause=None):\n"
" raise tp, value, tb\n"
)
def exception_as():
return sys.exc_info()[1]
all = all # noqa
def exception_name(exc):
return exc.__class__.__name__
################################################
# cross-compatible metaclass implementation
# Copyright (c) 2010-2012 Benjamin Peterson
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("%sBase" % meta.__name__, (base,), {})
################################################
def arg_stringname(func_arg):
"""Gets the string name of a kwarg or vararg
In Python3.4 a function's args are
of _ast.arg type not _ast.name
"""
if hasattr(func_arg, "arg"):
return func_arg.arg
else:
return str(func_arg)
| 4,295
|
Python
|
.py
| 129
| 27.271318
| 78
| 0.628946
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,704
|
exceptions.py
|
rembo10_headphones/lib/mako/exceptions.py
|
# mako/exceptions.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""exception classes"""
import sys
import traceback
from mako import compat
from mako import util
class MakoException(Exception):
pass
class RuntimeException(MakoException):
pass
def _format_filepos(lineno, pos, filename):
if filename is None:
return " at line: %d char: %d" % (lineno, pos)
else:
return " in file '%s' at line: %d char: %d" % (filename, lineno, pos)
class CompileException(MakoException):
def __init__(self, message, source, lineno, pos, filename):
MakoException.__init__(
self, message + _format_filepos(lineno, pos, filename)
)
self.lineno = lineno
self.pos = pos
self.filename = filename
self.source = source
class SyntaxException(MakoException):
def __init__(self, message, source, lineno, pos, filename):
MakoException.__init__(
self, message + _format_filepos(lineno, pos, filename)
)
self.lineno = lineno
self.pos = pos
self.filename = filename
self.source = source
class UnsupportedError(MakoException):
"""raised when a retired feature is used."""
class NameConflictError(MakoException):
"""raised when a reserved word is used inappropriately"""
class TemplateLookupException(MakoException):
pass
class TopLevelLookupException(TemplateLookupException):
pass
class RichTraceback(object):
"""Pull the current exception from the ``sys`` traceback and extracts
Mako-specific template information.
See the usage examples in :ref:`handling_exceptions`.
"""
def __init__(self, error=None, traceback=None):
self.source, self.lineno = "", 0
if error is None or traceback is None:
t, value, tback = sys.exc_info()
if error is None:
error = value or t
if traceback is None:
traceback = tback
self.error = error
self.records = self._init(traceback)
if isinstance(self.error, (CompileException, SyntaxException)):
self.source = self.error.source
self.lineno = self.error.lineno
self._has_source = True
self._init_message()
@property
def errorname(self):
return compat.exception_name(self.error)
def _init_message(self):
"""Find a unicode representation of self.error"""
try:
self.message = compat.text_type(self.error)
except UnicodeError:
try:
self.message = str(self.error)
except UnicodeEncodeError:
# Fallback to args as neither unicode nor
# str(Exception(u'\xe6')) work in Python < 2.6
self.message = self.error.args[0]
if not isinstance(self.message, compat.text_type):
self.message = compat.text_type(self.message, "ascii", "replace")
def _get_reformatted_records(self, records):
for rec in records:
if rec[6] is not None:
yield (rec[4], rec[5], rec[2], rec[6])
else:
yield tuple(rec[0:4])
@property
def traceback(self):
"""Return a list of 4-tuple traceback records (i.e. normal python
format) with template-corresponding lines remapped to the originating
template.
"""
return list(self._get_reformatted_records(self.records))
@property
def reverse_records(self):
return reversed(self.records)
@property
def reverse_traceback(self):
"""Return the same data as traceback, except in reverse order.
"""
return list(self._get_reformatted_records(self.reverse_records))
def _init(self, trcback):
"""format a traceback from sys.exc_info() into 7-item tuples,
containing the regular four traceback tuple items, plus the original
template filename, the line number adjusted relative to the template
source, and code line from that line number of the template."""
import mako.template
mods = {}
rawrecords = traceback.extract_tb(trcback)
new_trcback = []
for filename, lineno, function, line in rawrecords:
if not line:
line = ""
try:
(line_map, template_lines, template_filename) = mods[filename]
except KeyError:
try:
info = mako.template._get_module_info(filename)
module_source = info.code
template_source = info.source
template_filename = (
info.template_filename or info.template_uri or filename
)
except KeyError:
# A normal .py file (not a Template)
if not compat.py3k:
try:
fp = open(filename, "rb")
encoding = util.parse_encoding(fp)
fp.close()
except IOError:
encoding = None
if encoding:
line = line.decode(encoding)
else:
line = line.decode("ascii", "replace")
new_trcback.append(
(
filename,
lineno,
function,
line,
None,
None,
None,
None,
)
)
continue
template_ln = 1
mtm = mako.template.ModuleInfo
source_map = mtm.get_module_source_metadata(
module_source, full_line_map=True
)
line_map = source_map["full_line_map"]
template_lines = [
line_ for line_ in template_source.split("\n")
]
mods[filename] = (line_map, template_lines, template_filename)
template_ln = line_map[lineno - 1]
if template_ln <= len(template_lines):
template_line = template_lines[template_ln - 1]
else:
template_line = None
new_trcback.append(
(
filename,
lineno,
function,
line,
template_filename,
template_ln,
template_line,
template_source,
)
)
if not self.source:
for l in range(len(new_trcback) - 1, 0, -1):
if new_trcback[l][5]:
self.source = new_trcback[l][7]
self.lineno = new_trcback[l][5]
break
else:
if new_trcback:
try:
# A normal .py file (not a Template)
fp = open(new_trcback[-1][0], "rb")
encoding = util.parse_encoding(fp)
if compat.py3k and not encoding:
encoding = "utf-8"
fp.seek(0)
self.source = fp.read()
fp.close()
if encoding:
self.source = self.source.decode(encoding)
except IOError:
self.source = ""
self.lineno = new_trcback[-1][1]
return new_trcback
def text_error_template(lookup=None):
"""Provides a template that renders a stack trace in a similar format to
the Python interpreter, substituting source template filenames, line
numbers and code for that of the originating source template, as
applicable.
"""
import mako.template
return mako.template.Template(
r"""
<%page args="error=None, traceback=None"/>
<%!
from mako.exceptions import RichTraceback
%>\
<%
tback = RichTraceback(error=error, traceback=traceback)
%>\
Traceback (most recent call last):
% for (filename, lineno, function, line) in tback.traceback:
File "${filename}", line ${lineno}, in ${function or '?'}
${line | trim}
% endfor
${tback.errorname}: ${tback.message}
"""
)
def _install_pygments():
global syntax_highlight, pygments_html_formatter
from mako.ext.pygmentplugin import syntax_highlight # noqa
from mako.ext.pygmentplugin import pygments_html_formatter # noqa
def _install_fallback():
global syntax_highlight, pygments_html_formatter
from mako.filters import html_escape
pygments_html_formatter = None
def syntax_highlight(filename="", language=None):
return html_escape
def _install_highlighting():
try:
_install_pygments()
except ImportError:
_install_fallback()
_install_highlighting()
def html_error_template():
"""Provides a template that renders a stack trace in an HTML format,
providing an excerpt of code as well as substituting source template
filenames, line numbers and code for that of the originating source
template, as applicable.
The template's default ``encoding_errors`` value is
``'htmlentityreplace'``. The template has two options. With the
``full`` option disabled, only a section of an HTML document is
returned. With the ``css`` option disabled, the default stylesheet
won't be included.
"""
import mako.template
return mako.template.Template(
r"""
<%!
from mako.exceptions import RichTraceback, syntax_highlight,\
pygments_html_formatter
%>
<%page args="full=True, css=True, error=None, traceback=None"/>
% if full:
<html>
<head>
<title>Mako Runtime Error</title>
% endif
% if css:
<style>
body { font-family:verdana; margin:10px 30px 10px 30px;}
.stacktrace { margin:5px 5px 5px 5px; }
.highlight { padding:0px 10px 0px 10px; background-color:#9F9FDF; }
.nonhighlight { padding:0px; background-color:#DFDFDF; }
.sample { padding:10px; margin:10px 10px 10px 10px;
font-family:monospace; }
.sampleline { padding:0px 10px 0px 10px; }
.sourceline { margin:5px 5px 10px 5px; font-family:monospace;}
.location { font-size:80%; }
.highlight { white-space:pre; }
.sampleline { white-space:pre; }
% if pygments_html_formatter:
${pygments_html_formatter.get_style_defs()}
.linenos { min-width: 2.5em; text-align: right; }
pre { margin: 0; }
.syntax-highlighted { padding: 0 10px; }
.syntax-highlightedtable { border-spacing: 1px; }
.nonhighlight { border-top: 1px solid #DFDFDF;
border-bottom: 1px solid #DFDFDF; }
.stacktrace .nonhighlight { margin: 5px 15px 10px; }
.sourceline { margin: 0 0; font-family:monospace; }
.code { background-color: #F8F8F8; width: 100%; }
.error .code { background-color: #FFBDBD; }
.error .syntax-highlighted { background-color: #FFBDBD; }
% endif
</style>
% endif
% if full:
</head>
<body>
% endif
<h2>Error !</h2>
<%
tback = RichTraceback(error=error, traceback=traceback)
src = tback.source
line = tback.lineno
if src:
lines = src.split('\n')
else:
lines = None
%>
<h3>${tback.errorname}: ${tback.message|h}</h3>
% if lines:
<div class="sample">
<div class="nonhighlight">
% for index in range(max(0, line-4),min(len(lines), line+5)):
<%
if pygments_html_formatter:
pygments_html_formatter.linenostart = index + 1
%>
% if index + 1 == line:
<%
if pygments_html_formatter:
old_cssclass = pygments_html_formatter.cssclass
pygments_html_formatter.cssclass = 'error ' + old_cssclass
%>
${lines[index] | syntax_highlight(language='mako')}
<%
if pygments_html_formatter:
pygments_html_formatter.cssclass = old_cssclass
%>
% else:
${lines[index] | syntax_highlight(language='mako')}
% endif
% endfor
</div>
</div>
% endif
<div class="stacktrace">
% for (filename, lineno, function, line) in tback.reverse_traceback:
<div class="location">${filename}, line ${lineno}:</div>
<div class="nonhighlight">
<%
if pygments_html_formatter:
pygments_html_formatter.linenostart = lineno
%>
<div class="sourceline">${line | syntax_highlight(filename)}</div>
</div>
% endfor
</div>
% if full:
</body>
</html>
% endif
""",
output_encoding=sys.getdefaultencoding(),
encoding_errors="htmlentityreplace",
)
| 13,110
|
Python
|
.py
| 354
| 26.889831
| 79
| 0.577287
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,705
|
lookup.py
|
rembo10_headphones/lib/mako/lookup.py
|
# mako/lookup.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import os
import posixpath
import re
import stat
from mako import exceptions
from mako import util
from mako.template import Template
try:
import threading
except:
import dummy_threading as threading
class TemplateCollection(object):
"""Represent a collection of :class:`.Template` objects,
identifiable via URI.
A :class:`.TemplateCollection` is linked to the usage of
all template tags that address other templates, such
as ``<%include>``, ``<%namespace>``, and ``<%inherit>``.
The ``file`` attribute of each of those tags refers
to a string URI that is passed to that :class:`.Template`
object's :class:`.TemplateCollection` for resolution.
:class:`.TemplateCollection` is an abstract class,
with the usual default implementation being :class:`.TemplateLookup`.
"""
def has_template(self, uri):
"""Return ``True`` if this :class:`.TemplateLookup` is
capable of returning a :class:`.Template` object for the
given ``uri``.
:param uri: String URI of the template to be resolved.
"""
try:
self.get_template(uri)
return True
except exceptions.TemplateLookupException:
return False
def get_template(self, uri, relativeto=None):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
The default implementation raises
:class:`.NotImplementedError`. Implementations should
raise :class:`.TemplateLookupException` if the given ``uri``
cannot be resolved.
:param uri: String URI of the template to be resolved.
:param relativeto: if present, the given ``uri`` is assumed to
be relative to this URI.
"""
raise NotImplementedError()
def filename_to_uri(self, uri, filename):
"""Convert the given ``filename`` to a URI relative to
this :class:`.TemplateCollection`."""
return uri
def adjust_uri(self, uri, filename):
"""Adjust the given ``uri`` based on the calling ``filename``.
When this method is called from the runtime, the
``filename`` parameter is taken directly to the ``filename``
attribute of the calling template. Therefore a custom
:class:`.TemplateCollection` subclass can place any string
identifier desired in the ``filename`` parameter of the
:class:`.Template` objects it constructs and have them come back
here.
"""
return uri
class TemplateLookup(TemplateCollection):
"""Represent a collection of templates that locates template source files
from the local filesystem.
The primary argument is the ``directories`` argument, the list of
directories to search:
.. sourcecode:: python
lookup = TemplateLookup(["/path/to/templates"])
some_template = lookup.get_template("/index.html")
The :class:`.TemplateLookup` can also be given :class:`.Template` objects
programatically using :meth:`.put_string` or :meth:`.put_template`:
.. sourcecode:: python
lookup = TemplateLookup()
lookup.put_string("base.html", '''
<html><body>${self.next()}</body></html>
''')
lookup.put_string("hello.html", '''
<%include file='base.html'/>
Hello, world !
''')
:param directories: A list of directory names which will be
searched for a particular template URI. The URI is appended
to each directory and the filesystem checked.
:param collection_size: Approximate size of the collection used
to store templates. If left at its default of ``-1``, the size
is unbounded, and a plain Python dictionary is used to
relate URI strings to :class:`.Template` instances.
Otherwise, a least-recently-used cache object is used which
will maintain the size of the collection approximately to
the number given.
:param filesystem_checks: When at its default value of ``True``,
each call to :meth:`.TemplateLookup.get_template()` will
compare the filesystem last modified time to the time in
which an existing :class:`.Template` object was created.
This allows the :class:`.TemplateLookup` to regenerate a
new :class:`.Template` whenever the original source has
been updated. Set this to ``False`` for a very minor
performance increase.
:param modulename_callable: A callable which, when present,
is passed the path of the source file as well as the
requested URI, and then returns the full path of the
generated Python module file. This is used to inject
alternate schemes for Python module location. If left at
its default of ``None``, the built in system of generation
based on ``module_directory`` plus ``uri`` is used.
All other keyword parameters available for
:class:`.Template` are mirrored here. When new
:class:`.Template` objects are created, the keywords
established with this :class:`.TemplateLookup` are passed on
to each new :class:`.Template`.
"""
def __init__(
self,
directories=None,
module_directory=None,
filesystem_checks=True,
collection_size=-1,
format_exceptions=False,
error_handler=None,
disable_unicode=False,
bytestring_passthrough=False,
output_encoding=None,
encoding_errors="strict",
cache_args=None,
cache_impl="beaker",
cache_enabled=True,
cache_type=None,
cache_dir=None,
cache_url=None,
modulename_callable=None,
module_writer=None,
default_filters=None,
buffer_filters=(),
strict_undefined=False,
imports=None,
future_imports=None,
enable_loop=True,
input_encoding=None,
preprocessor=None,
lexer_cls=None,
include_error_handler=None,
):
self.directories = [
posixpath.normpath(d) for d in util.to_list(directories, ())
]
self.module_directory = module_directory
self.modulename_callable = modulename_callable
self.filesystem_checks = filesystem_checks
self.collection_size = collection_size
if cache_args is None:
cache_args = {}
# transfer deprecated cache_* args
if cache_dir:
cache_args.setdefault("dir", cache_dir)
if cache_url:
cache_args.setdefault("url", cache_url)
if cache_type:
cache_args.setdefault("type", cache_type)
self.template_args = {
"format_exceptions": format_exceptions,
"error_handler": error_handler,
"include_error_handler": include_error_handler,
"disable_unicode": disable_unicode,
"bytestring_passthrough": bytestring_passthrough,
"output_encoding": output_encoding,
"cache_impl": cache_impl,
"encoding_errors": encoding_errors,
"input_encoding": input_encoding,
"module_directory": module_directory,
"module_writer": module_writer,
"cache_args": cache_args,
"cache_enabled": cache_enabled,
"default_filters": default_filters,
"buffer_filters": buffer_filters,
"strict_undefined": strict_undefined,
"imports": imports,
"future_imports": future_imports,
"enable_loop": enable_loop,
"preprocessor": preprocessor,
"lexer_cls": lexer_cls,
}
if collection_size == -1:
self._collection = {}
self._uri_cache = {}
else:
self._collection = util.LRUCache(collection_size)
self._uri_cache = util.LRUCache(collection_size)
self._mutex = threading.Lock()
def get_template(self, uri):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
.. note:: The ``relativeto`` argument is not supported here at
the moment.
"""
try:
if self.filesystem_checks:
return self._check(uri, self._collection[uri])
else:
return self._collection[uri]
except KeyError:
u = re.sub(r"^\/+", "", uri)
for dir_ in self.directories:
# make sure the path seperators are posix - os.altsep is empty
# on POSIX and cannot be used.
dir_ = dir_.replace(os.path.sep, posixpath.sep)
srcfile = posixpath.normpath(posixpath.join(dir_, u))
if os.path.isfile(srcfile):
return self._load(srcfile, uri)
else:
raise exceptions.TopLevelLookupException(
"Cant locate template for uri %r" % uri
)
def adjust_uri(self, uri, relativeto):
"""Adjust the given ``uri`` based on the given relative URI."""
key = (uri, relativeto)
if key in self._uri_cache:
return self._uri_cache[key]
if uri[0] != "/":
if relativeto is not None:
v = self._uri_cache[key] = posixpath.join(
posixpath.dirname(relativeto), uri
)
else:
v = self._uri_cache[key] = "/" + uri
else:
v = self._uri_cache[key] = uri
return v
def filename_to_uri(self, filename):
"""Convert the given ``filename`` to a URI relative to
this :class:`.TemplateCollection`."""
try:
return self._uri_cache[filename]
except KeyError:
value = self._relativeize(filename)
self._uri_cache[filename] = value
return value
def _relativeize(self, filename):
"""Return the portion of a filename that is 'relative'
to the directories in this lookup.
"""
filename = posixpath.normpath(filename)
for dir_ in self.directories:
if filename[0 : len(dir_)] == dir_:
return filename[len(dir_) :]
else:
return None
def _load(self, filename, uri):
self._mutex.acquire()
try:
try:
# try returning from collection one
# more time in case concurrent thread already loaded
return self._collection[uri]
except KeyError:
pass
try:
if self.modulename_callable is not None:
module_filename = self.modulename_callable(filename, uri)
else:
module_filename = None
self._collection[uri] = template = Template(
uri=uri,
filename=posixpath.normpath(filename),
lookup=self,
module_filename=module_filename,
**self.template_args
)
return template
except:
# if compilation fails etc, ensure
# template is removed from collection,
# re-raise
self._collection.pop(uri, None)
raise
finally:
self._mutex.release()
def _check(self, uri, template):
if template.filename is None:
return template
try:
template_stat = os.stat(template.filename)
if template.module._modified_time < template_stat[stat.ST_MTIME]:
self._collection.pop(uri, None)
return self._load(template.filename, uri)
else:
return template
except OSError:
self._collection.pop(uri, None)
raise exceptions.TemplateLookupException(
"Cant locate template for uri %r" % uri
)
def put_string(self, uri, text):
"""Place a new :class:`.Template` object into this
:class:`.TemplateLookup`, based on the given string of
``text``.
"""
self._collection[uri] = Template(
text, lookup=self, uri=uri, **self.template_args
)
def put_template(self, uri, template):
"""Place a new :class:`.Template` object into this
:class:`.TemplateLookup`, based on the given
:class:`.Template` object.
"""
self._collection[uri] = template
| 12,718
|
Python
|
.py
| 310
| 30.964516
| 78
| 0.606593
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,706
|
cmd.py
|
rembo10_headphones/lib/mako/cmd.py
|
# mako/cmd.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from argparse import ArgumentParser
import io
from os.path import dirname
from os.path import isfile
import sys
from mako import exceptions
from mako.lookup import TemplateLookup
from mako.template import Template
def varsplit(var):
if "=" not in var:
return (var, "")
return var.split("=", 1)
def _exit():
sys.stderr.write(exceptions.text_error_template().render())
sys.exit(1)
def cmdline(argv=None):
parser = ArgumentParser()
parser.add_argument(
"--var",
default=[],
action="append",
help="variable (can be used multiple times, use name=value)",
)
parser.add_argument(
"--template-dir",
default=[],
action="append",
help="Directory to use for template lookup (multiple "
"directories may be provided). If not given then if the "
"template is read from stdin, the value defaults to be "
"the current directory, otherwise it defaults to be the "
"parent directory of the file provided.",
)
parser.add_argument(
"--output-encoding", default=None, help="force output encoding"
)
parser.add_argument(
"--output-file",
default=None,
help="Write to file upon successful render instead of stdout",
)
parser.add_argument("input", nargs="?", default="-")
options = parser.parse_args(argv)
output_encoding = options.output_encoding
output_file = options.output_file
if options.input == "-":
lookup_dirs = options.template_dir or ["."]
lookup = TemplateLookup(lookup_dirs)
try:
template = Template(
sys.stdin.read(),
lookup=lookup,
output_encoding=output_encoding,
)
except:
_exit()
else:
filename = options.input
if not isfile(filename):
raise SystemExit("error: can't find %s" % filename)
lookup_dirs = options.template_dir or [dirname(filename)]
lookup = TemplateLookup(lookup_dirs)
try:
template = Template(
filename=filename,
lookup=lookup,
output_encoding=output_encoding,
)
except:
_exit()
kw = dict([varsplit(var) for var in options.var])
try:
rendered = template.render(**kw)
except:
_exit()
else:
if output_file:
io.open(output_file, "wt", encoding=output_encoding).write(
rendered
)
else:
sys.stdout.write(rendered)
if __name__ == "__main__":
cmdline()
| 2,859
|
Python
|
.py
| 89
| 24.494382
| 74
| 0.611393
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,707
|
turbogears.py
|
rembo10_headphones/lib/mako/ext/turbogears.py
|
# ext/turbogears.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from mako import compat
from mako.lookup import TemplateLookup
from mako.template import Template
class TGPlugin(object):
"""TurboGears compatible Template Plugin."""
def __init__(self, extra_vars_func=None, options=None, extension="mak"):
self.extra_vars_func = extra_vars_func
self.extension = extension
if not options:
options = {}
# Pull the options out and initialize the lookup
lookup_options = {}
for k, v in options.items():
if k.startswith("mako."):
lookup_options[k[5:]] = v
elif k in ["directories", "filesystem_checks", "module_directory"]:
lookup_options[k] = v
self.lookup = TemplateLookup(**lookup_options)
self.tmpl_options = {}
# transfer lookup args to template args, based on those available
# in getargspec
for kw in compat.inspect_getargspec(Template.__init__)[0]:
if kw in lookup_options:
self.tmpl_options[kw] = lookup_options[kw]
def load_template(self, templatename, template_string=None):
"""Loads a template from a file or a string"""
if template_string is not None:
return Template(template_string, **self.tmpl_options)
# Translate TG dot notation to normal / template path
if "/" not in templatename:
templatename = (
"/" + templatename.replace(".", "/") + "." + self.extension
)
# Lookup template
return self.lookup.get_template(templatename)
def render(
self, info, format="html", fragment=False, template=None # noqa
):
if isinstance(template, compat.string_types):
template = self.load_template(template)
# Load extra vars func if provided
if self.extra_vars_func:
info.update(self.extra_vars_func())
return template.render(**info)
| 2,165
|
Python
|
.py
| 49
| 35.428571
| 79
| 0.634981
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,708
|
pygmentplugin.py
|
rembo10_headphones/lib/mako/ext/pygmentplugin.py
|
# ext/pygmentplugin.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from pygments import highlight
from pygments.formatters.html import HtmlFormatter
from pygments.lexer import bygroups
from pygments.lexer import DelegatingLexer
from pygments.lexer import include
from pygments.lexer import RegexLexer
from pygments.lexer import using
from pygments.lexers.agile import Python3Lexer
from pygments.lexers.agile import PythonLexer
from pygments.lexers.web import CssLexer
from pygments.lexers.web import HtmlLexer
from pygments.lexers.web import JavascriptLexer
from pygments.lexers.web import XmlLexer
from pygments.token import Comment
from pygments.token import Keyword
from pygments.token import Name
from pygments.token import Operator
from pygments.token import Other
from pygments.token import String
from pygments.token import Text
from mako import compat
class MakoLexer(RegexLexer):
name = "Mako"
aliases = ["mako"]
filenames = ["*.mao"]
tokens = {
"root": [
(
r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)",
bygroups(Text, Comment.Preproc, Keyword, Other),
),
(
r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)",
bygroups(Text, Comment.Preproc, using(PythonLexer), Other),
),
(
r"(\s*)(##[^\n]*)(\n|\Z)",
bygroups(Text, Comment.Preproc, Other),
),
(r"""(?s)<%doc>.*?</%doc>""", Comment.Preproc),
(
r"(<%)([\w\.\:]+)",
bygroups(Comment.Preproc, Name.Builtin),
"tag",
),
(
r"(</%)([\w\.\:]+)(>)",
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc),
),
(r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"),
(
r"(?s)(<%(?:!?))(.*?)(%>)",
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
),
(
r"(\$\{)(.*?)(\})",
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
),
(
r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line
(?=\#\*) | # multiline comment
(?=</?%) | # a python block
# call start or end
(?=\$\{) | # a substitution
(?<=\n)(?=\s*%) |
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)
""",
bygroups(Other, Operator),
),
(r"\s+", Text),
],
"ondeftags": [
(r"<%", Comment.Preproc),
(r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin),
include("tag"),
],
"tag": [
(r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)),
(r"/?\s*>", Comment.Preproc, "#pop"),
(r"\s+", Text),
],
"attr": [
('".*?"', String, "#pop"),
("'.*?'", String, "#pop"),
(r"[^\s>]+", String, "#pop"),
],
}
class MakoHtmlLexer(DelegatingLexer):
name = "HTML+Mako"
aliases = ["html+mako"]
def __init__(self, **options):
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options)
class MakoXmlLexer(DelegatingLexer):
name = "XML+Mako"
aliases = ["xml+mako"]
def __init__(self, **options):
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options)
class MakoJavascriptLexer(DelegatingLexer):
name = "JavaScript+Mako"
aliases = ["js+mako", "javascript+mako"]
def __init__(self, **options):
super(MakoJavascriptLexer, self).__init__(
JavascriptLexer, MakoLexer, **options
)
class MakoCssLexer(DelegatingLexer):
name = "CSS+Mako"
aliases = ["css+mako"]
def __init__(self, **options):
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options)
pygments_html_formatter = HtmlFormatter(
cssclass="syntax-highlighted", linenos=True
)
def syntax_highlight(filename="", language=None):
mako_lexer = MakoLexer()
if compat.py3k:
python_lexer = Python3Lexer()
else:
python_lexer = PythonLexer()
if filename.startswith("memory:") or language == "mako":
return lambda string: highlight(
string, mako_lexer, pygments_html_formatter
)
return lambda string: highlight(
string, python_lexer, pygments_html_formatter
)
| 4,951
|
Python
|
.py
| 136
| 26.992647
| 79
| 0.526283
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,709
|
linguaplugin.py
|
rembo10_headphones/lib/mako/ext/linguaplugin.py
|
# ext/linguaplugin.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import io
from lingua.extractors import Extractor
from lingua.extractors import get_extractor
from lingua.extractors import Message
from mako import compat
from mako.ext.extract import MessageExtractor
class LinguaMakoExtractor(Extractor, MessageExtractor):
"""Mako templates"""
extensions = [".mako"]
default_config = {"encoding": "utf-8", "comment-tags": ""}
def __call__(self, filename, options, fileobj=None):
self.options = options
self.filename = filename
self.python_extractor = get_extractor("x.py")
if fileobj is None:
fileobj = open(filename, "rb")
must_close = True
else:
must_close = False
try:
for message in self.process_file(fileobj):
yield message
finally:
if must_close:
fileobj.close()
def process_python(self, code, code_lineno, translator_strings):
source = code.getvalue().strip()
if source.endswith(compat.b(":")):
if source in (
compat.b("try:"),
compat.b("else:"),
) or source.startswith(compat.b("except")):
source = compat.b("") # Ignore try/except and else
elif source.startswith(compat.b("elif")):
source = source[2:] # Replace "elif" with "if"
source += compat.b("pass")
code = io.BytesIO(source)
for msg in self.python_extractor(
self.filename, self.options, code, code_lineno - 1
):
if translator_strings:
msg = Message(
msg.msgctxt,
msg.msgid,
msg.msgid_plural,
msg.flags,
compat.u(" ").join(translator_strings + [msg.comment]),
msg.tcomment,
msg.location,
)
yield msg
| 2,161
|
Python
|
.py
| 56
| 28
| 75
| 0.574905
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,710
|
autohandler.py
|
rembo10_headphones/lib/mako/ext/autohandler.py
|
# ext/autohandler.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""adds autohandler functionality to Mako templates.
requires that the TemplateLookup class is used with templates.
usage::
<%!
from mako.ext.autohandler import autohandler
%>
<%inherit file="${autohandler(template, context)}"/>
or with custom autohandler filename::
<%!
from mako.ext.autohandler import autohandler
%>
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
"""
import os
import posixpath
import re
def autohandler(template, context, name="autohandler"):
lookup = context.lookup
_template_uri = template.module._template_uri
if not lookup.filesystem_checks:
try:
return lookup._uri_cache[(autohandler, _template_uri, name)]
except KeyError:
pass
tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name]
while len(tokens):
path = "/" + "/".join(tokens)
if path != _template_uri and _file_exists(lookup, path):
if not lookup.filesystem_checks:
return lookup._uri_cache.setdefault(
(autohandler, _template_uri, name), path
)
else:
return path
if len(tokens) == 1:
break
tokens[-2:] = [name]
if not lookup.filesystem_checks:
return lookup._uri_cache.setdefault(
(autohandler, _template_uri, name), None
)
else:
return None
def _file_exists(lookup, path):
psub = re.sub(r"^/", "", path)
for d in lookup.directories:
if os.path.exists(d + "/" + psub):
return True
else:
return False
| 1,885
|
Python
|
.py
| 55
| 27.181818
| 78
| 0.629752
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,711
|
babelplugin.py
|
rembo10_headphones/lib/mako/ext/babelplugin.py
|
# ext/babelplugin.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from babel.messages.extract import extract_python
from mako.ext.extract import MessageExtractor
class BabelMakoExtractor(MessageExtractor):
def __init__(self, keywords, comment_tags, options):
self.keywords = keywords
self.options = options
self.config = {
"comment-tags": u" ".join(comment_tags),
"encoding": options.get(
"input_encoding", options.get("encoding", None)
),
}
super(BabelMakoExtractor, self).__init__()
def __call__(self, fileobj):
return self.process_file(fileobj)
def process_python(self, code, code_lineno, translator_strings):
comment_tags = self.config["comment-tags"]
for (
lineno,
funcname,
messages,
python_translator_comments,
) in extract_python(code, self.keywords, comment_tags, self.options):
yield (
code_lineno + (lineno - 1),
funcname,
messages,
translator_strings + python_translator_comments,
)
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Mako templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
extractor = BabelMakoExtractor(keywords, comment_tags, options)
for message in extractor(fileobj):
yield message
| 2,138
|
Python
|
.py
| 49
| 34.979592
| 78
| 0.650962
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,712
|
preprocessors.py
|
rembo10_headphones/lib/mako/ext/preprocessors.py
|
# ext/preprocessors.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""preprocessing functions, used with the 'preprocessor'
argument on Template, TemplateLookup"""
import re
def convert_comments(text):
"""preprocess old style comments.
example:
from mako.ext.preprocessors import convert_comments
t = Template(..., preprocessor=convert_comments)"""
return re.sub(r"(?<=\n)\s*#[^#]", "##", text)
| 576
|
Python
|
.py
| 14
| 38.285714
| 74
| 0.739209
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,713
|
beaker_cache.py
|
rembo10_headphones/lib/mako/ext/beaker_cache.py
|
# ext/beaker_cache.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provide a :class:`.CacheImpl` for the Beaker caching system."""
from mako import exceptions
from mako.cache import CacheImpl
try:
from beaker import cache as beaker_cache
except:
has_beaker = False
else:
has_beaker = True
_beaker_cache = None
class BeakerCacheImpl(CacheImpl):
"""A :class:`.CacheImpl` provided for the Beaker caching system.
This plugin is used by default, based on the default
value of ``'beaker'`` for the ``cache_impl`` parameter of the
:class:`.Template` or :class:`.TemplateLookup` classes.
"""
def __init__(self, cache):
if not has_beaker:
raise exceptions.RuntimeException(
"Can't initialize Beaker plugin; Beaker is not installed."
)
global _beaker_cache
if _beaker_cache is None:
if "manager" in cache.template.cache_args:
_beaker_cache = cache.template.cache_args["manager"]
else:
_beaker_cache = beaker_cache.CacheManager()
super(BeakerCacheImpl, self).__init__(cache)
def _get_cache(self, **kw):
expiretime = kw.pop("timeout", None)
if "dir" in kw:
kw["data_dir"] = kw.pop("dir")
elif self.cache.template.module_directory:
kw["data_dir"] = self.cache.template.module_directory
if "manager" in kw:
kw.pop("manager")
if kw.get("type") == "memcached":
kw["type"] = "ext:memcached"
if "region" in kw:
region = kw.pop("region")
cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
else:
cache = _beaker_cache.get_cache(self.cache.id, **kw)
cache_args = {"starttime": self.cache.starttime}
if expiretime:
cache_args["expiretime"] = expiretime
return cache, cache_args
def get_or_create(self, key, creation_function, **kw):
cache, kw = self._get_cache(**kw)
return cache.get(key, createfunc=creation_function, **kw)
def put(self, key, value, **kw):
cache, kw = self._get_cache(**kw)
cache.put(key, value, **kw)
def get(self, key, **kw):
cache, kw = self._get_cache(**kw)
return cache.get(key, **kw)
def invalidate(self, key, **kw):
cache, kw = self._get_cache(**kw)
cache.remove_value(key, **kw)
| 2,599
|
Python
|
.py
| 64
| 32.703125
| 79
| 0.617799
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,714
|
extract.py
|
rembo10_headphones/lib/mako/ext/extract.py
|
# ext/extract.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re
from mako import compat
from mako import lexer
from mako import parsetree
class MessageExtractor(object):
def process_file(self, fileobj):
template_node = lexer.Lexer(
fileobj.read(), input_encoding=self.config["encoding"]
).parse()
for extracted in self.extract_nodes(template_node.get_children()):
yield extracted
def extract_nodes(self, nodes):
translator_comments = []
in_translator_comments = False
input_encoding = self.config["encoding"] or "ascii"
comment_tags = list(
filter(None, re.split(r"\s+", self.config["comment-tags"]))
)
for node in nodes:
child_nodes = None
if (
in_translator_comments
and isinstance(node, parsetree.Text)
and not node.content.strip()
):
# Ignore whitespace within translator comments
continue
if isinstance(node, parsetree.Comment):
value = node.text.strip()
if in_translator_comments:
translator_comments.extend(
self._split_comment(node.lineno, value)
)
continue
for comment_tag in comment_tags:
if value.startswith(comment_tag):
in_translator_comments = True
translator_comments.extend(
self._split_comment(node.lineno, value)
)
continue
if isinstance(node, parsetree.DefTag):
code = node.function_decl.code
child_nodes = node.nodes
elif isinstance(node, parsetree.BlockTag):
code = node.body_decl.code
child_nodes = node.nodes
elif isinstance(node, parsetree.CallTag):
code = node.code.code
child_nodes = node.nodes
elif isinstance(node, parsetree.PageTag):
code = node.body_decl.code
elif isinstance(node, parsetree.CallNamespaceTag):
code = node.expression
child_nodes = node.nodes
elif isinstance(node, parsetree.ControlLine):
if node.isend:
in_translator_comments = False
continue
code = node.text
elif isinstance(node, parsetree.Code):
in_translator_comments = False
code = node.code.code
elif isinstance(node, parsetree.Expression):
code = node.code.code
else:
continue
# Comments don't apply unless they immediately precede the message
if (
translator_comments
and translator_comments[-1][0] < node.lineno - 1
):
translator_comments = []
translator_strings = [
comment[1] for comment in translator_comments
]
if isinstance(code, compat.text_type):
code = code.encode(input_encoding, "backslashreplace")
used_translator_comments = False
# We add extra newline to work around a pybabel bug
# (see python-babel/babel#274, parse_encoding dies if the first
# input string of the input is non-ascii)
# Also, because we added it, we have to subtract one from
# node.lineno
code = compat.byte_buffer(compat.b("\n") + code)
for message in self.process_python(
code, node.lineno - 1, translator_strings
):
yield message
used_translator_comments = True
if used_translator_comments:
translator_comments = []
in_translator_comments = False
if child_nodes:
for extracted in self.extract_nodes(child_nodes):
yield extracted
@staticmethod
def _split_comment(lineno, comment):
"""Return the multiline comment at lineno split into a list of
comment line numbers and the accompanying comment line"""
return [
(lineno + index, line)
for index, line in enumerate(comment.splitlines())
]
| 4,616
|
Python
|
.py
| 109
| 28.724771
| 78
| 0.558228
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,715
|
_collections.py
|
rembo10_headphones/lib/urllib3/_collections.py
|
from __future__ import absolute_import
try:
from collections.abc import Mapping, MutableMapping
except ImportError:
from collections import Mapping, MutableMapping
try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
class RLock:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
from collections import OrderedDict
from .exceptions import InvalidHeader
from .packages import six
from .packages.six import iterkeys, itervalues
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
_Null = object()
class RecentlyUsedContainer(MutableMapping):
"""
Provides a thread-safe dict-like container which maintains up to
``maxsize`` keys while throwing away the least-recently-used keys beyond
``maxsize``.
:param maxsize:
Maximum number of recent elements to retain.
:param dispose_func:
Every time an item is evicted from the container,
``dispose_func(value)`` is called. Callback which will get called
"""
ContainerCls = OrderedDict
def __init__(self, maxsize=10, dispose_func=None):
self._maxsize = maxsize
self.dispose_func = dispose_func
self._container = self.ContainerCls()
self.lock = RLock()
def __getitem__(self, key):
# Re-insert the item, moving it to the end of the eviction line.
with self.lock:
item = self._container.pop(key)
self._container[key] = item
return item
def __setitem__(self, key, value):
evicted_value = _Null
with self.lock:
# Possibly evict the existing value of 'key'
evicted_value = self._container.get(key, _Null)
self._container[key] = value
# If we didn't evict an existing value, we might have to evict the
# least recently used item from the beginning of the container.
if len(self._container) > self._maxsize:
_key, evicted_value = self._container.popitem(last=False)
if self.dispose_func and evicted_value is not _Null:
self.dispose_func(evicted_value)
def __delitem__(self, key):
with self.lock:
value = self._container.pop(key)
if self.dispose_func:
self.dispose_func(value)
def __len__(self):
with self.lock:
return len(self._container)
def __iter__(self):
raise NotImplementedError(
"Iteration over this class is unlikely to be threadsafe."
)
def clear(self):
with self.lock:
# Copy pointers to all values, then wipe the mapping
values = list(itervalues(self._container))
self._container.clear()
if self.dispose_func:
for value in values:
self.dispose_func(value)
def keys(self):
with self.lock:
return list(iterkeys(self._container))
class HTTPHeaderDict(MutableMapping):
"""
:param headers:
An iterable of field-value pairs. Must not contain multiple field names
when compared case-insensitively.
:param kwargs:
Additional field-value pairs to pass in to ``dict.update``.
A ``dict`` like container for storing HTTP Headers.
Field names are stored and compared case-insensitively in compliance with
RFC 7230. Iteration provides the first case-sensitive key seen for each
case-insensitive pair.
Using ``__setitem__`` syntax overwrites fields that compare equal
case-insensitively in order to maintain ``dict``'s api. For fields that
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
in a loop.
If multiple fields that are equal case-insensitively are passed to the
constructor or ``.update``, the behavior is undefined and some will be
lost.
>>> headers = HTTPHeaderDict()
>>> headers.add('Set-Cookie', 'foo=bar')
>>> headers.add('set-cookie', 'baz=quxx')
>>> headers['content-length'] = '7'
>>> headers['SET-cookie']
'foo=bar, baz=quxx'
>>> headers['Content-Length']
'7'
"""
def __init__(self, headers=None, **kwargs):
super(HTTPHeaderDict, self).__init__()
self._container = OrderedDict()
if headers is not None:
if isinstance(headers, HTTPHeaderDict):
self._copy_from(headers)
else:
self.extend(headers)
if kwargs:
self.extend(kwargs)
def __setitem__(self, key, val):
self._container[key.lower()] = [key, val]
return self._container[key.lower()]
def __getitem__(self, key):
val = self._container[key.lower()]
return ", ".join(val[1:])
def __delitem__(self, key):
del self._container[key.lower()]
def __contains__(self, key):
return key.lower() in self._container
def __eq__(self, other):
if not isinstance(other, Mapping) and not hasattr(other, "keys"):
return False
if not isinstance(other, type(self)):
other = type(self)(other)
return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
(k.lower(), v) for k, v in other.itermerged()
)
def __ne__(self, other):
return not self.__eq__(other)
if six.PY2: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
__marker = object()
def __len__(self):
return len(self._container)
def __iter__(self):
# Only provide the originally cased names
for vals in self._container.values():
yield vals[0]
def pop(self, key, default=__marker):
"""D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
"""
# Using the MutableMapping function directly fails due to the private marker.
# Using ordinary dict.pop would expose the internal structures.
# So let's reinvent the wheel.
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def discard(self, key):
try:
del self[key]
except KeyError:
pass
def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
key_lower = key.lower()
new_vals = [key, val]
# Keep the common case aka no item present as fast as possible
vals = self._container.setdefault(key_lower, new_vals)
if new_vals is not vals:
vals.append(val)
def extend(self, *args, **kwargs):
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 1:
raise TypeError(
"extend() takes at most 1 positional "
"arguments ({0} given)".format(len(args))
)
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
for key, val in other.iteritems():
self.add(key, val)
elif isinstance(other, Mapping):
for key in other:
self.add(key, other[key])
elif hasattr(other, "keys"):
for key in other.keys():
self.add(key, other[key])
else:
for key, value in other:
self.add(key, value)
for key, value in kwargs.items():
self.add(key, value)
def getlist(self, key, default=__marker):
"""Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist."""
try:
vals = self._container[key.lower()]
except KeyError:
if default is self.__marker:
return []
return default
else:
return vals[1:]
# Backwards compatibility for httplib
getheaders = getlist
getallmatchingheaders = getlist
iget = getlist
# Backwards compatibility for http.cookiejar
get_all = getlist
def __repr__(self):
return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
def _copy_from(self, other):
for key in other:
val = other.getlist(key)
if isinstance(val, list):
# Don't need to convert tuples
val = list(val)
self._container[key.lower()] = [key] + val
def copy(self):
clone = type(self)()
clone._copy_from(self)
return clone
def iteritems(self):
"""Iterate over all header lines, including duplicate ones."""
for key in self:
vals = self._container[key.lower()]
for val in vals[1:]:
yield vals[0], val
def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = self._container[key.lower()]
yield val[0], ", ".join(val[1:])
def items(self):
return list(self.iteritems())
@classmethod
def from_httplib(cls, message): # Python 2
"""Read headers from a Python 2 httplib message object."""
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
# object and extracts the multiheaders properly.
obs_fold_continued_leaders = (" ", "\t")
headers = []
for line in message.headers:
if line.startswith(obs_fold_continued_leaders):
if not headers:
# We received a header line that starts with OWS as described
# in RFC-7230 S3.2.4. This indicates a multiline header, but
# there exists no previous header to which we can attach it.
raise InvalidHeader(
"Header continuation with no previous header: %s" % line
)
else:
key, value = headers[-1]
headers[-1] = (key, value + " " + line.strip())
continue
key, value = line.split(":", 1)
headers.append((key, value.strip()))
return cls(headers)
| 10,811
|
Python
|
.py
| 271
| 30.354244
| 86
| 0.594424
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,716
|
_version.py
|
rembo10_headphones/lib/urllib3/_version.py
|
# This file is protected via CODEOWNERS
__version__ = "1.26.8"
| 63
|
Python
|
.py
| 2
| 30.5
| 39
| 0.704918
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,717
|
response.py
|
rembo10_headphones/lib/urllib3/response.py
|
from __future__ import absolute_import
import io
import logging
import zlib
from contextlib import contextmanager
from socket import error as SocketError
from socket import timeout as SocketTimeout
try:
import brotli
except ImportError:
brotli = None
from ._collections import HTTPHeaderDict
from .connection import BaseSSLError, HTTPException
from .exceptions import (
BodyNotHttplibCompatible,
DecodeError,
HTTPError,
IncompleteRead,
InvalidChunkLength,
InvalidHeader,
ProtocolError,
ReadTimeoutError,
ResponseNotChunked,
SSLError,
)
from .packages import six
from .util.response import is_fp_closed, is_response_to_head
log = logging.getLogger(__name__)
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = b""
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
decompressed = self._obj.decompress(data)
if decompressed:
self._first_try = False
self._data = None
return decompressed
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
class GzipDecoderState(object):
FIRST_MEMBER = 0
OTHER_MEMBERS = 1
SWALLOW_DATA = 2
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
self._state = GzipDecoderState.FIRST_MEMBER
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
ret = bytearray()
if self._state == GzipDecoderState.SWALLOW_DATA or not data:
return bytes(ret)
while True:
try:
ret += self._obj.decompress(data)
except zlib.error:
previous_state = self._state
# Ignore data after the first error
self._state = GzipDecoderState.SWALLOW_DATA
if previous_state == GzipDecoderState.OTHER_MEMBERS:
# Allow trailing garbage acceptable in other gzip clients
return bytes(ret)
raise
data = self._obj.unused_data
if not data:
return bytes(ret)
self._state = GzipDecoderState.OTHER_MEMBERS
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
if brotli is not None:
class BrotliDecoder(object):
# Supports both 'brotlipy' and 'Brotli' packages
# since they share an import name. The top branches
# are for 'brotlipy' and bottom branches for 'Brotli'
def __init__(self):
self._obj = brotli.Decompressor()
if hasattr(self._obj, "decompress"):
self.decompress = self._obj.decompress
else:
self.decompress = self._obj.process
def flush(self):
if hasattr(self._obj, "flush"):
return self._obj.flush()
return b""
class MultiDecoder(object):
"""
From RFC7231:
If one or more encodings have been applied to a representation, the
sender that applied the encodings MUST generate a Content-Encoding
header field that lists the content codings in the order in which
they were applied.
"""
def __init__(self, modes):
self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
def flush(self):
return self._decoders[0].flush()
def decompress(self, data):
for d in reversed(self._decoders):
data = d.decompress(data)
return data
def _get_decoder(mode):
if "," in mode:
return MultiDecoder(mode)
if mode == "gzip":
return GzipDecoder()
if brotli is not None and mode == "br":
return BrotliDecoder()
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param original_response:
When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
:param retries:
The retries contains the last :class:`~urllib3.util.retry.Retry` that
was used during the request.
:param enforce_content_length:
Enforce content length checking. Body returned by server must match
value of Content-Length header, if present. Otherwise, raise error.
"""
CONTENT_DECODERS = ["gzip", "deflate"]
if brotli is not None:
CONTENT_DECODERS += ["br"]
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(
self,
body="",
headers=None,
status=0,
version=0,
reason=None,
strict=0,
preload_content=True,
decode_content=True,
original_response=None,
pool=None,
connection=None,
msg=None,
retries=None,
enforce_content_length=False,
request_method=None,
request_url=None,
auto_close=True,
):
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
else:
self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self.retries = retries
self.enforce_content_length = enforce_content_length
self.auto_close = auto_close
self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
self.msg = msg
self._request_url = request_url
if body and isinstance(body, (six.string_types, bytes)):
self._body = body
self._pool = pool
self._connection = connection
if hasattr(body, "read"):
self._fp = body
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get("transfer-encoding", "").lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# Determine length of response
self.length_remaining = self._init_length(request_method)
# If requested, preload the body.
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get("location")
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
def drain_conn(self):
"""
Read and discard any remaining HTTP response data in the response connection.
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
self.read()
except (HTTPError, SocketError, BaseSSLError, HTTPException):
pass
@property
def data(self):
# For backwards-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
@property
def connection(self):
return self._connection
def isclosed(self):
return is_fp_closed(self._fp)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
if bytes are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def _init_length(self, request_method):
"""
Set initial length value for Response content if available.
"""
length = self.headers.get("content-length")
if length is not None:
if self.chunked:
# This Response will fail with an IncompleteRead if it can't be
# received as chunked. This method falls back to attempt reading
# the response before raising an exception.
log.warning(
"Received response with both Content-Length and "
"Transfer-Encoding set. This is expressly forbidden "
"by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
"attempting to process response as Transfer-Encoding: "
"chunked."
)
return None
try:
# RFC 7230 section 3.3.2 specifies multiple content lengths can
# be sent in a single Content-Length header
# (e.g. Content-Length: 42, 42). This line ensures the values
# are all valid ints and that as long as the `set` length is 1,
# all values are the same. Otherwise, the header is invalid.
lengths = set([int(val) for val in length.split(",")])
if len(lengths) > 1:
raise InvalidHeader(
"Content-Length contained multiple "
"unmatching values (%s)" % length
)
length = lengths.pop()
except ValueError:
length = None
else:
if length < 0:
length = None
# Convert status to int for comparison
# In some cases, httplib returns a status of "_UNKNOWN"
try:
status = int(self.status)
except ValueError:
status = 0
# Check for responses that shouldn't include a body
if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
length = 0
return length
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessary.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get("content-encoding", "").lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
elif "," in content_encoding:
encodings = [
e.strip()
for e in content_encoding.split(",")
if e.strip() in self.CONTENT_DECODERS
]
if len(encodings):
self._decoder = _get_decoder(content_encoding)
DECODER_ERROR_CLASSES = (IOError, zlib.error)
if brotli is not None:
DECODER_ERROR_CLASSES += (brotli.error,)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
if not decode_content:
return data
try:
if self._decoder:
data = self._decoder.decompress(data)
except self.DECODER_ERROR_CLASSES as e:
content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding,
e,
)
if flush_decoder:
data += self._flush_decoder()
return data
def _flush_decoder(self):
"""
Flushes the decoder. Should only be called if the decoder is actually
being used.
"""
if self._decoder:
buf = self._decoder.decompress(b"")
return buf + self._decoder.flush()
return b""
@contextmanager
def _error_catcher(self):
"""
Catch low-level python exceptions, instead re-raising urllib3
variants, so that low-level exceptions are not leaked in the
high-level api.
On exit, release the connection back to the pool.
"""
clean_exit = False
try:
try:
yield
except SocketTimeout:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
raise ReadTimeoutError(self._pool, None, "Read timed out.")
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if "read operation timed out" not in str(e):
# SSL errors related to framing/MAC get wrapped and reraised here
raise SSLError(e)
raise ReadTimeoutError(self._pool, None, "Read timed out.")
except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
raise ProtocolError("Connection broken: %r" % e, e)
# If no exception is thrown, we should avoid cleaning up
# unnecessarily.
clean_exit = True
finally:
# If we didn't terminate cleanly, we need to throw away our
# connection.
if not clean_exit:
# The response may not be closed but we're not going to use it
# anymore so close it now to ensure that the connection is
# released back to the pool.
if self._original_response:
self._original_response.close()
# Closing the response may not actually be sufficient to close
# everything, so if we have a hold of the connection close that
# too.
if self._connection:
self._connection.close()
# If we hold the original response but it's closed now, we should
# return the connection back to the pool.
if self._original_response and self._original_response.isclosed():
self.release_conn()
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
fp_closed = getattr(self._fp, "closed", False)
with self._error_catcher():
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read() if not fp_closed else b""
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt) if not fp_closed else b""
if (
amt != 0 and not data
): # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do
# not properly close the connection in all cases. There is
# no harm in redundantly calling close.
self._fp.close()
flush_decoder = True
if self.enforce_content_length and self.length_remaining not in (
0,
None,
):
# This is an edge case that httplib failed to cover due
# to concerns of backward compatibility. We're
# addressing it here to make sure IncompleteRead is
# raised during streaming, so all calls with incorrect
# Content-Length are caught.
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
if data:
self._fp_bytes_read += len(data)
if self.length_remaining is not None:
self.length_remaining -= len(data)
data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
return data
def stream(self, amt=2 ** 16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
if self.chunked and self.supports_chunked_reads():
for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`http.client.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if six.PY2:
# Python 2.7
headers = HTTPHeaderDict.from_httplib(headers)
else:
headers = HTTPHeaderDict(headers.items())
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, "strict", 0)
resp = ResponseCls(
body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw
)
return resp
# Backwards-compatibility methods for http.client.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Backwards compatibility for http.cookiejar
def info(self):
return self.headers
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
if self._connection:
self._connection.close()
if not self.auto_close:
io.IOBase.close(self)
@property
def closed(self):
if not self.auto_close:
return io.IOBase.closed.__get__(self)
elif self._fp is None:
return True
elif hasattr(self._fp, "isclosed"):
return self._fp.isclosed()
elif hasattr(self._fp, "closed"):
return self._fp.closed
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError(
"The file-like object this HTTPResponse is wrapped "
"around has no file descriptor"
)
def flush(self):
if (
self._fp is not None
and hasattr(self._fp, "flush")
and not getattr(self._fp, "closed", False)
):
return self._fp.flush()
def readable(self):
# This method is required for `io` module compatibility.
return True
def readinto(self, b):
# This method is required for `io` module compatibility.
temp = self.read(len(b))
if len(temp) == 0:
return 0
else:
b[: len(temp)] = temp
return len(temp)
def supports_chunked_reads(self):
"""
Checks if the underlying file-like object looks like a
:class:`http.client.HTTPResponse` object. We do this by testing for
the fp attribute. If it is present we assume it returns raw chunks as
processed by read_chunked().
"""
return hasattr(self._fp, "fp")
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b";", 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise InvalidChunkLength(self, line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked(
"Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing."
)
if not self.supports_chunked_reads():
raise BodyNotHttplibCompatible(
"Body should be http.client.HTTPResponse like. "
"It should have have an fp attribute which returns raw chunks."
)
with self._error_catcher():
# Don't bother reading the body of a HEAD request.
if self._original_response and is_response_to_head(self._original_response):
self._original_response.close()
return
# If a response is already read and closed
# then return immediately.
if self._fp.fp is None:
return
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
decoded = self._decode(
chunk, decode_content=decode_content, flush_decoder=False
)
if decoded:
yield decoded
if decode_content:
# On CPython and PyPy, we should never need to flush the
# decoder. However, on Jython we *might* need to, so
# lets defensively do it anyway.
decoded = self._flush_decoder()
if decoded: # Platform-specific: Jython.
yield decoded
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b"\r\n":
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
def geturl(self):
"""
Returns the URL that was the source of this response.
If the request that generated this response redirected, this method
will return the final redirect location.
"""
if self.retries is not None and len(self.retries.history):
return self.retries.history[-1].redirect_location
else:
return self._request_url
def __iter__(self):
buffer = []
for chunk in self.stream(decode_content=True):
if b"\n" in chunk:
chunk = chunk.split(b"\n")
yield b"".join(buffer) + chunk[0] + b"\n"
for x in chunk[1:-1]:
yield x + b"\n"
if chunk[-1]:
buffer = [chunk[-1]]
else:
buffer = []
else:
buffer.append(chunk)
if buffer:
yield b"".join(buffer)
| 28,203
|
Python
|
.py
| 692
| 29.297688
| 110
| 0.576291
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,718
|
poolmanager.py
|
rembo10_headphones/lib/urllib3/poolmanager.py
|
from __future__ import absolute_import
import collections
import functools
import logging
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
from .exceptions import (
LocationValueError,
MaxRetryError,
ProxySchemeUnknown,
ProxySchemeUnsupported,
URLSchemeUnknown,
)
from .packages import six
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
from .util.proxy import connection_requires_http_tunnel
from .util.retry import Retry
from .util.url import parse_url
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
log = logging.getLogger(__name__)
SSL_KEYWORDS = (
"key_file",
"cert_file",
"cert_reqs",
"ca_certs",
"ssl_version",
"ca_cert_dir",
"ssl_context",
"key_password",
)
# All known keyword arguments that could be provided to the pool manager, its
# pools, or the underlying connections. This is used to construct a pool key.
_key_fields = (
"key_scheme", # str
"key_host", # str
"key_port", # int
"key_timeout", # int or float or Timeout
"key_retries", # int or Retry
"key_strict", # bool
"key_block", # bool
"key_source_address", # str
"key_key_file", # str
"key_key_password", # str
"key_cert_file", # str
"key_cert_reqs", # str
"key_ca_certs", # str
"key_ssl_version", # str
"key_ca_cert_dir", # str
"key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
"key_maxsize", # int
"key_headers", # dict
"key__proxy", # parsed proxy url
"key__proxy_headers", # dict
"key__proxy_config", # class
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
"key__socks_options", # dict
"key_assert_hostname", # bool or string
"key_assert_fingerprint", # str
"key_server_hostname", # str
)
#: The namedtuple class used to construct keys for the connection pool.
#: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple("PoolKey", _key_fields)
_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
def _default_key_normalizer(key_class, request_context):
"""
Create a pool key out of a request context dictionary.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
:param key_class:
The class to use when constructing the key. This should be a namedtuple
with the ``scheme`` and ``host`` keys at a minimum.
:type key_class: namedtuple
:param request_context:
A dictionary-like object that contain the context for a request.
:type request_context: dict
:return: A namedtuple that can be used as a connection pool key.
:rtype: PoolKey
"""
# Since we mutate the dictionary, make a copy first
context = request_context.copy()
context["scheme"] = context["scheme"].lower()
context["host"] = context["host"].lower()
# These are both dictionaries and need to be transformed into frozensets
for key in ("headers", "_proxy_headers", "_socks_options"):
if key in context and context[key] is not None:
context[key] = frozenset(context[key].items())
# The socket_options key may be a list and needs to be transformed into a
# tuple.
socket_opts = context.get("socket_options")
if socket_opts is not None:
context["socket_options"] = tuple(socket_opts)
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can't have fields starting with '_'.
for key in list(context.keys()):
context["key_" + key] = context.pop(key)
# Default to ``None`` for keys missing from the context
for field in key_class._fields:
if field not in context:
context[field] = None
return key_class(**context)
#: A dictionary that maps a scheme to a callable that creates a pool key.
#: This can be used to alter the way pool keys are constructed, if desired.
#: Each PoolManager makes a copy of this dictionary so they can be configured
#: globally here, or individually on the instance.
key_fn_by_scheme = {
"http": functools.partial(_default_key_normalizer, PoolKey),
"https": functools.partial(_default_key_normalizer, PoolKey),
}
pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \\**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
>>> r = manager.request('GET', 'http://google.com/mail')
>>> r = manager.request('GET', 'http://yahoo.com/')
>>> len(manager.pools)
2
"""
proxy = None
proxy_config = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
# Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port, request_context=None):
"""
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
to the pool class used. This method is used to actually create the
connection pools handed out by :meth:`connection_from_url` and
companion methods. It is intended to be overridden for customization.
"""
pool_cls = self.pool_classes_by_scheme[scheme]
if request_context is None:
request_context = self.connection_pool_kw.copy()
# Although the context has everything necessary to create the pool,
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
# be removed.
for key in ("scheme", "host", "port"):
request_context.pop(key, None)
if scheme == "http":
for kw in SSL_KEYWORDS:
request_context.pop(kw, None)
return pool_cls(host, port, **request_context)
def clear(self):
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
provided, it is merged with the instance's ``connection_pool_kw``
variable and used to create the new connection pool, if one is
needed.
"""
if not host:
raise LocationValueError("No host specified.")
request_context = self._merge_pool_kwargs(pool_kwargs)
request_context["scheme"] = scheme or "http"
if not port:
port = port_by_scheme.get(request_context["scheme"].lower(), 80)
request_context["port"] = port
request_context["host"] = host
return self.connection_from_context(request_context)
def connection_from_context(self, request_context):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context["scheme"].lower()
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
if not pool_key_constructor:
raise URLSchemeUnknown(scheme)
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
def connection_from_pool_key(self, pool_key, request_context=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
scheme = request_context["scheme"]
host = request_context["host"]
port = request_context["port"]
pool = self._new_pool(scheme, host, port, request_context=request_context)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url, pool_kwargs=None):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url`.
If ``pool_kwargs`` is not provided and a new pool needs to be
constructed, ``self.connection_pool_kw`` is used to initialize
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
is provided, it is used instead. Note that if a new pool does not
need to be created for the request, the provided ``pool_kwargs`` are
not used.
"""
u = parse_url(url)
return self.connection_from_host(
u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
)
def _merge_pool_kwargs(self, override):
"""
Merge a dictionary of override values for self.connection_pool_kw.
This does not modify self.connection_pool_kw and returns a new dict.
Any keys in the override dictionary with a value of ``None`` are
removed from the merged dictionary.
"""
base_pool_kwargs = self.connection_pool_kw.copy()
if override:
for key, value in override.items():
if value is None:
try:
del base_pool_kwargs[key]
except KeyError:
pass
else:
base_pool_kwargs[key] = value
return base_pool_kwargs
def _proxy_requires_url_absolute_form(self, parsed_url):
"""
Indicates if the proxy requires the complete destination URL in the
request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
"""
if self.proxy is None:
return False
return not connection_requires_http_tunnel(
self.proxy, self.proxy_config, parsed_url.scheme
)
def _validate_proxy_scheme_url_selection(self, url_scheme):
"""
Validates that were not attempting to do TLS in TLS connections on
Python2 or with unsupported SSL implementations.
"""
if self.proxy is None or url_scheme != "https":
return
if self.proxy.scheme != "https":
return
if six.PY2 and not self.proxy_config.use_forwarding_for_https:
raise ProxySchemeUnsupported(
"Contacting HTTPS destinations through HTTPS proxies "
"'via CONNECT tunnels' is not supported in Python 2"
)
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
self._validate_proxy_scheme_url_selection(u.scheme)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False
kw["redirect"] = False
if "headers" not in kw:
kw["headers"] = self.headers.copy()
if self._proxy_requires_url_absolute_form(u):
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
# RFC 7231, Section 6.4.4
if response.status == 303:
method = "GET"
retries = kw.get("retries")
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
# Strip headers marked as unsafe to forward to the redirected location.
# Check remove_headers_on_redirect to avoid a potential network call within
# conn.is_same_host() which may use socket.gethostbyname() in the future.
if retries.remove_headers_on_redirect and not conn.is_same_host(
redirect_location
):
headers = list(six.iterkeys(kw["headers"]))
for header in headers:
if header.lower() in retries.remove_headers_on_redirect:
kw["headers"].pop(header, None)
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
kw["retries"] = retries
kw["redirect"] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
response.drain_conn()
return self.urlopen(method, redirect_location, **kw)
class ProxyManager(PoolManager):
"""
Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs.
:param proxy_url:
The URL of the proxy to be used.
:param proxy_headers:
A dictionary containing headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
:param proxy_ssl_context:
The proxy SSL context is used to establish the TLS connection to the
proxy when using HTTPS proxies.
:param use_forwarding_for_https:
(Defaults to False) If set to True will forward requests to the HTTPS
proxy to be made on behalf of the client instead of creating a TLS
tunnel via the CONNECT method. **Enabling this flag means that request
and response headers and content will be visible from the HTTPS proxy**
whereas tunneling keeps request and response headers and content
private. IP address, target hostname, SNI, and port are always visible
to an HTTPS proxy even when this flag is disabled.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
>>> r2 = proxy.request('GET', 'http://httpbin.org/')
>>> len(proxy.pools)
1
>>> r3 = proxy.request('GET', 'https://httpbin.org/')
>>> r4 = proxy.request('GET', 'https://twitter.com/')
>>> len(proxy.pools)
3
"""
def __init__(
self,
proxy_url,
num_pools=10,
headers=None,
proxy_headers=None,
proxy_ssl_context=None,
use_forwarding_for_https=False,
**connection_pool_kw
):
if isinstance(proxy_url, HTTPConnectionPool):
proxy_url = "%s://%s:%i" % (
proxy_url.scheme,
proxy_url.host,
proxy_url.port,
)
proxy = parse_url(proxy_url)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
self.proxy_ssl_context = proxy_ssl_context
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers
connection_pool_kw["_proxy_config"] = self.proxy_config
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
if scheme == "https":
return super(ProxyManager, self).connection_from_host(
host, port, scheme, pool_kwargs=pool_kwargs
)
return super(ProxyManager, self).connection_from_host(
self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
)
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {"Accept": "*/*"}
netloc = parse_url(url).netloc
if netloc:
headers_["Host"] = netloc
if headers:
headers_.update(headers)
return headers_
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
# For connections using HTTP CONNECT, httplib sets the necessary
# headers on the CONNECT to the proxy. If we're not using CONNECT,
# we'll definitely need to set 'Host' at the very least.
headers = kw.get("headers", self.headers)
kw["headers"] = self._set_proxy_headers(url, headers)
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
def proxy_from_url(url, **kw):
return ProxyManager(proxy_url=url, **kw)
| 19,763
|
Python
|
.py
| 434
| 37.085253
| 100
| 0.644146
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,719
|
request.py
|
rembo10_headphones/lib/urllib3/request.py
|
from __future__ import absolute_import
from .filepost import encode_multipart_formdata
from .packages.six.moves.urllib.parse import urlencode
__all__ = ["RequestMethods"]
class RequestMethods(object):
"""
Convenience mixin for classes who implement a :meth:`urlopen` method, such
as :class:`urllib3.HTTPConnectionPool` and
:class:`urllib3.PoolManager`.
Provides behavior for making common types of HTTP request methods and
decides which type of request field encoding to use.
Specifically,
:meth:`.request_encode_url` is for sending requests whose fields are
encoded in the URL (such as GET, HEAD, DELETE).
:meth:`.request_encode_body` is for sending requests whose fields are
encoded in the *body* of the request using multipart or www-form-urlencoded
(such as for POST, PUT, PATCH).
:meth:`.request` is for making any kind of request, it will look up the
appropriate encoding format and use one of the above two methods to make
the request.
Initializer parameters:
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
"""
_encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
def __init__(self, headers=None):
self.headers = headers or {}
def urlopen(
self,
method,
url,
body=None,
headers=None,
encode_multipart=True,
multipart_boundary=None,
**kw
): # Abstract
raise NotImplementedError(
"Classes extending RequestMethods must implement "
"their own ``urlopen`` method."
)
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
effort. It can be used in most situations, while still having the
option to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
"""
method = method.upper()
urlopen_kw["request_url"] = url
if method in self._encode_url_methods:
return self.request_encode_url(
method, url, fields=fields, headers=headers, **urlopen_kw
)
else:
return self.request_encode_body(
method, url, fields=fields, headers=headers, **urlopen_kw
)
def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
"""
if headers is None:
headers = self.headers
extra_kw = {"headers": headers}
extra_kw.update(urlopen_kw)
if fields:
url += "?" + urlencode(fields)
return self.urlopen(method, url, **extra_kw)
def request_encode_body(
self,
method,
url,
fields=None,
headers=None,
encode_multipart=True,
multipart_boundary=None,
**urlopen_kw
):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:func:`urllib3.encode_multipart_formdata` is used to encode
the payload with the appropriate content type. Otherwise
:func:`urllib.parse.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
safe to use it in other times too. However, it may break request
signing, such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
the MIME type is optional. For example::
fields = {
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(),
'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
}
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimic behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will
be overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
"""
if headers is None:
headers = self.headers
extra_kw = {"headers": {}}
if fields:
if "body" in urlopen_kw:
raise TypeError(
"request got values for both 'fields' and 'body', can only specify one."
)
if encode_multipart:
body, content_type = encode_multipart_formdata(
fields, boundary=multipart_boundary
)
else:
body, content_type = (
urlencode(fields),
"application/x-www-form-urlencoded",
)
extra_kw["body"] = body
extra_kw["headers"] = {"Content-Type": content_type}
extra_kw["headers"].update(headers)
extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw)
| 5,985
|
Python
|
.py
| 135
| 34.377778
| 92
| 0.617885
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,720
|
__init__.py
|
rembo10_headphones/lib/urllib3/__init__.py
|
"""
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
"""
from __future__ import absolute_import
# Set default logging handler to avoid "No handler found" warnings.
import logging
import warnings
from logging import NullHandler
from . import exceptions
from ._version import __version__
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = __version__
__all__ = (
"HTTPConnectionPool",
"HTTPSConnectionPool",
"PoolManager",
"ProxyManager",
"HTTPResponse",
"Retry",
"Timeout",
"add_stderr_logger",
"connection_from_url",
"disable_warnings",
"encode_multipart_formdata",
"get_host",
"make_headers",
"proxy_from_url",
)
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug("Added a stderr logging handler to logger: %s", __name__)
return handler
# ... Clean up.
del NullHandler
# All warning filters *must* be appended unless you're really certain that they
# shouldn't be: otherwise, it's very hard for users to use most Python
# mechanisms to silence them.
# SecurityWarning's always go off by default.
warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
# SNIMissingWarnings should go off only once.
warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
warnings.simplefilter("ignore", category)
| 2,763
|
Python
|
.py
| 71
| 35.915493
| 99
| 0.761016
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,721
|
exceptions.py
|
rembo10_headphones/lib/urllib3/exceptions.py
|
from __future__ import absolute_import
from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
# Base Exceptions
class HTTPError(Exception):
"""Base exception used by this module."""
pass
class HTTPWarning(Warning):
"""Base warning used by this module."""
pass
class PoolError(HTTPError):
"""Base exception for errors caused within a pool."""
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, None)
class RequestError(PoolError):
"""Base exception for PoolErrors that have associated URLs."""
def __init__(self, pool, url, message):
self.url = url
PoolError.__init__(self, pool, message)
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, self.url, None)
class SSLError(HTTPError):
"""Raised when SSL certificate fails in an HTTPS connection."""
pass
class ProxyError(HTTPError):
"""Raised when the connection to a proxy fails."""
def __init__(self, message, error, *args):
super(ProxyError, self).__init__(message, error, *args)
self.original_error = error
class DecodeError(HTTPError):
"""Raised when automatic decoding based on Content-Type fails."""
pass
class ProtocolError(HTTPError):
"""Raised when something unexpected happens mid-request/response."""
pass
#: Renamed to ProtocolError but aliased for backwards compatibility.
ConnectionError = ProtocolError
# Leaf Exceptions
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
:param pool: The connection pool
:type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
:param string url: The requested Url
:param exceptions.Exception reason: The underlying error
"""
def __init__(self, pool, url, reason=None):
self.reason = reason
message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
RequestError.__init__(self, pool, url, message)
class HostChangedError(RequestError):
"""Raised when an existing pool gets a request for a foreign host."""
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
RequestError.__init__(self, pool, url, message)
self.retries = retries
class TimeoutStateError(HTTPError):
"""Raised when passing an invalid state to a timeout"""
pass
class TimeoutError(HTTPError):
"""Raised when a socket timeout error occurs.
Catching this error will catch both :exc:`ReadTimeoutErrors
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
"""
pass
class ReadTimeoutError(TimeoutError, RequestError):
"""Raised when a socket timeout occurs while receiving data from a server"""
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
"""Raised when a socket timeout occurs while connecting to a server"""
pass
class NewConnectionError(ConnectTimeoutError, PoolError):
"""Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
pass
class EmptyPoolError(PoolError):
"""Raised when a pool runs out of connections and no more are allowed."""
pass
class ClosedPoolError(PoolError):
"""Raised when a request enters a pool after the pool has been closed."""
pass
class LocationValueError(ValueError, HTTPError):
"""Raised when there is something wrong with a given URL input."""
pass
class LocationParseError(LocationValueError):
"""Raised when get_host or similar fails to parse the URL input."""
def __init__(self, location):
message = "Failed to parse: %s" % location
HTTPError.__init__(self, message)
self.location = location
class URLSchemeUnknown(LocationValueError):
"""Raised when a URL input has an unsupported scheme."""
def __init__(self, scheme):
message = "Not supported URL scheme %s" % scheme
super(URLSchemeUnknown, self).__init__(message)
self.scheme = scheme
class ResponseError(HTTPError):
"""Used as a container for an error reason supplied in a MaxRetryError."""
GENERIC_ERROR = "too many error responses"
SPECIFIC_ERROR = "too many {status_code} error responses"
class SecurityWarning(HTTPWarning):
"""Warned when performing security reducing actions"""
pass
class SubjectAltNameWarning(SecurityWarning):
"""Warned when connecting to a host with a certificate missing a SAN."""
pass
class InsecureRequestWarning(SecurityWarning):
"""Warned when making an unverified HTTPS request."""
pass
class SystemTimeWarning(SecurityWarning):
"""Warned when system time is suspected to be wrong"""
pass
class InsecurePlatformWarning(SecurityWarning):
"""Warned when certain TLS/SSL configuration is not available on a platform."""
pass
class SNIMissingWarning(HTTPWarning):
"""Warned when making a HTTPS request without SNI available."""
pass
class DependencyWarning(HTTPWarning):
"""
Warned when an attempt is made to import a module with missing optional
dependencies.
"""
pass
class ResponseNotChunked(ProtocolError, ValueError):
"""Response needs to be chunked in order to read it as chunks."""
pass
class BodyNotHttplibCompatible(HTTPError):
"""
Body should be :class:`http.client.HTTPResponse` like
(have an fp attribute which returns raw chunks) for read_chunked().
"""
pass
class IncompleteRead(HTTPError, httplib_IncompleteRead):
"""
Response length doesn't match expected Content-Length
Subclass of :class:`http.client.IncompleteRead` to allow int value
for ``partial`` to avoid creating large objects on streamed reads.
"""
def __init__(self, partial, expected):
super(IncompleteRead, self).__init__(partial, expected)
def __repr__(self):
return "IncompleteRead(%i bytes read, %i more expected)" % (
self.partial,
self.expected,
)
class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
"""Invalid chunk length in a chunked response."""
def __init__(self, response, length):
super(InvalidChunkLength, self).__init__(
response.tell(), response.length_remaining
)
self.response = response
self.length = length
def __repr__(self):
return "InvalidChunkLength(got length %r, %i bytes read)" % (
self.length,
self.partial,
)
class InvalidHeader(HTTPError):
"""The header provided was somehow invalid."""
pass
class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
"""ProxyManager does not support the supplied scheme"""
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
# 'localhost' is here because our URL parser parses
# localhost:8080 -> scheme=localhost, remove if we fix this.
if scheme == "localhost":
scheme = None
if scheme is None:
message = "Proxy URL had no scheme, should start with http:// or https://"
else:
message = (
"Proxy URL had unsupported scheme %s, should use http:// or https://"
% scheme
)
super(ProxySchemeUnknown, self).__init__(message)
class ProxySchemeUnsupported(ValueError):
"""Fetching HTTPS resources through HTTPS proxies is unsupported"""
pass
class HeaderParsingError(HTTPError):
"""Raised by assert_header_parsing, but we convert it to a log.warning statement."""
def __init__(self, defects, unparsed_data):
message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
super(HeaderParsingError, self).__init__(message)
class UnrewindableBodyError(HTTPError):
"""urllib3 encountered an error when trying to rewind a body"""
pass
| 8,217
|
Python
|
.py
| 193
| 36.528497
| 88
| 0.697872
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,722
|
connectionpool.py
|
rembo10_headphones/lib/urllib3/connectionpool.py
|
from __future__ import absolute_import
import errno
import logging
import re
import socket
import sys
import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from .connection import (
BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
VerifiedHTTPSConnection,
port_by_scheme,
)
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
)
from .packages import six
from .packages.six.moves import queue
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.queue import LifoQueue
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import get_host, parse_url
xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
# Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
.. note::
ConnectionPool.urlopen() does not normalize or percent-encode target URIs
which is useful if your target server doesn't support percent-encoded
target URIs.
"""
scheme = None
QueueCls = LifoQueue
def __init__(self, host, port=None):
if not host:
raise LocationValueError("No host specified.")
self.host = _normalize_host(host, scheme=self.scheme)
self._proxy_host = host.lower()
self.port = port
def __str__(self):
return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close(self):
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`http.client.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`http.client.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`http.client.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
:param timeout:
Socket timeout in seconds for each individual connection. This can
be a float or integer, which sets the timeout for the HTTP request,
or an instance of :class:`urllib3.util.Timeout` which gives you more
fine-grained control over request timeouts. After the constructor has
been parsed, this is always a `urllib3.util.Timeout` object.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to False, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param retries:
Retry configuration to use by default with requests in this pool.
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.ProxyManager`
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.ProxyManager`
:param \\**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = "http"
ConnectionCls = HTTPConnection
ResponseCls = HTTPResponse
def __init__(
self,
host,
port=None,
strict=False,
timeout=Timeout.DEFAULT_TIMEOUT,
maxsize=1,
block=False,
headers=None,
retries=None,
_proxy=None,
_proxy_headers=None,
_proxy_config=None,
**conn_kw
):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
if retries is None:
retries = Retry.DEFAULT
self.timeout = timeout
self.retries = retries
self.pool = self.QueueCls(maxsize)
self.block = block
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
self.proxy_config = _proxy_config
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
self.conn_kw = conn_kw
if self.proxy:
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
self.conn_kw.setdefault("socket_options", [])
self.conn_kw["proxy"] = self.proxy
self.conn_kw["proxy_config"] = self.proxy_config
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTP connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "80",
)
conn = self.ConnectionCls(
host=self.host,
port=self.port,
timeout=self.timeout.connect_timeout,
strict=self.strict,
**self.conn_kw
)
return conn
def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except queue.Empty:
if self.block:
raise EmptyPoolError(
self,
"Pool reached maximum size and no more connections are allowed.",
)
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.debug("Resetting dropped connection: %s", self.host)
conn.close()
if getattr(conn, "auto_open", 1) == 0:
# This is a proxied connection that has been mutated by
# http.client._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn()
def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except queue.Full:
# This should never happen if self.block == True
log.warning(
"Connection pool is full, discarding connection: %s. Connection pool size: %s",
self.host,
self.pool.qsize(),
)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
pass
def _prepare_proxy(self, conn):
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout):
"""Helper that always returns a :class:`urllib3.util.Timeout`"""
if timeout is _Default:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout)
def _raise_timeout(self, err, url, timeout_value):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
if hasattr(err, "errno") and err.errno in _blocking_errnos:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
if "timed out" in str(err) or "did not complete (read)" in str(
err
): # Python < 2.7.4
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
def _make_request(
self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls http.client.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
try:
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
# legitimately able to close the connection after sending a valid response.
# With this behaviour, the received response is still readable.
except BrokenPipeError:
# Python 3
pass
except IOError as e:
# Python 2 and macOS/Linux
# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
if e.errno not in {
errno.EPIPE,
errno.ESHUTDOWN,
errno.EPROTOTYPE,
}:
raise
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
if getattr(conn, "sock", None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % read_timeout
)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
try:
# Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError:
# Python 3
try:
httplib_response = conn.getresponse()
except BaseException as e:
# Remove the TypeError from the exception chain in
# Python 3 (including for exceptions like SystemExit).
# Otherwise it looks like a bug in the code.
six.raise_from(e, None)
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
log.debug(
'%s://%s:%s "%s %s %s" %s %s',
self.scheme,
self.host,
self.port,
method,
url,
http_version,
httplib_response.status,
httplib_response.length,
)
try:
assert_header_parsing(httplib_response.msg)
except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
log.warning(
"Failed to parse headers (url=%s): %s",
self._absolute_url(url),
hpe,
exc_info=True,
)
return httplib_response
def _absolute_url(self, path):
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
def close(self):
"""
Close all pooled connections and disable the pool.
"""
if self.pool is None:
return
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except queue.Empty:
pass # Done.
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith("/"):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
if host is not None:
host = _normalize_host(host, scheme=scheme)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
assert_same_host=True,
timeout=_Default,
pool_timeout=None,
release_conn=None,
chunked=False,
body_pos=None,
**response_kw
):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
:param \\**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get("preload_content", True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = six.ensure_str(_encode_target(url))
else:
url = six.ensure_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
if is_new_proxy_conn and http_tunnel_required:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
)
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Pass method to Response for length checking
response_kw["request_method"] = method
# Import httplib's response into our own wrapper object
response = self.ResponseCls.from_httplib(
httplib_response,
pool=self,
connection=response_conn,
retries=retries,
**response_kw
)
# Everything went great!
clean_exit = True
except EmptyPoolError:
# Didn't get a connection from the pool, no need to clean up
clean_exit = True
release_this_conn = False
raise
except (
TimeoutError,
HTTPException,
SocketError,
ProtocolError,
BaseSSLError,
SSLError,
CertificateError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
def _is_ssl_error_message_from_http_proxy(ssl_error):
# We're trying to detect the message 'WRONG_VERSION_NUMBER' but
# SSLErrors are kinda all over the place when it comes to the message,
# so we try to cover our bases here!
message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
return (
"wrong version number" in message or "unknown protocol" in message
)
# Try to detect a common user error with proxies which is to
# set an HTTP proxy to be HTTPS when it should be 'http://'
# (ie {'http': 'http://proxy', 'https': 'https://proxy'})
# Instead we add a nice error message and point to a URL.
if (
isinstance(e, BaseSSLError)
and self.proxy
and _is_ssl_error_message_from_http_proxy(e)
):
e = ProxyError(
"Your proxy appears to only use HTTP and not HTTPS, "
"try changing your proxy URL to be HTTP. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#https-proxy-error-http-proxy",
SSLError(e),
)
elif isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError("Cannot connect to proxy.", e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError("Connection aborted.", e)
retries = retries.increment(
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
)
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if not clean_exit:
# We hit some kind of exception, handled or otherwise. We need
# to throw the connection away unless explicitly told not to.
# Close the connection, set the variable to None, and make sure
# we put the None back in the pool to avoid leaking it.
conn = conn and conn.close()
release_this_conn = True
if release_this_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning(
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
)
return self.urlopen(
method,
url,
body,
headers,
retries,
redirect,
assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = "GET"
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep_for_retry(response)
log.debug("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
method,
redirect_location,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
# Check if we should retry the HTTP response.
has_retry_after = bool(response.getheader("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep(response)
log.debug("Retry: %s", url)
return self.urlopen(
method,
url,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
:class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
"""
scheme = "https"
ConnectionCls = HTTPSConnection
def __init__(
self,
host,
port=None,
strict=False,
timeout=Timeout.DEFAULT_TIMEOUT,
maxsize=1,
block=False,
headers=None,
retries=None,
_proxy=None,
_proxy_headers=None,
key_file=None,
cert_file=None,
cert_reqs=None,
key_password=None,
ca_certs=None,
ssl_version=None,
assert_hostname=None,
assert_fingerprint=None,
ca_cert_dir=None,
**conn_kw
):
HTTPConnectionPool.__init__(
self,
host,
port,
strict,
timeout,
maxsize,
block,
headers,
retries,
_proxy,
_proxy_headers,
**conn_kw
)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.key_password = key_password
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _prepare_conn(self, conn):
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(
key_file=self.key_file,
key_password=self.key_password,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint,
)
conn.ssl_version = self.ssl_version
return conn
def _prepare_proxy(self, conn):
"""
Establishes a tunnel connection through HTTP CONNECT.
Tunnel connection is established early because otherwise httplib would
improperly set Host: header to proxy's IP:port.
"""
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
if self.proxy.scheme == "https":
conn.tls_in_tls_required = True
conn.connect()
def _new_conn(self):
"""
Return a fresh :class:`http.client.HTTPSConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTPS connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "443",
)
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError(
"Can't connect to HTTPS URL because the SSL module is not available."
)
actual_host = self.host
actual_port = self.port
if self.proxy is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(
host=actual_host,
port=actual_port,
timeout=self.timeout.connect_timeout,
strict=self.strict,
cert_file=self.cert_file,
key_file=self.key_file,
key_password=self.key_password,
**self.conn_kw
)
return self._prepare_conn(conn)
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn(
(
"Unverified HTTPS request is being made to host '%s'. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings" % conn.host
),
InsecureRequestWarning,
)
if getattr(conn, "proxy_is_verified", None) is False:
warnings.warn(
(
"Unverified HTTPS connection done to an HTTPS proxy. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings"
),
InsecureRequestWarning,
)
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \\**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
port = port or port_by_scheme.get(scheme, 80)
if scheme == "https":
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
def _normalize_host(host, scheme):
"""
Normalize hosts for comparisons and use with sockets.
"""
host = normalize_host(host, scheme)
# httplib doesn't like it when we include brackets in IPv6 addresses
# Specifically, if we include brackets but also pass the port then
# httplib crazily doubles up the square brackets on the Host header.
# Instead, we need to make sure we never pass ``None`` as the port.
# However, for backward compatibility reasons we can't actually
# *assert* that. See http://bugs.python.org/issue28539
if host.startswith("[") and host.endswith("]"):
host = host[1:-1]
return host
| 39,013
|
Python
|
.py
| 931
| 30.856069
| 106
| 0.594539
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,723
|
connection.py
|
rembo10_headphones/lib/urllib3/connection.py
|
from __future__ import absolute_import
import datetime
import logging
import os
import re
import socket
import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException # noqa: F401
from .util.proxy import create_proxy_ssl_context
try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try:
# Python 3: not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError:
# Python 2
class ConnectionError(Exception):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
BrokenPipeError = BrokenPipeError
except NameError: # Python 2:
class BrokenPipeError(Exception):
pass
from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
NewConnectionError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import (
assert_fingerprint,
create_urllib3_context,
is_ipaddress,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
from .util.ssl_match_hostname import CertificateError, match_hostname
log = logging.getLogger(__name__)
port_by_scheme = {"http": 80, "https": 443}
# When it comes time to update this value as a part of regular maintenance
# (ie test_recent_date is failing) update it to ~6 months before the current date.
RECENT_DATE = datetime.date(2020, 7, 1)
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
class HTTPConnection(_HTTPConnection, object):
"""
Based on :class:`http.client.HTTPConnection` but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass:
.. code-block:: python
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme["http"]
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
#: Whether this proxy connection (if used) verifies the proxy host's
#: certificate.
proxy_is_verified = None
def __init__(self, *args, **kw):
if not six.PY2:
kw.pop("strict", None)
# Pre-set source_address.
self.source_address = kw.get("source_address")
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop("socket_options", self.default_socket_options)
# Proxy options provided by the user.
self.proxy = kw.pop("proxy", None)
self.proxy_config = kw.pop("proxy_config", None)
_HTTPConnection.__init__(self, *args, **kw)
@property
def host(self):
"""
Getter method to remove any trailing dots that indicate the hostname is an FQDN.
In general, SSL certificates don't include the trailing dot indicating a
fully-qualified domain name, and thus, they don't validate properly when
checked against a domain name that includes the dot. In addition, some
servers may not expect to receive the trailing dot when provided.
However, the hostname with trailing dot is critical to DNS resolution; doing a
lookup with the trailing dot will properly only resolve the appropriate FQDN,
whereas a lookup without a trailing dot will search the system's search domain
list. Thus, it's important to keep the original host around for use only in
those cases where it's appropriate (i.e., when doing DNS lookup to establish the
actual TCP connection across which we're going to send HTTP requests).
"""
return self._dns_host.rstrip(".")
@host.setter
def host(self, value):
"""
Setter for the `host` property.
We assume that only urllib3 uses the _dns_host attribute; httplib itself
only uses `host`, and it seems reasonable that other libraries follow suit.
"""
self._dns_host = value
def _new_conn(self):
"""Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw["source_address"] = self.source_address
if self.socket_options:
extra_kw["socket_options"] = self.socket_options
try:
conn = connection.create_connection(
(self._dns_host, self.port), self.timeout, **extra_kw
)
except SocketTimeout:
raise ConnectTimeoutError(
self,
"Connection to %s timed out. (connect timeout=%s)"
% (self.host, self.timeout),
)
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e
)
return conn
def _is_using_tunnel(self):
# Google App Engine's httplib does not define _tunnel_host
return getattr(self, "_tunnel_host", None)
def _prepare_conn(self, conn):
self.sock = conn
if self._is_using_tunnel():
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
def putrequest(self, method, url, *args, **kwargs):
""" """
# Empty docstring because the indentation of CPython's implementation
# is broken but we don't want this method in our documentation.
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
if match:
raise ValueError(
"Method cannot contain non-token characters %r (found at least %r)"
% (method, match.group())
)
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
def putheader(self, header, *values):
""" """
if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
_HTTPConnection.putheader(self, header, *values)
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
raise ValueError(
"urllib3.util.SKIP_HEADER only supports '%s'"
% ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
)
def request(self, method, url, body=None, headers=None):
if headers is None:
headers = {}
else:
# Avoid modifying the headers passed into .request()
headers = headers.copy()
if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
headers["User-Agent"] = _get_default_user_agent()
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
headers = headers or {}
header_keys = set([six.ensure_str(k.lower()) for k in headers])
skip_accept_encoding = "accept-encoding" in header_keys
skip_host = "host" in header_keys
self.putrequest(
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
)
if "user-agent" not in header_keys:
self.putheader("User-Agent", _get_default_user_agent())
for header, value in headers.items():
self.putheader(header, value)
if "transfer-encoding" not in header_keys:
self.putheader("Transfer-Encoding", "chunked")
self.endheaders()
if body is not None:
stringish_types = six.string_types + (bytes,)
if isinstance(body, stringish_types):
body = (body,)
for chunk in body:
if not chunk:
continue
if not isinstance(chunk, bytes):
chunk = chunk.encode("utf8")
len_str = hex(len(chunk))[2:]
to_send = bytearray(len_str.encode())
to_send += b"\r\n"
to_send += chunk
to_send += b"\r\n"
self.send(to_send)
# After the if clause, to always have a closed body
self.send(b"0\r\n\r\n")
class HTTPSConnection(HTTPConnection):
"""
Many of the parameters to this constructor are passed to the underlying SSL
socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
"""
default_port = port_by_scheme["https"]
cert_reqs = None
ca_certs = None
ca_cert_dir = None
ca_cert_data = None
ssl_version = None
assert_fingerprint = None
tls_in_tls_required = False
def __init__(
self,
host,
port=None,
key_file=None,
cert_file=None,
key_password=None,
strict=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
ssl_context=None,
server_hostname=None,
**kw
):
HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
self.key_password = key_password
self.ssl_context = ssl_context
self.server_hostname = server_hostname
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = "https"
def set_cert(
self,
key_file=None,
cert_file=None,
cert_reqs=None,
key_password=None,
ca_certs=None,
assert_hostname=None,
assert_fingerprint=None,
ca_cert_dir=None,
ca_cert_data=None,
):
"""
This method should only be called once, before the connection is used.
"""
# If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
# have an SSLContext object in which case we'll use its verify_mode.
if cert_reqs is None:
if self.ssl_context is not None:
cert_reqs = self.ssl_context.verify_mode
else:
cert_reqs = resolve_cert_reqs(None)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.key_password = key_password
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
self.ca_cert_data = ca_cert_data
def connect(self):
# Add certificate verification
conn = self._new_conn()
hostname = self.host
tls_in_tls = False
if self._is_using_tunnel():
if self.tls_in_tls_required:
conn = self._connect_tls_proxy(hostname, conn)
tls_in_tls = True
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
server_hostname = hostname
if self.server_hostname is not None:
server_hostname = self.server_hostname
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn(
(
"System time is way off (before {0}). This will probably "
"lead to SSL verification errors"
).format(RECENT_DATE),
SystemTimeWarning,
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
default_ssl_context = False
if self.ssl_context is None:
default_ssl_context = True
self.ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(self.ssl_version),
cert_reqs=resolve_cert_reqs(self.cert_reqs),
)
context = self.ssl_context
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
# Try to load OS default certs if none are given.
# Works well on Windows (requires Python3.4+)
if (
not self.ca_certs
and not self.ca_cert_dir
and not self.ca_cert_data
and default_ssl_context
and hasattr(context, "load_default_certs")
):
context.load_default_certs()
self.sock = ssl_wrap_socket(
sock=conn,
keyfile=self.key_file,
certfile=self.cert_file,
key_password=self.key_password,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
ca_cert_data=self.ca_cert_data,
server_hostname=server_hostname,
ssl_context=context,
tls_in_tls=tls_in_tls,
)
# If we're using all defaults and the connection
# is TLSv1 or TLSv1.1 we throw a DeprecationWarning
# for the host.
if (
default_ssl_context
and self.ssl_version is None
and hasattr(self.sock, "version")
and self.sock.version() in {"TLSv1", "TLSv1.1"}
):
warnings.warn(
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
"and will be disabled in urllib3 v2.0.0. Connecting to "
"'%s' with '%s' can be enabled by explicitly opting-in "
"with 'ssl_version'" % (self.host, self.sock.version()),
DeprecationWarning,
)
if self.assert_fingerprint:
assert_fingerprint(
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
)
elif (
context.verify_mode != ssl.CERT_NONE
and not getattr(context, "check_hostname", False)
and self.assert_hostname is not False
):
# While urllib3 attempts to always turn off hostname matching from
# the TLS library, this cannot always be done. So we check whether
# the TLS Library still thinks it's matching hostnames.
cert = self.sock.getpeercert()
if not cert.get("subjectAltName", ()):
warnings.warn(
(
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
"`commonName` for now. This feature is being removed by major browsers and "
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
"for details.)".format(hostname)
),
SubjectAltNameWarning,
)
_match_hostname(cert, self.assert_hostname or server_hostname)
self.is_verified = (
context.verify_mode == ssl.CERT_REQUIRED
or self.assert_fingerprint is not None
)
def _connect_tls_proxy(self, hostname, conn):
"""
Establish a TLS connection to the proxy using the provided SSL context.
"""
proxy_config = self.proxy_config
ssl_context = proxy_config.ssl_context
if ssl_context:
# If the user provided a proxy context, we assume CA and client
# certificates have already been set
return ssl_wrap_socket(
sock=conn,
server_hostname=hostname,
ssl_context=ssl_context,
)
ssl_context = create_proxy_ssl_context(
self.ssl_version,
self.cert_reqs,
self.ca_certs,
self.ca_cert_dir,
self.ca_cert_data,
)
# If no cert was provided, use only the default options for server
# certificate validation
socket = ssl_wrap_socket(
sock=conn,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
ca_cert_data=self.ca_cert_data,
server_hostname=hostname,
ssl_context=ssl_context,
)
if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
ssl_context, "check_hostname", False
):
# While urllib3 attempts to always turn off hostname matching from
# the TLS library, this cannot always be done. So we check whether
# the TLS Library still thinks it's matching hostnames.
cert = socket.getpeercert()
if not cert.get("subjectAltName", ()):
warnings.warn(
(
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
"`commonName` for now. This feature is being removed by major browsers and "
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
"for details.)".format(hostname)
),
SubjectAltNameWarning,
)
_match_hostname(cert, hostname)
self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
return socket
def _match_hostname(cert, asserted_hostname):
# Our upstream implementation of ssl.match_hostname()
# only applies this normalization to IP addresses so it doesn't
# match DNS SANs so we do the same thing!
stripped_hostname = asserted_hostname.strip("u[]")
if is_ipaddress(stripped_hostname):
asserted_hostname = stripped_hostname
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
log.warning(
"Certificate did not match expected hostname: %s. Certificate: %s",
asserted_hostname,
cert,
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
e._peer_cert = cert
raise
def _get_default_user_agent():
return "python-urllib3/%s" % __version__
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
if not ssl:
HTTPSConnection = DummyConnection # noqa: F811
VerifiedHTTPSConnection = HTTPSConnection
| 20,076
|
Python
|
.py
| 473
| 32.515856
| 101
| 0.613831
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,724
|
fields.py
|
rembo10_headphones/lib/urllib3/fields.py
|
from __future__ import absolute_import
import email.utils
import mimetypes
import re
from .packages import six
def guess_content_type(filename, default="application/octet-stream"):
"""
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
if filename:
return mimetypes.guess_type(filename)[0] or default
return default
def format_header_param_rfc2231(name, value):
"""
Helper function to format and quote a single header parameter using the
strategy defined in RFC 2231.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
An RFC-2231-formatted unicode string.
"""
if isinstance(value, six.binary_type):
value = value.decode("utf-8")
if not any(ch in value for ch in '"\\\r\n'):
result = u'%s="%s"' % (name, value)
try:
result.encode("ascii")
except (UnicodeEncodeError, UnicodeDecodeError):
pass
else:
return result
if six.PY2: # Python 2:
value = value.encode("utf-8")
# encode_rfc2231 accepts an encoded string and returns an ascii-encoded
# string in Python 2 but accepts and returns unicode strings in Python 3
value = email.utils.encode_rfc2231(value, "utf-8")
value = "%s*=%s" % (name, value)
if six.PY2: # Python 2:
value = value.decode("utf-8")
return value
_HTML5_REPLACEMENTS = {
u"\u0022": u"%22",
# Replace "\" with "\\".
u"\u005C": u"\u005C\u005C",
}
# All control characters from 0x00 to 0x1F *except* 0x1B.
_HTML5_REPLACEMENTS.update(
{
six.unichr(cc): u"%{:02X}".format(cc)
for cc in range(0x00, 0x1F + 1)
if cc not in (0x1B,)
}
)
def _replace_multiple(value, needles_and_replacements):
def replacer(match):
return needles_and_replacements[match.group(0)]
pattern = re.compile(
r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
)
result = pattern.sub(replacer, value)
return result
def format_header_param_html5(name, value):
"""
Helper function to format and quote a single header parameter using the
HTML5 strategy.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows the `HTML5 Working Draft
Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
.. _HTML5 Working Draft Section 4.10.22.7:
https://w3c.github.io/html/sec-forms.html#multipart-form-data
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
A unicode string, stripped of troublesome characters.
"""
if isinstance(value, six.binary_type):
value = value.decode("utf-8")
value = _replace_multiple(value, _HTML5_REPLACEMENTS)
return u'%s="%s"' % (name, value)
# For backwards-compatibility.
format_header_param = format_header_param_html5
class RequestField(object):
"""
A data container for request body parameters.
:param name:
The name of this request field. Must be unicode.
:param data:
The data/value body.
:param filename:
An optional filename of the request field. Must be unicode.
:param headers:
An optional dict-like object of headers to initially use for the field.
:param header_formatter:
An optional callable that is used to encode and format the headers. By
default, this is :func:`format_header_param_html5`.
"""
def __init__(
self,
name,
data,
filename=None,
headers=None,
header_formatter=format_header_param_html5,
):
self._name = name
self._filename = filename
self.data = data
self.headers = {}
if headers:
self.headers = dict(headers)
self.header_formatter = header_formatter
@classmethod
def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
Supports constructing :class:`~urllib3.fields.RequestField` from
parameter of key/value strings AND key/filetuple. A filetuple is a
(filename, data, MIME type) tuple where the MIME type is optional.
For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
Field names and filenames must be unicode.
"""
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
content_type = guess_content_type(filename)
else:
filename = None
content_type = None
data = value
request_param = cls(
fieldname, data, filename=filename, header_formatter=header_formatter
)
request_param.make_multipart(content_type=content_type)
return request_param
def _render_part(self, name, value):
"""
Overridable helper function to format a single header parameter. By
default, this calls ``self.header_formatter``.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
return self.header_formatter(name, value)
def _render_parts(self, header_parts):
"""
Helper function to format and quote a single header.
Useful for single headers that are composed of multiple items. E.g.,
'Content-Disposition' fields.
:param header_parts:
A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
"""
parts = []
iterable = header_parts
if isinstance(header_parts, dict):
iterable = header_parts.items()
for name, value in iterable:
if value is not None:
parts.append(self._render_part(name, value))
return u"; ".join(parts)
def render_headers(self):
"""
Renders the headers for this request field.
"""
lines = []
sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
for sort_key in sort_keys:
if self.headers.get(sort_key, False):
lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
for header_name, header_value in self.headers.items():
if header_name not in sort_keys:
if header_value:
lines.append(u"%s: %s" % (header_name, header_value))
lines.append(u"\r\n")
return u"\r\n".join(lines)
def make_multipart(
self, content_disposition=None, content_type=None, content_location=None
):
"""
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
"""
self.headers["Content-Disposition"] = content_disposition or u"form-data"
self.headers["Content-Disposition"] += u"; ".join(
[
u"",
self._render_parts(
((u"name", self._name), (u"filename", self._filename))
),
]
)
self.headers["Content-Type"] = content_type
self.headers["Content-Location"] = content_location
| 8,579
|
Python
|
.py
| 217
| 31.193548
| 88
| 0.622878
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,725
|
filepost.py
|
rembo10_headphones/lib/urllib3/filepost.py
|
from __future__ import absolute_import
import binascii
import codecs
import os
from io import BytesIO
from .fields import RequestField
from .packages import six
from .packages.six import b
writer = codecs.lookup("utf-8")[3]
def choose_boundary():
"""
Our embarrassingly-simple replacement for mimetools.choose_boundary.
"""
boundary = binascii.hexlify(os.urandom(16))
if not six.PY2:
boundary = boundary.decode("ascii")
return boundary
def iter_field_objects(fields):
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts, and lists of
:class:`~urllib3.fields.RequestField`.
"""
if isinstance(fields, dict):
i = six.iteritems(fields)
else:
i = iter(fields)
for field in i:
if isinstance(field, RequestField):
yield field
else:
yield RequestField.from_tuples(*field)
def iter_fields(fields):
"""
.. deprecated:: 1.6
Iterate over fields.
The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use :func:`iter_field_objects`, which returns
:class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts.
"""
if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields)
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`urllib3.filepost.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(b("--%s\r\n" % (boundary)))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b"\r\n")
body.write(b("--%s--\r\n" % (boundary)))
content_type = str("multipart/form-data; boundary=%s" % boundary)
return body.getvalue(), content_type
| 2,440
|
Python
|
.py
| 70
| 28.657143
| 85
| 0.657985
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,726
|
six.py
|
rembo10_headphones/lib/urllib3/packages/six.py
|
# Copyright (c) 2010-2020 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Utilities for writing code that runs on Python 2 and 3"""
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.16.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = (str,)
integer_types = (int,)
class_types = (type,)
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = (basestring,)
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
if PY34:
from importlib.util import spec_from_loader
else:
spec_from_loader = None
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def find_spec(self, fullname, path, target=None):
if fullname in self.known_modules:
return spec_from_loader(fullname, self)
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
def create_module(self, spec):
return self.load_module(spec.name)
def exec_module(self, module):
pass
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute(
"filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"
),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute(
"reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"
),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute(
"zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule(
"collections_abc",
"collections",
"collections.abc" if sys.version_info >= (3, 3) else "collections",
),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
MovedModule(
"_dummy_thread",
"dummy_thread",
"_dummy_thread" if sys.version_info < (3, 9) else "_thread",
),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule(
"email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"
),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute(
"unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"
),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(
Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse",
"moves.urllib.parse",
)
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(
Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error",
"moves.urllib.error",
)
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(
Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request",
"moves.urllib.request",
)
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(
Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response",
"moves.urllib.response",
)
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = (
_urllib_robotparser_moved_attributes
)
_importer._add_module(
Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser",
"moves.urllib.robotparser",
)
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ["parse", "error", "request", "response", "robotparser"]
_importer._add_module(
Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib"
)
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(
get_unbound_function, """Get the function out of a possibly unbound function"""
)
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(
iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary."
)
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
del io
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
_assertNotRegex = "assertNotRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
def assertNotRegex(self, *args, **kwargs):
return getattr(self, _assertNotRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec ("""exec _code_ in _globs_, _locs_""")
exec_(
"""def reraise(tp, value, tb=None):
try:
raise tp, value, tb
finally:
tb = None
"""
)
if sys.version_info[:2] > (3,):
exec_(
"""def raise_from(value, from_value):
try:
raise value from from_value
finally:
value = None
"""
)
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (
isinstance(fp, file)
and isinstance(data, unicode)
and fp.encoding is not None
):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
# This does exactly the same what the :func:`py3:functools.update_wrapper`
# function does on Python versions after 3.2. It sets the ``__wrapped__``
# attribute on ``wrapper`` object and it doesn't raise an error if any of
# the attributes mentioned in ``assigned`` and ``updated`` are missing on
# ``wrapped`` object.
def _update_wrapper(
wrapper,
wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES,
):
for attr in assigned:
try:
value = getattr(wrapped, attr)
except AttributeError:
continue
else:
setattr(wrapper, attr, value)
for attr in updated:
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
wrapper.__wrapped__ = wrapped
return wrapper
_update_wrapper.__doc__ = functools.update_wrapper.__doc__
def wraps(
wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES,
):
return functools.partial(
_update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
)
wraps.__doc__ = functools.wraps.__doc__
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
if sys.version_info[:2] >= (3, 7):
# This version introduced PEP 560 that requires a bit
# of extra care (we mimic what is done by __build_class__).
resolved_bases = types.resolve_bases(bases)
if resolved_bases is not bases:
d["__orig_bases__"] = bases
else:
resolved_bases = bases
return meta(name, resolved_bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
return meta.__prepare__(name, bases)
return type.__new__(metaclass, "temporary_class", (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get("__slots__")
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop("__dict__", None)
orig_vars.pop("__weakref__", None)
if hasattr(cls, "__qualname__"):
orig_vars["__qualname__"] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def ensure_binary(s, encoding="utf-8", errors="strict"):
"""Coerce **s** to six.binary_type.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> encoded to `bytes`
- `bytes` -> `bytes`
"""
if isinstance(s, binary_type):
return s
if isinstance(s, text_type):
return s.encode(encoding, errors)
raise TypeError("not expecting type '%s'" % type(s))
def ensure_str(s, encoding="utf-8", errors="strict"):
"""Coerce *s* to `str`.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
# Optimization: Fast return for the common case.
if type(s) is str:
return s
if PY2 and isinstance(s, text_type):
return s.encode(encoding, errors)
elif PY3 and isinstance(s, binary_type):
return s.decode(encoding, errors)
elif not isinstance(s, (text_type, binary_type)):
raise TypeError("not expecting type '%s'" % type(s))
return s
def ensure_text(s, encoding="utf-8", errors="strict"):
"""Coerce *s* to six.text_type.
For Python 2:
- `unicode` -> `unicode`
- `str` -> `unicode`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
if isinstance(s, binary_type):
return s.decode(encoding, errors)
elif isinstance(s, text_type):
return s
else:
raise TypeError("not expecting type '%s'" % type(s))
def python_2_unicode_compatible(klass):
"""
A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if "__str__" not in klass.__dict__:
raise ValueError(
"@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." % klass.__name__
)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode("utf-8")
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (
type(importer).__name__ == "_SixMetaPathImporter"
and importer.name == __name__
):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| 34,666
|
Python
|
.py
| 858
| 33.679487
| 87
| 0.642502
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,727
|
makefile.py
|
rembo10_headphones/lib/urllib3/packages/backports/makefile.py
|
# -*- coding: utf-8 -*-
"""
backports.makefile
~~~~~~~~~~~~~~~~~~
Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
"""
import io
from socket import SocketIO
def backport_makefile(
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
| 1,417
|
Python
|
.py
| 48
| 24.208333
| 76
| 0.628111
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,728
|
appengine.py
|
rembo10_headphones/lib/urllib3/contrib/appengine.py
|
"""
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Example usage::
from urllib3 import PoolManager
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
if is_appengine_sandbox():
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
http = AppEngineManager()
else:
# PoolManager uses a socket-level API behind the scenes
http = PoolManager()
r = http.request('GET', 'https://google.com/')
There are `limitations <https://cloud.google.com/appengine/docs/python/\
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
cost-effective in many circumstances as long as your usage is within the
limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
Sockets also have `limitations and restrictions
<https://cloud.google.com/appengine/docs/python/sockets/\
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
To use sockets, be sure to specify the following in your ``app.yaml``::
env_variables:
GAE_USE_SOCKETS_HTTPLIB : 'true'
3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
"""
from __future__ import absolute_import
import io
import logging
import warnings
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
SSLError,
TimeoutError,
)
from ..packages.six.moves.urllib.parse import urljoin
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.retry import Retry
from ..util.timeout import Timeout
from . import _appengine_environ
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
log = logging.getLogger(__name__)
class AppEnginePlatformWarning(HTTPWarning):
pass
class AppEnginePlatformError(HTTPError):
pass
class AppEngineManager(RequestMethods):
"""
Connection manager for Google App Engine sandbox applications.
This manager uses the URLFetch service directly instead of using the
emulated httplib, and is subject to URLFetch limitations as described in
the App Engine documentation `here
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Notably it will raise an :class:`AppEnginePlatformError` if:
* URLFetch is not available.
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabytes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
def __init__(
self,
headers=None,
retries=None,
validate_certificate=True,
urlfetch_retries=True,
):
if not urlfetch:
raise AppEnginePlatformError(
"URLFetch is not available in this environment."
)
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
AppEnginePlatformWarning,
)
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
self.urlfetch_retries = urlfetch_retries
self.retries = retries or Retry.DEFAULT
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Return False to re-raise any potential exceptions
return False
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
timeout=Timeout.DEFAULT_TIMEOUT,
**response_kw
):
retries = self._get_retries(retries, redirect)
try:
follow_redirects = redirect and retries.redirect != 0 and retries.total
response = urlfetch.fetch(
url,
payload=body,
method=method,
headers=headers or {},
allow_truncated=False,
follow_redirects=self.urlfetch_retries and follow_redirects,
deadline=self._get_absolute_timeout(timeout),
validate_certificate=self.validate_certificate,
)
except urlfetch.DeadlineExceededError as e:
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
if "too large" in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.",
e,
)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
if "Too many redirects" in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
"responses up to 32mb in size.",
e,
)
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
"URLFetch does not support method: %s" % method, e
)
http_response = self._urlfetch_response_to_http_response(
response, retries=retries, **response_kw
)
# Handle redirect?
redirect_location = redirect and http_response.get_redirect_location()
if redirect_location:
# Check for redirect response
if self.urlfetch_retries and retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
else:
if http_response.status == 303:
method = "GET"
try:
retries = retries.increment(
method, url, response=http_response, _pool=self
)
except MaxRetryError:
if retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
return http_response
retries.sleep_for_retry(http_response)
log.debug("Redirecting %s -> %s", url, redirect_location)
redirect_url = urljoin(url, redirect_location)
return self.urlopen(
method,
redirect_url,
body,
headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
# Check if we should retry the HTTP response.
has_retry_after = bool(http_response.getheader("Retry-After"))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
retries.sleep(http_response)
return self.urlopen(
method,
url,
body=body,
headers=headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
return http_response
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get("content-encoding")
if content_encoding == "deflate":
del urlfetch_resp.headers["content-encoding"]
transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
if transfer_encoding == "chunked":
encodings = transfer_encoding.split(",")
encodings.remove("chunked")
urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
original_response = HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
body=io.BytesIO(urlfetch_resp.content),
msg=urlfetch_resp.header_msg,
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
return HTTPResponse(
body=io.BytesIO(urlfetch_resp.content),
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
original_response=original_response,
**response_kw
)
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return None # Defer to URLFetch's default.
if isinstance(timeout, Timeout):
if timeout._read is not None or timeout._connect is not None:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total or default URLFetch timeout.",
AppEnginePlatformWarning,
)
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
AppEnginePlatformWarning,
)
return retries
# Alias methods from _appengine_environ to maintain public API interface.
is_appengine = _appengine_environ.is_appengine
is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
is_local_appengine = _appengine_environ.is_local_appengine
is_prod_appengine = _appengine_environ.is_prod_appengine
is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
| 11,010
|
Python
|
.py
| 258
| 32.20155
| 88
| 0.630236
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,729
|
securetransport.py
|
rembo10_headphones/lib/urllib3/contrib/securetransport.py
|
"""
SecureTranport support for urllib3 via ctypes.
This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.
We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.
This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.
To use this module, simply import and inject it::
import urllib3.contrib.securetransport
urllib3.contrib.securetransport.inject_into_urllib3()
Happy TLSing!
This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
.. code-block::
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
import six
from .. import util
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.low_level import (
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
try: # Platform-specific: Python 2
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works
HAS_SNI = True
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
# This dictionary is used by the read callback to obtain a handle to the
# calling wrapped socket. This is a pretty silly approach, but for now it'll
# do. I feel like I should be able to smuggle a handle to the wrapped socket
# directly in the SSLConnectionRef, but for now this approach will work I
# guess.
#
# We need to lock around this structure for inserts, but we don't do it for
# reads/writes in the callbacks. The reasoning here goes as follows:
#
# 1. It is not possible to call into the callbacks before the dictionary is
# populated, so once in the callback the id must be in the dictionary.
# 2. The callbacks don't mutate the dictionary, they only read from it, and
# so cannot conflict with any of the insertions.
#
# This is good: if we had to lock in the callbacks we'd drastically slow down
# the performance of this code.
_connection_refs = weakref.WeakValueDictionary()
_connection_ref_lock = threading.Lock()
# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
# for no better reason than we need *a* limit, and this one is right there.
SSL_WRITE_BLOCKSIZE = 16384
# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
# individual cipher suites. We need to do this because this is how
# SecureTransport wants them.
CIPHER_SUITES = [
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_AES_256_GCM_SHA384,
SecurityConst.TLS_AES_128_GCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_AES_128_CCM_8_SHA256,
SecurityConst.TLS_AES_128_CCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
]
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
# TLSv1 to 1.2 are supported on macOS 10.8+
_protocol_to_min_max = {
util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
SecurityConst.kSSLProtocol2,
SecurityConst.kSSLProtocol2,
)
if hasattr(ssl, "PROTOCOL_SSLv3"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
SecurityConst.kSSLProtocol3,
SecurityConst.kSSLProtocol3,
)
if hasattr(ssl, "PROTOCOL_TLSv1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
SecurityConst.kTLSProtocol1,
SecurityConst.kTLSProtocol1,
)
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
SecurityConst.kTLSProtocol11,
SecurityConst.kTLSProtocol11,
)
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
SecurityConst.kTLSProtocol12,
SecurityConst.kTLSProtocol12,
)
def inject_into_urllib3():
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
"""
util.SSLContext = SecureTransportContext
util.ssl_.SSLContext = SecureTransportContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_SECURETRANSPORT = True
util.ssl_.IS_SECURETRANSPORT = True
def extract_from_urllib3():
"""
Undo monkey-patching by :func:`inject_into_urllib3`.
"""
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_SECURETRANSPORT = False
util.ssl_.IS_SECURETRANSPORT = False
def _read_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport read callback. This is called by ST to request that data
be returned from the socket.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
requested_length = data_length_pointer[0]
timeout = wrapped_socket.gettimeout()
error = None
read_count = 0
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
if not util.wait_for_read(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
remaining = requested_length - read_count
buffer = (ctypes.c_char * remaining).from_address(
data_buffer + read_count
)
chunk_size = base_socket.recv_into(buffer, remaining)
read_count += chunk_size
if not chunk_size:
if not read_count:
return SecurityConst.errSSLClosedGraceful
break
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = read_count
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = read_count
if read_count != requested_length:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
def _write_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport write callback. This is called by ST to request that data
actually be sent on the network.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
bytes_to_write = data_length_pointer[0]
data = ctypes.string_at(data_buffer, bytes_to_write)
timeout = wrapped_socket.gettimeout()
error = None
sent = 0
try:
while sent < bytes_to_write:
if timeout is None or timeout >= 0:
if not util.wait_for_write(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
chunk_sent = base_socket.send(data)
sent += chunk_sent
# This has some needless copying here, but I'm not sure there's
# much value in optimising this data path.
data = data[chunk_sent:]
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = sent
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = sent
if sent != bytes_to_write:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
# We need to keep these two objects references alive: if they get GC'd while
# in use then SecureTransport could attempt to call a function that is in freed
# memory. That would be...uh...bad. Yeah, that's the word. Bad.
_read_callback_pointer = Security.SSLReadFunc(_read_callback)
_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
class WrappedSocket(object):
"""
API-compatibility wrapper for Python's OpenSSL wrapped socket object.
Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
collector of PyPy.
"""
def __init__(self, socket):
self.socket = socket
self.context = None
self._makefile_refs = 0
self._closed = False
self._exception = None
self._keychain = None
self._keychain_dir = None
self._client_cert_chain = None
# We save off the previously-configured timeout and then set it to
# zero. This is done because we use select and friends to handle the
# timeouts, but if we leave the timeout set on the lower socket then
# Python will "kindly" call select on that socket again for us. Avoid
# that by forcing the timeout to zero.
self._timeout = self.socket.gettimeout()
self.socket.settimeout(0)
@contextlib.contextmanager
def _raise_on_error(self):
"""
A context manager that can be used to wrap calls that do I/O from
SecureTransport. If any of the I/O callbacks hit an exception, this
context manager will correctly propagate the exception after the fact.
This avoids silently swallowing those exceptions.
It also correctly forces the socket closed.
"""
self._exception = None
# We explicitly don't catch around this yield because in the unlikely
# event that an exception was hit in the block we don't want to swallow
# it.
yield
if self._exception is not None:
exception, self._exception = self._exception, None
self.close()
raise exception
def _set_ciphers(self):
"""
Sets up the allowed ciphers. By default this matches the set in
util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
custom and doesn't allow changing at this time, mostly because parsing
OpenSSL cipher strings is going to be a freaking nightmare.
"""
ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
result = Security.SSLSetEnabledCiphers(
self.context, ciphers, len(CIPHER_SUITES)
)
_assert_no_error(result)
def _set_alpn_protocols(self, protocols):
"""
Sets up the ALPN protocols on the context.
"""
if not protocols:
return
protocols_arr = _create_cfstring_array(protocols)
try:
result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
_assert_no_error(result)
finally:
CoreFoundation.CFRelease(protocols_arr)
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
Raises an SSLError if the connection is not trusted.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
try:
trust_result = self._evaluate_trust(trust_bundle)
if trust_result in successes:
return
reason = "error code: %d" % (trust_result,)
except Exception as e:
# Do not trust on error
reason = "exception: %r" % (e,)
# SecureTransport does not send an alert nor shuts down the connection.
rec = _build_tls_unknown_ca_alert(self.version())
self.socket.sendall(rec)
# close the connection immediately
# l_onoff = 1, activate linger
# l_linger = 0, linger for 0 seoncds
opts = struct.pack("ii", 1, 0)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
self.close()
raise ssl.SSLError("certificate verify failed, %s" % reason)
def _evaluate_trust(self, trust_bundle):
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f:
trust_bundle = f.read()
cert_array = None
trust = Security.SecTrustRef()
try:
# Get a CFArray that contains the certs we want.
cert_array = _cert_array_from_pem(trust_bundle)
# Ok, now the hard part. We want to get the SecTrustRef that ST has
# created for this connection, shove our CAs into it, tell ST to
# ignore everything else it knows, and then ask if it can build a
# chain. This is a buuuunch of code.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
raise ssl.SSLError("Failed to copy trust reference")
result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
_assert_no_error(result)
result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
_assert_no_error(result)
trust_result = Security.SecTrustResultType()
result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
_assert_no_error(result)
finally:
if trust:
CoreFoundation.CFRelease(trust)
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
return trust_result.value
def handshake(
self,
server_hostname,
verify,
trust_bundle,
min_version,
max_version,
client_cert,
client_key,
client_key_passphrase,
alpn_protocols,
):
"""
Actually performs the TLS handshake. This is run automatically by
wrapped socket, and shouldn't be needed in user code.
"""
# First, we do the initial bits of connection setup. We need to create
# a context, set its I/O funcs, and set the connection reference.
self.context = Security.SSLCreateContext(
None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
)
result = Security.SSLSetIOFuncs(
self.context, _read_callback_pointer, _write_callback_pointer
)
_assert_no_error(result)
# Here we need to compute the handle to use. We do this by taking the
# id of self modulo 2**31 - 1. If this is already in the dictionary, we
# just keep incrementing by one until we find a free space.
with _connection_ref_lock:
handle = id(self) % 2147483647
while handle in _connection_refs:
handle = (handle + 1) % 2147483647
_connection_refs[handle] = self
result = Security.SSLSetConnection(self.context, handle)
_assert_no_error(result)
# If we have a server hostname, we should set that too.
if server_hostname:
if not isinstance(server_hostname, bytes):
server_hostname = server_hostname.encode("utf-8")
result = Security.SSLSetPeerDomainName(
self.context, server_hostname, len(server_hostname)
)
_assert_no_error(result)
# Setup the ciphers.
self._set_ciphers()
# Setup the ALPN protocols.
self._set_alpn_protocols(alpn_protocols)
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
_assert_no_error(result)
# If there's a trust DB, we need to use it. We do that by telling
# SecureTransport to break on server auth. We also do that if we don't
# want to validate the certs at all: we just won't actually do any
# authing in that case.
if not verify or trust_bundle is not None:
result = Security.SSLSetSessionOption(
self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
)
_assert_no_error(result)
# If there's a client cert, we need to use it.
if client_cert:
self._keychain, self._keychain_dir = _temporary_keychain()
self._client_cert_chain = _load_client_cert_chain(
self._keychain, client_cert, client_key
)
result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
_assert_no_error(result)
while True:
with self._raise_on_error():
result = Security.SSLHandshake(self.context)
if result == SecurityConst.errSSLWouldBlock:
raise socket.timeout("handshake timed out")
elif result == SecurityConst.errSSLServerAuthCompleted:
self._custom_validate(verify, trust_bundle)
continue
else:
_assert_no_error(result)
break
def fileno(self):
return self.socket.fileno()
# Copy-pasted from Python 3.5 source code
def _decref_socketios(self):
if self._makefile_refs > 0:
self._makefile_refs -= 1
if self._closed:
self.close()
def recv(self, bufsiz):
buffer = ctypes.create_string_buffer(bufsiz)
bytes_read = self.recv_into(buffer, bufsiz)
data = buffer[:bytes_read]
return data
def recv_into(self, buffer, nbytes=None):
# Read short on EOF.
if self._closed:
return 0
if nbytes is None:
nbytes = len(buffer)
buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLRead(
self.context, buffer, nbytes, ctypes.byref(processed_bytes)
)
# There are some result codes that we want to treat as "not always
# errors". Specifically, those are errSSLWouldBlock,
# errSSLClosedGraceful, and errSSLClosedNoNotify.
if result == SecurityConst.errSSLWouldBlock:
# If we didn't process any bytes, then this was just a time out.
# However, we can get errSSLWouldBlock in situations when we *did*
# read some data, and in those cases we should just read "short"
# and return.
if processed_bytes.value == 0:
# Timed out, no data read.
raise socket.timeout("recv timed out")
elif result in (
SecurityConst.errSSLClosedGraceful,
SecurityConst.errSSLClosedNoNotify,
):
# The remote peer has closed this connection. We should do so as
# well. Note that we don't actually return here because in
# principle this could actually be fired along with return data.
# It's unlikely though.
self.close()
else:
_assert_no_error(result)
# Ok, we read and probably succeeded. We should return whatever data
# was actually read.
return processed_bytes.value
def settimeout(self, timeout):
self._timeout = timeout
def gettimeout(self):
return self._timeout
def send(self, data):
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLWrite(
self.context, data, len(data), ctypes.byref(processed_bytes)
)
if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
# Timed out
raise socket.timeout("send timed out")
else:
_assert_no_error(result)
# We sent, and probably succeeded. Tell them how much we sent.
return processed_bytes.value
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
with self._raise_on_error():
Security.SSLClose(self.context)
def close(self):
# TODO: should I do clean shutdown here? Do I have to?
if self._makefile_refs < 1:
self._closed = True
if self.context:
CoreFoundation.CFRelease(self.context)
self.context = None
if self._client_cert_chain:
CoreFoundation.CFRelease(self._client_cert_chain)
self._client_cert_chain = None
if self._keychain:
Security.SecKeychainDelete(self._keychain)
CoreFoundation.CFRelease(self._keychain)
shutil.rmtree(self._keychain_dir)
self._keychain = self._keychain_dir = None
return self.socket.close()
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
# Urgh, annoying.
#
# Here's how we do this:
#
# 1. Call SSLCopyPeerTrust to get hold of the trust object for this
# connection.
# 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
# 3. To get the CN, call SecCertificateCopyCommonName and process that
# string so that it's of the appropriate type.
# 4. To get the SAN, we need to do something a bit more complex:
# a. Call SecCertificateCopyValues to get the data, requesting
# kSecOIDSubjectAltName.
# b. Mess about with this dictionary to try to get the SANs out.
#
# This is gross. Really gross. It's going to be a few hundred LoC extra
# just to repeat something that SecureTransport can *already do*. So my
# operating assumption at this time is that what we want to do is
# instead to just flag to urllib3 that it shouldn't do its own hostname
# validation when using SecureTransport.
if not binary_form:
raise ValueError("SecureTransport only supports dumping binary certs")
trust = Security.SecTrustRef()
certdata = None
der_bytes = None
try:
# Grab the trust store.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
# Probably we haven't done the handshake yet. No biggie.
return None
cert_count = Security.SecTrustGetCertificateCount(trust)
if not cert_count:
# Also a case that might happen if we haven't handshaked.
# Handshook? Handshaken?
return None
leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
assert leaf
# Ok, now we want the DER bytes.
certdata = Security.SecCertificateCopyData(leaf)
assert certdata
data_length = CoreFoundation.CFDataGetLength(certdata)
data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
der_bytes = ctypes.string_at(data_buffer, data_length)
finally:
if certdata:
CoreFoundation.CFRelease(certdata)
if trust:
CoreFoundation.CFRelease(trust)
return der_bytes
def version(self):
protocol = Security.SSLProtocol()
result = Security.SSLGetNegotiatedProtocolVersion(
self.context, ctypes.byref(protocol)
)
_assert_no_error(result)
if protocol.value == SecurityConst.kTLSProtocol13:
raise ssl.SSLError("SecureTransport does not support TLS 1.3")
elif protocol.value == SecurityConst.kTLSProtocol12:
return "TLSv1.2"
elif protocol.value == SecurityConst.kTLSProtocol11:
return "TLSv1.1"
elif protocol.value == SecurityConst.kTLSProtocol1:
return "TLSv1"
elif protocol.value == SecurityConst.kSSLProtocol3:
return "SSLv3"
elif protocol.value == SecurityConst.kSSLProtocol2:
return "SSLv2"
else:
raise ssl.SSLError("Unknown TLS version: %r" % protocol)
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
if _fileobject: # Platform-specific: Python 2
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
def makefile(self, mode="r", buffering=None, *args, **kwargs):
# We disable buffering with SecureTransport because it conflicts with
# the buffering that ST does internally (see issue #1153 for more).
buffering = 0
return backport_makefile(self, mode, buffering, *args, **kwargs)
WrappedSocket.makefile = makefile
class SecureTransportContext(object):
"""
I am a wrapper class for the SecureTransport library, to translate the
interface of the standard library ``SSLContext`` object to calls into
SecureTransport.
"""
def __init__(self, protocol):
self._min_version, self._max_version = _protocol_to_min_max[protocol]
self._options = 0
self._verify = False
self._trust_bundle = None
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
self._alpn_protocols = None
@property
def check_hostname(self):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
return True
@check_hostname.setter
def check_hostname(self, value):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
pass
@property
def options(self):
# TODO: Well, crap.
#
# So this is the bit of the code that is the most likely to cause us
# trouble. Essentially we need to enumerate all of the SSL options that
# users might want to use and try to see if we can sensibly translate
# them, or whether we should just ignore them.
return self._options
@options.setter
def options(self, value):
# TODO: Update in line with above.
self._options = value
@property
def verify_mode(self):
return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
@verify_mode.setter
def verify_mode(self, value):
self._verify = True if value == ssl.CERT_REQUIRED else False
def set_default_verify_paths(self):
# So, this has to do something a bit weird. Specifically, what it does
# is nothing.
#
# This means that, if we had previously had load_verify_locations
# called, this does not undo that. We need to do that because it turns
# out that the rest of the urllib3 code will attempt to load the
# default verify paths if it hasn't been told about any paths, even if
# the context itself was sometime earlier. We resolve that by just
# ignoring it.
pass
def load_default_certs(self):
return self.set_default_verify_paths()
def set_ciphers(self, ciphers):
# For now, we just require the default cipher string.
if ciphers != util.ssl_.DEFAULT_CIPHERS:
raise ValueError("SecureTransport doesn't support custom cipher strings")
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
# OK, we only really support cadata and cafile.
if capath is not None:
raise ValueError("SecureTransport does not support cert directories")
# Raise if cafile does not exist.
if cafile is not None:
with open(cafile):
pass
self._trust_bundle = cafile or cadata
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._client_cert = certfile
self._client_key = keyfile
self._client_cert_passphrase = password
def set_alpn_protocols(self, protocols):
"""
Sets the ALPN protocols that will later be set on the context.
Raises a NotImplementedError if ALPN is not supported.
"""
if not hasattr(Security, "SSLSetALPNProtocols"):
raise NotImplementedError(
"SecureTransport supports ALPN only in macOS 10.12+"
)
self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
# So, what do we do here? Firstly, we assert some properties. This is a
# stripped down shim, so there is some functionality we don't support.
# See PEP 543 for the real deal.
assert not server_side
assert do_handshake_on_connect
assert suppress_ragged_eofs
# Ok, we're good to go. Now we want to create the wrapped socket object
# and store it in the appropriate place.
wrapped_socket = WrappedSocket(sock)
# Now we can handshake
wrapped_socket.handshake(
server_hostname,
self._verify,
self._trust_bundle,
self._min_version,
self._max_version,
self._client_cert,
self._client_key,
self._client_key_passphrase,
self._alpn_protocols,
)
return wrapped_socket
| 34,417
|
Python
|
.py
| 780
| 35.234615
| 86
| 0.652276
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,730
|
pyopenssl.py
|
rembo10_headphones/lib/urllib3/contrib/pyopenssl.py
|
"""
TLS with SNI_-support for Python 2. Follow these instructions if you would
like to verify TLS certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.
This needs the following packages installed:
* `pyOpenSSL`_ (tested with 16.0.0)
* `cryptography`_ (minimum 1.3.4, from pyopenssl)
* `idna`_ (minimum 2.0, from cryptography)
However, pyopenssl depends on cryptography, which depends on idna, so while we
use all three directly here we end up having relatively few packages required.
You can install them with the following command:
.. code-block:: bash
$ python -m pip install pyopenssl cryptography idna
To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this:
.. code-block:: python
try:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
except ImportError:
pass
Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
compression in Python 2 (see `CRIME attack`_).
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
.. _pyopenssl: https://www.pyopenssl.org
.. _cryptography: https://cryptography.io
.. _idna: https://github.com/kjd/idna
"""
from __future__ import absolute_import
import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
try:
from cryptography.x509 import UnsupportedExtension
except ImportError:
# UnsupportedExtension is gone in cryptography >= 2.1.0
class UnsupportedExtension(Exception):
pass
from io import BytesIO
from socket import error as SocketError
from socket import timeout
try: # Platform-specific: Python 2
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
import logging
import ssl
import sys
from .. import util
from ..packages import six
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works.
HAS_SNI = True
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
_openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
_stdlib_to_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
log = logging.getLogger(__name__)
def inject_into_urllib3():
"Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
_validate_dependencies_met()
util.SSLContext = PyOpenSSLContext
util.ssl_.SSLContext = PyOpenSSLContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_PYOPENSSL = True
util.ssl_.IS_PYOPENSSL = True
def extract_from_urllib3():
"Undo monkey-patching by :func:`inject_into_urllib3`."
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_PYOPENSSL = False
util.ssl_.IS_PYOPENSSL = False
def _validate_dependencies_met():
"""
Verifies that PyOpenSSL's package-level dependencies have been met.
Throws `ImportError` if they are not met.
"""
# Method added in `cryptography==1.1`; not available in older versions
from cryptography.x509.extensions import Extensions
if getattr(Extensions, "get_extension_for_class", None) is None:
raise ImportError(
"'cryptography' module missing required functionality. "
"Try upgrading to v1.3.4 or newer."
)
# pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
# attribute is only present on those versions.
from OpenSSL.crypto import X509
x509 = X509()
if getattr(x509, "_x509", None) is None:
raise ImportError(
"'pyOpenSSL' module missing required functionality. "
"Try upgrading to v0.14 or newer."
)
def _dnsname_to_stdlib(name):
"""
Converts a dNSName SubjectAlternativeName field to the form used by the
standard library on the given Python version.
Cryptography produces a dNSName as a unicode string that was idna-decoded
from ASCII bytes. We need to idna-encode that string to get it back, and
then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
If the name cannot be idna-encoded then we return None signalling that
the name given should be skipped.
"""
def idna_encode(name):
"""
Borrowed wholesale from the Python Cryptography Project. It turns out
that we can't just safely call `idna.encode`: it can explode for
wildcard names. This avoids that problem.
"""
import idna
try:
for prefix in [u"*.", u"."]:
if name.startswith(prefix):
name = name[len(prefix) :]
return prefix.encode("ascii") + idna.encode(name)
return idna.encode(name)
except idna.core.IDNAError:
return None
# Don't send IPv6 addresses through the IDNA encoder.
if ":" in name:
return name
name = idna_encode(name)
if name is None:
return None
elif sys.version_info >= (3, 0):
name = name.decode("utf-8")
return name
def get_subj_alt_name(peer_cert):
"""
Given an PyOpenSSL certificate, provides all the subject alternative names.
"""
# Pass the cert to cryptography, which has much better APIs for this.
if hasattr(peer_cert, "to_cryptography"):
cert = peer_cert.to_cryptography()
else:
# This is technically using private APIs, but should work across all
# relevant versions before PyOpenSSL got a proper API for this.
cert = _Certificate(openssl_backend, peer_cert._x509)
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)
try:
ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
except x509.ExtensionNotFound:
# No such extension, return the empty list.
return []
except (
x509.DuplicateExtension,
UnsupportedExtension,
x509.UnsupportedGeneralNameType,
UnicodeError,
) as e:
# A problem has been found with the quality of the certificate. Assume
# no SAN field is present.
log.warning(
"A problem was encountered with the certificate that prevented "
"urllib3 from finding the SubjectAlternativeName field. This can "
"affect certificate validation. The error was %s",
e,
)
return []
# We want to return dNSName and iPAddress fields. We need to cast the IPs
# back to strings because the match_hostname function wants them as
# strings.
# Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
# decoded. This is pretty frustrating, but that's what the standard library
# does with certificates, and so we need to attempt to do the same.
# We also want to skip over names which cannot be idna encoded.
names = [
("DNS", name)
for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
if name is not None
]
names.extend(
("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
)
return names
class WrappedSocket(object):
"""API-compatibility wrapper for Python OpenSSL's Connection-class.
Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
collector of pypy.
"""
def __init__(self, connection, socket, suppress_ragged_eofs=True):
self.connection = connection
self.socket = socket
self.suppress_ragged_eofs = suppress_ragged_eofs
self._makefile_refs = 0
self._closed = False
def fileno(self):
return self.socket.fileno()
# Copy-pasted from Python 3.5 source code
def _decref_socketios(self):
if self._makefile_refs > 0:
self._makefile_refs -= 1
if self._closed:
self.close()
def recv(self, *args, **kwargs):
try:
data = self.connection.recv(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
return b""
else:
raise SocketError(str(e))
except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b""
else:
raise
except OpenSSL.SSL.WantReadError:
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
raise timeout("The read operation timed out")
else:
return self.recv(*args, **kwargs)
# TLS 1.3 post-handshake authentication
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("read error: %r" % e)
else:
return data
def recv_into(self, *args, **kwargs):
try:
return self.connection.recv_into(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
return 0
else:
raise SocketError(str(e))
except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return 0
else:
raise
except OpenSSL.SSL.WantReadError:
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
raise timeout("The read operation timed out")
else:
return self.recv_into(*args, **kwargs)
# TLS 1.3 post-handshake authentication
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("read error: %r" % e)
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
def _send_until_done(self, data):
while True:
try:
return self.connection.send(data)
except OpenSSL.SSL.WantWriteError:
if not util.wait_for_write(self.socket, self.socket.gettimeout()):
raise timeout()
continue
except OpenSSL.SSL.SysCallError as e:
raise SocketError(str(e))
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self._send_until_done(
data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
)
total_sent += sent
def shutdown(self):
# FIXME rethrow compatible exceptions should we ever use this
self.connection.shutdown()
def close(self):
if self._makefile_refs < 1:
try:
self._closed = True
return self.connection.close()
except OpenSSL.SSL.Error:
return
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
x509 = self.connection.get_peer_certificate()
if not x509:
return x509
if binary_form:
return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
return {
"subject": ((("commonName", x509.get_subject().CN),),),
"subjectAltName": get_subj_alt_name(x509),
}
def version(self):
return self.connection.get_protocol_version_name()
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
if _fileobject: # Platform-specific: Python 2
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
makefile = backport_makefile
WrappedSocket.makefile = makefile
class PyOpenSSLContext(object):
"""
I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
for translating the interface of the standard library ``SSLContext`` object
to calls into PyOpenSSL.
"""
def __init__(self, protocol):
self.protocol = _openssl_versions[protocol]
self._ctx = OpenSSL.SSL.Context(self.protocol)
self._options = 0
self.check_hostname = False
@property
def options(self):
return self._options
@options.setter
def options(self, value):
self._options = value
self._ctx.set_options(value)
@property
def verify_mode(self):
return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
@verify_mode.setter
def verify_mode(self, value):
self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
def set_default_verify_paths(self):
self._ctx.set_default_verify_paths()
def set_ciphers(self, ciphers):
if isinstance(ciphers, six.text_type):
ciphers = ciphers.encode("utf-8")
self._ctx.set_cipher_list(ciphers)
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
if cafile is not None:
cafile = cafile.encode("utf-8")
if capath is not None:
capath = capath.encode("utf-8")
try:
self._ctx.load_verify_locations(cafile, capath)
if cadata is not None:
self._ctx.load_verify_locations(BytesIO(cadata))
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("unable to load trusted certificates: %r" % e)
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._ctx.use_certificate_chain_file(certfile)
if password is not None:
if not isinstance(password, six.binary_type):
password = password.encode("utf-8")
self._ctx.set_passwd_cb(lambda *_: password)
self._ctx.use_privatekey_file(keyfile or certfile)
def set_alpn_protocols(self, protocols):
protocols = [six.ensure_binary(p) for p in protocols]
return self._ctx.set_alpn_protos(protocols)
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
server_hostname = server_hostname.encode("utf-8")
if server_hostname is not None:
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
if not util.wait_for_read(sock, sock.gettimeout()):
raise timeout("select timed out")
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("bad handshake: %r" % e)
break
return WrappedSocket(cnx, sock)
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
return err_no == 0
| 16,874
|
Python
|
.py
| 407
| 33.727273
| 88
| 0.659781
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,731
|
socks.py
|
rembo10_headphones/lib/urllib3/contrib/socks.py
|
# -*- coding: utf-8 -*-
"""
This module contains provisional support for SOCKS proxies from within
urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
SOCKS5. To enable its functionality, either install PySocks or install this
module with the ``socks`` extra.
The SOCKS implementation supports the full range of urllib3 features. It also
supports the following SOCKS features:
- SOCKS4A (``proxy_url='socks4a://...``)
- SOCKS4 (``proxy_url='socks4://...``)
- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
- Usernames and passwords for the SOCKS proxy
.. note::
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
your ``proxy_url`` to ensure that DNS resolution is done from the remote
server instead of client-side when connecting to a domain name.
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
supports IPv4, IPv6, and domain names.
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
will be sent as the ``userid`` section of the SOCKS request:
.. code-block:: python
proxy_url="socks4a://<userid>@proxy-host"
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
of the ``proxy_url`` will be sent as the username/password to authenticate
with the proxy:
.. code-block:: python
proxy_url="socks5h://<username>:<password>@proxy-host"
"""
from __future__ import absolute_import
try:
import socks
except ImportError:
import warnings
from ..exceptions import DependencyWarning
warnings.warn(
(
"SOCKS support in urllib3 requires the installation of optional "
"dependencies: specifically, PySocks. For more information, see "
"https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
),
DependencyWarning,
)
raise
from socket import error as SocketError
from socket import timeout as SocketTimeout
from ..connection import HTTPConnection, HTTPSConnection
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from ..exceptions import ConnectTimeoutError, NewConnectionError
from ..poolmanager import PoolManager
from ..util.url import parse_url
try:
import ssl
except ImportError:
ssl = None
class SOCKSConnection(HTTPConnection):
"""
A plain-text HTTP connection that connects via a SOCKS proxy.
"""
def __init__(self, *args, **kwargs):
self._socks_options = kwargs.pop("_socks_options")
super(SOCKSConnection, self).__init__(*args, **kwargs)
def _new_conn(self):
"""
Establish a new connection via the SOCKS proxy.
"""
extra_kw = {}
if self.source_address:
extra_kw["source_address"] = self.source_address
if self.socket_options:
extra_kw["socket_options"] = self.socket_options
try:
conn = socks.create_connection(
(self.host, self.port),
proxy_type=self._socks_options["socks_version"],
proxy_addr=self._socks_options["proxy_host"],
proxy_port=self._socks_options["proxy_port"],
proxy_username=self._socks_options["username"],
proxy_password=self._socks_options["password"],
proxy_rdns=self._socks_options["rdns"],
timeout=self.timeout,
**extra_kw
)
except SocketTimeout:
raise ConnectTimeoutError(
self,
"Connection to %s timed out. (connect timeout=%s)"
% (self.host, self.timeout),
)
except socks.ProxyError as e:
# This is fragile as hell, but it seems to be the only way to raise
# useful errors here.
if e.socket_err:
error = e.socket_err
if isinstance(error, SocketTimeout):
raise ConnectTimeoutError(
self,
"Connection to %s timed out. (connect timeout=%s)"
% (self.host, self.timeout),
)
else:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % error
)
else:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e
)
except SocketError as e: # Defensive: PySocks should catch all these.
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e
)
return conn
# We don't need to duplicate the Verified/Unverified distinction from
# urllib3/connection.py here because the HTTPSConnection will already have been
# correctly set to either the Verified or Unverified form by that module. This
# means the SOCKSHTTPSConnection will automatically be the correct type.
class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
pass
class SOCKSHTTPConnectionPool(HTTPConnectionPool):
ConnectionCls = SOCKSConnection
class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
ConnectionCls = SOCKSHTTPSConnection
class SOCKSProxyManager(PoolManager):
"""
A version of the urllib3 ProxyManager that routes connections via the
defined SOCKS proxy.
"""
pool_classes_by_scheme = {
"http": SOCKSHTTPConnectionPool,
"https": SOCKSHTTPSConnectionPool,
}
def __init__(
self,
proxy_url,
username=None,
password=None,
num_pools=10,
headers=None,
**connection_pool_kw
):
parsed = parse_url(proxy_url)
if username is None and password is None and parsed.auth is not None:
split = parsed.auth.split(":")
if len(split) == 2:
username, password = split
if parsed.scheme == "socks5":
socks_version = socks.PROXY_TYPE_SOCKS5
rdns = False
elif parsed.scheme == "socks5h":
socks_version = socks.PROXY_TYPE_SOCKS5
rdns = True
elif parsed.scheme == "socks4":
socks_version = socks.PROXY_TYPE_SOCKS4
rdns = False
elif parsed.scheme == "socks4a":
socks_version = socks.PROXY_TYPE_SOCKS4
rdns = True
else:
raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
self.proxy_url = proxy_url
socks_options = {
"socks_version": socks_version,
"proxy_host": parsed.host,
"proxy_port": parsed.port,
"username": username,
"password": password,
"rdns": rdns,
}
connection_pool_kw["_socks_options"] = socks_options
super(SOCKSProxyManager, self).__init__(
num_pools, headers, **connection_pool_kw
)
self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
| 7,097
|
Python
|
.py
| 174
| 31.954023
| 85
| 0.632902
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,732
|
ntlmpool.py
|
rembo10_headphones/lib/urllib3/contrib/ntlmpool.py
|
"""
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
import warnings
from logging import getLogger
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
warnings.warn(
"The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
"in urllib3 v2.0 release, urllib3 is not able to support it properly due "
"to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
"If you are a user of this module please comment in the mentioned issue.",
DeprecationWarning,
)
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = "https"
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split("\\", 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def _new_conn(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug(
"Starting NTLM HTTPS connection no. %d: https://%s%s",
self.num_connections,
self.host,
self.authurl,
)
headers = {"Connection": "Keep-Alive"}
req_header = "Authorization"
resp_header = "www-authenticate"
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
self.rawuser
)
log.debug("Request headers: %s", headers)
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
log.debug("Response status: %s %s", res.status, res.reason)
log.debug("Response headers: %s", reshdr)
log.debug("Response data: %s [...]", res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(", ")
auth_header_value = None
for s in auth_header_values:
if s[:5] == "NTLM ":
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception(
"Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
)
# Send authentication message
ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
auth_header_value
)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
)
headers[req_header] = "NTLM %s" % auth_msg
log.debug("Request headers: %s", headers)
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
log.debug("Response status: %s %s", res.status, res.reason)
log.debug("Response headers: %s", dict(res.getheaders()))
log.debug("Response data: %s [...]", res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception("Server rejected request: wrong username or password")
raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
res.fp = None
log.debug("Connection established")
return conn
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=3,
redirect=True,
assert_same_host=True,
):
if headers is None:
headers = {}
headers["Connection"] = "Keep-Alive"
return super(NTLMConnectionPool, self).urlopen(
method, url, body, headers, retries, redirect, assert_same_host
)
| 4,538
|
Python
|
.py
| 111
| 32.468468
| 88
| 0.623185
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,733
|
_appengine_environ.py
|
rembo10_headphones/lib/urllib3/contrib/_appengine_environ.py
|
"""
This module provides means to detect the App Engine environment.
"""
import os
def is_appengine():
return is_local_appengine() or is_prod_appengine()
def is_appengine_sandbox():
"""Reports if the app is running in the first generation sandbox.
The second generation runtimes are technically still in a sandbox, but it
is much less restrictive, so generally you shouldn't need to check for it.
see https://cloud.google.com/appengine/docs/standard/runtimes
"""
return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
def is_local_appengine():
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
"SERVER_SOFTWARE", ""
).startswith("Development/")
def is_prod_appengine():
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
"SERVER_SOFTWARE", ""
).startswith("Google App Engine/")
def is_prod_appengine_mvms():
"""Deprecated."""
return False
| 957
|
Python
|
.py
| 24
| 35.541667
| 78
| 0.717698
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,734
|
low_level.py
|
rembo10_headphones/lib/urllib3/contrib/_securetransport/low_level.py
|
"""
Low-level helpers for the SecureTransport bindings.
These are Python functions that are not directly related to the high-level APIs
but are necessary to get them to work. They include a whole bunch of low-level
CoreFoundation messing about and memory management. The concerns in this module
are almost entirely about trying to avoid memory leaks and providing
appropriate and useful assistance to the higher-level code.
"""
import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from .bindings import CFConst, CoreFoundation, Security
# This regular expression is used to grab PEM data out of a PEM bundle.
_PEM_CERTS_RE = re.compile(
b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
)
def _cf_data_from_bytes(bytestring):
"""
Given a bytestring, create a CFData object from it. This CFData object must
be CFReleased by the caller.
"""
return CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
)
def _cf_dictionary_from_tuples(tuples):
"""
Given a list of Python tuples, create an associated CFDictionary.
"""
dictionary_size = len(tuples)
# We need to get the dictionary keys and values out in the same order.
keys = (t[0] for t in tuples)
values = (t[1] for t in tuples)
cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
return CoreFoundation.CFDictionaryCreate(
CoreFoundation.kCFAllocatorDefault,
cf_keys,
cf_values,
dictionary_size,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
def _cfstr(py_bstr):
"""
Given a Python binary data, create a CFString.
The string must be CFReleased by the caller.
"""
c_str = ctypes.c_char_p(py_bstr)
cf_str = CoreFoundation.CFStringCreateWithCString(
CoreFoundation.kCFAllocatorDefault,
c_str,
CFConst.kCFStringEncodingUTF8,
)
return cf_str
def _create_cfstring_array(lst):
"""
Given a list of Python binary data, create an associated CFMutableArray.
The array must be CFReleased by the caller.
Raises an ssl.SSLError on failure.
"""
cf_arr = None
try:
cf_arr = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cf_arr:
raise MemoryError("Unable to allocate memory!")
for item in lst:
cf_str = _cfstr(item)
if not cf_str:
raise MemoryError("Unable to allocate memory!")
try:
CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
finally:
CoreFoundation.CFRelease(cf_str)
except BaseException as e:
if cf_arr:
CoreFoundation.CFRelease(cf_arr)
raise ssl.SSLError("Unable to allocate array: %s" % (e,))
return cf_arr
def _cf_string_to_unicode(value):
"""
Creates a Unicode string from a CFString object. Used entirely for error
reporting.
Yes, it annoys me quite a lot that this function is this complex.
"""
value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
string = CoreFoundation.CFStringGetCStringPtr(
value_as_void_p, CFConst.kCFStringEncodingUTF8
)
if string is None:
buffer = ctypes.create_string_buffer(1024)
result = CoreFoundation.CFStringGetCString(
value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
)
if not result:
raise OSError("Error copying C string from CFStringRef")
string = buffer.value
if string is not None:
string = string.decode("utf-8")
return string
def _assert_no_error(error, exception_class=None):
"""
Checks the return code and throws an exception if there is an error to
report
"""
if error == 0:
return
cf_error_string = Security.SecCopyErrorMessageString(error, None)
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
if output is None or output == u"":
output = u"OSStatus %s" % error
if exception_class is None:
exception_class = ssl.SSLError
raise exception_class(output)
def _cert_array_from_pem(pem_bundle):
"""
Given a bundle of certs in PEM format, turns them into a CFArray of certs
that can be used to validate a cert chain.
"""
# Normalize the PEM bundle's line endings.
pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
der_certs = [
base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
]
if not der_certs:
raise ssl.SSLError("No root certificates specified")
cert_array = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cert_array:
raise ssl.SSLError("Unable to allocate memory!")
try:
for der_bytes in der_certs:
certdata = _cf_data_from_bytes(der_bytes)
if not certdata:
raise ssl.SSLError("Unable to allocate memory!")
cert = Security.SecCertificateCreateWithData(
CoreFoundation.kCFAllocatorDefault, certdata
)
CoreFoundation.CFRelease(certdata)
if not cert:
raise ssl.SSLError("Unable to build cert object!")
CoreFoundation.CFArrayAppendValue(cert_array, cert)
CoreFoundation.CFRelease(cert)
except Exception:
# We need to free the array before the exception bubbles further.
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
raise
return cert_array
def _is_cert(item):
"""
Returns True if a given CFTypeRef is a certificate.
"""
expected = Security.SecCertificateGetTypeID()
return CoreFoundation.CFGetTypeID(item) == expected
def _is_identity(item):
"""
Returns True if a given CFTypeRef is an identity.
"""
expected = Security.SecIdentityGetTypeID()
return CoreFoundation.CFGetTypeID(item) == expected
def _temporary_keychain():
"""
This function creates a temporary Mac keychain that we can use to work with
credentials. This keychain uses a one-time password and a temporary file to
store the data. We expect to have one keychain per socket. The returned
SecKeychainRef must be freed by the caller, including calling
SecKeychainDelete.
Returns a tuple of the SecKeychainRef and the path to the temporary
directory that contains it.
"""
# Unfortunately, SecKeychainCreate requires a path to a keychain. This
# means we cannot use mkstemp to use a generic temporary file. Instead,
# we're going to create a temporary directory and a filename to use there.
# This filename will be 8 random bytes expanded into base64. We also need
# some random bytes to password-protect the keychain we're creating, so we
# ask for 40 random bytes.
random_bytes = os.urandom(40)
filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
tempdirectory = tempfile.mkdtemp()
keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
# We now want to create the keychain itself.
keychain = Security.SecKeychainRef()
status = Security.SecKeychainCreate(
keychain_path, len(password), password, False, None, ctypes.byref(keychain)
)
_assert_no_error(status)
# Having created the keychain, we want to pass it off to the caller.
return keychain, tempdirectory
def _load_items_from_file(keychain, path):
"""
Given a single file, loads all the trust objects from it into arrays and
the keychain.
Returns a tuple of lists: the first list is a list of identities, the
second a list of certs.
"""
certificates = []
identities = []
result_array = None
with open(path, "rb") as f:
raw_filedata = f.read()
try:
filedata = CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
)
result_array = CoreFoundation.CFArrayRef()
result = Security.SecItemImport(
filedata, # cert data
None, # Filename, leaving it out for now
None, # What the type of the file is, we don't care
None, # what's in the file, we don't care
0, # import flags
None, # key params, can include passphrase in the future
keychain, # The keychain to insert into
ctypes.byref(result_array), # Results
)
_assert_no_error(result)
# A CFArray is not very useful to us as an intermediary
# representation, so we are going to extract the objects we want
# and then free the array. We don't need to keep hold of keys: the
# keychain already has them!
result_count = CoreFoundation.CFArrayGetCount(result_array)
for index in range(result_count):
item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
item = ctypes.cast(item, CoreFoundation.CFTypeRef)
if _is_cert(item):
CoreFoundation.CFRetain(item)
certificates.append(item)
elif _is_identity(item):
CoreFoundation.CFRetain(item)
identities.append(item)
finally:
if result_array:
CoreFoundation.CFRelease(result_array)
CoreFoundation.CFRelease(filedata)
return (identities, certificates)
def _load_client_cert_chain(keychain, *paths):
"""
Load certificates and maybe keys from a number of files. Has the end goal
of returning a CFArray containing one SecIdentityRef, and then zero or more
SecCertificateRef objects, suitable for use as a client certificate trust
chain.
"""
# Ok, the strategy.
#
# This relies on knowing that macOS will not give you a SecIdentityRef
# unless you have imported a key into a keychain. This is a somewhat
# artificial limitation of macOS (for example, it doesn't necessarily
# affect iOS), but there is nothing inside Security.framework that lets you
# get a SecIdentityRef without having a key in a keychain.
#
# So the policy here is we take all the files and iterate them in order.
# Each one will use SecItemImport to have one or more objects loaded from
# it. We will also point at a keychain that macOS can use to work with the
# private key.
#
# Once we have all the objects, we'll check what we actually have. If we
# already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
# we'll take the first certificate (which we assume to be our leaf) and
# ask the keychain to give us a SecIdentityRef with that cert's associated
# key.
#
# We'll then return a CFArray containing the trust chain: one
# SecIdentityRef and then zero-or-more SecCertificateRef objects. The
# responsibility for freeing this CFArray will be with the caller. This
# CFArray must remain alive for the entire connection, so in practice it
# will be stored with a single SSLSocket, along with the reference to the
# keychain.
certificates = []
identities = []
# Filter out bad paths.
paths = (path for path in paths if path)
try:
for file_path in paths:
new_identities, new_certs = _load_items_from_file(keychain, file_path)
identities.extend(new_identities)
certificates.extend(new_certs)
# Ok, we have everything. The question is: do we have an identity? If
# not, we want to grab one from the first cert we have.
if not identities:
new_identity = Security.SecIdentityRef()
status = Security.SecIdentityCreateWithCertificate(
keychain, certificates[0], ctypes.byref(new_identity)
)
_assert_no_error(status)
identities.append(new_identity)
# We now want to release the original certificate, as we no longer
# need it.
CoreFoundation.CFRelease(certificates.pop(0))
# We now need to build a new CFArray that holds the trust chain.
trust_chain = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
for item in itertools.chain(identities, certificates):
# ArrayAppendValue does a CFRetain on the item. That's fine,
# because the finally block will release our other refs to them.
CoreFoundation.CFArrayAppendValue(trust_chain, item)
return trust_chain
finally:
for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj)
TLS_PROTOCOL_VERSIONS = {
"SSLv2": (0, 2),
"SSLv3": (3, 0),
"TLSv1": (3, 1),
"TLSv1.1": (3, 2),
"TLSv1.2": (3, 3),
}
def _build_tls_unknown_ca_alert(version):
"""
Builds a TLS alert record for an unknown CA.
"""
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
severity_fatal = 0x02
description_unknown_ca = 0x30
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
msg_len = len(msg)
record_type_alert = 0x15
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
return record
| 13,922
|
Python
|
.py
| 336
| 34.300595
| 88
| 0.678669
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,735
|
bindings.py
|
rembo10_headphones/lib/urllib3/contrib/_securetransport/bindings.py
|
"""
This module uses ctypes to bind a whole bunch of functions and constants from
SecureTransport. The goal here is to provide the low-level API to
SecureTransport. These are essentially the C-level functions and constants, and
they're pretty gross to work with.
This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import platform
from ctypes import (
CDLL,
CFUNCTYPE,
POINTER,
c_bool,
c_byte,
c_char_p,
c_int32,
c_long,
c_size_t,
c_uint32,
c_ulong,
c_void_p,
)
from ctypes.util import find_library
from ...packages.six import raise_from
if platform.system() != "Darwin":
raise ImportError("Only macOS is supported")
version = platform.mac_ver()[0]
version_info = tuple(map(int, version.split(".")))
if version_info < (10, 8):
raise OSError(
"Only OS X 10.8 and newer are supported, not %s.%s"
% (version_info[0], version_info[1])
)
def load_cdll(name, macos10_16_path):
"""Loads a CDLL by name, falling back to known path on 10.16+"""
try:
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
# beta being labeled as 10.16.
if version_info >= (10, 16):
path = macos10_16_path
else:
path = find_library(name)
if not path:
raise OSError # Caught and reraised as 'ImportError'
return CDLL(path, use_errno=True)
except OSError:
raise_from(ImportError("The library %s failed to load" % name), None)
Security = load_cdll(
"Security", "/System/Library/Frameworks/Security.framework/Security"
)
CoreFoundation = load_cdll(
"CoreFoundation",
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
)
Boolean = c_bool
CFIndex = c_long
CFStringEncoding = c_uint32
CFData = c_void_p
CFString = c_void_p
CFArray = c_void_p
CFMutableArray = c_void_p
CFDictionary = c_void_p
CFError = c_void_p
CFType = c_void_p
CFTypeID = c_ulong
CFTypeRef = POINTER(CFType)
CFAllocatorRef = c_void_p
OSStatus = c_int32
CFDataRef = POINTER(CFData)
CFStringRef = POINTER(CFString)
CFArrayRef = POINTER(CFArray)
CFMutableArrayRef = POINTER(CFMutableArray)
CFDictionaryRef = POINTER(CFDictionary)
CFArrayCallBacks = c_void_p
CFDictionaryKeyCallBacks = c_void_p
CFDictionaryValueCallBacks = c_void_p
SecCertificateRef = POINTER(c_void_p)
SecExternalFormat = c_uint32
SecExternalItemType = c_uint32
SecIdentityRef = POINTER(c_void_p)
SecItemImportExportFlags = c_uint32
SecItemImportExportKeyParameters = c_void_p
SecKeychainRef = POINTER(c_void_p)
SSLProtocol = c_uint32
SSLCipherSuite = c_uint32
SSLContextRef = POINTER(c_void_p)
SecTrustRef = POINTER(c_void_p)
SSLConnectionRef = c_uint32
SecTrustResultType = c_uint32
SecTrustOptionFlags = c_uint32
SSLProtocolSide = c_uint32
SSLConnectionType = c_uint32
SSLSessionOption = c_uint32
try:
Security.SecItemImport.argtypes = [
CFDataRef,
CFStringRef,
POINTER(SecExternalFormat),
POINTER(SecExternalItemType),
SecItemImportExportFlags,
POINTER(SecItemImportExportKeyParameters),
SecKeychainRef,
POINTER(CFArrayRef),
]
Security.SecItemImport.restype = OSStatus
Security.SecCertificateGetTypeID.argtypes = []
Security.SecCertificateGetTypeID.restype = CFTypeID
Security.SecIdentityGetTypeID.argtypes = []
Security.SecIdentityGetTypeID.restype = CFTypeID
Security.SecKeyGetTypeID.argtypes = []
Security.SecKeyGetTypeID.restype = CFTypeID
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
Security.SecCertificateCreateWithData.restype = SecCertificateRef
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
Security.SecCertificateCopyData.restype = CFDataRef
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SecIdentityCreateWithCertificate.argtypes = [
CFTypeRef,
SecCertificateRef,
POINTER(SecIdentityRef),
]
Security.SecIdentityCreateWithCertificate.restype = OSStatus
Security.SecKeychainCreate.argtypes = [
c_char_p,
c_uint32,
c_void_p,
Boolean,
c_void_p,
POINTER(SecKeychainRef),
]
Security.SecKeychainCreate.restype = OSStatus
Security.SecKeychainDelete.argtypes = [SecKeychainRef]
Security.SecKeychainDelete.restype = OSStatus
Security.SecPKCS12Import.argtypes = [
CFDataRef,
CFDictionaryRef,
POINTER(CFArrayRef),
]
Security.SecPKCS12Import.restype = OSStatus
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
SSLWriteFunc = CFUNCTYPE(
OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
)
Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
Security.SSLSetIOFuncs.restype = OSStatus
Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerID.restype = OSStatus
Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetCertificate.restype = OSStatus
Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
Security.SSLSetCertificateAuthorities.restype = OSStatus
Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
Security.SSLSetConnection.restype = OSStatus
Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerDomainName.restype = OSStatus
Security.SSLHandshake.argtypes = [SSLContextRef]
Security.SSLHandshake.restype = OSStatus
Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLRead.restype = OSStatus
Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLWrite.restype = OSStatus
Security.SSLClose.argtypes = [SSLContextRef]
Security.SSLClose.restype = OSStatus
Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
Security.SSLGetSupportedCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetSupportedCiphers.restype = OSStatus
Security.SSLSetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
c_size_t,
]
Security.SSLSetEnabledCiphers.restype = OSStatus
Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
Security.SSLGetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetEnabledCiphers.restype = OSStatus
Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
Security.SSLGetNegotiatedCipher.restype = OSStatus
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
SSLContextRef,
POINTER(SSLProtocol),
]
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
Security.SSLCopyPeerTrust.restype = OSStatus
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
Security.SecTrustSetAnchorCertificates.restype = OSStatus
Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
Security.SecTrustEvaluate.restype = OSStatus
Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
Security.SecTrustGetCertificateCount.restype = CFIndex
Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
Security.SSLCreateContext.argtypes = [
CFAllocatorRef,
SSLProtocolSide,
SSLConnectionType,
]
Security.SSLCreateContext.restype = SSLContextRef
Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
Security.SSLSetSessionOption.restype = OSStatus
Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMin.restype = OSStatus
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
try:
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetALPNProtocols.restype = OSStatus
except AttributeError:
# Supported only in 10.12+
pass
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SSLReadFunc = SSLReadFunc
Security.SSLWriteFunc = SSLWriteFunc
Security.SSLContextRef = SSLContextRef
Security.SSLProtocol = SSLProtocol
Security.SSLCipherSuite = SSLCipherSuite
Security.SecIdentityRef = SecIdentityRef
Security.SecKeychainRef = SecKeychainRef
Security.SecTrustRef = SecTrustRef
Security.SecTrustResultType = SecTrustResultType
Security.SecExternalFormat = SecExternalFormat
Security.OSStatus = OSStatus
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
Security, "kSecImportExportPassphrase"
)
Security.kSecImportItemIdentity = CFStringRef.in_dll(
Security, "kSecImportItemIdentity"
)
# CoreFoundation time!
CoreFoundation.CFRetain.argtypes = [CFTypeRef]
CoreFoundation.CFRetain.restype = CFTypeRef
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
CoreFoundation.CFRelease.restype = None
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
CoreFoundation.CFGetTypeID.restype = CFTypeID
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
CFStringEncoding,
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
CFStringEncoding,
]
CoreFoundation.CFStringGetCString.restype = c_bool
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
CoreFoundation.CFDataCreate.restype = CFDataRef
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
CoreFoundation.CFDataGetLength.restype = CFIndex
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDictionaryCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
POINTER(CFTypeRef),
CFIndex,
CFDictionaryKeyCallBacks,
CFDictionaryValueCallBacks,
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
CoreFoundation.CFArrayCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreate.restype = CFArrayRef
CoreFoundation.CFArrayCreateMutable.argtypes = [
CFAllocatorRef,
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
CoreFoundation.CFArrayAppendValue.restype = None
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
CoreFoundation.CFArrayGetCount.restype = CFIndex
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
CoreFoundation, "kCFAllocatorDefault"
)
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeArrayCallBacks"
)
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
)
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryValueCallBacks"
)
CoreFoundation.CFTypeRef = CFTypeRef
CoreFoundation.CFArrayRef = CFArrayRef
CoreFoundation.CFStringRef = CFStringRef
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except (AttributeError):
raise ImportError("Error initializing ctypes")
class CFConst(object):
"""
A class object that acts as essentially a namespace for CoreFoundation
constants.
"""
kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
class SecurityConst(object):
"""
A class object that acts as essentially a namespace for Security constants.
"""
kSSLSessionOptionBreakOnServerAuth = 0
kSSLProtocol2 = 1
kSSLProtocol3 = 2
kTLSProtocol1 = 4
kTLSProtocol11 = 7
kTLSProtocol12 = 8
# SecureTransport does not support TLS 1.3 even if there's a constant for it
kTLSProtocol13 = 10
kTLSProtocolMaxSupported = 999
kSSLClientSide = 1
kSSLStreamType = 0
kSecFormatPEMSequence = 10
kSecTrustResultInvalid = 0
kSecTrustResultProceed = 1
# This gap is present on purpose: this was kSecTrustResultConfirm, which
# is deprecated.
kSecTrustResultDeny = 3
kSecTrustResultUnspecified = 4
kSecTrustResultRecoverableTrustFailure = 5
kSecTrustResultFatalTrustFailure = 6
kSecTrustResultOtherError = 7
errSSLProtocol = -9800
errSSLWouldBlock = -9803
errSSLClosedGraceful = -9805
errSSLClosedNoNotify = -9816
errSSLClosedAbort = -9806
errSSLXCertChainInvalid = -9807
errSSLCrypto = -9809
errSSLInternal = -9810
errSSLCertExpired = -9814
errSSLCertNotYetValid = -9815
errSSLUnknownRootCert = -9812
errSSLNoRootCert = -9813
errSSLHostNameMismatch = -9843
errSSLPeerHandshakeFail = -9824
errSSLPeerUserCancelled = -9839
errSSLWeakPeerEphemeralDHKey = -9850
errSSLServerAuthCompleted = -9841
errSSLRecordOverflow = -9847
errSecVerifyFailed = -67808
errSecNoTrustSettings = -25263
errSecItemNotFound = -25300
errSecInvalidTrustSettings = -25262
# Cipher suites. We only pick the ones our default cipher string allows.
# Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
TLS_AES_128_GCM_SHA256 = 0x1301
TLS_AES_256_GCM_SHA384 = 0x1302
TLS_AES_128_CCM_8_SHA256 = 0x1305
TLS_AES_128_CCM_SHA256 = 0x1304
| 17,632
|
Python
|
.py
| 423
| 36.34279
| 96
| 0.757611
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,736
|
ssltransport.py
|
rembo10_headphones/lib/urllib3/util/ssltransport.py
|
import io
import socket
import ssl
from ..exceptions import ProxySchemeUnsupported
from ..packages import six
SSL_BLOCKSIZE = 16384
class SSLTransport:
"""
The SSLTransport wraps an existing socket and establishes an SSL connection.
Contrary to Python's implementation of SSLSocket, it allows you to chain
multiple TLS connections together. It's particularly useful if you need to
implement TLS within TLS.
The class supports most of the socket API operations.
"""
@staticmethod
def _validate_ssl_context_for_tls_in_tls(ssl_context):
"""
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS.
The only requirement is that the ssl_context provides the 'wrap_bio'
methods.
"""
if not hasattr(ssl_context, "wrap_bio"):
if six.PY2:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"supported on Python 2"
)
else:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"available on non-native SSLContext"
)
def __init__(
self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
):
"""
Create an SSLTransport around socket using the provided ssl_context.
"""
self.incoming = ssl.MemoryBIO()
self.outgoing = ssl.MemoryBIO()
self.suppress_ragged_eofs = suppress_ragged_eofs
self.socket = socket
self.sslobj = ssl_context.wrap_bio(
self.incoming, self.outgoing, server_hostname=server_hostname
)
# Perform initial handshake.
self._ssl_io_loop(self.sslobj.do_handshake)
def __enter__(self):
return self
def __exit__(self, *_):
self.close()
def fileno(self):
return self.socket.fileno()
def read(self, len=1024, buffer=None):
return self._wrap_ssl_read(len, buffer)
def recv(self, len=1024, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to recv")
return self._wrap_ssl_read(len)
def recv_into(self, buffer, nbytes=None, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to recv_into")
if buffer and (nbytes is None):
nbytes = len(buffer)
elif nbytes is None:
nbytes = 1024
return self.read(nbytes, buffer)
def sendall(self, data, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to sendall")
count = 0
with memoryview(data) as view, view.cast("B") as byte_view:
amount = len(byte_view)
while count < amount:
v = self.send(byte_view[count:])
count += v
def send(self, data, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to send")
response = self._ssl_io_loop(self.sslobj.write, data)
return response
def makefile(
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
):
"""
Python's httpclient uses makefile and buffered io when reading HTTP
messages and we need to support it.
This is unfortunately a copy and paste of socket.py makefile with small
changes to point to the socket directly.
"""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = socket.SocketIO(self, rawmode)
self.socket._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
def unwrap(self):
self._ssl_io_loop(self.sslobj.unwrap)
def close(self):
self.socket.close()
def getpeercert(self, binary_form=False):
return self.sslobj.getpeercert(binary_form)
def version(self):
return self.sslobj.version()
def cipher(self):
return self.sslobj.cipher()
def selected_alpn_protocol(self):
return self.sslobj.selected_alpn_protocol()
def selected_npn_protocol(self):
return self.sslobj.selected_npn_protocol()
def shared_ciphers(self):
return self.sslobj.shared_ciphers()
def compression(self):
return self.sslobj.compression()
def settimeout(self, value):
self.socket.settimeout(value)
def gettimeout(self):
return self.socket.gettimeout()
def _decref_socketios(self):
self.socket._decref_socketios()
def _wrap_ssl_read(self, len, buffer=None):
try:
return self._ssl_io_loop(self.sslobj.read, len, buffer)
except ssl.SSLError as e:
if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
return 0 # eof, return 0.
else:
raise
def _ssl_io_loop(self, func, *args):
"""Performs an I/O loop between incoming/outgoing and the socket."""
should_loop = True
ret = None
while should_loop:
errno = None
try:
ret = func(*args)
except ssl.SSLError as e:
if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
# WANT_READ, and WANT_WRITE are expected, others are not.
raise e
errno = e.errno
buf = self.outgoing.read()
self.socket.sendall(buf)
if errno is None:
should_loop = False
elif errno == ssl.SSL_ERROR_WANT_READ:
buf = self.socket.recv(SSL_BLOCKSIZE)
if buf:
self.incoming.write(buf)
else:
self.incoming.write_eof()
return ret
| 6,895
|
Python
|
.py
| 180
| 28.188889
| 86
| 0.591399
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,737
|
queue.py
|
rembo10_headphones/lib/urllib3/util/queue.py
|
import collections
from ..packages import six
from ..packages.six.moves import queue
if six.PY2:
# Queue is imported for side effects on MS Windows. See issue #229.
import Queue as _unused_module_Queue # noqa: F401
class LifoQueue(queue.Queue):
def _init(self, _):
self.queue = collections.deque()
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item):
self.queue.append(item)
def _get(self):
return self.queue.pop()
| 498
|
Python
|
.py
| 15
| 28
| 71
| 0.678571
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,738
|
proxy.py
|
rembo10_headphones/lib/urllib3/util/proxy.py
|
from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
def connection_requires_http_tunnel(
proxy_url=None, proxy_config=None, destination_scheme=None
):
"""
Returns True if the connection requires an HTTP CONNECT through the proxy.
:param URL proxy_url:
URL of the proxy.
:param ProxyConfig proxy_config:
Proxy configuration from poolmanager.py
:param str destination_scheme:
The scheme of the destination. (i.e https, http, etc)
"""
# If we're not using a proxy, no way to use a tunnel.
if proxy_url is None:
return False
# HTTP destinations never require tunneling, we always forward.
if destination_scheme == "http":
return False
# Support for forwarding with HTTPS proxies and HTTPS destinations.
if (
proxy_url.scheme == "https"
and proxy_config
and proxy_config.use_forwarding_for_https
):
return False
# Otherwise always use a tunnel.
return True
def create_proxy_ssl_context(
ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
):
"""
Generates a default proxy ssl context if one hasn't been provided by the
user.
"""
ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(ssl_version),
cert_reqs=resolve_cert_reqs(cert_reqs),
)
if (
not ca_certs
and not ca_cert_dir
and not ca_cert_data
and hasattr(ssl_context, "load_default_certs")
):
ssl_context.load_default_certs()
return ssl_context
| 1,605
|
Python
|
.py
| 47
| 28
| 80
| 0.675065
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,739
|
response.py
|
rembo10_headphones/lib/urllib3/util/response.py
|
from __future__ import absolute_import
from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
from ..exceptions import HeaderParsingError
from ..packages.six.moves import http_client as httplib
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
try:
# Check `isclosed()` first, in case Python3 doesn't set `closed`.
# GH Issue #928
return obj.isclosed()
except AttributeError:
pass
try:
# Check via the official file-like-object way.
return obj.closed
except AttributeError:
pass
try:
# Check if the object is a container for another file-like object that
# gets released on exhaustion (e.g. HTTPResponse).
return obj.fp is None
except AttributeError:
pass
raise ValueError("Unable to determine whether fp is closed.")
def assert_header_parsing(headers):
"""
Asserts whether all headers have been successfully parsed.
Extracts encountered errors from the result of parsing headers.
Only works on Python 3.
:param http.client.HTTPMessage headers: Headers to verify.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
"""
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
defects = getattr(headers, "defects", None)
get_payload = getattr(headers, "get_payload", None)
unparsed_data = None
if get_payload:
# get_payload is actually email.message.Message.get_payload;
# we're only interested in the result if it's not a multipart message
if not headers.is_multipart():
payload = get_payload()
if isinstance(payload, (bytes, str)):
unparsed_data = payload
if defects:
# httplib is assuming a response body is available
# when parsing headers even when httplib only sends
# header data to parse_headers() This results in
# defects on multipart responses in particular.
# See: https://github.com/urllib3/urllib3/issues/800
# So we ignore the following defects:
# - StartBoundaryNotFoundDefect:
# The claimed start boundary was never found.
# - MultipartInvariantViolationDefect:
# A message claimed to be a multipart but no subparts were found.
defects = [
defect
for defect in defects
if not isinstance(
defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
)
]
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
def is_response_to_head(response):
"""
Checks whether the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
:param http.client.HTTPResponse response:
Response to check if the originating request
used 'HEAD' as a method.
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
if isinstance(method, int): # Platform-specific: Appengine
return method == 3
return method.upper() == "HEAD"
| 3,510
|
Python
|
.py
| 84
| 34.416667
| 88
| 0.682633
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,740
|
wait.py
|
rembo10_headphones/lib/urllib3/util/wait.py
|
import errno
import select
import sys
from functools import partial
try:
from time import monotonic
except ImportError:
from time import time as monotonic
__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
class NoWayToWaitForSocketError(Exception):
pass
# How should we wait on sockets?
#
# There are two types of APIs you can use for waiting on sockets: the fancy
# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
# select/poll. The stateful APIs are more efficient when you have a lots of
# sockets to keep track of, because you can set them up once and then use them
# lots of times. But we only ever want to wait on a single socket at a time
# and don't want to keep track of state, so the stateless APIs are actually
# more efficient. So we want to use select() or poll().
#
# Now, how do we choose between select() and poll()? On traditional Unixes,
# select() has a strange calling convention that makes it slow, or fail
# altogether, for high-numbered file descriptors. The point of poll() is to fix
# that, so on Unixes, we prefer poll().
#
# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
# for it), but that's OK, because on Windows, select() doesn't have this
# strange calling convention; plain select() works fine.
#
# So: on Windows we use select(), and everywhere else we use poll(). We also
# fall back to select() in case poll() is somehow broken or missing.
if sys.version_info >= (3, 5):
# Modern Python, that retries syscalls by default
def _retry_on_intr(fn, timeout):
return fn(timeout)
else:
# Old and broken Pythons.
def _retry_on_intr(fn, timeout):
if timeout is None:
deadline = float("inf")
else:
deadline = monotonic() + timeout
while True:
try:
return fn(timeout)
# OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
except (OSError, select.error) as e:
# 'e.args[0]' incantation works for both OSError and select.error
if e.args[0] != errno.EINTR:
raise
else:
timeout = deadline - monotonic()
if timeout < 0:
timeout = 0
if timeout == float("inf"):
timeout = None
continue
def select_wait_for_socket(sock, read=False, write=False, timeout=None):
if not read and not write:
raise RuntimeError("must specify at least one of read=True, write=True")
rcheck = []
wcheck = []
if read:
rcheck.append(sock)
if write:
wcheck.append(sock)
# When doing a non-blocking connect, most systems signal success by
# marking the socket writable. Windows, though, signals success by marked
# it as "exceptional". We paper over the difference by checking the write
# sockets for both conditions. (The stdlib selectors module does the same
# thing.)
fn = partial(select.select, rcheck, wcheck, wcheck)
rready, wready, xready = _retry_on_intr(fn, timeout)
return bool(rready or wready or xready)
def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
if not read and not write:
raise RuntimeError("must specify at least one of read=True, write=True")
mask = 0
if read:
mask |= select.POLLIN
if write:
mask |= select.POLLOUT
poll_obj = select.poll()
poll_obj.register(sock, mask)
# For some reason, poll() takes timeout in milliseconds
def do_poll(t):
if t is not None:
t *= 1000
return poll_obj.poll(t)
return bool(_retry_on_intr(do_poll, timeout))
def null_wait_for_socket(*args, **kwargs):
raise NoWayToWaitForSocketError("no select-equivalent available")
def _have_working_poll():
# Apparently some systems have a select.poll that fails as soon as you try
# to use it, either due to strange configuration or broken monkeypatching
# from libraries like eventlet/greenlet.
try:
poll_obj = select.poll()
_retry_on_intr(poll_obj.poll, 0)
except (AttributeError, OSError):
return False
else:
return True
def wait_for_socket(*args, **kwargs):
# We delay choosing which implementation to use until the first time we're
# called. We could do it at import time, but then we might make the wrong
# decision if someone goes wild with monkeypatching select.poll after
# we're imported.
global wait_for_socket
if _have_working_poll():
wait_for_socket = poll_wait_for_socket
elif hasattr(select, "select"):
wait_for_socket = select_wait_for_socket
else: # Platform-specific: Appengine.
wait_for_socket = null_wait_for_socket
return wait_for_socket(*args, **kwargs)
def wait_for_read(sock, timeout=None):
"""Waits for reading to be available on a given socket.
Returns True if the socket is readable, or False if the timeout expired.
"""
return wait_for_socket(sock, read=True, timeout=timeout)
def wait_for_write(sock, timeout=None):
"""Waits for writing to be available on a given socket.
Returns True if the socket is readable, or False if the timeout expired.
"""
return wait_for_socket(sock, write=True, timeout=timeout)
| 5,404
|
Python
|
.py
| 127
| 36.244094
| 81
| 0.674157
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,741
|
url.py
|
rembo10_headphones/lib/urllib3/util/url.py
|
from __future__ import absolute_import
import re
from collections import namedtuple
from ..exceptions import LocationParseError
from ..packages import six
url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
# We only want to normalize urls with an HTTP(S) scheme.
# urllib3 infers URLs without a scheme (None) to be http.
NORMALIZABLE_SCHEMES = ("http", "https", None)
# Almost all of these patterns were derived from the
# 'rfc3986' module: https://github.com/python-hyper/rfc3986
PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
URI_RE = re.compile(
r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
r"(?://([^\\/?#]*))?"
r"([^?#]*)"
r"(?:\?([^#]*))?"
r"(?:#(.*))?$",
re.UNICODE | re.DOTALL,
)
IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
HEX_PAT = "[0-9A-Fa-f]{1,4}"
LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
_variations = [
# 6( h16 ":" ) ls32
"(?:%(hex)s:){6}%(ls32)s",
# "::" 5( h16 ":" ) ls32
"::(?:%(hex)s:){5}%(ls32)s",
# [ h16 ] "::" 4( h16 ":" ) ls32
"(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
# [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
"(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
# [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
"(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
# [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
"(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
# [ *4( h16 ":" ) h16 ] "::" ls32
"(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
# [ *5( h16 ":" ) h16 ] "::" h16
"(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
# [ *6( h16 ":" ) h16 ] "::"
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
IPV4_RE = re.compile("^" + IPV4_PAT + "$")
IPV6_RE = re.compile("^" + IPV6_PAT + "$")
IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
REG_NAME_PAT,
IPV4_PAT,
IPV6_ADDRZ_PAT,
)
_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
UNRESERVED_CHARS = set(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
)
SUB_DELIM_CHARS = set("!$&'()*+,;=")
USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
PATH_CHARS = USERINFO_CHARS | {"@", "/"}
QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
class Url(namedtuple("Url", url_attrs)):
"""
Data structure for representing an HTTP URL. Used as a return value for
:func:`parse_url`. Both the scheme and host are normalized as they are
both case-insensitive according to RFC 3986.
"""
__slots__ = ()
def __new__(
cls,
scheme=None,
auth=None,
host=None,
port=None,
path=None,
query=None,
fragment=None,
):
if path and not path.startswith("/"):
path = "/" + path
if scheme is not None:
scheme = scheme.lower()
return super(Url, cls).__new__(
cls, scheme, auth, host, port, path, query, fragment
)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or "/"
if self.query is not None:
uri += "?" + self.query
return uri
@property
def netloc(self):
"""Network location including host and port"""
if self.port:
return "%s:%d" % (self.host, self.port)
return self.host
@property
def url(self):
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example: ::
>>> U = parse_url('http://google.com/mail/')
>>> U.url
'http://google.com/mail/'
>>> Url('http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment').url
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = self
url = u""
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + u"://"
if auth is not None:
url += auth + u"@"
if host is not None:
url += host
if port is not None:
url += u":" + str(port)
if path is not None:
url += path
if query is not None:
url += u"?" + query
if fragment is not None:
url += u"#" + fragment
return url
def __str__(self):
return self.url
def split_first(s, delims):
"""
.. deprecated:: 1.25
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, "", None
return s[:min_idx], s[min_idx + 1 :], min_delim
def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
"""Percent-encodes a URI component without reapplying
onto an already percent-encoded component.
"""
if component is None:
return component
component = six.ensure_text(component)
# Normalize existing percent-encoded bytes.
# Try to see if the component we're encoding is already percent-encoded
# so we can skip all '%' characters but still encode all others.
component, percent_encodings = PERCENT_RE.subn(
lambda match: match.group(0).upper(), component
)
uri_bytes = component.encode("utf-8", "surrogatepass")
is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
encoded_component = bytearray()
for i in range(0, len(uri_bytes)):
# Will return a single character bytestring on both Python 2 & 3
byte = uri_bytes[i : i + 1]
byte_ord = ord(byte)
if (is_percent_encoded and byte == b"%") or (
byte_ord < 128 and byte.decode() in allowed_chars
):
encoded_component += byte
continue
encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
return encoded_component.decode(encoding)
def _remove_path_dot_segments(path):
# See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
segments = path.split("/") # Turn the path into a list of segments
output = [] # Initialize the variable to use to store output
for segment in segments:
# '.' is the current directory, so ignore it, it is superfluous
if segment == ".":
continue
# Anything other than '..', should be appended to the output
elif segment != "..":
output.append(segment)
# In this case segment == '..', if we can, we should pop the last
# element
elif output:
output.pop()
# If the path starts with '/' and the output is empty or the first string
# is non-empty
if path.startswith("/") and (not output or output[0]):
output.insert(0, "")
# If the path starts with '/.' or '/..' ensure we add one more empty
# string to add a trailing '/'
if path.endswith(("/.", "/..")):
output.append("")
return "/".join(output)
def _normalize_host(host, scheme):
if host:
if isinstance(host, six.binary_type):
host = six.ensure_str(host)
if scheme in NORMALIZABLE_SCHEMES:
is_ipv6 = IPV6_ADDRZ_RE.match(host)
if is_ipv6:
match = ZONE_ID_RE.search(host)
if match:
start, end = match.span(1)
zone_id = host[start:end]
if zone_id.startswith("%25") and zone_id != "%25":
zone_id = zone_id[3:]
else:
zone_id = zone_id[1:]
zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS)
return host[:start].lower() + zone_id + host[end:]
else:
return host.lower()
elif not IPV4_RE.match(host):
return six.ensure_str(
b".".join([_idna_encode(label) for label in host.split(".")])
)
return host
def _idna_encode(name):
if name and any([ord(x) > 128 for x in name]):
try:
import idna
except ImportError:
six.raise_from(
LocationParseError("Unable to parse URL without the 'idna' module"),
None,
)
try:
return idna.encode(name.lower(), strict=True, std3_rules=True)
except idna.IDNAError:
six.raise_from(
LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None
)
return name.lower().encode("ascii")
def _encode_target(target):
"""Percent-encodes a request target so that there are no invalid characters"""
path, query = TARGET_RE.match(target).groups()
target = _encode_invalid_chars(path, PATH_CHARS)
query = _encode_invalid_chars(query, QUERY_CHARS)
if query is not None:
target += "?" + query
return target
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
This parser is RFC 3986 compliant.
The parser logic and helper functions are based heavily on
work done in the ``rfc3986`` module.
:param str url: URL to parse into a :class:`.Url` namedtuple.
Partly backwards-compatible with :mod:`urlparse`.
Example::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
if not url:
# Empty
return Url()
source_url = url
if not SCHEME_RE.search(url):
url = "//" + url
try:
scheme, authority, path, query, fragment = URI_RE.match(url).groups()
normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
if scheme:
scheme = scheme.lower()
if authority:
auth, _, host_port = authority.rpartition("@")
auth = auth or None
host, port = _HOST_PORT_RE.match(host_port).groups()
if auth and normalize_uri:
auth = _encode_invalid_chars(auth, USERINFO_CHARS)
if port == "":
port = None
else:
auth, host, port = None, None, None
if port is not None:
port = int(port)
if not (0 <= port <= 65535):
raise LocationParseError(url)
host = _normalize_host(host, scheme)
if normalize_uri and path:
path = _remove_path_dot_segments(path)
path = _encode_invalid_chars(path, PATH_CHARS)
if normalize_uri and query:
query = _encode_invalid_chars(query, QUERY_CHARS)
if normalize_uri and fragment:
fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
except (ValueError, AttributeError):
return six.raise_from(LocationParseError(source_url), None)
# For the sake of backwards compatibility we put empty
# string values for path if there are any defined values
# beyond the path in the URL.
# TODO: Remove this when we break backwards compatibility.
if not path:
if query is not None or fragment is not None:
path = ""
else:
path = None
# Ensure that each part of the URL is a `str` for
# backwards compatibility.
if isinstance(url, six.text_type):
ensure_func = six.ensure_text
else:
ensure_func = six.ensure_str
def ensure_type(x):
return x if x is None else ensure_func(x)
return Url(
scheme=ensure_type(scheme),
auth=ensure_type(auth),
host=ensure_type(host),
port=port,
path=ensure_type(path),
query=ensure_type(query),
fragment=ensure_type(fragment),
)
def get_host(url):
"""
Deprecated. Use :func:`parse_url` instead.
"""
p = parse_url(url)
return p.scheme or "http", p.hostname, p.port
| 14,030
|
Python
|
.py
| 354
| 31.768362
| 88
| 0.560671
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,742
|
request.py
|
rembo10_headphones/lib/urllib3/util/request.py
|
from __future__ import absolute_import
from base64 import b64encode
from ..exceptions import UnrewindableBodyError
from ..packages.six import b, integer_types
# Pass as a value within ``headers`` to skip
# emitting some HTTP headers that are added automatically.
# The only headers that are supported are ``Accept-Encoding``,
# ``Host``, and ``User-Agent``.
SKIP_HEADER = "@@@SKIP_HEADER@@@"
SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
ACCEPT_ENCODING = "gzip,deflate"
try:
import brotli as _unused_module_brotli # noqa: F401
except ImportError:
pass
else:
ACCEPT_ENCODING += ",br"
_FAILEDTELL = object()
def make_headers(
keep_alive=None,
accept_encoding=None,
user_agent=None,
basic_auth=None,
proxy_basic_auth=None,
disable_cache=None,
):
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
:param proxy_basic_auth:
Colon-separated username:password string for 'proxy-authorization: basic ...'
auth header.
:param disable_cache:
If ``True``, adds 'cache-control: no-cache' header.
Example::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'}
"""
headers = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ",".join(accept_encoding)
else:
accept_encoding = ACCEPT_ENCODING
headers["accept-encoding"] = accept_encoding
if user_agent:
headers["user-agent"] = user_agent
if keep_alive:
headers["connection"] = "keep-alive"
if basic_auth:
headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8")
if proxy_basic_auth:
headers["proxy-authorization"] = "Basic " + b64encode(
b(proxy_basic_auth)
).decode("utf-8")
if disable_cache:
headers["cache-control"] = "no-cache"
return headers
def set_file_position(body, pos):
"""
If a position is provided, move file to that point.
Otherwise, we'll attempt to record a position for future use.
"""
if pos is not None:
rewind_body(body, pos)
elif getattr(body, "tell", None) is not None:
try:
pos = body.tell()
except (IOError, OSError):
# This differentiates from None, allowing us to catch
# a failed `tell()` later when trying to rewind the body.
pos = _FAILEDTELL
return pos
def rewind_body(body, body_pos):
"""
Attempt to rewind body to a certain position.
Primarily used for request redirects and retries.
:param body:
File-like object that supports seek.
:param int pos:
Position to seek to in file.
"""
body_seek = getattr(body, "seek", None)
if body_seek is not None and isinstance(body_pos, integer_types):
try:
body_seek(body_pos)
except (IOError, OSError):
raise UnrewindableBodyError(
"An error occurred when rewinding request body for redirect/retry."
)
elif body_pos is _FAILEDTELL:
raise UnrewindableBodyError(
"Unable to record file position for rewinding "
"request body during a redirect/retry."
)
else:
raise ValueError(
"body_pos must be of type integer, instead it was %s." % type(body_pos)
)
| 4,123
|
Python
|
.py
| 115
| 28.904348
| 86
| 0.640201
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,743
|
ssl_.py
|
rembo10_headphones/lib/urllib3/util/ssl_.py
|
from __future__ import absolute_import
import hmac
import os
import sys
import warnings
from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256
from ..exceptions import (
InsecurePlatformWarning,
ProxySchemeUnsupported,
SNIMissingWarning,
SSLError,
)
from ..packages import six
from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
SSLContext = None
SSLTransport = None
HAS_SNI = False
IS_PYOPENSSL = False
IS_SECURETRANSPORT = False
ALPN_PROTOCOLS = ["http/1.1"]
# Maps the length of a digest to a possible hash function producing this digest
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
def _const_compare_digest_backport(a, b):
"""
Compare two digests of equal length in constant time.
The digests must be of type str/bytes.
Returns True if the digests match, and False otherwise.
"""
result = abs(len(a) - len(b))
for left, right in zip(bytearray(a), bytearray(b)):
result |= left ^ right
return result == 0
_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
try: # Test for SSL features
import ssl
from ssl import CERT_REQUIRED, wrap_socket
except ImportError:
pass
try:
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
try:
from .ssltransport import SSLTransport
except ImportError:
pass
try: # Platform-specific: Python 3.6
from ssl import PROTOCOL_TLS
PROTOCOL_SSLv23 = PROTOCOL_TLS
except ImportError:
try:
from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
PROTOCOL_SSLv23 = PROTOCOL_TLS
except ImportError:
PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
try:
from ssl import PROTOCOL_TLS_CLIENT
except ImportError:
PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
try:
from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000
try: # OP_NO_TICKET was added in Python 3.6
from ssl import OP_NO_TICKET
except ImportError:
OP_NO_TICKET = 0x4000
# A secure default.
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
# security,
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
# - disable NULL authentication, MD5 MACs, DSS, and other
# insecure ciphers for security reasons.
# - NOTE: TLS 1.3 cipher suites are managed through a different interface
# not exposed by CPython (yet!) and are enabled by default if they're available.
DEFAULT_CIPHERS = ":".join(
[
"ECDHE+AESGCM",
"ECDHE+CHACHA20",
"DHE+AESGCM",
"DHE+CHACHA20",
"ECDH+AESGCM",
"DH+AESGCM",
"ECDH+AES",
"DH+AES",
"RSA+AESGCM",
"RSA+AES",
"!aNULL",
"!eNULL",
"!MD5",
"!DSS",
]
)
try:
from ssl import SSLContext # Modern SSL?
except ImportError:
class SSLContext(object): # Platform-specific: Python 2
def __init__(self, protocol_version):
self.protocol = protocol_version
# Use default values from a real SSLContext
self.check_hostname = False
self.verify_mode = ssl.CERT_NONE
self.ca_certs = None
self.options = 0
self.certfile = None
self.keyfile = None
self.ciphers = None
def load_cert_chain(self, certfile, keyfile):
self.certfile = certfile
self.keyfile = keyfile
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
self.ca_certs = cafile
if capath is not None:
raise SSLError("CA directories not supported in older Pythons")
if cadata is not None:
raise SSLError("CA data not supported in older Pythons")
def set_ciphers(self, cipher_suite):
self.ciphers = cipher_suite
def wrap_socket(self, socket, server_hostname=None, server_side=False):
warnings.warn(
"A true SSLContext object is not available. This prevents "
"urllib3 from configuring SSL appropriately and may cause "
"certain SSL connections to fail. You can upgrade to a newer "
"version of Python to solve this. For more information, see "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings",
InsecurePlatformWarning,
)
kwargs = {
"keyfile": self.keyfile,
"certfile": self.certfile,
"ca_certs": self.ca_certs,
"cert_reqs": self.verify_mode,
"ssl_version": self.protocol,
"server_side": server_side,
}
return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
fingerprint = fingerprint.replace(":", "").lower()
digest_length = len(fingerprint)
hashfunc = HASHFUNC_MAP.get(digest_length)
if not hashfunc:
raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint))
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
cert_digest = hashfunc(cert).digest()
if not _const_compare_digest(cert_digest, fingerprint_bytes):
raise SSLError(
'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
fingerprint, hexlify(cert_digest)
)
)
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_REQUIRED`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbreviation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_REQUIRED
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, "CERT_" + candidate)
return res
return candidate
def resolve_ssl_version(candidate):
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_TLS
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, "PROTOCOL_" + candidate)
return res
return candidate
def create_urllib3_context(
ssl_version=None, cert_reqs=None, options=None, ciphers=None
):
"""All arguments have the same meaning as ``ssl_wrap_socket``.
By default, this function does a lot of the same work that
``ssl.create_default_context`` does on Python 3.4+. It:
- Disables SSLv2, SSLv3, and compression
- Sets a restricted set of server ciphers
If you wish to enable SSLv3, you can do::
from urllib3.util import ssl_
context = ssl_.create_urllib3_context()
context.options &= ~ssl_.OP_NO_SSLv3
You can do the same to enable compression (substituting ``COMPRESSION``
for ``SSLv3`` in the last line above).
:param ssl_version:
The desired protocol version to use. This will default to
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
the server and your installation of OpenSSL support.
:param cert_reqs:
Whether to require the certificate verification. This defaults to
``ssl.CERT_REQUIRED``.
:param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
:param ciphers:
Which cipher suites to allow the server to select.
:returns:
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
# PROTOCOL_TLS is deprecated in Python 3.10
if not ssl_version or ssl_version == PROTOCOL_TLS:
ssl_version = PROTOCOL_TLS_CLIENT
context = SSLContext(ssl_version)
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
if options is None:
options = 0
# SSLv2 is easily broken and is considered harmful and dangerous
options |= OP_NO_SSLv2
# SSLv3 has several problems and is now dangerous
options |= OP_NO_SSLv3
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309)
options |= OP_NO_COMPRESSION
# TLSv1.2 only. Unless set explicitly, do not request tickets.
# This may save some bandwidth on wire, and although the ticket is encrypted,
# there is a risk associated with it being on wire,
# if the server is not rotating its ticketing keys properly.
options |= OP_NO_TICKET
context.options |= options
# Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
# necessary for conditional client cert authentication with TLS 1.3.
# The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
# versions of Python. We only enable on Python 3.7.4+ or if certificate
# verification is enabled to work around Python issue #37428
# See: https://bugs.python.org/issue37428
if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
context, "post_handshake_auth", None
) is not None:
context.post_handshake_auth = True
def disable_check_hostname():
if (
getattr(context, "check_hostname", None) is not None
): # Platform-specific: Python 3.2
# We do our own verification, including fingerprints and alternative
# hostnames. So disable it here
context.check_hostname = False
# The order of the below lines setting verify_mode and check_hostname
# matter due to safe-guards SSLContext has to prevent an SSLContext with
# check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
# complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
# or not so we don't know the initial state of the freshly created SSLContext.
if cert_reqs == ssl.CERT_REQUIRED:
context.verify_mode = cert_reqs
disable_check_hostname()
else:
disable_check_hostname()
context.verify_mode = cert_reqs
# Enable logging of TLS session keys via defacto standard environment variable
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
if hasattr(context, "keylog_filename"):
sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
if sslkeylogfile:
context.keylog_filename = sslkeylogfile
return context
def ssl_wrap_socket(
sock,
keyfile=None,
certfile=None,
cert_reqs=None,
ca_certs=None,
server_hostname=None,
ssl_version=None,
ciphers=None,
ssl_context=None,
ca_cert_dir=None,
key_password=None,
ca_cert_data=None,
tls_in_tls=False,
):
"""
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
the same meaning as they do when using :func:`ssl.wrap_socket`.
:param server_hostname:
When SNI is supported, the expected hostname of the certificate
:param ssl_context:
A pre-made :class:`SSLContext` object. If none is provided, one will
be created using :func:`create_urllib3_context`.
:param ciphers:
A string of ciphers we wish the client to support.
:param ca_cert_dir:
A directory containing CA certificates in multiple separate files, as
supported by OpenSSL's -CApath flag or the capath argument to
SSLContext.load_verify_locations().
:param key_password:
Optional password if the keyfile is encrypted.
:param ca_cert_data:
Optional string containing CA certificates in PEM format suitable for
passing as the cadata parameter to SSLContext.load_verify_locations()
:param tls_in_tls:
Use SSLTransport to wrap the existing socket.
"""
context = ssl_context
if context is None:
# Note: This branch of code and all the variables in it are no longer
# used by urllib3 itself. We should consider deprecating and removing
# this code.
context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
if ca_certs or ca_cert_dir or ca_cert_data:
try:
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
except (IOError, OSError) as e:
raise SSLError(e)
elif ssl_context is None and hasattr(context, "load_default_certs"):
# try to load OS default certs; works well on Windows (require Python3.4+)
context.load_default_certs()
# Attempt to detect if we get the goofy behavior of the
# keyfile being encrypted and OpenSSL asking for the
# passphrase via the terminal and instead error out.
if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
raise SSLError("Client private key is encrypted, password is required")
if certfile:
if key_password is None:
context.load_cert_chain(certfile, keyfile)
else:
context.load_cert_chain(certfile, keyfile, key_password)
try:
if hasattr(context, "set_alpn_protocols"):
context.set_alpn_protocols(ALPN_PROTOCOLS)
except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
pass
# If we detect server_hostname is an IP address then the SNI
# extension should not be used according to RFC3546 Section 3.1
use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
# SecureTransport uses server_hostname in certificate verification.
send_sni = (use_sni_hostname and HAS_SNI) or (
IS_SECURETRANSPORT and server_hostname
)
# Do not warn the user if server_hostname is an invalid SNI hostname.
if not HAS_SNI and use_sni_hostname:
warnings.warn(
"An HTTPS request has been made, but the SNI (Server Name "
"Indication) extension to TLS is not available on this platform. "
"This may cause the server to present an incorrect TLS "
"certificate, which can cause validation failures. You can upgrade to "
"a newer version of Python to solve this. For more information, see "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings",
SNIMissingWarning,
)
if send_sni:
ssl_sock = _ssl_wrap_socket_impl(
sock, context, tls_in_tls, server_hostname=server_hostname
)
else:
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
return ssl_sock
def is_ipaddress(hostname):
"""Detects whether the hostname given is an IPv4 or IPv6 address.
Also detects IPv6 addresses with Zone IDs.
:param str hostname: Hostname to examine.
:return: True if the hostname is an IP address, False otherwise.
"""
if not six.PY2 and isinstance(hostname, bytes):
# IDN A-label bytes are ASCII compatible.
hostname = hostname.decode("ascii")
return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
def _is_key_file_encrypted(key_file):
"""Detects if a key file is encrypted or not."""
with open(key_file, "r") as f:
for line in f:
# Look for Proc-Type: 4,ENCRYPTED
if "ENCRYPTED" in line:
return True
return False
def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
if tls_in_tls:
if not SSLTransport:
# Import error, ssl is not available.
raise ProxySchemeUnsupported(
"TLS in TLS requires support for the 'ssl' module"
)
SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
return SSLTransport(sock, ssl_context, server_hostname)
if server_hostname:
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
else:
return ssl_context.wrap_socket(sock)
| 17,165
|
Python
|
.py
| 411
| 34.671533
| 87
| 0.673125
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,744
|
__init__.py
|
rembo10_headphones/lib/urllib3/util/__init__.py
|
from __future__ import absolute_import
# For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped
from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
from .response import is_fp_closed
from .retry import Retry
from .ssl_ import (
ALPN_PROTOCOLS,
HAS_SNI,
IS_PYOPENSSL,
IS_SECURETRANSPORT,
PROTOCOL_TLS,
SSLContext,
assert_fingerprint,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
from .timeout import Timeout, current_time
from .url import Url, get_host, parse_url, split_first
from .wait import wait_for_read, wait_for_write
__all__ = (
"HAS_SNI",
"IS_PYOPENSSL",
"IS_SECURETRANSPORT",
"SSLContext",
"PROTOCOL_TLS",
"ALPN_PROTOCOLS",
"Retry",
"Timeout",
"Url",
"assert_fingerprint",
"current_time",
"is_connection_dropped",
"is_fp_closed",
"get_host",
"parse_url",
"make_headers",
"resolve_cert_reqs",
"resolve_ssl_version",
"split_first",
"ssl_wrap_socket",
"wait_for_read",
"wait_for_write",
"SKIP_HEADER",
"SKIPPABLE_HEADERS",
)
| 1,155
|
Python
|
.py
| 47
| 20.638298
| 68
| 0.68264
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,745
|
retry.py
|
rembo10_headphones/lib/urllib3/util/retry.py
|
from __future__ import absolute_import
import email
import logging
import re
import time
import warnings
from collections import namedtuple
from itertools import takewhile
from ..exceptions import (
ConnectTimeoutError,
InvalidHeader,
MaxRetryError,
ProtocolError,
ProxyError,
ReadTimeoutError,
ResponseError,
)
from ..packages import six
log = logging.getLogger(__name__)
# Data structure for representing the metadata of requests that result in a retry.
RequestHistory = namedtuple(
"RequestHistory", ["method", "url", "error", "status", "redirect_location"]
)
# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
_Default = object()
class _RetryMeta(type):
@property
def DEFAULT_METHOD_WHITELIST(cls):
warnings.warn(
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
DeprecationWarning,
)
return cls.DEFAULT_ALLOWED_METHODS
@DEFAULT_METHOD_WHITELIST.setter
def DEFAULT_METHOD_WHITELIST(cls, value):
warnings.warn(
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
DeprecationWarning,
)
cls.DEFAULT_ALLOWED_METHODS = value
@property
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
warnings.warn(
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
DeprecationWarning,
)
return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
@DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
warnings.warn(
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
DeprecationWarning,
)
cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
@property
def BACKOFF_MAX(cls):
warnings.warn(
"Using 'Retry.BACKOFF_MAX' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
DeprecationWarning,
)
return cls.DEFAULT_BACKOFF_MAX
@BACKOFF_MAX.setter
def BACKOFF_MAX(cls, value):
warnings.warn(
"Using 'Retry.BACKOFF_MAX' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
DeprecationWarning,
)
cls.DEFAULT_BACKOFF_MAX = value
@six.add_metaclass(_RetryMeta)
class Retry(object):
"""Retry configuration.
Each retry attempt will create a new Retry object with updated values, so
they can be safely reused.
Retries can be defined as a default for a pool::
retries = Retry(connect=5, read=2, redirect=5)
http = PoolManager(retries=retries)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool)::
response = http.request('GET', 'http://example.com/', retries=Retry(10))
Retries can be disabled by passing ``False``::
response = http.request('GET', 'http://example.com/', retries=False)
Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
retries are disabled, in which case the causing exception will be raised.
:param int total:
Total number of retries to allow. Takes precedence over other counts.
Set to ``None`` to remove this constraint and fall back on other
counts.
Set to ``0`` to fail on the first retry.
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
:param int connect:
How many connection-related errors to retry on.
These are errors raised before the request is sent to the remote server,
which we assume has not triggered the server to process the request.
Set to ``0`` to fail on the first retry of this type.
:param int read:
How many times to retry on read errors.
These errors are raised after the request was sent to the server, so the
request may have side-effects.
Set to ``0`` to fail on the first retry of this type.
:param int redirect:
How many redirects to perform. Limit this to avoid infinite redirect
loops.
A redirect is a HTTP response with a status code 301, 302, 303, 307 or
308.
Set to ``0`` to fail on the first retry of this type.
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
:param int status:
How many times to retry on bad status codes.
These are retries made on responses, where status code matches
``status_forcelist``.
Set to ``0`` to fail on the first retry of this type.
:param int other:
How many times to retry on other errors.
Other errors are errors that are not connect, read, redirect or status errors.
These errors might be raised after the request was sent to the server, so the
request might have side-effects.
Set to ``0`` to fail on the first retry of this type.
If ``total`` is not set, it's a good idea to set this to 0 to account
for unexpected edge cases and avoid infinite retry loops.
:param iterable allowed_methods:
Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be
idempotent (multiple requests with the same parameters end with the
same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
Set to a ``False`` value to retry on any verb.
.. warning::
Previously this parameter was named ``method_whitelist``, that
usage is deprecated in v1.26.0 and will be removed in v2.0.
:param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ``allowed_methods``
and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``.
:param float backoff_factor:
A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a
delay). urllib3 will sleep for::
{backoff factor} * (2 ** ({number of total retries} - 1))
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
than :attr:`Retry.DEFAULT_BACKOFF_MAX`.
By default, backoff is disabled (set to 0).
:param bool raise_on_redirect: Whether, if the number of redirects is
exhausted, to raise a MaxRetryError, or to return a response with a
response code in the 3xx range.
:param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
whether we should raise an exception, or return a response,
if status falls in ``status_forcelist`` range and retries have
been exhausted.
:param tuple history: The history of the request encountered during
each call to :meth:`~Retry.increment`. The list is in the order
the requests occurred. Each list item is of class :class:`RequestHistory`.
:param bool respect_retry_after_header:
Whether to respect Retry-After header on status codes defined as
:attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
:param iterable remove_headers_on_redirect:
Sequence of headers to remove from the request when a response
indicating a redirect is returned before firing off the redirected
request.
"""
#: Default methods to be used for ``allowed_methods``
DEFAULT_ALLOWED_METHODS = frozenset(
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
)
#: Default status codes to be used for ``status_forcelist``
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
#: Default headers to be used for ``remove_headers_on_redirect``
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
#: Maximum backoff time.
DEFAULT_BACKOFF_MAX = 120
def __init__(
self,
total=10,
connect=None,
read=None,
redirect=None,
status=None,
other=None,
allowed_methods=_Default,
status_forcelist=None,
backoff_factor=0,
raise_on_redirect=True,
raise_on_status=True,
history=None,
respect_retry_after_header=True,
remove_headers_on_redirect=_Default,
# TODO: Deprecated, remove in v2.0
method_whitelist=_Default,
):
if method_whitelist is not _Default:
if allowed_methods is not _Default:
raise ValueError(
"Using both 'allowed_methods' and "
"'method_whitelist' together is not allowed. "
"Instead only use 'allowed_methods'"
)
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
stacklevel=2,
)
allowed_methods = method_whitelist
if allowed_methods is _Default:
allowed_methods = self.DEFAULT_ALLOWED_METHODS
if remove_headers_on_redirect is _Default:
remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
self.total = total
self.connect = connect
self.read = read
self.status = status
self.other = other
if redirect is False or total is False:
redirect = 0
raise_on_redirect = False
self.redirect = redirect
self.status_forcelist = status_forcelist or set()
self.allowed_methods = allowed_methods
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
self.raise_on_status = raise_on_status
self.history = history or tuple()
self.respect_retry_after_header = respect_retry_after_header
self.remove_headers_on_redirect = frozenset(
[h.lower() for h in remove_headers_on_redirect]
)
def new(self, **kw):
params = dict(
total=self.total,
connect=self.connect,
read=self.read,
redirect=self.redirect,
status=self.status,
other=self.other,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect,
raise_on_status=self.raise_on_status,
history=self.history,
remove_headers_on_redirect=self.remove_headers_on_redirect,
respect_retry_after_header=self.respect_retry_after_header,
)
# TODO: If already given in **kw we use what's given to us
# If not given we need to figure out what to pass. We decide
# based on whether our class has the 'method_whitelist' property
# and if so we pass the deprecated 'method_whitelist' otherwise
# we use 'allowed_methods'. Remove in v2.0
if "method_whitelist" not in kw and "allowed_methods" not in kw:
if "method_whitelist" in self.__dict__:
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
params["method_whitelist"] = self.allowed_methods
else:
params["allowed_methods"] = self.allowed_methods
params.update(kw)
return type(self)(**params)
@classmethod
def from_int(cls, retries, redirect=True, default=None):
"""Backwards-compatibility for the old retries format."""
if retries is None:
retries = default if default is not None else cls.DEFAULT
if isinstance(retries, Retry):
return retries
redirect = bool(redirect) and None
new_retries = cls(retries, redirect=redirect)
log.debug("Converted retries value: %r -> %r", retries, new_retries)
return new_retries
def get_backoff_time(self):
"""Formula for computing the current backoff
:rtype: float
"""
# We want to consider only the last consecutive errors sequence (Ignore redirects).
consecutive_errors_len = len(
list(
takewhile(lambda x: x.redirect_location is None, reversed(self.history))
)
)
if consecutive_errors_len <= 1:
return 0
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
return min(self.DEFAULT_BACKOFF_MAX, backoff_value)
def parse_retry_after(self, retry_after):
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
if re.match(r"^\s*[0-9]+\s*$", retry_after):
seconds = int(retry_after)
else:
retry_date_tuple = email.utils.parsedate_tz(retry_after)
if retry_date_tuple is None:
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
if retry_date_tuple[9] is None: # Python 2
# Assume UTC if no timezone was specified
# On Python2.7, parsedate_tz returns None for a timezone offset
# instead of 0 if no timezone is given, where mktime_tz treats
# a None timezone offset as local time.
retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
retry_date = email.utils.mktime_tz(retry_date_tuple)
seconds = retry_date - time.time()
if seconds < 0:
seconds = 0
return seconds
def get_retry_after(self, response):
"""Get the value of Retry-After in seconds."""
retry_after = response.getheader("Retry-After")
if retry_after is None:
return None
return self.parse_retry_after(retry_after)
def sleep_for_retry(self, response=None):
retry_after = self.get_retry_after(response)
if retry_after:
time.sleep(retry_after)
return True
return False
def _sleep_backoff(self):
backoff = self.get_backoff_time()
if backoff <= 0:
return
time.sleep(backoff)
def sleep(self, response=None):
"""Sleep between retry attempts.
This method will respect a server's ``Retry-After`` response header
and sleep the duration of the time requested. If that is not present, it
will use an exponential backoff. By default, the backoff factor is 0 and
this method will return immediately.
"""
if self.respect_retry_after_header and response:
slept = self.sleep_for_retry(response)
if slept:
return
self._sleep_backoff()
def _is_connection_error(self, err):
"""Errors when we're fairly sure that the server did not receive the
request, so it should be safe to retry.
"""
if isinstance(err, ProxyError):
err = err.original_error
return isinstance(err, ConnectTimeoutError)
def _is_read_error(self, err):
"""Errors that occur after the request has been started, so we should
assume that the server began processing it.
"""
return isinstance(err, (ReadTimeoutError, ProtocolError))
def _is_method_retryable(self, method):
"""Checks if a given HTTP method should be retried upon, depending if
it is included in the allowed_methods
"""
# TODO: For now favor if the Retry implementation sets its own method_whitelist
# property outside of our constructor to avoid breaking custom implementations.
if "method_whitelist" in self.__dict__:
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
allowed_methods = self.method_whitelist
else:
allowed_methods = self.allowed_methods
if allowed_methods and method.upper() not in allowed_methods:
return False
return True
def is_retry(self, method, status_code, has_retry_after=False):
"""Is this method/status code retryable? (Based on allowlists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
be retried upon on the presence of the aforementioned header)
"""
if not self._is_method_retryable(method):
return False
if self.status_forcelist and status_code in self.status_forcelist:
return True
return (
self.total
and self.respect_retry_after_header
and has_retry_after
and (status_code in self.RETRY_AFTER_STATUS_CODES)
)
def is_exhausted(self):
"""Are we out of retries?"""
retry_counts = (
self.total,
self.connect,
self.read,
self.redirect,
self.status,
self.other,
)
retry_counts = list(filter(None, retry_counts))
if not retry_counts:
return False
return min(retry_counts) < 0
def increment(
self,
method=None,
url=None,
response=None,
error=None,
_pool=None,
_stacktrace=None,
):
"""Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not
return a response.
:type response: :class:`~urllib3.response.HTTPResponse`
:param Exception error: An error encountered during the request, or
None if the response was received successfully.
:return: A new ``Retry`` object.
"""
if self.total is False and error:
# Disabled, indicate to re-raise the error.
raise six.reraise(type(error), error, _stacktrace)
total = self.total
if total is not None:
total -= 1
connect = self.connect
read = self.read
redirect = self.redirect
status_count = self.status
other = self.other
cause = "unknown"
status = None
redirect_location = None
if error and self._is_connection_error(error):
# Connect retry?
if connect is False:
raise six.reraise(type(error), error, _stacktrace)
elif connect is not None:
connect -= 1
elif error and self._is_read_error(error):
# Read retry?
if read is False or not self._is_method_retryable(method):
raise six.reraise(type(error), error, _stacktrace)
elif read is not None:
read -= 1
elif error:
# Other retry?
if other is not None:
other -= 1
elif response and response.get_redirect_location():
# Redirect retry?
if redirect is not None:
redirect -= 1
cause = "too many redirects"
redirect_location = response.get_redirect_location()
status = response.status
else:
# Incrementing because of a server error like a 500 in
# status_forcelist and the given method is in the allowed_methods
cause = ResponseError.GENERIC_ERROR
if response and response.status:
if status_count is not None:
status_count -= 1
cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
status = response.status
history = self.history + (
RequestHistory(method, url, error, status, redirect_location),
)
new_retry = self.new(
total=total,
connect=connect,
read=read,
redirect=redirect,
status=status_count,
other=other,
history=history,
)
if new_retry.is_exhausted():
raise MaxRetryError(_pool, url, error or ResponseError(cause))
log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
return new_retry
def __repr__(self):
return (
"{cls.__name__}(total={self.total}, connect={self.connect}, "
"read={self.read}, redirect={self.redirect}, status={self.status})"
).format(cls=type(self), self=self)
def __getattr__(self, item):
if item == "method_whitelist":
# TODO: Remove this deprecated alias in v2.0
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
return self.allowed_methods
try:
return getattr(super(Retry, self), item)
except AttributeError:
return getattr(Retry, item)
# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
| 22,001
|
Python
|
.py
| 498
| 34.194779
| 94
| 0.626538
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,746
|
ssl_match_hostname.py
|
rembo10_headphones/lib/urllib3/util/ssl_match_hostname.py
|
"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
# Note: This file is under the PSF license as the code comes from the python
# stdlib. http://docs.python.org/3/license.html
import re
import sys
# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
# system, use it to handle IPAddress ServerAltnames (this was added in
# python-3.5) otherwise only do DNS matching. This allows
# util.ssl_match_hostname to continue to be used in Python 2.7.
try:
import ipaddress
except ImportError:
ipaddress = None
__version__ = "3.5.0.1"
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r".")
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count("*")
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn)
)
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == "*":
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append("[^.]+")
elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
return pat.match(hostname)
def _to_unicode(obj):
if isinstance(obj, str) and sys.version_info < (3,):
# ignored flake8 # F821 to support python 2.7 function
obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821
return obj
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
# OpenSSL may add a trailing newline to a subjectAltName's IP address
# Divergence from upstream: ipaddress can't handle byte str
ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
return ip == host_ip
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError(
"empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED"
)
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(_to_unicode(hostname))
except ValueError:
# Not an IP address (common case)
host_ip = None
except UnicodeError:
# Divergence from upstream: Have to deal with ipaddress not taking
# byte strings. addresses should be all ascii, so we consider it not
# an ipaddress in this case
host_ip = None
except AttributeError:
# Divergence from upstream: Make ipaddress library optional
if ipaddress is None:
host_ip = None
else:
raise
dnsnames = []
san = cert.get("subjectAltName", ())
for key, value in san:
if key == "DNS":
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == "IP Address":
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get("subject", ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == "commonName":
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError(
"hostname %r "
"doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
)
elif len(dnsnames) == 1:
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
else:
raise CertificateError(
"no appropriate commonName or subjectAltName fields were found"
)
| 5,751
|
Python
|
.py
| 138
| 34.101449
| 88
| 0.64025
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,747
|
timeout.py
|
rembo10_headphones/lib/urllib3/util/timeout.py
|
from __future__ import absolute_import
import time
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
from ..exceptions import TimeoutStateError
# A sentinel value to indicate that no timeout was specified by the user in
# urllib3
_Default = object()
# Use time.monotonic if available.
current_time = getattr(time, "monotonic", time.time)
class Timeout(object):
"""Timeout configuration.
Timeouts can be defined as a default for a pool:
.. code-block:: python
timeout = Timeout(connect=2.0, read=7.0)
http = PoolManager(timeout=timeout)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool):
.. code-block:: python
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
Timeouts can be disabled by setting all the parameters to ``None``:
.. code-block:: python
no_timeout = Timeout(connect=None, read=None)
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
:param total:
This combines the connect and read timeouts into one; the read timeout
will be set to the time leftover from the connect attempt. In the
event that both a connect timeout and a total are specified, or a read
timeout and a total are specified, the shorter timeout will be applied.
Defaults to None.
:type total: int, float, or None
:param connect:
The maximum amount of time (in seconds) to wait for a connection
attempt to a server to succeed. Omitting the parameter will default the
connect timeout to the system default, probably `the global default
timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
:type connect: int, float, or None
:param read:
The maximum amount of time (in seconds) to wait between consecutive
read operations for a response from the server. Omitting the parameter
will default the read timeout to the system default, probably `the
global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
:type read: int, float, or None
.. note::
Many factors can affect the total amount of time for urllib3 to return
an HTTP response.
For example, Python's DNS resolver does not obey the timeout specified
on the socket. Other factors that can affect total request time include
high CPU load, high swap, the program running at a low priority level,
or other behaviors.
In addition, the read and total timeouts only measure the time between
read operations on the socket connecting the client and the server,
not the total amount of time for the request to return a complete
response. For most requests, the timeout is raised because the server
has not sent the first byte in the specified time. This is not always
the case; if a server streams one byte every fifteen seconds, a timeout
of 20 seconds will not trigger, even though the request will take
several minutes to complete.
If your goal is to cut off any request after a set amount of wall clock
time, consider having a second "watcher" thread to cut off a slow
request.
"""
#: A sentinel object representing the default timeout value
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
def __init__(self, total=None, connect=_Default, read=_Default):
self._connect = self._validate_timeout(connect, "connect")
self._read = self._validate_timeout(read, "read")
self.total = self._validate_timeout(total, "total")
self._start_connect = None
def __repr__(self):
return "%s(connect=%r, read=%r, total=%r)" % (
type(self).__name__,
self._connect,
self._read,
self.total,
)
# __str__ provided for backwards compatibility
__str__ = __repr__
@classmethod
def _validate_timeout(cls, value, name):
"""Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT
if value is None or value is cls.DEFAULT_TIMEOUT:
return value
if isinstance(value, bool):
raise ValueError(
"Timeout cannot be a boolean value. It must "
"be an int, float or None."
)
try:
float(value)
except (TypeError, ValueError):
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
try:
if value <= 0:
raise ValueError(
"Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
"than or equal to 0." % (name, value)
)
except TypeError:
# Python 3
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
return value
@classmethod
def from_float(cls, timeout):
"""Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
connect(), and recv() socket requests. This creates a :class:`Timeout`
object that sets the individual timeouts to the ``timeout`` value
passed to this function.
:param timeout: The legacy timeout value.
:type timeout: integer, float, sentinel default object, or None
:return: Timeout object
:rtype: :class:`Timeout`
"""
return Timeout(read=timeout, connect=timeout)
def clone(self):
"""Create a copy of the timeout object
Timeout properties are stored per-pool but each request needs a fresh
Timeout object to ensure each one has its own start/stop configured.
:return: a copy of the timeout object
:rtype: :class:`Timeout`
"""
# We can't use copy.deepcopy because that will also create a new object
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
# detect the user default.
return Timeout(connect=self._connect, read=self._read, total=self.total)
def start_connect(self):
"""Start the timeout clock, used during a connect() attempt
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to start a timer that has been started already.
"""
if self._start_connect is not None:
raise TimeoutStateError("Timeout timer has already been started.")
self._start_connect = current_time()
return self._start_connect
def get_connect_duration(self):
"""Gets the time elapsed since the call to :meth:`start_connect`.
:return: Elapsed time in seconds.
:rtype: float
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to get duration for a timer that hasn't been started.
"""
if self._start_connect is None:
raise TimeoutStateError(
"Can't get connect duration for timer that has not started."
)
return current_time() - self._start_connect
@property
def connect_timeout(self):
"""Get the value to use when setting a connection timeout.
This will be a positive float or integer, the value None
(never timeout), or the default system timeout.
:return: Connect timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
"""
if self.total is None:
return self._connect
if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
return self.total
return min(self._connect, self.total)
@property
def read_timeout(self):
"""Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
computes the read timeout appropriately.
If self.total is set, the read timeout is dependent on the amount of
time taken by the connect timeout. If the connection time has not been
established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
raised.
:return: Value to use for the read timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
if (
self.total is not None
and self.total is not self.DEFAULT_TIMEOUT
and self._read is not None
and self._read is not self.DEFAULT_TIMEOUT
):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
return max(0, min(self.total - self.get_connect_duration(), self._read))
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
return max(0, self.total - self.get_connect_duration())
else:
return self._read
| 10,003
|
Python
|
.py
| 209
| 38.473684
| 84
| 0.646533
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,748
|
connection.py
|
rembo10_headphones/lib/urllib3/util/connection.py
|
from __future__ import absolute_import
import socket
from ..contrib import _appengine_environ
from ..exceptions import LocationParseError
from ..packages import six
from .wait import NoWayToWaitForSocketError, wait_for_read
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`http.client.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, "sock", False)
if sock is False: # Platform-specific: AppEngine
return False
if sock is None: # Connection already closed (such as by httplib).
return True
try:
# Returns True if readable, which here means it's been dropped
return wait_for_read(sock, timeout=0.0)
except NoWayToWaitForSocketError: # Platform-specific: AppEngine
return False
# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
# One additional modification is that we avoid binding to IPv6 servers
# discovered in DNS if the system doesn't have IPv6 functionality.
def create_connection(
address,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None,
socket_options=None,
):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`socket.getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
if host.startswith("["):
host = host.strip("[]")
err = None
# Using the value from allowed_gai_family() in the context of getaddrinfo lets
# us select whether to work with IPv4 DNS records, IPv6 records, or both.
# The original create_connection function always returns all records.
family = allowed_gai_family()
try:
host.encode("idna")
except UnicodeError:
return six.raise_from(
LocationParseError(u"'%s', label empty or too long" % host), None
)
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as e:
err = e
if sock is not None:
sock.close()
sock = None
if err is not None:
raise err
raise socket.error("getaddrinfo returns an empty list")
def _set_socket_options(sock, options):
if options is None:
return
for opt in options:
sock.setsockopt(*opt)
def allowed_gai_family():
"""This function is designed to work in the context of
getaddrinfo, where family=socket.AF_UNSPEC is the default and
will perform a DNS search for both IPv6 and IPv4 records."""
family = socket.AF_INET
if HAS_IPV6:
family = socket.AF_UNSPEC
return family
def _has_ipv6(host):
"""Returns True if the system can bind an IPv6 address."""
sock = None
has_ipv6 = False
# App Engine doesn't support IPV6 sockets and actually has a quota on the
# number of sockets that can be used, so just early out here instead of
# creating a socket needlessly.
# See https://github.com/urllib3/urllib3/issues/1446
if _appengine_environ.is_appengine_sandbox():
return False
if socket.has_ipv6:
# has_ipv6 returns true if cPython was compiled with IPv6 support.
# It does not tell us if the system has IPv6 support enabled. To
# determine that we must bind to an IPv6 address.
# https://github.com/urllib3/urllib3/pull/611
# https://bugs.python.org/issue658327
try:
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
has_ipv6 = True
except Exception:
pass
if sock:
sock.close()
return has_ipv6
HAS_IPV6 = _has_ipv6("::1")
| 4,901
|
Python
|
.py
| 118
| 34.576271
| 82
| 0.675715
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,749
|
_jellyfish.py
|
rembo10_headphones/lib/jellyfish/_jellyfish.py
|
import unicodedata
from collections import defaultdict
from .compat import _range, _zip_longest, _no_bytes_err
from .porter import Stemmer
def _normalize(s):
return unicodedata.normalize('NFKD', s)
def levenshtein_distance(s1, s2):
if isinstance(s1, bytes) or isinstance(s2, bytes):
raise TypeError(_no_bytes_err)
if s1 == s2:
return 0
rows = len(s1)+1
cols = len(s2)+1
if not s1:
return cols-1
if not s2:
return rows-1
prev = None
cur = list(range(cols))
for r in _range(1, rows):
prev, cur = cur, [r] + [0]*(cols-1)
for c in _range(1, cols):
deletion = prev[c] + 1
insertion = cur[c-1] + 1
edit = prev[c-1] + (0 if s1[r-1] == s2[c-1] else 1)
cur[c] = min(edit, deletion, insertion)
return cur[-1]
def _jaro_winkler(ying, yang, long_tolerance, winklerize):
if isinstance(ying, bytes) or isinstance(yang, bytes):
raise TypeError(_no_bytes_err)
ying_len = len(ying)
yang_len = len(yang)
if not ying_len or not yang_len:
return 0
min_len = max(ying_len, yang_len)
search_range = (min_len // 2) - 1
if search_range < 0:
search_range = 0
ying_flags = [False]*ying_len
yang_flags = [False]*yang_len
# looking only within search range, count & flag matched pairs
common_chars = 0
for i, ying_ch in enumerate(ying):
low = i - search_range if i > search_range else 0
hi = i + search_range if i + search_range < yang_len else yang_len - 1
for j in _range(low, hi+1):
if not yang_flags[j] and yang[j] == ying_ch:
ying_flags[i] = yang_flags[j] = True
common_chars += 1
break
# short circuit if no characters match
if not common_chars:
return 0
# count transpositions
k = trans_count = 0
for i, ying_f in enumerate(ying_flags):
if ying_f:
for j in _range(k, yang_len):
if yang_flags[j]:
k = j + 1
break
if ying[i] != yang[j]:
trans_count += 1
trans_count /= 2
# adjust for similarities in nonmatched characters
common_chars = float(common_chars)
weight = ((common_chars/ying_len + common_chars/yang_len +
(common_chars-trans_count) / common_chars)) / 3
# winkler modification: continue to boost if strings are similar
if winklerize and weight > 0.7 and ying_len > 3 and yang_len > 3:
# adjust for up to first 4 chars in common
j = min(min_len, 4)
i = 0
while i < j and ying[i] == yang[i] and ying[i]:
i += 1
if i:
weight += i * 0.1 * (1.0 - weight)
# optionally adjust for long strings
# after agreeing beginning chars, at least two or more must agree and
# agreed characters must be > half of remaining characters
if (long_tolerance and min_len > 4 and common_chars > i+1 and
2 * common_chars >= min_len + i):
weight += ((1.0 - weight) * (float(common_chars-i-1) / float(ying_len+yang_len-i*2+2)))
return weight
def damerau_levenshtein_distance(s1, s2):
if isinstance(s1, bytes) or isinstance(s2, bytes):
raise TypeError(_no_bytes_err)
len1 = len(s1)
len2 = len(s2)
infinite = len1 + len2
# character array
da = defaultdict(int)
# distance matrix
score = [[0]*(len2+2) for x in _range(len1+2)]
score[0][0] = infinite
for i in _range(0, len1+1):
score[i+1][0] = infinite
score[i+1][1] = i
for i in _range(0, len2+1):
score[0][i+1] = infinite
score[1][i+1] = i
for i in _range(1, len1+1):
db = 0
for j in _range(1, len2+1):
i1 = da[s2[j-1]]
j1 = db
cost = 1
if s1[i-1] == s2[j-1]:
cost = 0
db = j
score[i+1][j+1] = min(score[i][j] + cost,
score[i+1][j] + 1,
score[i][j+1] + 1,
score[i1][j1] + (i-i1-1) + 1 + (j-j1-1))
da[s1[i-1]] = i
return score[len1+1][len2+1]
def jaro_distance(s1, s2):
return _jaro_winkler(s1, s2, False, False)
def jaro_winkler(s1, s2, long_tolerance=False):
return _jaro_winkler(s1, s2, long_tolerance, True)
def soundex(s):
if not s:
return s
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
s = _normalize(s)
replacements = (('bfpv', '1'),
('cgjkqsxz', '2'),
('dt', '3'),
('l', '4'),
('mn', '5'),
('r', '6'))
result = [s[0]]
count = 1
# find would-be replacment for first character
for lset, sub in replacements:
if s[0].lower() in lset:
last = sub
break
else:
last = None
for letter in s[1:]:
for lset, sub in replacements:
if letter.lower() in lset:
if sub != last:
result.append(sub)
count += 1
last = sub
break
else:
last = None
if count == 4:
break
result += '0'*(4-count)
return ''.join(result)
def hamming_distance(s1, s2):
if isinstance(s1, bytes) or isinstance(s2, bytes):
raise TypeError(_no_bytes_err)
# ensure length of s1 >= s2
if len(s2) > len(s1):
s1, s2 = s2, s1
# distance is difference in length + differing chars
distance = len(s1) - len(s2)
for i, c in enumerate(s2):
if c != s1[i]:
distance += 1
return distance
def nysiis(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
if not s:
return ''
s = s.upper()
key = []
# step 1 - prefixes
if s.startswith('MAC'):
s = 'MCC' + s[3:]
elif s.startswith('KN'):
s = s[1:]
elif s.startswith('K'):
s = 'C' + s[1:]
elif s.startswith(('PH', 'PF')):
s = 'FF' + s[2:]
elif s.startswith('SCH'):
s = 'SSS' + s[3:]
# step 2 - suffixes
if s.endswith(('IE', 'EE')):
s = s[:-2] + 'Y'
elif s.endswith(('DT', 'RT', 'RD', 'NT', 'ND')):
s = s[:-2] + 'D'
# step 3 - first character of key comes from name
key.append(s[0])
# step 4 - translate remaining chars
i = 1
len_s = len(s)
while i < len_s:
ch = s[i]
if ch == 'E' and i+1 < len_s and s[i+1] == 'V':
ch = 'AF'
i += 1
elif ch in 'AEIOU':
ch = 'A'
elif ch == 'Q':
ch = 'G'
elif ch == 'Z':
ch = 'S'
elif ch == 'M':
ch = 'N'
elif ch == 'K':
if i+1 < len(s) and s[i+1] == 'N':
ch = 'N'
else:
ch = 'C'
elif ch == 'S' and s[i+1:i+3] == 'CH':
ch = 'SS'
i += 2
elif ch == 'P' and i+1 < len(s) and s[i+1] == 'H':
ch = 'F'
i += 1
elif ch == 'H' and (s[i-1] not in 'AEIOU' or (i+1 < len(s) and s[i+1] not in 'AEIOU')):
if s[i-1] in 'AEIOU':
ch = 'A'
else:
ch = s[i-1]
elif ch == 'W' and s[i-1] in 'AEIOU':
ch = s[i-1]
if ch[-1] != key[-1][-1]:
key.append(ch)
i += 1
key = ''.join(key)
# step 5 - remove trailing S
if key.endswith('S') and key != 'S':
key = key[:-1]
# step 6 - replace AY w/ Y
if key.endswith('AY'):
key = key[:-2] + 'Y'
# step 7 - remove trailing A
if key.endswith('A') and key != 'A':
key = key[:-1]
# step 8 was already done
return key
def match_rating_codex(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
s = s.upper()
codex = []
prev = None
for i, c in enumerate(s):
# not a space OR
# starting character & vowel
# or consonant not preceded by same consonant
if (c != ' ' and (i == 0 and c in 'AEIOU') or (c not in 'AEIOU' and c != prev)):
codex.append(c)
prev = c
# just use first/last 3
if len(codex) > 6:
return ''.join(codex[:3]+codex[-3:])
else:
return ''.join(codex)
def match_rating_comparison(s1, s2):
codex1 = match_rating_codex(s1)
codex2 = match_rating_codex(s2)
len1 = len(codex1)
len2 = len(codex2)
res1 = []
res2 = []
# length differs by 3 or more, no result
if abs(len1-len2) >= 3:
return None
# get minimum rating based on sums of codexes
lensum = len1 + len2
if lensum <= 4:
min_rating = 5
elif lensum <= 7:
min_rating = 4
elif lensum <= 11:
min_rating = 3
else:
min_rating = 2
# strip off common prefixes
for c1, c2 in _zip_longest(codex1, codex2):
if c1 != c2:
if c1:
res1.append(c1)
if c2:
res2.append(c2)
unmatched_count1 = unmatched_count2 = 0
for c1, c2 in _zip_longest(reversed(res1), reversed(res2)):
if c1 != c2:
if c1:
unmatched_count1 += 1
if c2:
unmatched_count2 += 1
return (6 - max(unmatched_count1, unmatched_count2)) >= min_rating
def metaphone(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
result = []
s = _normalize(s.lower())
# skip first character if s starts with these
if s.startswith(('kn', 'gn', 'pn', 'ac', 'wr', 'ae')):
s = s[1:]
i = 0
while i < len(s):
c = s[i]
next = s[i+1] if i < len(s)-1 else '*****'
nextnext = s[i+2] if i < len(s)-2 else '*****'
# skip doubles except for cc
if c == next and c != 'c':
i += 1
continue
if c in 'aeiou':
if i == 0 or s[i-1] == ' ':
result.append(c)
elif c == 'b':
if (not (i != 0 and s[i-1] == 'm')) or next:
result.append('b')
elif c == 'c':
if next == 'i' and nextnext == 'a' or next == 'h':
result.append('x')
i += 1
elif next in 'iey':
result.append('s')
i += 1
else:
result.append('k')
elif c == 'd':
if next == 'g' and nextnext in 'iey':
result.append('j')
i += 2
else:
result.append('t')
elif c in 'fjlmnr':
result.append(c)
elif c == 'g':
if next in 'iey':
result.append('j')
elif next not in 'hn':
result.append('k')
elif next == 'h' and nextnext and nextnext not in 'aeiou':
i += 1
elif c == 'h':
if i == 0 or next in 'aeiou' or s[i-1] not in 'aeiou':
result.append('h')
elif c == 'k':
if i == 0 or s[i-1] != 'c':
result.append('k')
elif c == 'p':
if next == 'h':
result.append('f')
i += 1
else:
result.append('p')
elif c == 'q':
result.append('k')
elif c == 's':
if next == 'h':
result.append('x')
i += 1
elif next == 'i' and nextnext in 'oa':
result.append('x')
i += 2
else:
result.append('s')
elif c == 't':
if next == 'i' and nextnext in 'oa':
result.append('x')
elif next == 'h':
result.append('0')
i += 1
elif next != 'c' or nextnext != 'h':
result.append('t')
elif c == 'v':
result.append('f')
elif c == 'w':
if i == 0 and next == 'h':
i += 1
if nextnext in 'aeiou' or nextnext == '*****':
result.append('w')
elif c == 'x':
if i == 0:
if next == 'h' or (next == 'i' and nextnext in 'oa'):
result.append('x')
else:
result.append('s')
else:
result.append('k')
result.append('s')
elif c == 'y':
if next in 'aeiou':
result.append('y')
elif c == 'z':
result.append('s')
elif c == ' ':
if len(result) > 0 and result[-1] != ' ':
result.append(' ')
i += 1
return ''.join(result).upper()
def porter_stem(s):
if isinstance(s, bytes):
raise TypeError(_no_bytes_err)
return Stemmer(s).stem()
| 13,017
|
Python
|
.py
| 402
| 22.49005
| 99
| 0.473382
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,750
|
test.py
|
rembo10_headphones/lib/jellyfish/test.py
|
# -*- coding: utf-8 -*-
import sys
if sys.version_info[0] < 3:
import unicodecsv as csv
open_kwargs = {}
else:
import csv
open_kwargs = {'encoding': 'utf8'}
import platform
import pytest
def assertAlmostEqual(a, b, places=3):
assert abs(a - b) < (0.1**places)
if platform.python_implementation() == 'CPython':
implementations = ['python', 'c']
else:
implementations = ['python']
@pytest.fixture(params=implementations)
def jf(request):
if request.param == 'python':
from jellyfish import _jellyfish as jf
else:
from jellyfish import cjellyfish as jf
return jf
def _load_data(name):
with open('testdata/{}.csv'.format(name), **open_kwargs) as f:
for data in csv.reader(f):
yield data
@pytest.mark.parametrize("s1,s2,value", _load_data('jaro_winkler'), ids=str)
def test_jaro_winkler(jf, s1, s2, value):
value = float(value)
assertAlmostEqual(jf.jaro_winkler(s1, s2), value, places=3)
@pytest.mark.parametrize("s1,s2,value", _load_data('jaro_distance'), ids=str)
def test_jaro_distance(jf, s1, s2, value):
value = float(value)
assertAlmostEqual(jf.jaro_distance(s1, s2), value, places=3)
@pytest.mark.parametrize("s1,s2,value", _load_data('hamming'), ids=str)
def test_hamming_distance(jf, s1, s2, value):
value = int(value)
assert jf.hamming_distance(s1, s2) == value
@pytest.mark.parametrize("s1,s2,value", _load_data('levenshtein'), ids=str)
def test_levenshtein_distance(jf, s1, s2, value):
value = int(value)
assert jf.levenshtein_distance(s1, s2) == value
@pytest.mark.parametrize("s1,s2,value", _load_data('damerau_levenshtein'), ids=str)
def test_damerau_levenshtein_distance(jf, s1, s2, value):
value = int(value)
assert jf.damerau_levenshtein_distance(s1, s2) == value
@pytest.mark.parametrize("s1,code", _load_data('soundex'), ids=str)
def test_soundex(jf, s1, code):
assert jf.soundex(s1) == code
@pytest.mark.parametrize("s1,code", _load_data('metaphone'), ids=str)
def test_metaphone(jf, s1, code):
assert jf.metaphone(s1) == code
@pytest.mark.parametrize("s1,s2", _load_data('nysiis'), ids=str)
def test_nysiis(jf, s1, s2):
assert jf.nysiis(s1) == s2
@pytest.mark.parametrize("s1,s2", _load_data('match_rating_codex'), ids=str)
def test_match_rating_codex(jf, s1, s2):
assert jf.match_rating_codex(s1) == s2
@pytest.mark.parametrize("s1,s2,value", _load_data('match_rating_comparison'), ids=str)
def test_match_rating_comparison(jf, s1, s2, value):
value = {'True': True, 'False': False, 'None': None}[value]
assert jf.match_rating_comparison(s1, s2) is value
# use non-parameterized version for speed
# @pytest.mark.parametrize("a,b", _load_data('porter'), ids=str)
# def test_porter_stem(jf, a, b):
# assert jf.porter_stem(a) == b
def test_porter_stem(jf):
with open('testdata/porter.csv', **open_kwargs) as f:
reader = csv.reader(f)
for (a, b) in reader:
assert jf.porter_stem(a) == b
if platform.python_implementation() == 'CPython':
def test_match_rating_comparison_segfault():
import hashlib
from jellyfish import cjellyfish as jf
sha1s = ['{}'.format(hashlib.sha1(str(v).encode('ascii')).hexdigest())
for v in range(100)]
# this segfaulted on 0.1.2
assert [[jf.match_rating_comparison(h1, h2) for h1 in sha1s] for h2 in sha1s]
def test_damerau_levenshtein_unicode_segfault():
# unfortunate difference in behavior between Py & C versions
from jellyfish.cjellyfish import damerau_levenshtein_distance as c_dl
from jellyfish._jellyfish import damerau_levenshtein_distance as py_dl
s1 = 'mylifeoutdoors'
s2 = 'нахлыст'
with pytest.raises(ValueError):
c_dl(s1, s2)
with pytest.raises(ValueError):
c_dl(s2, s1)
assert py_dl(s1, s2) == 14
assert py_dl(s2, s1) == 14
def test_jaro_winkler_long_tolerance(jf):
no_lt = jf.jaro_winkler('two long strings', 'two long stringz', long_tolerance=False)
with_lt = jf.jaro_winkler('two long strings', 'two long stringz', long_tolerance=True)
# make sure long_tolerance does something
assertAlmostEqual(no_lt, 0.975)
assertAlmostEqual(with_lt, 0.984)
def test_damerau_levenshtein_distance_type(jf):
jf.damerau_levenshtein_distance('abc', 'abc')
with pytest.raises(TypeError) as exc:
jf.damerau_levenshtein_distance(b'abc', b'abc')
assert 'expected' in str(exc.value)
def test_levenshtein_distance_type(jf):
assert jf.levenshtein_distance('abc', 'abc') == 0
with pytest.raises(TypeError) as exc:
jf.levenshtein_distance(b'abc', b'abc')
assert 'expected' in str(exc.value)
def test_jaro_distance_type(jf):
assert jf.jaro_distance('abc', 'abc') == 1
with pytest.raises(TypeError) as exc:
jf.jaro_distance(b'abc', b'abc')
assert 'expected' in str(exc.value)
def test_jaro_winkler_type(jf):
assert jf.jaro_winkler('abc', 'abc') == 1
with pytest.raises(TypeError) as exc:
jf.jaro_winkler(b'abc', b'abc')
assert 'expected' in str(exc.value)
def test_mra_comparison_type(jf):
assert jf.match_rating_comparison('abc', 'abc') is True
with pytest.raises(TypeError) as exc:
jf.match_rating_comparison(b'abc', b'abc')
assert 'expected' in str(exc.value)
def test_hamming_type(jf):
assert jf.hamming_distance('abc', 'abc') == 0
with pytest.raises(TypeError) as exc:
jf.hamming_distance(b'abc', b'abc')
assert 'expected' in str(exc.value)
def test_soundex_type(jf):
assert jf.soundex('ABC') == 'A120'
with pytest.raises(TypeError) as exc:
jf.soundex(b'ABC')
assert 'expected' in str(exc.value)
def test_metaphone_type(jf):
assert jf.metaphone('abc') == 'ABK'
with pytest.raises(TypeError) as exc:
jf.metaphone(b'abc')
assert 'expected' in str(exc.value)
def test_nysiis_type(jf):
assert jf.nysiis('abc') == 'ABC'
with pytest.raises(TypeError) as exc:
jf.nysiis(b'abc')
assert 'expected' in str(exc.value)
def test_mr_codex_type(jf):
assert jf.match_rating_codex('abc') == 'ABC'
with pytest.raises(TypeError) as exc:
jf.match_rating_codex(b'abc')
assert 'expected' in str(exc.value)
def test_porter_type(jf):
assert jf.porter_stem('abc') == 'abc'
with pytest.raises(TypeError) as exc:
jf.porter_stem(b'abc')
assert 'expected' in str(exc.value)
| 6,527
|
Python
|
.py
| 153
| 37.45098
| 90
| 0.678453
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,751
|
__init__.py
|
rembo10_headphones/lib/jellyfish/__init__.py
|
try:
from .cjellyfish import * # noqa
except ImportError:
from ._jellyfish import * # noqa
| 103
|
Python
|
.py
| 4
| 22.75
| 38
| 0.676768
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,752
|
compat.py
|
rembo10_headphones/lib/jellyfish/compat.py
|
import sys
import itertools
IS_PY3 = sys.version_info[0] == 3
if IS_PY3:
_range = range
_zip_longest = itertools.zip_longest
_no_bytes_err = 'expected str, got bytes'
else:
_range = xrange
_zip_longest = itertools.izip_longest
_no_bytes_err = 'expected unicode, got str'
| 297
|
Python
|
.py
| 11
| 23.636364
| 47
| 0.690141
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,753
|
porter.py
|
rembo10_headphones/lib/jellyfish/porter.py
|
from .compat import _range
_s2_options = {
'a': ((['a', 't', 'i', 'o', 'n', 'a', 'l'], ['a', 't', 'e']),
(['t', 'i', 'o', 'n', 'a', 'l'], ['t', 'i', 'o', 'n'])),
'c': ((['e', 'n', 'c', 'i'], ['e', 'n', 'c', 'e']),
(['a', 'n', 'c', 'i'], ['a', 'n', 'c', 'e']),),
'e': ((['i', 'z', 'e', 'r'], ['i', 'z', 'e']),),
'l': ((['b', 'l', 'i'], ['b', 'l', 'e']),
(['a', 'l', 'l', 'i'], ['a', 'l']),
(['e', 'n', 't', 'l', 'i'], ['e', 'n', 't']),
(['e', 'l', 'i'], ['e']),
(['o', 'u', 's', 'l', 'i'], ['o', 'u', 's']),),
'o': ((['i', 'z', 'a', 't', 'i', 'o', 'n'], ['i', 'z', 'e']),
(['a', 't', 'i', 'o', 'n'], ['a', 't', 'e']),
(['a', 't', 'o', 'r'], ['a', 't', 'e']),),
's': ((['a', 'l', 'i', 's', 'm'], ['a', 'l']),
(['i', 'v', 'e', 'n', 'e', 's', 's'], ['i', 'v', 'e']),
(['f', 'u', 'l', 'n', 'e', 's', 's'], ['f', 'u', 'l']),
(['o', 'u', 's', 'n', 'e', 's', 's'], ['o', 'u', 's']),),
't': ((['a', 'l', 'i', 't', 'i'], ['a', 'l']),
(['i', 'v', 'i', 't', 'i'], ['i', 'v', 'e']),
(['b', 'i', 'l', 'i', 't', 'i'], ['b', 'l', 'e']),),
'g': ((['l', 'o', 'g', 'i'], ['l', 'o', 'g']),),
}
_s3_options = {
'e': ((['i', 'c', 'a', 't', 'e'], ['i', 'c']),
(['a', 't', 'i', 'v', 'e'], []),
(['a', 'l', 'i', 'z', 'e'], ['a', 'l']),),
'i': ((['i', 'c', 'i', 't', 'i'], ['i', 'c']),),
'l': ((['i', 'c', 'a', 'l'], ['i', 'c']),
(['f', 'u', 'l'], []),),
's': ((['n', 'e', 's', 's'], []),),
}
_s4_endings = {
'a': (['a', 'l'],),
'c': (['a', 'n', 'c', 'e'], ['e', 'n', 'c', 'e']),
'e': (['e', 'r'],),
'i': (['i', 'c'],),
'l': (['a', 'b', 'l', 'e'], ['i', 'b', 'l', 'e']),
'n': (['a', 'n', 't'], ['e', 'm', 'e', 'n', 't'], ['m', 'e', 'n', 't'],
['e', 'n', 't']),
# handle 'o' separately
's': (['i', 's', 'm'],),
't': (['a', 't', 'e'], ['i', 't', 'i']),
'u': (['o', 'u', 's'],),
'v': (['i', 'v', 'e'],),
'z': (['i', 'z', 'e'],),
}
class Stemmer(object):
def __init__(self, b):
self.b = list(b)
self.k = len(b)-1
self.j = 0
def cons(self, i):
""" True iff b[i] is a consonant """
if self.b[i] in 'aeiou':
return False
elif self.b[i] == 'y':
return True if i == 0 else not self.cons(i-1)
return True
def m(self):
n = i = 0
while True:
if i > self.j:
return n
if not self.cons(i):
break
i += 1
i += 1
while True:
while True:
if i > self.j:
return n
if self.cons(i):
break
i += 1
i += 1
n += 1
while True:
if i > self.j:
return n
if not self.cons(i):
break
i += 1
i += 1
def vowel_in_stem(self):
""" True iff 0...j contains vowel """
for i in _range(0, self.j+1):
if not self.cons(i):
return True
return False
def doublec(self, j):
""" True iff j, j-1 contains double consonant """
if j < 1 or self.b[j] != self.b[j-1]:
return False
return self.cons(j)
def cvc(self, i):
""" True iff i-2,i-1,i is consonent-vowel consonant
and if second c isn't w,x, or y.
used to restore e at end of short words like cave, love, hope, crime
"""
if (i < 2 or not self.cons(i) or self.cons(i-1) or not self.cons(i-2) or
self.b[i] in 'wxy'):
return False
return True
def ends(self, s):
length = len(s)
""" True iff 0...k ends with string s """
res = (self.b[self.k-length+1:self.k+1] == s)
if res:
self.j = self.k - length
return res
def setto(self, s):
""" set j+1...k to string s, readjusting k """
length = len(s)
self.b[self.j+1:self.j+1+length] = s
self.k = self.j + length
def r(self, s):
if self.m() > 0:
self.setto(s)
def step1ab(self):
if self.b[self.k] == 's':
if self.ends(['s', 's', 'e', 's']):
self.k -= 2
elif self.ends(['i', 'e', 's']):
self.setto(['i'])
elif self.b[self.k-1] != 's':
self.k -= 1
if self.ends(['e', 'e', 'd']):
if self.m() > 0:
self.k -= 1
elif ((self.ends(['e', 'd']) or self.ends(['i', 'n', 'g'])) and
self.vowel_in_stem()):
self.k = self.j
if self.ends(['a', 't']):
self.setto(['a', 't', 'e'])
elif self.ends(['b', 'l']):
self.setto(['b', 'l', 'e'])
elif self.ends(['i', 'z']):
self.setto(['i', 'z', 'e'])
elif self.doublec(self.k):
self.k -= 1
if self.b[self.k] in 'lsz':
self.k += 1
elif self.m() == 1 and self.cvc(self.k):
self.setto(['e'])
def step1c(self):
""" turn terminal y into i if there's a vowel in stem """
if self.ends(['y']) and self.vowel_in_stem():
self.b[self.k] = 'i'
def step2and3(self):
for end, repl in _s2_options.get(self.b[self.k-1], []):
if self.ends(end):
self.r(repl)
break
for end, repl in _s3_options.get(self.b[self.k], []):
if self.ends(end):
self.r(repl)
break
def step4(self):
ch = self.b[self.k-1]
if ch == 'o':
if not ((self.ends(['i', 'o', 'n']) and self.b[self.j] in 'st') or
self.ends(['o', 'u'])):
return
else:
endings = _s4_endings.get(ch, [])
for end in endings:
if self.ends(end):
break
else:
return
if self.m() > 1:
self.k = self.j
def step5(self):
self.j = self.k
if self.b[self.k] == 'e':
a = self.m()
if a > 1 or a == 1 and not self.cvc(self.k-1):
self.k -= 1
if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1:
self.k -= 1
def result(self):
return ''.join(self.b[:self.k+1])
def stem(self):
if self.k > 1:
self.step1ab()
self.step1c()
self.step2and3()
self.step4()
self.step5()
return self.result()
| 6,796
|
Python
|
.py
| 192
| 24.760417
| 80
| 0.343265
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,754
|
transform.py
|
rembo10_headphones/lib/bencode/transform.py
|
"""Code, for converting bencoded data to string and back."""
def be_to_str(data: bytes) -> str:
"""Convert bencoded data from bytes to string"""
result = []
for num in data:
# Non-printable characters, double quotes, square brackets, accent
if num < 32 or num in [34, 91, 92, 93] or num > 126:
result.append("[%0.2x]" % num)
else:
result.append(chr(num))
return "".join(result)
def str_to_be(data: str) -> bytes:
"""Convert bencoded data from string to bytes"""
result = bytearray()
seq_marker = False
seq_chars = ""
for char in data:
if char == "[":
seq_marker = True
continue
if char == "]":
result.append(int(seq_chars, 16))
seq_marker = False
seq_chars = ""
continue
if seq_marker:
seq_chars += char
else:
result.append(ord(char))
return bytes(result)
| 977
|
Python
|
.py
| 30
| 24.2
| 74
| 0.550955
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,755
|
__init__.py
|
rembo10_headphones/lib/bencode/__init__.py
|
"""We import some functions here, so they are available on the package level"""
from .bencode import decode, encode # noqa
from .torrent import decode_torrent, encode_torrent # noqa
from .transform import be_to_str, str_to_be # noqa
| 236
|
Python
|
.py
| 4
| 58
| 79
| 0.762931
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,756
|
torrent.py
|
rembo10_headphones/lib/bencode/torrent.py
|
"""Code, which deals with torrent data."""
from typing import Any
from bencode.bencode import decode, encode
def _decode_object(data: Any, encoding: str, errors: str) -> Any:
"""Replace bytes with strings in the provided Python object"""
if isinstance(data, bytes):
return data.decode(encoding, errors)
if isinstance(data, dict):
result_dict = {}
for key, value in data.items():
decoded_key = _decode_object(key, encoding, errors)
if decoded_key.endswith(".utf-8"):
decoded_value = _decode_object(value, "utf8", errors)
elif decoded_key in ["ed2k", "filehash", "pieces"]:
decoded_value = value.hex()
else:
decoded_value = _decode_object(value, encoding, errors)
result_dict[decoded_key] = decoded_value
return result_dict
if isinstance(data, list):
return [_decode_object(item, encoding, errors) for item in data]
return data
def _encode_object(data: Any, encoding: str, errors: str) -> Any:
"""Replace strings with bytes in the provided Python object"""
if isinstance(data, str):
return data.encode(encoding, errors)
if isinstance(data, dict):
result_dict = {}
for key, value in data.items():
encoded_key = _encode_object(key, encoding, errors)
if encoded_key.endswith(b".utf-8"):
encoded_value = _encode_object(value, "utf8", errors)
elif encoded_key in [b"ed2k", b"filehash", b"pieces"]:
encoded_value = bytes.fromhex(value)
else:
encoded_value = _encode_object(value, encoding, errors)
result_dict[encoded_key] = encoded_value
return result_dict
if isinstance(data, list):
return [_encode_object(item, encoding, errors) for item in data]
return data
def decode_torrent(
data: bytes, encoding: str = "utf_8", errors: str = "strict"
) -> dict:
"""Convert the given torrent to a Python dictionary.
Fields are decoded:
- using utf8 (if the key ends with ".utf-8" suffix, like "name.utf-8")
- using the provided encoding (for other human readable fields)
- as hex (for binary fields)
Args:
data: some binary data to decode
encoding: which encoding should be used
(https://docs.python.org/3/library/codecs.html#standard-encodings)
errors: what to do if decoding is not possible
(https://docs.python.org/3/library/codecs.html#error-handlers)
Raises:
UnicodeDecodeError: If some key or value cannot be decoded using the
provided encoding
ValueError: If the first argument is not of type bytes
"""
if not isinstance(data, bytes):
raise ValueError(
f"Cannot decode data, expected bytes, got {type(data)} instead."
)
return _decode_object(decode(data), encoding, errors)
def encode_torrent(
data: dict, encoding: str = "utf8", errors: str = "strict"
) -> bytes:
"""Convert the given Python dictionary to a torrent
Mirror function for the "decode_torrent" function.
Raises:
UnicodeEncodeError: If some key or value cannot be encoded using the
provided encoding
ValueError: If the first argument is not of type dict
"""
if not isinstance(data, dict):
raise ValueError(
f"Cannot encode data, expected dict, got {type(data)} instead."
)
return encode(_encode_object(data, encoding, errors))
| 3,568
|
Python
|
.py
| 80
| 36.25
| 78
| 0.64158
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,757
|
bencode.py
|
rembo10_headphones/lib/bencode/bencode.py
|
"""Code, which deals with bencoded data."""
from dataclasses import dataclass
from typing import Union
COLON = ord(":")
END_MARKER = ord("e")
START_DICT = ord("d")
START_INTEGER = ord("i")
START_LIST = ord("l")
@dataclass
class BencodedString:
"""An internal container for bencoded strings"""
def __init__(self, data):
"""Called when the object is created, sets its attributes"""
self.bytes = bytearray(data)
def del_prefix(self, index):
"""Delete the prefix of specified length"""
del self.bytes[:index]
def get_prefix(self, index):
"""Get the prefix of specified length (as bytes)"""
return bytes(self.bytes[:index])
def _decode(data: BencodedString) -> Union[bytes, dict, int, list]:
"""Convert the given bencoded string to a Python object.
Args:
Some BencodedString
Raises:
ValueError:
If the argument is empty
If the first byte doesn't match a supported by bencode data type
Returns:
A Python object
"""
if not data.bytes:
raise ValueError("Cannot decode an empty bencoded string.")
if data.bytes[0] == START_DICT:
return _decode_dict(data)
if data.bytes[0] == START_LIST:
return _decode_list(data)
if data.bytes[0] == START_INTEGER:
return _decode_int(data)
if chr(data.bytes[0]).isdigit():
return _decode_bytes(data)
raise ValueError(
"Cannot decode data, expected the first byte to be one of "
f"'d', 'i', 'l' or a digit, got {chr(data.bytes[0])!r} instead."
)
def _decode_bytes(data: BencodedString) -> bytes:
"""Extract the first byte string from the given bencoded string
Args:
Some BencodedString, which starts with a byte string
Raises:
ValueError:
If the byte string doesn't contain a delimiter
If the real string length is shorter, than the prefix length
Returns:
An extracted byte string
"""
# Get byte string length
delimiter_index = data.bytes.find(COLON)
if delimiter_index > 0:
length_prefix = data.get_prefix(delimiter_index)
string_length = int(length_prefix.decode("ascii"))
data.del_prefix(delimiter_index + 1)
else:
raise ValueError(
"Cannot decode a byte string, it doesn't contain a delimiter. "
"Most likely the bencoded string is incomplete or incorrect."
)
# Get byte string data
if len(data.bytes) >= string_length:
result_bytes = data.get_prefix(string_length)
data.del_prefix(string_length)
else:
raise ValueError(
f"Cannot decode a byte string (prefix length "
f"- {string_length}, real_length - {len(data.bytes)}. "
"Most likely the bencoded string is incomplete or incorrect."
)
return result_bytes
def _decode_dict(data: BencodedString) -> dict:
"""Extract the first dict from the given bencoded string
Args:
Some BencodedString, which starts with a dictionary
Raises:
ValueError: If bencoded string ended before the end marker was found
Returns:
An extracted dictionary
"""
result_dict = {}
data.del_prefix(1)
while True:
if data.bytes:
if data.bytes[0] != END_MARKER:
key = _decode(data)
value = _decode(data)
result_dict[key] = value
else:
data.del_prefix(1)
break
else:
raise ValueError(
"Cannot decode a dictionary, reached end of the bencoded "
"string before the end marker was found. Most likely the "
"bencoded string is incomplete or incorrect."
)
return result_dict
def _decode_int(data: BencodedString) -> int:
"""Extract the first integer from the given bencoded string
Args:
Some BencodedString, which starts with an integer
Raises:
ValueError: If bencoded string ended before the end marker was found
Returns:
An extracted integer
"""
data.del_prefix(1)
end_marker_index = data.bytes.find(END_MARKER)
if end_marker_index > 0:
result_bytes = data.get_prefix(end_marker_index)
data.del_prefix(end_marker_index + 1)
else:
raise ValueError(
"Cannot decode an integer, reached the end of the bencoded "
"string before the end marker was found. Most likely the "
"bencoded string is incomplete or incorrect."
)
return int(result_bytes.decode("ascii"))
def _decode_list(data: BencodedString) -> list:
"""Extract the first list from the given bencoded string
Args:
Some BencodedString, which starts with a list
Raises:
ValueError: If bencoded string ended before the end marker was found
Returns:
An extracted list
"""
result_list = []
data.del_prefix(1)
while True:
if data.bytes:
if data.bytes[0] != END_MARKER:
result_list.append(_decode(data))
else:
data.del_prefix(1)
break
else:
raise ValueError(
"Cannot decode a list, reached end of the bencoded string "
"before the end marker was found. Most likely the bencoded "
"string is incomplete or incorrect."
)
return result_list
def _encode_bytes(source: bytes) -> bytes:
"""Encode provided bytes as a bencoded string"""
return str(len(source)).encode("ascii") + b":" + source
def _encode_dict(source: dict) -> bytes:
"""Encode provided dictionary as a bencoded string"""
result_data = b"d"
for key, value in source.items():
result_data += encode(key) + encode(value)
return result_data + b"e"
def _encode_int(source: int) -> bytes:
"""Encode provided integer as a bencoded string"""
return b"i" + str(source).encode("ascii") + b"e"
def _encode_list(source: list) -> bytes:
"""Encode provided list as a bencoded string"""
result_data = b"l"
for item in source:
result_data += encode(item)
return result_data + b"e"
def decode(data: bytes) -> Union[bytes, dict, int, list]:
"""Convert the given bencoded string to a Python object.
Raises:
ValueError:
If the argument is not of type bytes or is empty
If the first byte doesn't match a supported by bencode data type
Returns:
A Python object
"""
if not isinstance(data, bytes):
raise ValueError(
f"Cannot decode data, expected bytes, got {type(data)} instead."
)
return _decode(BencodedString(data))
def encode(data: Union[bytes, dict, int, list]) -> bytes:
"""Convert the given Python object to a bencoded string.
Raises:
ValueError: If the provided object type is not supported
Returns:
A bencoded string
"""
if isinstance(data, bytes):
return _encode_bytes(data)
if isinstance(data, dict):
return _encode_dict(data)
if isinstance(data, int):
return _encode_int(data)
if isinstance(data, list):
return _encode_list(data)
raise ValueError(
f"Cannot encode data: objects of type {type(data)} are not supported."
)
| 7,431
|
Python
|
.py
| 201
| 29.139303
| 78
| 0.631057
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,758
|
_compat.py
|
rembo10_headphones/lib/concurrent/futures/_compat.py
|
from keyword import iskeyword as _iskeyword
from operator import itemgetter as _itemgetter
import sys as _sys
def namedtuple(typename, field_names):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', 'x y')
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessable by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Parse and validate the field names. Validation serves two purposes,
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
field_names = tuple(map(str, field_names))
for name in (typename,) + field_names:
if not all(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_'):
raise ValueError('Field names cannot start with an underscore: %r' % name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
dicttxt = ', '.join('%r: t[%d]' % (name, pos) for pos, name in enumerate(field_names))
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(_cls, %(argtxt)s):
return _tuple.__new__(_cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(t):
'Return a new dict which maps field names to their values'
return {%(dicttxt)s} \n
def _replace(_self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = _self._make(map(kwds.pop, %(field_names)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += ' %s = _property(_itemgetter(%d))\n' % (name, i)
# Execute the template string in a temporary namespace and
# support tracing utilities by setting a value for frame.f_globals['__name__']
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
_property=property, _tuple=tuple)
try:
exec(template, namespace)
except SyntaxError:
e = _sys.exc_info()[1]
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example).
if hasattr(_sys, '_getframe'):
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
return result
if _sys.version_info[0] < 3:
def reraise(exc, traceback):
locals_ = {'exc_type': type(exc), 'exc_value': exc, 'traceback': traceback}
exec('raise exc_type, exc_value, traceback', {}, locals_)
else:
def reraise(exc, traceback):
# Tracebacks are embedded in exceptions in Python 3
raise exc
| 4,969
|
Python
|
.py
| 100
| 41.93
| 126
| 0.592837
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,759
|
thread.py
|
rembo10_headphones/lib/concurrent/futures/thread.py
|
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Implements ThreadPoolExecutor."""
import atexit
import threading
import weakref
import sys
from concurrent.futures import _base
try:
import queue
except ImportError:
import queue as queue
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
# Workers are created as daemon threads. This is done to allow the interpreter
# to exit when there are still idle threads in a ThreadPoolExecutor's thread
# pool (i.e. shutdown() was not called). However, allowing workers to die with
# the interpreter has two undesirable properties:
# - The workers would still be running during interpretor shutdown,
# meaning that they would fail in unpredictable ways.
# - The workers could be killed while evaluating a work item, which could
# be bad if the callable being evaluated has external side-effects e.g.
# writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
def _python_exit():
global _shutdown
_shutdown = True
items = list(_threads_queues.items())
for t, q in items:
q.put(None)
for t, q in items:
t.join()
atexit.register(_python_exit)
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
def run(self):
if not self.future.set_running_or_notify_cancel():
return
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
e, tb = sys.exc_info()[1:]
self.future.set_exception_info(e, tb)
else:
self.future.set_result(result)
def _worker(executor_reference, work_queue):
try:
while True:
work_item = work_queue.get(block=True)
if work_item is not None:
work_item.run()
continue
executor = executor_reference()
# Exit if:
# - The interpreter is shutting down OR
# - The executor that owns the worker has been collected OR
# - The executor that owns the worker has been shutdown.
if _shutdown or executor is None or executor._shutdown:
# Notice other workers
work_queue.put(None)
return
del executor
except BaseException:
_base.LOGGER.critical('Exception in worker', exc_info=True)
class ThreadPoolExecutor(_base.Executor):
def __init__(self, max_workers):
"""Initializes a new ThreadPoolExecutor instance.
Args:
max_workers: The maximum number of threads that can be used to
execute the given calls.
"""
self._max_workers = max_workers
self._work_queue = queue.Queue()
self._threads = set()
self._shutdown = False
self._shutdown_lock = threading.Lock()
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._shutdown:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, fn, args, kwargs)
self._work_queue.put(w)
self._adjust_thread_count()
return f
submit.__doc__ = _base.Executor.submit.__doc__
def _adjust_thread_count(self):
# When the executor gets lost, the weakref callback will wake up
# the worker threads.
def weakref_cb(_, q=self._work_queue):
q.put(None)
# TODO(bquinlan): Should avoid creating new threads if there are more
# idle threads than items in the work queue.
if len(self._threads) < self._max_workers:
t = threading.Thread(target=_worker,
args=(weakref.ref(self, weakref_cb),
self._work_queue))
t.daemon = True
t.start()
self._threads.add(t)
_threads_queues[t] = self._work_queue
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown = True
self._work_queue.put(None)
if wait:
for t in self._threads:
t.join()
shutdown.__doc__ = _base.Executor.shutdown.__doc__
| 4,568
|
Python
|
.py
| 117
| 30.512821
| 80
| 0.619187
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,760
|
process.py
|
rembo10_headphones/lib/concurrent/futures/process.py
|
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Implements ProcessPoolExecutor.
The follow diagram and text describe the data-flow through the system:
|======================= In-process =====================|== Out-of-process ==|
+----------+ +----------+ +--------+ +-----------+ +---------+
| | => | Work Ids | => | | => | Call Q | => | |
| | +----------+ | | +-----------+ | |
| | | ... | | | | ... | | |
| | | 6 | | | | 5, call() | | |
| | | 7 | | | | ... | | |
| Process | | ... | | Local | +-----------+ | Process |
| Pool | +----------+ | Worker | | #1..n |
| Executor | | Thread | | |
| | +----------- + | | +-----------+ | |
| | <=> | Work Items | <=> | | <= | Result Q | <= | |
| | +------------+ | | +-----------+ | |
| | | 6: call() | | | | ... | | |
| | | future | | | | 4, result | | |
| | | ... | | | | 3, except | | |
+----------+ +------------+ +--------+ +-----------+ +---------+
Executor.submit() called:
- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
- adds the id of the _WorkItem to the "Work Ids" queue
Local worker thread:
- reads work ids from the "Work Ids" queue and looks up the corresponding
WorkItem from the "Work Items" dict: if the work item has been cancelled then
it is simply removed from the dict, otherwise it is repackaged as a
_CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
- reads _ResultItems from "Result Q", updates the future stored in the
"Work Items" dict and deletes the dict entry
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
_ResultItems in "Request Q"
"""
import atexit
import multiprocessing
import threading
import weakref
import sys
from concurrent.futures import _base
try:
import queue
except ImportError:
import queue as queue
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
# Workers are created as daemon threads and processes. This is done to allow the
# interpreter to exit when there are still idle processes in a
# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
# allowing workers to die with the interpreter has two undesirable properties:
# - The workers would still be running during interpretor shutdown,
# meaning that they would fail in unpredictable ways.
# - The workers could be killed while evaluating a work item, which could
# be bad if the callable being evaluated has external side-effects e.g.
# writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads/processes finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
def _python_exit():
global _shutdown
_shutdown = True
items = list(_threads_queues.items())
for t, q in items:
q.put(None)
for t, q in items:
t.join()
# Controls how many more calls than processes will be queued in the call queue.
# A smaller number will mean that processes spend more time idle waiting for
# work while a larger number will make Future.cancel() succeed less frequently
# (Futures in the call queue cannot be cancelled).
EXTRA_QUEUED_CALLS = 1
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
class _ResultItem(object):
def __init__(self, work_id, exception=None, result=None):
self.work_id = work_id
self.exception = exception
self.result = result
class _CallItem(object):
def __init__(self, work_id, fn, args, kwargs):
self.work_id = work_id
self.fn = fn
self.args = args
self.kwargs = kwargs
def _process_worker(call_queue, result_queue):
"""Evaluates calls from call_queue and places the results in result_queue.
This worker is run in a separate process.
Args:
call_queue: A multiprocessing.Queue of _CallItems that will be read and
evaluated by the worker.
result_queue: A multiprocessing.Queue of _ResultItems that will written
to by the worker.
shutdown: A multiprocessing.Event that will be set as a signal to the
worker that it should exit when call_queue is empty.
"""
while True:
call_item = call_queue.get(block=True)
if call_item is None:
# Wake up queue management thread
result_queue.put(None)
return
try:
r = call_item.fn(*call_item.args, **call_item.kwargs)
except BaseException:
e = sys.exc_info()[1]
result_queue.put(_ResultItem(call_item.work_id,
exception=e))
else:
result_queue.put(_ResultItem(call_item.work_id,
result=r))
def _add_call_item_to_queue(pending_work_items,
work_ids,
call_queue):
"""Fills call_queue with _WorkItems from pending_work_items.
This function never blocks.
Args:
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
are consumed and the corresponding _WorkItems from
pending_work_items are transformed into _CallItems and put in
call_queue.
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems.
"""
while True:
if call_queue.full():
return
try:
work_id = work_ids.get(block=False)
except queue.Empty:
return
else:
work_item = pending_work_items[work_id]
if work_item.future.set_running_or_notify_cancel():
call_queue.put(_CallItem(work_id,
work_item.fn,
work_item.args,
work_item.kwargs),
block=True)
else:
del pending_work_items[work_id]
continue
def _queue_management_worker(executor_reference,
processes,
pending_work_items,
work_ids_queue,
call_queue,
result_queue):
"""Manages the communication between this process and the worker processes.
This function is run in a local thread.
Args:
executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
this thread. Used to determine if the ProcessPoolExecutor has been
garbage collected and that this function can exit.
process: A list of the multiprocessing.Process instances used as
workers.
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems for processing by the process workers.
result_queue: A multiprocessing.Queue of _ResultItems generated by the
process workers.
"""
nb_shutdown_processes = [0]
def shutdown_one_process():
"""Tell a worker to terminate, which will in turn wake us again"""
call_queue.put(None)
nb_shutdown_processes[0] += 1
while True:
_add_call_item_to_queue(pending_work_items,
work_ids_queue,
call_queue)
result_item = result_queue.get(block=True)
if result_item is not None:
work_item = pending_work_items[result_item.work_id]
del pending_work_items[result_item.work_id]
if result_item.exception:
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
if _shutdown or executor is None or executor._shutdown_thread:
# Since no new work items can be added, it is safe to shutdown
# this thread if there are no pending work items.
if not pending_work_items:
while nb_shutdown_processes[0] < len(processes):
shutdown_one_process()
# If .join() is not called on the created processes then
# some multiprocessing.Queue methods may deadlock on Mac OS
# X.
for p in processes:
p.join()
call_queue.close()
return
del executor
_system_limits_checked = False
_system_limited = None
def _check_system_limits():
global _system_limits_checked, _system_limited
if _system_limits_checked:
if _system_limited:
raise NotImplementedError(_system_limited)
_system_limits_checked = True
try:
import os
nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
except (AttributeError, ValueError):
# sysconf not available or setting not available
return
if nsems_max == -1:
# indetermine limit, assume that limit is determined
# by available memory only
return
if nsems_max >= 256:
# minimum number of semaphores available
# according to POSIX
return
_system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
raise NotImplementedError(_system_limited)
class ProcessPoolExecutor(_base.Executor):
def __init__(self, max_workers=None):
"""Initializes a new ProcessPoolExecutor instance.
Args:
max_workers: The maximum number of processes that can be used to
execute the given calls. If None or not given then as many
worker processes will be created as the machine has processors.
"""
_check_system_limits()
if max_workers is None:
self._max_workers = multiprocessing.cpu_count()
else:
self._max_workers = max_workers
# Make the call queue slightly larger than the number of processes to
# prevent the worker processes from idling. But don't make it too big
# because futures in the call queue cannot be cancelled.
self._call_queue = multiprocessing.Queue(self._max_workers +
EXTRA_QUEUED_CALLS)
self._result_queue = multiprocessing.Queue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
self._processes = set()
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
self._queue_count = 0
self._pending_work_items = {}
def _start_queue_management_thread(self):
# When the executor gets lost, the weakref callback will wake up
# the queue management thread.
def weakref_cb(_, q=self._result_queue):
q.put(None)
if self._queue_management_thread is None:
self._queue_management_thread = threading.Thread(
target=_queue_management_worker,
args=(weakref.ref(self, weakref_cb),
self._processes,
self._pending_work_items,
self._work_ids,
self._call_queue,
self._result_queue))
self._queue_management_thread.daemon = True
self._queue_management_thread.start()
_threads_queues[self._queue_management_thread] = self._result_queue
def _adjust_process_count(self):
for _ in range(len(self._processes), self._max_workers):
p = multiprocessing.Process(
target=_process_worker,
args=(self._call_queue,
self._result_queue))
p.start()
self._processes.add(p)
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._shutdown_thread:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, fn, args, kwargs)
self._pending_work_items[self._queue_count] = w
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
self._result_queue.put(None)
self._start_queue_management_thread()
self._adjust_process_count()
return f
submit.__doc__ = _base.Executor.submit.__doc__
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown_thread = True
if self._queue_management_thread:
# Wake up queue management thread
self._result_queue.put(None)
if wait:
self._queue_management_thread.join()
# To reduce the risk of openning too many files, remove references to
# objects that use file descriptors.
self._queue_management_thread = None
self._call_queue = None
self._result_queue = None
self._processes = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
atexit.register(_python_exit)
| 14,812
|
Python
|
.py
| 318
| 36.833333
| 100
| 0.569451
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,761
|
__init__.py
|
rembo10_headphones/lib/concurrent/futures/__init__.py
|
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Execute computations asynchronously using threads or processes."""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
from concurrent.futures._base import (FIRST_COMPLETED,
FIRST_EXCEPTION,
ALL_COMPLETED,
CancelledError,
TimeoutError,
Future,
Executor,
wait,
as_completed)
from concurrent.futures.thread import ThreadPoolExecutor
# Jython doesn't have multiprocessing
try:
from concurrent.futures.process import ProcessPoolExecutor
except ImportError:
pass
| 877
|
Python
|
.py
| 19
| 28.526316
| 69
| 0.533958
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,762
|
_base.py
|
rembo10_headphones/lib/concurrent/futures/_base.py
|
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
import logging
import threading
import time
from concurrent.futures._compat import reraise
try:
from collections import namedtuple
except ImportError:
from concurrent.futures._compat import namedtuple
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
FIRST_COMPLETED = 'FIRST_COMPLETED'
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
ALL_COMPLETED = 'ALL_COMPLETED'
_AS_COMPLETED = '_AS_COMPLETED'
# Possible future states (for internal use by the futures package).
PENDING = 'PENDING'
RUNNING = 'RUNNING'
# The future was cancelled by the user...
CANCELLED = 'CANCELLED'
# ...and _Waiter.add_cancelled() was called by a worker.
CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
FINISHED = 'FINISHED'
_FUTURE_STATES = [
PENDING,
RUNNING,
CANCELLED,
CANCELLED_AND_NOTIFIED,
FINISHED
]
_STATE_TO_DESCRIPTION_MAP = {
PENDING: "pending",
RUNNING: "running",
CANCELLED: "cancelled",
CANCELLED_AND_NOTIFIED: "cancelled",
FINISHED: "finished"
}
# Logger for internal use by the futures package.
LOGGER = logging.getLogger("concurrent.futures")
class Error(Exception):
"""Base class for all future-related exceptions."""
pass
class CancelledError(Error):
"""The Future was cancelled."""
pass
class TimeoutError(Error):
"""The operation exceeded the given deadline."""
pass
class _Waiter(object):
"""Provides the event that wait() and as_completed() block on."""
def __init__(self):
self.event = threading.Event()
self.finished_futures = []
def add_result(self, future):
self.finished_futures.append(future)
def add_exception(self, future):
self.finished_futures.append(future)
def add_cancelled(self, future):
self.finished_futures.append(future)
class _AsCompletedWaiter(_Waiter):
"""Used by as_completed()."""
def __init__(self):
super(_AsCompletedWaiter, self).__init__()
self.lock = threading.Lock()
def add_result(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _FirstCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_COMPLETED)."""
def add_result(self, future):
super(_FirstCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
super(_FirstCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
super(_FirstCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _AllCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
def __init__(self, num_pending_calls, stop_on_exception):
self.num_pending_calls = num_pending_calls
self.stop_on_exception = stop_on_exception
self.lock = threading.Lock()
super(_AllCompletedWaiter, self).__init__()
def _decrement_pending_calls(self):
with self.lock:
self.num_pending_calls -= 1
if not self.num_pending_calls:
self.event.set()
def add_result(self, future):
super(_AllCompletedWaiter, self).add_result(future)
self._decrement_pending_calls()
def add_exception(self, future):
super(_AllCompletedWaiter, self).add_exception(future)
if self.stop_on_exception:
self.event.set()
else:
self._decrement_pending_calls()
def add_cancelled(self, future):
super(_AllCompletedWaiter, self).add_cancelled(future)
self._decrement_pending_calls()
class _AcquireFutures(object):
"""A context manager that does an ordered acquire of Future conditions."""
def __init__(self, futures):
self.futures = sorted(futures, key=id)
def __enter__(self):
for future in self.futures:
future._condition.acquire()
def __exit__(self, *args):
for future in self.futures:
future._condition.release()
def _create_and_install_waiters(fs, return_when):
if return_when == _AS_COMPLETED:
waiter = _AsCompletedWaiter()
elif return_when == FIRST_COMPLETED:
waiter = _FirstCompletedWaiter()
else:
pending_count = sum(
f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
if return_when == FIRST_EXCEPTION:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
elif return_when == ALL_COMPLETED:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
else:
raise ValueError("Invalid return condition: %r" % return_when)
for f in fs:
f._waiters.append(waiter)
return waiter
def as_completed(fs, timeout=None):
"""An iterator over the given futures that yields each as it completes.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
iterate over.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator that yields the given Futures as they complete (finished or
cancelled).
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
"""
if timeout is not None:
end_time = timeout + time.time()
with _AcquireFutures(fs):
finished = set(
f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
pending = set(fs) - finished
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
for future in finished:
yield future
while pending:
if timeout is None:
wait_timeout = None
else:
wait_timeout = end_time - time.time()
if wait_timeout < 0:
raise TimeoutError(
'%d (of %d) futures unfinished' % (
len(pending), len(fs)))
waiter.event.wait(wait_timeout)
with waiter.lock:
finished = waiter.finished_futures
waiter.finished_futures = []
waiter.event.clear()
for future in finished:
yield future
pending.remove(future)
finally:
for f in fs:
f._waiters.remove(waiter)
DoneAndNotDoneFutures = namedtuple(
'DoneAndNotDoneFutures', 'done not_done')
def wait(fs, timeout=None, return_when=ALL_COMPLETED):
"""Wait for the futures in the given sequence to complete.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
wait upon.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
return_when: Indicates when this function should return. The options
are:
FIRST_COMPLETED - Return when any future finishes or is
cancelled.
FIRST_EXCEPTION - Return when any future finishes by raising an
exception. If no future raises an exception
then it is equivalent to ALL_COMPLETED.
ALL_COMPLETED - Return when all futures finish or are cancelled.
Returns:
A named 2-tuple of sets. The first set, named 'done', contains the
futures that completed (is finished or cancelled) before the wait
completed. The second set, named 'not_done', contains uncompleted
futures.
"""
with _AcquireFutures(fs):
done = set(f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
not_done = set(fs) - done
if (return_when == FIRST_COMPLETED) and done:
return DoneAndNotDoneFutures(done, not_done)
elif (return_when == FIRST_EXCEPTION) and done:
if any(f for f in done
if not f.cancelled() and f.exception() is not None):
return DoneAndNotDoneFutures(done, not_done)
if len(done) == len(fs):
return DoneAndNotDoneFutures(done, not_done)
waiter = _create_and_install_waiters(fs, return_when)
waiter.event.wait(timeout)
for f in fs:
f._waiters.remove(waiter)
done.update(waiter.finished_futures)
return DoneAndNotDoneFutures(done, set(fs) - done)
class Future(object):
"""Represents the result of an asynchronous computation."""
def __init__(self):
"""Initializes the future. Should not be called by clients."""
self._condition = threading.Condition()
self._state = PENDING
self._result = None
self._exception = None
self._traceback = None
self._waiters = []
self._done_callbacks = []
def _invoke_callbacks(self):
for callback in self._done_callbacks:
try:
callback(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def __repr__(self):
with self._condition:
if self._state == FINISHED:
if self._exception:
return '<Future at %s state=%s raised %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._exception.__class__.__name__)
else:
return '<Future at %s state=%s returned %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._result.__class__.__name__)
return '<Future at %s state=%s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state])
def cancel(self):
"""Cancel the future if possible.
Returns True if the future was cancelled, False otherwise. A future
cannot be cancelled if it is running or has already completed.
"""
with self._condition:
if self._state in [RUNNING, FINISHED]:
return False
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
return True
self._state = CANCELLED
self._condition.notify_all()
self._invoke_callbacks()
return True
def cancelled(self):
"""Return True if the future has cancelled."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
def running(self):
"""Return True if the future is currently executing."""
with self._condition:
return self._state == RUNNING
def done(self):
"""Return True of the future was cancelled or finished executing."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
def __get_result(self):
if self._exception:
reraise(self._exception, self._traceback)
else:
return self._result
def add_done_callback(self, fn):
"""Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return
fn(self)
def result(self, timeout=None):
"""Return the result of the call that the future represents.
Args:
timeout: The number of seconds to wait for the result if the future
isn't done. If None, then there is no limit on the wait time.
Returns:
The result of the call that the future represents.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
Exception: If the call raised then that exception will be raised.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
else:
raise TimeoutError()
def exception_info(self, timeout=None):
"""Return a tuple of (exception, traceback) raised by the call that the
future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception, self._traceback
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception, self._traceback
else:
raise TimeoutError()
def exception(self, timeout=None):
"""Return the exception raised by the call that the future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
return self.exception_info(timeout)[0]
# The following methods should only be used by Executors and in tests.
def set_running_or_notify_cancel(self):
"""Mark the future as running or process any cancel notifications.
Should only be used by Executor implementations and unit tests.
If the future has been cancelled (cancel() was called and returned
True) then any threads waiting on the future completing (though calls
to as_completed() or wait()) are notified and False is returned.
If the future was not cancelled then it is put in the running state
(future calls to running() will return True) and True is returned.
This method should be called by Executor implementations before
executing the work associated with this future. If this method returns
False then the work should not be executed.
Returns:
False if the Future was cancelled, True otherwise.
Raises:
RuntimeError: if this method was already called or if set_result()
or set_exception() was called.
"""
with self._condition:
if self._state == CANCELLED:
self._state = CANCELLED_AND_NOTIFIED
for waiter in self._waiters:
waiter.add_cancelled(self)
# self._condition.notify_all() is not necessary because
# self.cancel() triggers a notification.
return False
elif self._state == PENDING:
self._state = RUNNING
return True
else:
LOGGER.critical('Future %s in unexpected state: %s',
id(self.future),
self.future._state)
raise RuntimeError('Future in unexpected state')
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._result = result
self._state = FINISHED
for waiter in self._waiters:
waiter.add_result(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception_info(self, exception, traceback):
"""Sets the result of the future as being the given exception
and traceback.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._exception = exception
self._traceback = traceback
self._state = FINISHED
for waiter in self._waiters:
waiter.add_exception(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Executor implementations and unit tests.
"""
self.set_exception_info(exception, None)
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
def submit(self, fn, *args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as fn(*args, **kwargs) and returns
a Future instance representing the execution of the callable.
Returns:
A Future representing the given call.
"""
raise NotImplementedError()
def map(self, fn, *iterables, **kwargs):
"""Returns a iterator equivalent to map(fn, iter).
Args:
fn: A callable that will take as many arguments as there are
passed iterables.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator equivalent to: map(func, *iterables) but the calls may
be evaluated out-of-order.
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
Exception: If fn(*args) raises for any values.
"""
timeout = kwargs.get('timeout')
if timeout is not None:
end_time = timeout + time.time()
fs = [self.submit(fn, *args) for args in zip(*iterables)]
try:
for future in fs:
if timeout is None:
yield future.result()
else:
yield future.result(end_time - time.time())
finally:
for future in fs:
future.cancel()
def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
methods can be called after this one.
Args:
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
"""
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown(wait=True)
return False
| 20,793
|
Python
|
.py
| 486
| 32.306584
| 80
| 0.611601
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,763
|
signals.py
|
rembo10_headphones/lib/oauthlib/signals.py
|
"""
Implements signals based on blinker if available, otherwise
falls silently back to a noop. Shamelessly stolen from flask.signals:
https://github.com/mitsuhiko/flask/blob/master/flask/signals.py
"""
signals_available = False
try:
from blinker import Namespace
signals_available = True
except ImportError: # noqa
class Namespace:
def signal(self, name, doc=None):
return _FakeSignal(name, doc)
class _FakeSignal:
"""If blinker is unavailable, create a fake class with the same
interface that allows sending of signals but will fail with an
error on anything else. Instead of doing anything on send, it
will just ignore the arguments and do nothing instead.
"""
def __init__(self, name, doc=None):
self.name = name
self.__doc__ = doc
def _fail(self, *args, **kwargs):
raise RuntimeError('signalling support is unavailable '
'because the blinker library is '
'not installed.')
send = lambda *a, **kw: None
connect = disconnect = has_receivers_for = receivers_for = \
temporarily_connected_to = connected_to = _fail
del _fail
# The namespace for code signals. If you are not oauthlib code, do
# not put signals in here. Create your own namespace instead.
_signals = Namespace()
# Core signals.
scope_changed = _signals.signal('scope-changed')
| 1,489
|
Python
|
.py
| 35
| 34.6
| 73
| 0.648723
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,764
|
__init__.py
|
rembo10_headphones/lib/oauthlib/__init__.py
|
"""
oauthlib
~~~~~~~~
A generic, spec-compliant, thorough implementation of the OAuth
request-signing logic.
:copyright: (c) 2019 by The OAuthlib Community
:license: BSD, see LICENSE for details.
"""
import logging
from logging import NullHandler
__author__ = 'The OAuthlib Community'
__version__ = '3.1.1'
logging.getLogger('oauthlib').addHandler(NullHandler())
_DEBUG = False
def set_debug(debug_val):
"""Set value of debug flag
:param debug_val: Value to set. Must be a bool value.
"""
global _DEBUG
_DEBUG = debug_val
def get_debug():
"""Get debug mode value.
:return: `True` if debug mode is on, `False` otherwise
"""
return _DEBUG
| 686
|
Python
|
.py
| 25
| 24.48
| 67
| 0.703077
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,765
|
uri_validate.py
|
rembo10_headphones/lib/oauthlib/uri_validate.py
|
"""
Regex for URIs
These regex are directly derived from the collected ABNF in RFC3986
(except for DIGIT, ALPHA and HEXDIG, defined by RFC2234).
They should be processed with re.VERBOSE.
Thanks Mark Nottingham for this code - https://gist.github.com/138549
"""
import re
# basics
DIGIT = r"[\x30-\x39]"
ALPHA = r"[\x41-\x5A\x61-\x7A]"
HEXDIG = r"[\x30-\x39A-Fa-f]"
# pct-encoded = "%" HEXDIG HEXDIG
pct_encoded = r" %% %(HEXDIG)s %(HEXDIG)s" % locals()
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
unreserved = r"(?: %(ALPHA)s | %(DIGIT)s | \- | \. | _ | ~ )" % locals()
# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
gen_delims = r"(?: : | / | \? | \# | \[ | \] | @ )"
# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
# / "*" / "+" / "," / ";" / "="
sub_delims = r"""(?: ! | \$ | & | ' | \( | \) |
\* | \+ | , | ; | = )"""
# pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
pchar = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | : | @ )" % locals(
)
# reserved = gen-delims / sub-delims
reserved = r"(?: %(gen_delims)s | %(sub_delims)s )" % locals()
# scheme
# scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
scheme = r"%(ALPHA)s (?: %(ALPHA)s | %(DIGIT)s | \+ | \- | \. )*" % locals()
# authority
# dec-octet = DIGIT ; 0-9
# / %x31-39 DIGIT ; 10-99
# / "1" 2DIGIT ; 100-199
# / "2" %x30-34 DIGIT ; 200-249
# / "25" %x30-35 ; 250-255
dec_octet = r"""(?: %(DIGIT)s |
[\x31-\x39] %(DIGIT)s |
1 %(DIGIT)s{2} |
2 [\x30-\x34] %(DIGIT)s |
25 [\x30-\x35]
)
""" % locals()
# IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
IPv4address = r"%(dec_octet)s \. %(dec_octet)s \. %(dec_octet)s \. %(dec_octet)s" % locals(
)
# IPv6address
IPv6address = r"([A-Fa-f0-9:]+:+)+[A-Fa-f0-9]+"
# IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" )
IPvFuture = r"v %(HEXDIG)s+ \. (?: %(unreserved)s | %(sub_delims)s | : )+" % locals()
# IP-literal = "[" ( IPv6address / IPvFuture ) "]"
IP_literal = r"\[ (?: %(IPv6address)s | %(IPvFuture)s ) \]" % locals()
# reg-name = *( unreserved / pct-encoded / sub-delims )
reg_name = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s )*" % locals()
# userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
userinfo = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | : )" % locals(
)
# host = IP-literal / IPv4address / reg-name
host = r"(?: %(IP_literal)s | %(IPv4address)s | %(reg_name)s )" % locals()
# port = *DIGIT
port = r"(?: %(DIGIT)s )*" % locals()
# authority = [ userinfo "@" ] host [ ":" port ]
authority = r"(?: %(userinfo)s @)? %(host)s (?: : %(port)s)?" % locals()
# Path
# segment = *pchar
segment = r"%(pchar)s*" % locals()
# segment-nz = 1*pchar
segment_nz = r"%(pchar)s+" % locals()
# segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
# ; non-zero-length segment without any colon ":"
segment_nz_nc = r"(?: %(unreserved)s | %(pct_encoded)s | %(sub_delims)s | @ )+" % locals()
# path-abempty = *( "/" segment )
path_abempty = r"(?: / %(segment)s )*" % locals()
# path-absolute = "/" [ segment-nz *( "/" segment ) ]
path_absolute = r"/ (?: %(segment_nz)s (?: / %(segment)s )* )?" % locals()
# path-noscheme = segment-nz-nc *( "/" segment )
path_noscheme = r"%(segment_nz_nc)s (?: / %(segment)s )*" % locals()
# path-rootless = segment-nz *( "/" segment )
path_rootless = r"%(segment_nz)s (?: / %(segment)s )*" % locals()
# path-empty = 0<pchar>
path_empty = r"" # FIXME
# path = path-abempty ; begins with "/" or is empty
# / path-absolute ; begins with "/" but not "//"
# / path-noscheme ; begins with a non-colon segment
# / path-rootless ; begins with a segment
# / path-empty ; zero characters
path = r"""(?: %(path_abempty)s |
%(path_absolute)s |
%(path_noscheme)s |
%(path_rootless)s |
%(path_empty)s
)
""" % locals()
### Query and Fragment
# query = *( pchar / "/" / "?" )
query = r"(?: %(pchar)s | / | \? )*" % locals()
# fragment = *( pchar / "/" / "?" )
fragment = r"(?: %(pchar)s | / | \? )*" % locals()
# URIs
# hier-part = "//" authority path-abempty
# / path-absolute
# / path-rootless
# / path-empty
hier_part = r"""(?: (?: // %(authority)s %(path_abempty)s ) |
%(path_absolute)s |
%(path_rootless)s |
%(path_empty)s
)
""" % locals()
# relative-part = "//" authority path-abempty
# / path-absolute
# / path-noscheme
# / path-empty
relative_part = r"""(?: (?: // %(authority)s %(path_abempty)s ) |
%(path_absolute)s |
%(path_noscheme)s |
%(path_empty)s
)
""" % locals()
# relative-ref = relative-part [ "?" query ] [ "#" fragment ]
relative_ref = r"%(relative_part)s (?: \? %(query)s)? (?: \# %(fragment)s)?" % locals(
)
# URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
URI = r"^(?: %(scheme)s : %(hier_part)s (?: \? %(query)s )? (?: \# %(fragment)s )? )$" % locals(
)
# URI-reference = URI / relative-ref
URI_reference = r"^(?: %(URI)s | %(relative_ref)s )$" % locals()
# absolute-URI = scheme ":" hier-part [ "?" query ]
absolute_URI = r"^(?: %(scheme)s : %(hier_part)s (?: \? %(query)s )? )$" % locals(
)
def is_uri(uri):
return re.match(URI, uri, re.VERBOSE)
def is_uri_reference(uri):
return re.match(URI_reference, uri, re.VERBOSE)
def is_absolute_uri(uri):
return re.match(absolute_URI, uri, re.VERBOSE)
| 6,107
|
Python
|
.py
| 136
| 40.794118
| 96
| 0.479635
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,766
|
common.py
|
rembo10_headphones/lib/oauthlib/common.py
|
"""
oauthlib.common
~~~~~~~~~~~~~~
This module provides data structures and utilities common
to all implementations of OAuth.
"""
import collections
import datetime
import logging
import re
import time
import urllib.parse as urlparse
from urllib.parse import (
quote as _quote, unquote as _unquote, urlencode as _urlencode,
)
from . import get_debug
try:
from secrets import randbits
from secrets import SystemRandom
except ImportError:
from random import getrandbits as randbits
from random import SystemRandom
UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789')
CLIENT_ID_CHARACTER_SET = (r' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMN'
'OPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}')
SANITIZE_PATTERN = re.compile(r'([^&;]*(?:password|token)[^=]*=)[^&;]+', re.IGNORECASE)
INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]')
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz'
'0123456789' '_.-')
log = logging.getLogger('oauthlib')
# 'safe' must be bytes (Python 2.6 requires bytes, other versions allow either)
def quote(s, safe=b'/'):
s = s.encode('utf-8') if isinstance(s, str) else s
s = _quote(s, safe)
# PY3 always returns unicode. PY2 may return either, depending on whether
# it had to modify the string.
if isinstance(s, bytes):
s = s.decode('utf-8')
return s
def unquote(s):
s = _unquote(s)
# PY3 always returns unicode. PY2 seems to always return what you give it,
# which differs from quote's behavior. Just to be safe, make sure it is
# unicode before we return.
if isinstance(s, bytes):
s = s.decode('utf-8')
return s
def urlencode(params):
utf8_params = encode_params_utf8(params)
urlencoded = _urlencode(utf8_params)
if isinstance(urlencoded, str):
return urlencoded
else:
return urlencoded.decode("utf-8")
def encode_params_utf8(params):
"""Ensures that all parameters in a list of 2-element tuples are encoded to
bytestrings using UTF-8
"""
encoded = []
for k, v in params:
encoded.append((
k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return encoded
def decode_params_utf8(params):
"""Ensures that all parameters in a list of 2-element tuples are decoded to
unicode using UTF-8.
"""
decoded = []
for k, v in params:
decoded.append((
k.decode('utf-8') if isinstance(k, bytes) else k,
v.decode('utf-8') if isinstance(v, bytes) else v))
return decoded
urlencoded = set(always_safe) | set('=&;:%+~,*@!()/?\'$')
def urldecode(query):
"""Decode a query string in x-www-form-urlencoded format into a sequence
of two-element tuples.
Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce
correct formatting of the query string by validation. If validation fails
a ValueError will be raised. urllib.parse_qsl will only raise errors if
any of name-value pairs omits the equals sign.
"""
# Check if query contains invalid characters
if query and not set(query) <= urlencoded:
error = ("Error trying to decode a non urlencoded string. "
"Found invalid characters: %s "
"in the string: '%s'. "
"Please ensure the request/response body is "
"x-www-form-urlencoded.")
raise ValueError(error % (set(query) - urlencoded, query))
# Check for correctly hex encoded values using a regular expression
# All encoded values begin with % followed by two hex characters
# correct = %00, %A0, %0A, %FF
# invalid = %G0, %5H, %PO
if INVALID_HEX_PATTERN.search(query):
raise ValueError('Invalid hex encoding in query string.')
# We want to allow queries such as "c2" whereas urlparse.parse_qsl
# with the strict_parsing flag will not.
params = urlparse.parse_qsl(query, keep_blank_values=True)
# unicode all the things
return decode_params_utf8(params)
def extract_params(raw):
"""Extract parameters and return them as a list of 2-tuples.
Will successfully extract parameters from urlencoded query strings,
dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an
empty list of parameters. Any other input will result in a return
value of None.
"""
if isinstance(raw, (bytes, str)):
try:
params = urldecode(raw)
except ValueError:
params = None
elif hasattr(raw, '__iter__'):
try:
dict(raw)
except ValueError:
params = None
except TypeError:
params = None
else:
params = list(raw.items() if isinstance(raw, dict) else raw)
params = decode_params_utf8(params)
else:
params = None
return params
def generate_nonce():
"""Generate pseudorandom nonce that is unlikely to repeat.
Per `section 3.3`_ of the OAuth 1 RFC 5849 spec.
Per `section 3.2.1`_ of the MAC Access Authentication spec.
A random 64-bit number is appended to the epoch timestamp for both
randomness and to decrease the likelihood of collisions.
.. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1
.. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3
"""
return str(str(randbits(64)) + generate_timestamp())
def generate_timestamp():
"""Get seconds since epoch (UTC).
Per `section 3.3`_ of the OAuth 1 RFC 5849 spec.
Per `section 3.2.1`_ of the MAC Access Authentication spec.
.. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1
.. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3
"""
return str(int(time.time()))
def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET):
"""Generates a non-guessable OAuth token
OAuth (1 and 2) does not specify the format of tokens except that they
should be strings of random characters. Tokens should not be guessable
and entropy when generating the random characters is important. Which is
why SystemRandom is used instead of the default random.choice method.
"""
rand = SystemRandom()
return ''.join(rand.choice(chars) for x in range(length))
def generate_signed_token(private_pem, request):
import jwt
now = datetime.datetime.utcnow()
claims = {
'scope': request.scope,
'exp': now + datetime.timedelta(seconds=request.expires_in)
}
claims.update(request.claims)
token = jwt.encode(claims, private_pem, 'RS256')
token = to_unicode(token, "UTF-8")
return token
def verify_signed_token(public_pem, token):
import jwt
return jwt.decode(token, public_pem, algorithms=['RS256'])
def generate_client_id(length=30, chars=CLIENT_ID_CHARACTER_SET):
"""Generates an OAuth client_id
OAuth 2 specify the format of client_id in
https://tools.ietf.org/html/rfc6749#appendix-A.
"""
return generate_token(length, chars)
def add_params_to_qs(query, params):
"""Extend a query with a list of two-tuples."""
if isinstance(params, dict):
params = params.items()
queryparams = urlparse.parse_qsl(query, keep_blank_values=True)
queryparams.extend(params)
return urlencode(queryparams)
def add_params_to_uri(uri, params, fragment=False):
"""Add a list of two-tuples to the uri query components."""
sch, net, path, par, query, fra = urlparse.urlparse(uri)
if fragment:
fra = add_params_to_qs(fra, params)
else:
query = add_params_to_qs(query, params)
return urlparse.urlunparse((sch, net, path, par, query, fra))
def safe_string_equals(a, b):
""" Near-constant time string comparison.
Used in order to avoid timing attacks on sensitive information such
as secret keys during request verification (`rootLabs`_).
.. _`rootLabs`: http://rdist.root.org/2010/01/07/timing-independent-array-comparison/
"""
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def to_unicode(data, encoding='UTF-8'):
"""Convert a number of different types of objects to unicode."""
if isinstance(data, str):
return data
if isinstance(data, bytes):
return str(data, encoding=encoding)
if hasattr(data, '__iter__'):
try:
dict(data)
except TypeError:
pass
except ValueError:
# Assume it's a one dimensional data structure
return (to_unicode(i, encoding) for i in data)
else:
# We support 2.6 which lacks dict comprehensions
if hasattr(data, 'items'):
data = data.items()
return {to_unicode(k, encoding): to_unicode(v, encoding) for k, v in data}
return data
class CaseInsensitiveDict(dict):
"""Basic case insensitive dict with strings only keys."""
proxy = {}
def __init__(self, data):
self.proxy = {k.lower(): k for k in data}
for k in data:
self[k] = data[k]
def __contains__(self, k):
return k.lower() in self.proxy
def __delitem__(self, k):
key = self.proxy[k.lower()]
super().__delitem__(key)
del self.proxy[k.lower()]
def __getitem__(self, k):
key = self.proxy[k.lower()]
return super().__getitem__(key)
def get(self, k, default=None):
return self[k] if k in self else default
def __setitem__(self, k, v):
super().__setitem__(k, v)
self.proxy[k.lower()] = k
def update(self, *args, **kwargs):
super().update(*args, **kwargs)
for k in dict(*args, **kwargs):
self.proxy[k.lower()] = k
class Request:
"""A malleable representation of a signable HTTP request.
Body argument may contain any data, but parameters will only be decoded if
they are one of:
* urlencoded query string
* dict
* list of 2-tuples
Anything else will be treated as raw body data to be passed through
unmolested.
"""
def __init__(self, uri, http_method='GET', body=None, headers=None,
encoding='utf-8'):
# Convert to unicode using encoding if given, else assume unicode
encode = lambda x: to_unicode(x, encoding) if encoding else x
self.uri = encode(uri)
self.http_method = encode(http_method)
self.headers = CaseInsensitiveDict(encode(headers or {}))
self.body = encode(body)
self.decoded_body = extract_params(self.body)
self.oauth_params = []
self.validator_log = {}
self._params = {
"access_token": None,
"client": None,
"client_id": None,
"client_secret": None,
"code": None,
"code_challenge": None,
"code_challenge_method": None,
"code_verifier": None,
"extra_credentials": None,
"grant_type": None,
"redirect_uri": None,
"refresh_token": None,
"request_token": None,
"response_type": None,
"scope": None,
"scopes": None,
"state": None,
"token": None,
"user": None,
"token_type_hint": None,
# OpenID Connect
"response_mode": None,
"nonce": None,
"display": None,
"prompt": None,
"claims": None,
"max_age": None,
"ui_locales": None,
"id_token_hint": None,
"login_hint": None,
"acr_values": None
}
self._params.update(dict(urldecode(self.uri_query)))
self._params.update(dict(self.decoded_body or []))
def __getattr__(self, name):
if name in self._params:
return self._params[name]
else:
raise AttributeError(name)
def __repr__(self):
if not get_debug():
return "<oauthlib.Request SANITIZED>"
body = self.body
headers = self.headers.copy()
if body:
body = SANITIZE_PATTERN.sub('\1<SANITIZED>', str(body))
if 'Authorization' in headers:
headers['Authorization'] = '<SANITIZED>'
return '<oauthlib.Request url="{}", http_method="{}", headers="{}", body="{}">'.format(
self.uri, self.http_method, headers, body)
@property
def uri_query(self):
return urlparse.urlparse(self.uri).query
@property
def uri_query_params(self):
if not self.uri_query:
return []
return urlparse.parse_qsl(self.uri_query, keep_blank_values=True,
strict_parsing=True)
@property
def duplicate_params(self):
seen_keys = collections.defaultdict(int)
all_keys = (p[0]
for p in (self.decoded_body or []) + self.uri_query_params)
for k in all_keys:
seen_keys[k] += 1
return [k for k, c in seen_keys.items() if c > 1]
| 13,422
|
Python
|
.py
| 338
| 32.065089
| 98
| 0.626424
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,767
|
__init__.py
|
rembo10_headphones/lib/oauthlib/openid/__init__.py
|
"""
oauthlib.openid
~~~~~~~~~~~~~~
"""
from .connect.core.endpoints import Server, UserInfoEndpoint
from .connect.core.request_validator import RequestValidator
| 162
|
Python
|
.py
| 6
| 25.833333
| 60
| 0.767742
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,768
|
tokens.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/tokens.py
|
"""
authlib.openid.connect.core.tokens
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains methods for adding JWT tokens to requests.
"""
from oauthlib.oauth2.rfc6749.tokens import TokenBase, random_token_generator, get_token_from_header
class JWTToken(TokenBase):
__slots__ = (
'request_validator', 'token_generator',
'refresh_token_generator', 'expires_in'
)
def __init__(self, request_validator=None, token_generator=None,
expires_in=None, refresh_token_generator=None):
self.request_validator = request_validator
self.token_generator = token_generator or random_token_generator
self.refresh_token_generator = (
refresh_token_generator or self.token_generator
)
self.expires_in = expires_in or 3600
def create_token(self, request, refresh_token=False):
"""Create a JWT Token, using requestvalidator method."""
if callable(self.expires_in):
expires_in = self.expires_in(request)
else:
expires_in = self.expires_in
request.expires_in = expires_in
return self.request_validator.get_jwt_bearer_token(None, None, request)
def validate_request(self, request):
token = get_token_from_header(request)
return self.request_validator.validate_jwt_bearer_token(
token, request.scopes, request)
def estimate_type(self, request):
token = get_token_from_header(request)
if token and token.startswith('ey') and token.count('.') in (2, 4):
return 10
return 0
| 1,596
|
Python
|
.py
| 36
| 36.472222
| 99
| 0.656129
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,769
|
request_validator.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/request_validator.py
|
"""
oauthlib.openid.connect.core.request_validator
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
from oauthlib.oauth2.rfc6749.request_validator import (
RequestValidator as OAuth2RequestValidator,
)
log = logging.getLogger(__name__)
class RequestValidator(OAuth2RequestValidator):
def get_authorization_code_scopes(self, client_id, code, redirect_uri, request):
""" Extracts scopes from saved authorization code.
The scopes returned by this method is used to route token requests
based on scopes passed to Authorization Code requests.
With that the token endpoint knows when to include OpenIDConnect
id_token in token response only based on authorization code scopes.
Only code param should be sufficient to retrieve grant code from
any storage you are using, `client_id` and `redirect_uri` can have a
blank value `""` don't forget to check it before using those values
in a select query if a database is used.
:param client_id: Unicode client identifier
:param code: Unicode authorization code grant
:param redirect_uri: Unicode absolute URI
:return: A list of scope
Method is used by:
- Authorization Token Grant Dispatcher
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_authorization_code_nonce(self, client_id, code, redirect_uri, request):
""" Extracts nonce from saved authorization code.
If present in the Authentication Request, Authorization
Servers MUST include a nonce Claim in the ID Token with the
Claim Value being the nonce value sent in the Authentication
Request. Authorization Servers SHOULD perform no other
processing on nonce values used. The nonce value is a
case-sensitive string.
Only code param should be sufficient to retrieve grant code from
any storage you are using. However, `client_id` and `redirect_uri`
have been validated and can be used also.
:param client_id: Unicode client identifier
:param code: Unicode authorization code grant
:param redirect_uri: Unicode absolute URI
:return: Unicode nonce
Method is used by:
- Authorization Token Grant Dispatcher
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_jwt_bearer_token(self, token, token_handler, request):
"""Get JWT Bearer token or OpenID Connect ID token
If using OpenID Connect this SHOULD call `oauthlib.oauth2.RequestValidator.get_id_token`
:param token: A Bearer token dict
:param token_handler: the token handler (BearerToken class)
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:return: The JWT Bearer token or OpenID Connect ID token (a JWS signed JWT)
Method is used by JWT Bearer and OpenID Connect tokens:
- JWTToken.create_token
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_id_token(self, token, token_handler, request):
"""Get OpenID Connect ID token
This method is OPTIONAL and is NOT RECOMMENDED.
`finalize_id_token` SHOULD be implemented instead. However, if you
want a full control over the minting of the `id_token`, you
MAY want to override `get_id_token` instead of using
`finalize_id_token`.
In the OpenID Connect workflows when an ID Token is requested this method is called.
Subclasses should implement the construction, signing and optional encryption of the
ID Token as described in the OpenID Connect spec.
In addition to the standard OAuth2 request properties, the request may also contain
these OIDC specific properties which are useful to this method:
- nonce, if workflow is implicit or hybrid and it was provided
- claims, if provided to the original Authorization Code request
The token parameter is a dict which may contain an ``access_token`` entry, in which
case the resulting ID Token *should* include a calculated ``at_hash`` claim.
Similarly, when the request parameter has a ``code`` property defined, the ID Token
*should* include a calculated ``c_hash`` claim.
http://openid.net/specs/openid-connect-core-1_0.html (sections `3.1.3.6`_, `3.2.2.10`_, `3.3.2.11`_)
.. _`3.1.3.6`: http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken
.. _`3.2.2.10`: http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDToken
.. _`3.3.2.11`: http://openid.net/specs/openid-connect-core-1_0.html#HybridIDToken
:param token: A Bearer token dict
:param token_handler: the token handler (BearerToken class)
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:return: The ID Token (a JWS signed JWT)
"""
return None
def finalize_id_token(self, id_token, token, token_handler, request):
"""Finalize OpenID Connect ID token & Sign or Encrypt.
In the OpenID Connect workflows when an ID Token is requested
this method is called. Subclasses should implement the
construction, signing and optional encryption of the ID Token
as described in the OpenID Connect spec.
The `id_token` parameter is a dict containing a couple of OIDC
technical fields related to the specification. Prepopulated
attributes are:
- `aud`, equals to `request.client_id`.
- `iat`, equals to current time.
- `nonce`, if present, is equals to the `nonce` from the
authorization request.
- `at_hash`, hash of `access_token`, if relevant.
- `c_hash`, hash of `code`, if relevant.
This method MUST provide required fields as below:
- `iss`, REQUIRED. Issuer Identifier for the Issuer of the response.
- `sub`, REQUIRED. Subject Identifier
- `exp`, REQUIRED. Expiration time on or after which the ID
Token MUST NOT be accepted by the RP when performing
authentication with the OP.
Additionals claims must be added, note that `request.scope`
should be used to determine the list of claims.
More information can be found at `OpenID Connect Core#Claims`_
.. _`OpenID Connect Core#Claims`: https://openid.net/specs/openid-connect-core-1_0.html#Claims
:param id_token: A dict containing technical fields of id_token
:param token: A Bearer token dict
:param token_handler: the token handler (BearerToken class)
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:return: The ID Token (a JWS signed JWT or JWE encrypted JWT)
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_jwt_bearer_token(self, token, scopes, request):
"""Ensure the JWT Bearer token or OpenID Connect ID token are valids and authorized access to scopes.
If using OpenID Connect this SHOULD call `oauthlib.oauth2.RequestValidator.get_id_token`
If not using OpenID Connect this can `return None` to avoid 5xx rather 401/3 response.
OpenID connect core 1.0 describe how to validate an id_token:
- http://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation
- http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDTValidation
- http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation
- http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation2
:param token: Unicode Bearer token
:param scopes: List of scopes (defined by you)
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is indirectly used by all core OpenID connect JWT token issuing grant types:
- Authorization Code Grant
- Implicit Grant
- Hybrid Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_id_token(self, token, scopes, request):
"""Ensure the id token is valid and authorized access to scopes.
OpenID connect core 1.0 describe how to validate an id_token:
- http://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation
- http://openid.net/specs/openid-connect-core-1_0.html#ImplicitIDTValidation
- http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation
- http://openid.net/specs/openid-connect-core-1_0.html#HybridIDTValidation2
:param token: Unicode Bearer token
:param scopes: List of scopes (defined by you)
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is indirectly used by all core OpenID connect JWT token issuing grant types:
- Authorization Code Grant
- Implicit Grant
- Hybrid Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_silent_authorization(self, request):
"""Ensure the logged in user has authorized silent OpenID authorization.
Silent OpenID authorization allows access tokens and id tokens to be
granted to clients without any user prompt or interaction.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- OpenIDConnectAuthCode
- OpenIDConnectImplicit
- OpenIDConnectHybrid
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_silent_login(self, request):
"""Ensure session user has authorized silent OpenID login.
If no user is logged in or has not authorized silent login, this
method should return False.
If the user is logged in but associated with multiple accounts and
not selected which one to link to the token then this method should
raise an oauthlib.oauth2.AccountSelectionRequired error.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- OpenIDConnectAuthCode
- OpenIDConnectImplicit
- OpenIDConnectHybrid
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_user_match(self, id_token_hint, scopes, claims, request):
"""Ensure client supplied user id hint matches session user.
If the sub claim or id_token_hint is supplied then the session
user must match the given ID.
:param id_token_hint: User identifier string.
:param scopes: List of OAuth 2 scopes and OpenID claims (strings).
:param claims: OpenID Connect claims dict.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- OpenIDConnectAuthCode
- OpenIDConnectImplicit
- OpenIDConnectHybrid
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_userinfo_claims(self, request):
"""Return the UserInfo claims in JSON or Signed or Encrypted.
The UserInfo Claims MUST be returned as the members of a JSON object
unless a signed or encrypted response was requested during Client
Registration. The Claims defined in Section 5.1 can be returned, as can
additional Claims not specified there.
For privacy reasons, OpenID Providers MAY elect to not return values for
some requested Claims.
If a Claim is not returned, that Claim Name SHOULD be omitted from the
JSON object representing the Claims; it SHOULD NOT be present with a
null or empty string value.
The sub (subject) Claim MUST always be returned in the UserInfo
Response.
Upon receipt of the UserInfo Request, the UserInfo Endpoint MUST return
the JSON Serialization of the UserInfo Response as in Section 13.3 in
the HTTP response body unless a different format was specified during
Registration [OpenID.Registration].
If the UserInfo Response is signed and/or encrypted, then the Claims are
returned in a JWT and the content-type MUST be application/jwt. The
response MAY be encrypted without also being signed. If both signing and
encryption are requested, the response MUST be signed then encrypted,
with the result being a Nested JWT, as defined in [JWT].
If signed, the UserInfo Response SHOULD contain the Claims iss (issuer)
and aud (audience) as members. The iss value SHOULD be the OP's Issuer
Identifier URL. The aud value SHOULD be or include the RP's Client ID
value.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: Claims as a dict OR JWT/JWS/JWE as a string
Method is used by:
UserInfoEndpoint
"""
| 13,448
|
Python
|
.py
| 236
| 47.65678
| 109
| 0.688128
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,770
|
exceptions.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/exceptions.py
|
"""
oauthlib.oauth2.rfc6749.errors
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Error used both by OAuth 2 clients and providers to represent the spec
defined error responses for all four core grant types.
"""
from oauthlib.oauth2.rfc6749.errors import FatalClientError, OAuth2Error
class FatalOpenIDClientError(FatalClientError):
pass
class OpenIDClientError(OAuth2Error):
pass
class InteractionRequired(OpenIDClientError):
"""
The Authorization Server requires End-User interaction to proceed.
This error MAY be returned when the prompt parameter value in the
Authentication Request is none, but the Authentication Request cannot be
completed without displaying a user interface for End-User interaction.
"""
error = 'interaction_required'
status_code = 401
class LoginRequired(OpenIDClientError):
"""
The Authorization Server requires End-User authentication.
This error MAY be returned when the prompt parameter value in the
Authentication Request is none, but the Authentication Request cannot be
completed without displaying a user interface for End-User authentication.
"""
error = 'login_required'
status_code = 401
class AccountSelectionRequired(OpenIDClientError):
"""
The End-User is REQUIRED to select a session at the Authorization Server.
The End-User MAY be authenticated at the Authorization Server with
different associated accounts, but the End-User did not select a session.
This error MAY be returned when the prompt parameter value in the
Authentication Request is none, but the Authentication Request cannot be
completed without displaying a user interface to prompt for a session to
use.
"""
error = 'account_selection_required'
class ConsentRequired(OpenIDClientError):
"""
The Authorization Server requires End-User consent.
This error MAY be returned when the prompt parameter value in the
Authentication Request is none, but the Authentication Request cannot be
completed without displaying a user interface for End-User consent.
"""
error = 'consent_required'
status_code = 401
class InvalidRequestURI(OpenIDClientError):
"""
The request_uri in the Authorization Request returns an error or
contains invalid data.
"""
error = 'invalid_request_uri'
description = 'The request_uri in the Authorization Request returns an ' \
'error or contains invalid data.'
class InvalidRequestObject(OpenIDClientError):
"""
The request parameter contains an invalid Request Object.
"""
error = 'invalid_request_object'
description = 'The request parameter contains an invalid Request Object.'
class RequestNotSupported(OpenIDClientError):
"""
The OP does not support use of the request parameter.
"""
error = 'request_not_supported'
description = 'The request parameter is not supported.'
class RequestURINotSupported(OpenIDClientError):
"""
The OP does not support use of the request_uri parameter.
"""
error = 'request_uri_not_supported'
description = 'The request_uri parameter is not supported.'
class RegistrationNotSupported(OpenIDClientError):
"""
The OP does not support use of the registration parameter.
"""
error = 'registration_not_supported'
description = 'The registration parameter is not supported.'
class InvalidTokenError(OAuth2Error):
"""
The access token provided is expired, revoked, malformed, or
invalid for other reasons. The resource SHOULD respond with
the HTTP 401 (Unauthorized) status code. The client MAY
request a new access token and retry the protected resource
request.
"""
error = 'invalid_token'
status_code = 401
description = ("The access token provided is expired, revoked, malformed, "
"or invalid for other reasons.")
class InsufficientScopeError(OAuth2Error):
"""
The request requires higher privileges than provided by the
access token. The resource server SHOULD respond with the HTTP
403 (Forbidden) status code and MAY include the "scope"
attribute with the scope necessary to access the protected
resource.
"""
error = 'insufficient_scope'
status_code = 403
description = ("The request requires higher privileges than provided by "
"the access token.")
def raise_from_error(error, params=None):
import inspect
import sys
kwargs = {
'description': params.get('error_description'),
'uri': params.get('error_uri'),
'state': params.get('state')
}
for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
if cls.error == error:
raise cls(**kwargs)
| 4,790
|
Python
|
.py
| 116
| 36.146552
| 79
| 0.732385
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,771
|
pre_configured.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/endpoints/pre_configured.py
|
"""
oauthlib.openid.connect.core.endpoints.pre_configured
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various endpoints needed
for providing OpenID Connect servers.
"""
from oauthlib.oauth2.rfc6749.endpoints import (
AuthorizationEndpoint, IntrospectEndpoint, ResourceEndpoint,
RevocationEndpoint, TokenEndpoint,
)
from oauthlib.oauth2.rfc6749.grant_types import (
AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant,
ClientCredentialsGrant, ImplicitGrant as OAuth2ImplicitGrant,
RefreshTokenGrant, ResourceOwnerPasswordCredentialsGrant,
)
from oauthlib.oauth2.rfc6749.tokens import BearerToken
from ..grant_types import AuthorizationCodeGrant, HybridGrant, ImplicitGrant
from ..grant_types.dispatchers import (
AuthorizationCodeGrantDispatcher, AuthorizationTokenGrantDispatcher,
ImplicitTokenGrantDispatcher,
)
from ..tokens import JWTToken
from .userinfo import UserInfoEndpoint
class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint,
ResourceEndpoint, RevocationEndpoint, UserInfoEndpoint):
"""An all-in-one endpoint featuring all four major grant types."""
def __init__(self, request_validator, token_expires_in=None,
token_generator=None, refresh_token_generator=None,
*args, **kwargs):
"""Construct a new all-grants-in-one server.
:param request_validator: An implementation of
oauthlib.oauth2.RequestValidator.
:param token_expires_in: An int or a function to generate a token
expiration offset (in seconds) given a
oauthlib.common.Request object.
:param token_generator: A function to generate a token from a request.
:param refresh_token_generator: A function to generate a token from a
request for the refresh token.
:param kwargs: Extra parameters to pass to authorization-,
token-, resource-, and revocation-endpoint constructors.
"""
self.auth_grant = OAuth2AuthorizationCodeGrant(request_validator)
self.implicit_grant = OAuth2ImplicitGrant(request_validator)
self.password_grant = ResourceOwnerPasswordCredentialsGrant(
request_validator)
self.credentials_grant = ClientCredentialsGrant(request_validator)
self.refresh_grant = RefreshTokenGrant(request_validator)
self.openid_connect_auth = AuthorizationCodeGrant(request_validator)
self.openid_connect_implicit = ImplicitGrant(request_validator)
self.openid_connect_hybrid = HybridGrant(request_validator)
self.bearer = BearerToken(request_validator, token_generator,
token_expires_in, refresh_token_generator)
self.jwt = JWTToken(request_validator, token_generator,
token_expires_in, refresh_token_generator)
self.auth_grant_choice = AuthorizationCodeGrantDispatcher(default_grant=self.auth_grant, oidc_grant=self.openid_connect_auth)
self.implicit_grant_choice = ImplicitTokenGrantDispatcher(default_grant=self.implicit_grant, oidc_grant=self.openid_connect_implicit)
# See http://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#Combinations for valid combinations
# internally our AuthorizationEndpoint will ensure they can appear in any order for any valid combination
AuthorizationEndpoint.__init__(self, default_response_type='code',
response_types={
'code': self.auth_grant_choice,
'token': self.implicit_grant_choice,
'id_token': self.openid_connect_implicit,
'id_token token': self.openid_connect_implicit,
'code token': self.openid_connect_hybrid,
'code id_token': self.openid_connect_hybrid,
'code id_token token': self.openid_connect_hybrid,
'none': self.auth_grant
},
default_token_type=self.bearer)
self.token_grant_choice = AuthorizationTokenGrantDispatcher(request_validator, default_grant=self.auth_grant, oidc_grant=self.openid_connect_auth)
TokenEndpoint.__init__(self, default_grant_type='authorization_code',
grant_types={
'authorization_code': self.token_grant_choice,
'password': self.password_grant,
'client_credentials': self.credentials_grant,
'refresh_token': self.refresh_grant,
},
default_token_type=self.bearer)
ResourceEndpoint.__init__(self, default_token='Bearer',
token_types={'Bearer': self.bearer, 'JWT': self.jwt})
RevocationEndpoint.__init__(self, request_validator)
IntrospectEndpoint.__init__(self, request_validator)
UserInfoEndpoint.__init__(self, request_validator)
| 5,426
|
Python
|
.py
| 84
| 48.416667
| 154
| 0.62751
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,772
|
__init__.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/endpoints/__init__.py
|
"""
oauthlib.oopenid.core
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OpenID Connect
"""
from .pre_configured import Server
from .userinfo import UserInfoEndpoint
| 229
|
Python
|
.py
| 8
| 27.5
| 56
| 0.759091
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,773
|
userinfo.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/endpoints/userinfo.py
|
"""
oauthlib.openid.connect.core.endpoints.userinfo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of userinfo endpoint.
"""
import json
import logging
from oauthlib.common import Request
from oauthlib.oauth2.rfc6749 import errors
from oauthlib.oauth2.rfc6749.endpoints.base import (
BaseEndpoint, catch_errors_and_unavailability,
)
from oauthlib.oauth2.rfc6749.tokens import BearerToken
log = logging.getLogger(__name__)
class UserInfoEndpoint(BaseEndpoint):
"""Authorizes access to userinfo resource.
"""
def __init__(self, request_validator):
self.bearer = BearerToken(request_validator, None, None, None)
self.request_validator = request_validator
BaseEndpoint.__init__(self)
@catch_errors_and_unavailability
def create_userinfo_response(self, uri, http_method='GET', body=None, headers=None):
"""Validate BearerToken and return userinfo from RequestValidator
The UserInfo Endpoint MUST return a
content-type header to indicate which format is being returned. The
content-type of the HTTP response MUST be application/json if the
response body is a text JSON object; the response body SHOULD be encoded
using UTF-8.
"""
request = Request(uri, http_method, body, headers)
request.scopes = ["openid"]
self.validate_userinfo_request(request)
claims = self.request_validator.get_userinfo_claims(request)
if claims is None:
log.error('Userinfo MUST have claims for %r.', request)
raise errors.ServerError(status_code=500)
if isinstance(claims, dict):
resp_headers = {
'Content-Type': 'application/json'
}
if "sub" not in claims:
log.error('Userinfo MUST have "sub" for %r.', request)
raise errors.ServerError(status_code=500)
body = json.dumps(claims)
elif isinstance(claims, str):
resp_headers = {
'Content-Type': 'application/jwt'
}
body = claims
else:
log.error('Userinfo return unknown response for %r.', request)
raise errors.ServerError(status_code=500)
log.debug('Userinfo access valid for %r.', request)
return resp_headers, body, 200
def validate_userinfo_request(self, request):
"""Ensure the request is valid.
5.3.1. UserInfo Request
The Client sends the UserInfo Request using either HTTP GET or HTTP
POST. The Access Token obtained from an OpenID Connect Authentication
Request MUST be sent as a Bearer Token, per Section 2 of OAuth 2.0
Bearer Token Usage [RFC6750].
It is RECOMMENDED that the request use the HTTP GET method and the
Access Token be sent using the Authorization header field.
The following is a non-normative example of a UserInfo Request:
GET /userinfo HTTP/1.1
Host: server.example.com
Authorization: Bearer SlAV32hkKG
5.3.3. UserInfo Error Response
When an error condition occurs, the UserInfo Endpoint returns an Error
Response as defined in Section 3 of OAuth 2.0 Bearer Token Usage
[RFC6750]. (HTTP errors unrelated to RFC 6750 are returned to the User
Agent using the appropriate HTTP status code.)
The following is a non-normative example of a UserInfo Error Response:
HTTP/1.1 401 Unauthorized
WWW-Authenticate: Bearer error="invalid_token",
error_description="The Access Token expired"
"""
if not self.bearer.validate_request(request):
raise errors.InvalidTokenError()
if "openid" not in request.scopes:
raise errors.InsufficientScopeError()
| 3,847
|
Python
|
.py
| 82
| 38.292683
| 88
| 0.666756
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,774
|
hybrid.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/grant_types/hybrid.py
|
"""
oauthlib.openid.connect.core.grant_types
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
from oauthlib.oauth2.rfc6749.errors import InvalidRequestError
from oauthlib.oauth2.rfc6749.grant_types.authorization_code import (
AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant,
)
from ..request_validator import RequestValidator
from .base import GrantTypeBase
log = logging.getLogger(__name__)
class HybridGrant(GrantTypeBase):
def __init__(self, request_validator=None, **kwargs):
self.request_validator = request_validator or RequestValidator()
self.proxy_target = OAuth2AuthorizationCodeGrant(
request_validator=request_validator, **kwargs)
# All hybrid response types should be fragment-encoded.
self.proxy_target.default_response_mode = "fragment"
self.register_response_type('code id_token')
self.register_response_type('code token')
self.register_response_type('code id_token token')
self.custom_validators.post_auth.append(
self.openid_authorization_validator)
# Hybrid flows can return the id_token from the authorization
# endpoint as part of the 'code' response
self.register_code_modifier(self.add_token)
self.register_code_modifier(self.add_id_token)
self.register_token_modifier(self.add_id_token)
def add_id_token(self, token, token_handler, request):
return super().add_id_token(token, token_handler, request, nonce=request.nonce)
def openid_authorization_validator(self, request):
"""Additional validation when following the Authorization Code flow.
"""
request_info = super().openid_authorization_validator(request)
if not request_info: # returns immediately if OAuth2.0
return request_info
# REQUIRED if the Response Type of the request is `code
# id_token` or `code id_token token` and OPTIONAL when the
# Response Type of the request is `code token`. It is a string
# value used to associate a Client session with an ID Token,
# and to mitigate replay attacks. The value is passed through
# unmodified from the Authentication Request to the ID
# Token. Sufficient entropy MUST be present in the `nonce`
# values used to prevent attackers from guessing values. For
# implementation notes, see Section 15.5.2.
if request.response_type in ["code id_token", "code id_token token"]:
if not request.nonce:
raise InvalidRequestError(
request=request,
description='Request is missing mandatory nonce parameter.'
)
return request_info
| 2,742
|
Python
|
.py
| 53
| 43.603774
| 87
| 0.689436
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,775
|
implicit.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/grant_types/implicit.py
|
"""
oauthlib.openid.connect.core.grant_types
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
from oauthlib.oauth2.rfc6749.errors import InvalidRequestError
from oauthlib.oauth2.rfc6749.grant_types.implicit import (
ImplicitGrant as OAuth2ImplicitGrant,
)
from .base import GrantTypeBase
log = logging.getLogger(__name__)
class ImplicitGrant(GrantTypeBase):
def __init__(self, request_validator=None, **kwargs):
self.proxy_target = OAuth2ImplicitGrant(
request_validator=request_validator, **kwargs)
self.register_response_type('id_token')
self.register_response_type('id_token token')
self.custom_validators.post_auth.append(
self.openid_authorization_validator)
self.register_token_modifier(self.add_id_token)
def add_id_token(self, token, token_handler, request):
if 'state' not in token and request.state:
token['state'] = request.state
return super().add_id_token(token, token_handler, request, nonce=request.nonce)
def openid_authorization_validator(self, request):
"""Additional validation when following the implicit flow.
"""
request_info = super().openid_authorization_validator(request)
if not request_info: # returns immediately if OAuth2.0
return request_info
# REQUIRED. String value used to associate a Client session with an ID
# Token, and to mitigate replay attacks. The value is passed through
# unmodified from the Authentication Request to the ID Token.
# Sufficient entropy MUST be present in the nonce values used to
# prevent attackers from guessing values. For implementation notes, see
# Section 15.5.2.
if not request.nonce:
raise InvalidRequestError(
request=request,
description='Request is missing mandatory nonce parameter.'
)
return request_info
| 1,971
|
Python
|
.py
| 42
| 39.238095
| 87
| 0.682813
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,776
|
dispatchers.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/grant_types/dispatchers.py
|
import logging
log = logging.getLogger(__name__)
class Dispatcher:
default_grant = None
oidc_grant = None
class AuthorizationCodeGrantDispatcher(Dispatcher):
"""
This is an adapter class that will route simple Authorization Code
requests, those that have `response_type=code` and a scope including
`openid` to either the `default_grant` or the `oidc_grant` based on
the scopes requested.
"""
def __init__(self, default_grant=None, oidc_grant=None):
self.default_grant = default_grant
self.oidc_grant = oidc_grant
def _handler_for_request(self, request):
handler = self.default_grant
if request.scopes and "openid" in request.scopes:
handler = self.oidc_grant
log.debug('Selecting handler for request %r.', handler)
return handler
def create_authorization_response(self, request, token_handler):
"""Read scope and route to the designated handler."""
return self._handler_for_request(request).create_authorization_response(request, token_handler)
def validate_authorization_request(self, request):
"""Read scope and route to the designated handler."""
return self._handler_for_request(request).validate_authorization_request(request)
class ImplicitTokenGrantDispatcher(Dispatcher):
"""
This is an adapter class that will route simple Authorization
requests, those that have `id_token` in `response_type` and a scope
including `openid` to either the `default_grant` or the `oidc_grant`
based on the scopes requested.
"""
def __init__(self, default_grant=None, oidc_grant=None):
self.default_grant = default_grant
self.oidc_grant = oidc_grant
def _handler_for_request(self, request):
handler = self.default_grant
if request.scopes and "openid" in request.scopes and 'id_token' in request.response_type:
handler = self.oidc_grant
log.debug('Selecting handler for request %r.', handler)
return handler
def create_authorization_response(self, request, token_handler):
"""Read scope and route to the designated handler."""
return self._handler_for_request(request).create_authorization_response(request, token_handler)
def validate_authorization_request(self, request):
"""Read scope and route to the designated handler."""
return self._handler_for_request(request).validate_authorization_request(request)
class AuthorizationTokenGrantDispatcher(Dispatcher):
"""
This is an adapter class that will route simple Token requests, those that authorization_code have a scope
including 'openid' to either the default_grant or the oidc_grant based on the scopes requested.
"""
def __init__(self, request_validator, default_grant=None, oidc_grant=None):
self.default_grant = default_grant
self.oidc_grant = oidc_grant
self.request_validator = request_validator
def _handler_for_request(self, request):
handler = self.default_grant
scopes = ()
parameters = dict(request.decoded_body)
client_id = parameters.get('client_id', None)
code = parameters.get('code', None)
redirect_uri = parameters.get('redirect_uri', None)
# If code is not pressent fallback to `default_grant` which will
# raise an error for the missing `code` in `create_token_response` step.
if code:
scopes = self.request_validator.get_authorization_code_scopes(client_id, code, redirect_uri, request)
if 'openid' in scopes:
handler = self.oidc_grant
log.debug('Selecting handler for request %r.', handler)
return handler
def create_token_response(self, request, token_handler):
"""Read scope and route to the designated handler."""
handler = self._handler_for_request(request)
return handler.create_token_response(request, token_handler)
| 3,979
|
Python
|
.py
| 77
| 44.285714
| 113
| 0.700619
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,777
|
__init__.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/grant_types/__init__.py
|
"""
oauthlib.openid.connect.core.grant_types
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from .authorization_code import AuthorizationCodeGrant
from .base import GrantTypeBase
from .dispatchers import (
AuthorizationCodeGrantDispatcher, AuthorizationTokenGrantDispatcher,
ImplicitTokenGrantDispatcher,
)
from .hybrid import HybridGrant
from .implicit import ImplicitGrant
| 381
|
Python
|
.py
| 12
| 30.083333
| 72
| 0.766938
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,778
|
authorization_code.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/grant_types/authorization_code.py
|
"""
oauthlib.openid.connect.core.grant_types
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
from oauthlib.oauth2.rfc6749.grant_types.authorization_code import (
AuthorizationCodeGrant as OAuth2AuthorizationCodeGrant,
)
from .base import GrantTypeBase
log = logging.getLogger(__name__)
class AuthorizationCodeGrant(GrantTypeBase):
def __init__(self, request_validator=None, **kwargs):
self.proxy_target = OAuth2AuthorizationCodeGrant(
request_validator=request_validator, **kwargs)
self.custom_validators.post_auth.append(
self.openid_authorization_validator)
self.register_token_modifier(self.add_id_token)
def add_id_token(self, token, token_handler, request):
"""
Construct an initial version of id_token, and let the
request_validator sign or encrypt it.
The authorization_code version of this method is used to
retrieve the nonce accordingly to the code storage.
"""
# Treat it as normal OAuth 2 auth code request if openid is not present
if not request.scopes or 'openid' not in request.scopes:
return token
nonce = self.request_validator.get_authorization_code_nonce(
request.client_id,
request.code,
request.redirect_uri,
request
)
return super().add_id_token(token, token_handler, request, nonce=nonce)
| 1,441
|
Python
|
.py
| 34
| 35
| 79
| 0.674535
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,779
|
base.py
|
rembo10_headphones/lib/oauthlib/openid/connect/core/grant_types/base.py
|
import base64
import hashlib
import logging
import time
from json import loads
from oauthlib.oauth2.rfc6749.errors import (
ConsentRequired, InvalidRequestError, LoginRequired,
)
log = logging.getLogger(__name__)
class GrantTypeBase:
# Just proxy the majority of method calls through to the
# proxy_target grant type handler, which will usually be either
# the standard OAuth2 AuthCode or Implicit grant types.
def __getattr__(self, attr):
return getattr(self.proxy_target, attr)
def __setattr__(self, attr, value):
proxied_attrs = {'refresh_token', 'response_types'}
if attr in proxied_attrs:
setattr(self.proxy_target, attr, value)
else:
super(OpenIDConnectBase, self).__setattr__(attr, value)
def validate_authorization_request(self, request):
"""Validates the OpenID Connect authorization request parameters.
:returns: (list of scopes, dict of request info)
"""
return self.proxy_target.validate_authorization_request(request)
def _inflate_claims(self, request):
# this may be called multiple times in a single request so make sure we only de-serialize the claims once
if request.claims and not isinstance(request.claims, dict):
# specific claims are requested during the Authorization Request and may be requested for inclusion
# in either the id_token or the UserInfo endpoint response
# see http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter
try:
request.claims = loads(request.claims)
except Exception as ex:
raise InvalidRequestError(description="Malformed claims parameter",
uri="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter")
def id_token_hash(self, value, hashfunc=hashlib.sha256):
"""
Its value is the base64url encoding of the left-most half of the
hash of the octets of the ASCII representation of the access_token
value, where the hash algorithm used is the hash algorithm used in
the alg Header Parameter of the ID Token's JOSE Header.
For instance, if the alg is RS256, hash the access_token value
with SHA-256, then take the left-most 128 bits and
base64url-encode them.
For instance, if the alg is HS512, hash the code value with
SHA-512, then take the left-most 256 bits and base64url-encode
them. The c_hash value is a case-sensitive string.
Example of hash from OIDC specification (bound to a JWS using RS256):
code:
Qcb0Orv1zh30vL1MPRsbm-diHiMwcLyZvn1arpZv-Jxf_11jnpEX3Tgfvk
c_hash:
LDktKdoQak3Pk0cnXxCltA
"""
digest = hashfunc(value.encode()).digest()
left_most = len(digest) // 2
return base64.urlsafe_b64encode(digest[:left_most]).decode().rstrip("=")
def add_id_token(self, token, token_handler, request, nonce=None):
"""
Construct an initial version of id_token, and let the
request_validator sign or encrypt it.
The initial version can contain the fields below, accordingly
to the spec:
- aud
- iat
- nonce
- at_hash
- c_hash
"""
# Treat it as normal OAuth 2 auth code request if openid is not present
if not request.scopes or 'openid' not in request.scopes:
return token
# Only add an id token on auth/token step if asked for.
if request.response_type and 'id_token' not in request.response_type:
return token
# Implementation mint its own id_token without help.
id_token = self.request_validator.get_id_token(token, token_handler, request)
if id_token:
token['id_token'] = id_token
return token
# Fallback for asking some help from oauthlib framework.
# Start with technicals fields bound to the specification.
id_token = {}
id_token['aud'] = request.client_id
id_token['iat'] = int(time.time())
# nonce is REQUIRED when response_type value is:
# - id_token token (Implicit)
# - id_token (Implicit)
# - code id_token (Hybrid)
# - code id_token token (Hybrid)
#
# nonce is OPTIONAL when response_type value is:
# - code (Authorization Code)
# - code token (Hybrid)
if nonce is not None:
id_token["nonce"] = nonce
# at_hash is REQUIRED when response_type value is:
# - id_token token (Implicit)
# - code id_token token (Hybrid)
#
# at_hash is OPTIONAL when:
# - code (Authorization code)
# - code id_token (Hybrid)
# - code token (Hybrid)
#
# at_hash MAY NOT be used when:
# - id_token (Implicit)
if "access_token" in token:
id_token["at_hash"] = self.id_token_hash(token["access_token"])
# c_hash is REQUIRED when response_type value is:
# - code id_token (Hybrid)
# - code id_token token (Hybrid)
#
# c_hash is OPTIONAL for others.
if "code" in token:
id_token["c_hash"] = self.id_token_hash(token["code"])
# Call request_validator to complete/sign/encrypt id_token
token['id_token'] = self.request_validator.finalize_id_token(id_token, token, token_handler, request)
return token
def openid_authorization_validator(self, request):
"""Perform OpenID Connect specific authorization request validation.
nonce
OPTIONAL. String value used to associate a Client session with
an ID Token, and to mitigate replay attacks. The value is
passed through unmodified from the Authentication Request to
the ID Token. Sufficient entropy MUST be present in the nonce
values used to prevent attackers from guessing values
display
OPTIONAL. ASCII string value that specifies how the
Authorization Server displays the authentication and consent
user interface pages to the End-User. The defined values are:
page - The Authorization Server SHOULD display the
authentication and consent UI consistent with a full User
Agent page view. If the display parameter is not specified,
this is the default display mode.
popup - The Authorization Server SHOULD display the
authentication and consent UI consistent with a popup User
Agent window. The popup User Agent window should be of an
appropriate size for a login-focused dialog and should not
obscure the entire window that it is popping up over.
touch - The Authorization Server SHOULD display the
authentication and consent UI consistent with a device that
leverages a touch interface.
wap - The Authorization Server SHOULD display the
authentication and consent UI consistent with a "feature
phone" type display.
The Authorization Server MAY also attempt to detect the
capabilities of the User Agent and present an appropriate
display.
prompt
OPTIONAL. Space delimited, case sensitive list of ASCII string
values that specifies whether the Authorization Server prompts
the End-User for reauthentication and consent. The defined
values are:
none - The Authorization Server MUST NOT display any
authentication or consent user interface pages. An error is
returned if an End-User is not already authenticated or the
Client does not have pre-configured consent for the
requested Claims or does not fulfill other conditions for
processing the request. The error code will typically be
login_required, interaction_required, or another code
defined in Section 3.1.2.6. This can be used as a method to
check for existing authentication and/or consent.
login - The Authorization Server SHOULD prompt the End-User
for reauthentication. If it cannot reauthenticate the
End-User, it MUST return an error, typically
login_required.
consent - The Authorization Server SHOULD prompt the
End-User for consent before returning information to the
Client. If it cannot obtain consent, it MUST return an
error, typically consent_required.
select_account - The Authorization Server SHOULD prompt the
End-User to select a user account. This enables an End-User
who has multiple accounts at the Authorization Server to
select amongst the multiple accounts that they might have
current sessions for. If it cannot obtain an account
selection choice made by the End-User, it MUST return an
error, typically account_selection_required.
The prompt parameter can be used by the Client to make sure
that the End-User is still present for the current session or
to bring attention to the request. If this parameter contains
none with any other value, an error is returned.
max_age
OPTIONAL. Maximum Authentication Age. Specifies the allowable
elapsed time in seconds since the last time the End-User was
actively authenticated by the OP. If the elapsed time is
greater than this value, the OP MUST attempt to actively
re-authenticate the End-User. (The max_age request parameter
corresponds to the OpenID 2.0 PAPE [OpenID.PAPE] max_auth_age
request parameter.) When max_age is used, the ID Token returned
MUST include an auth_time Claim Value.
ui_locales
OPTIONAL. End-User's preferred languages and scripts for the
user interface, represented as a space-separated list of BCP47
[RFC5646] language tag values, ordered by preference. For
instance, the value "fr-CA fr en" represents a preference for
French as spoken in Canada, then French (without a region
designation), followed by English (without a region
designation). An error SHOULD NOT result if some or all of the
requested locales are not supported by the OpenID Provider.
id_token_hint
OPTIONAL. ID Token previously issued by the Authorization
Server being passed as a hint about the End-User's current or
past authenticated session with the Client. If the End-User
identified by the ID Token is logged in or is logged in by the
request, then the Authorization Server returns a positive
response; otherwise, it SHOULD return an error, such as
login_required. When possible, an id_token_hint SHOULD be
present when prompt=none is used and an invalid_request error
MAY be returned if it is not; however, the server SHOULD
respond successfully when possible, even if it is not present.
The Authorization Server need not be listed as an audience of
the ID Token when it is used as an id_token_hint value. If the
ID Token received by the RP from the OP is encrypted, to use it
as an id_token_hint, the Client MUST decrypt the signed ID
Token contained within the encrypted ID Token. The Client MAY
re-encrypt the signed ID token to the Authentication Server
using a key that enables the server to decrypt the ID Token,
and use the re-encrypted ID token as the id_token_hint value.
login_hint
OPTIONAL. Hint to the Authorization Server about the login
identifier the End-User might use to log in (if necessary).
This hint can be used by an RP if it first asks the End-User
for their e-mail address (or other identifier) and then wants
to pass that value as a hint to the discovered authorization
service. It is RECOMMENDED that the hint value match the value
used for discovery. This value MAY also be a phone number in
the format specified for the phone_number Claim. The use of
this parameter is left to the OP's discretion.
acr_values
OPTIONAL. Requested Authentication Context Class Reference
values. Space-separated string that specifies the acr values
that the Authorization Server is being requested to use for
processing this Authentication Request, with the values
appearing in order of preference. The Authentication Context
Class satisfied by the authentication performed is returned as
the acr Claim Value, as specified in Section 2. The acr Claim
is requested as a Voluntary Claim by this parameter.
"""
# Treat it as normal OAuth 2 auth code request if openid is not present
if not request.scopes or 'openid' not in request.scopes:
return {}
prompt = request.prompt if request.prompt else []
if hasattr(prompt, 'split'):
prompt = prompt.strip().split()
prompt = set(prompt)
if 'none' in prompt:
if len(prompt) > 1:
msg = "Prompt none is mutually exclusive with other values."
raise InvalidRequestError(request=request, description=msg)
if not self.request_validator.validate_silent_login(request):
raise LoginRequired(request=request)
if not self.request_validator.validate_silent_authorization(request):
raise ConsentRequired(request=request)
self._inflate_claims(request)
if not self.request_validator.validate_user_match(
request.id_token_hint, request.scopes, request.claims, request):
msg = "Session user does not match client supplied user."
raise LoginRequired(request=request, description=msg)
request_info = {
'display': request.display,
'nonce': request.nonce,
'prompt': prompt,
'ui_locales': request.ui_locales.split() if request.ui_locales else [],
'id_token_hint': request.id_token_hint,
'login_hint': request.login_hint,
'claims': request.claims
}
return request_info
OpenIDConnectBase = GrantTypeBase
| 15,386
|
Python
|
.py
| 271
| 43.487085
| 117
| 0.631981
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,780
|
__init__.py
|
rembo10_headphones/lib/oauthlib/oauth1/__init__.py
|
"""
oauthlib.oauth1
~~~~~~~~~~~~~~
This module is a wrapper for the most recent implementation of OAuth 1.0 Client
and Server classes.
"""
from .rfc5849 import Client
from .rfc5849 import (SIGNATURE_HMAC,
SIGNATURE_HMAC_SHA1,
SIGNATURE_HMAC_SHA256,
SIGNATURE_HMAC_SHA512,
SIGNATURE_RSA,
SIGNATURE_RSA_SHA1,
SIGNATURE_RSA_SHA256,
SIGNATURE_RSA_SHA512,
SIGNATURE_PLAINTEXT)
from .rfc5849 import SIGNATURE_TYPE_AUTH_HEADER, SIGNATURE_TYPE_QUERY
from .rfc5849 import SIGNATURE_TYPE_BODY
from .rfc5849.request_validator import RequestValidator
from .rfc5849.endpoints import RequestTokenEndpoint, AuthorizationEndpoint
from .rfc5849.endpoints import AccessTokenEndpoint, ResourceEndpoint
from .rfc5849.endpoints import SignatureOnlyEndpoint, WebApplicationServer
from .rfc5849.errors import (InsecureTransportError,
InvalidClientError,
InvalidRequestError,
InvalidSignatureMethodError,
OAuth1Error)
| 1,187
|
Python
|
.py
| 27
| 32.111111
| 79
| 0.639344
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,781
|
errors.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/errors.py
|
"""
oauthlib.oauth1.rfc5849.errors
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Error used both by OAuth 1 clients and provicers to represent the spec
defined error responses for all four core grant types.
"""
from oauthlib.common import add_params_to_uri, urlencode
class OAuth1Error(Exception):
error = None
description = ''
def __init__(self, description=None, uri=None, status_code=400,
request=None):
"""
description: A human-readable ASCII [USASCII] text providing
additional information, used to assist the client
developer in understanding the error that occurred.
Values for the "error_description" parameter MUST NOT
include characters outside the set
x20-21 / x23-5B / x5D-7E.
uri: A URI identifying a human-readable web page with information
about the error, used to provide the client developer with
additional information about the error. Values for the
"error_uri" parameter MUST conform to the URI- Reference
syntax, and thus MUST NOT include characters outside the set
x21 / x23-5B / x5D-7E.
state: A CSRF protection value received from the client.
request: Oauthlib Request object
"""
self.description = description or self.description
message = '({}) {}'.format(self.error, self.description)
if request:
message += ' ' + repr(request)
super().__init__(message)
self.uri = uri
self.status_code = status_code
def in_uri(self, uri):
return add_params_to_uri(uri, self.twotuples)
@property
def twotuples(self):
error = [('error', self.error)]
if self.description:
error.append(('error_description', self.description))
if self.uri:
error.append(('error_uri', self.uri))
return error
@property
def urlencoded(self):
return urlencode(self.twotuples)
class InsecureTransportError(OAuth1Error):
error = 'insecure_transport_protocol'
description = 'Only HTTPS connections are permitted.'
class InvalidSignatureMethodError(OAuth1Error):
error = 'invalid_signature_method'
class InvalidRequestError(OAuth1Error):
error = 'invalid_request'
class InvalidClientError(OAuth1Error):
error = 'invalid_client'
| 2,474
|
Python
|
.py
| 57
| 34.192982
| 77
| 0.638032
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,782
|
parameters.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/parameters.py
|
"""
oauthlib.parameters
~~~~~~~~~~~~~~~~~~~
This module contains methods related to `section 3.5`_ of the OAuth 1.0a spec.
.. _`section 3.5`: https://tools.ietf.org/html/rfc5849#section-3.5
"""
from urllib.parse import urlparse, urlunparse
from oauthlib.common import extract_params, urlencode
from . import utils
# TODO: do we need filter_params now that oauth_params are handled by Request?
# We can easily pass in just oauth protocol params.
@utils.filter_params
def prepare_headers(oauth_params, headers=None, realm=None):
"""**Prepare the Authorization header.**
Per `section 3.5.1`_ of the spec.
Protocol parameters can be transmitted using the HTTP "Authorization"
header field as defined by `RFC2617`_ with the auth-scheme name set to
"OAuth" (case insensitive).
For example::
Authorization: OAuth realm="Example",
oauth_consumer_key="0685bd9184jfhq22",
oauth_token="ad180jjd733klru7",
oauth_signature_method="HMAC-SHA1",
oauth_signature="wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
oauth_timestamp="137131200",
oauth_nonce="4572616e48616d6d65724c61686176",
oauth_version="1.0"
.. _`section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1
.. _`RFC2617`: https://tools.ietf.org/html/rfc2617
"""
headers = headers or {}
# Protocol parameters SHALL be included in the "Authorization" header
# field as follows:
authorization_header_parameters_parts = []
for oauth_parameter_name, value in oauth_params:
# 1. Parameter names and values are encoded per Parameter Encoding
# (`Section 3.6`_)
#
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
escaped_name = utils.escape(oauth_parameter_name)
escaped_value = utils.escape(value)
# 2. Each parameter's name is immediately followed by an "=" character
# (ASCII code 61), a """ character (ASCII code 34), the parameter
# value (MAY be empty), and another """ character (ASCII code 34).
part = '{}="{}"'.format(escaped_name, escaped_value)
authorization_header_parameters_parts.append(part)
# 3. Parameters are separated by a "," character (ASCII code 44) and
# OPTIONAL linear whitespace per `RFC2617`_.
#
# .. _`RFC2617`: https://tools.ietf.org/html/rfc2617
authorization_header_parameters = ', '.join(
authorization_header_parameters_parts)
# 4. The OPTIONAL "realm" parameter MAY be added and interpreted per
# `RFC2617 section 1.2`_.
#
# .. _`RFC2617 section 1.2`: https://tools.ietf.org/html/rfc2617#section-1.2
if realm:
# NOTE: realm should *not* be escaped
authorization_header_parameters = ('realm="%s", ' % realm +
authorization_header_parameters)
# the auth-scheme name set to "OAuth" (case insensitive).
authorization_header = 'OAuth %s' % authorization_header_parameters
# contribute the Authorization header to the given headers
full_headers = {}
full_headers.update(headers)
full_headers['Authorization'] = authorization_header
return full_headers
def _append_params(oauth_params, params):
"""Append OAuth params to an existing set of parameters.
Both params and oauth_params is must be lists of 2-tuples.
Per `section 3.5.2`_ and `3.5.3`_ of the spec.
.. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2
.. _`3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3
"""
merged = list(params)
merged.extend(oauth_params)
# The request URI / entity-body MAY include other request-specific
# parameters, in which case, the protocol parameters SHOULD be appended
# following the request-specific parameters, properly separated by an "&"
# character (ASCII code 38)
merged.sort(key=lambda i: i[0].startswith('oauth_'))
return merged
def prepare_form_encoded_body(oauth_params, body):
"""Prepare the Form-Encoded Body.
Per `section 3.5.2`_ of the spec.
.. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2
"""
# append OAuth params to the existing body
return _append_params(oauth_params, body)
def prepare_request_uri_query(oauth_params, uri):
"""Prepare the Request URI Query.
Per `section 3.5.3`_ of the spec.
.. _`section 3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3
"""
# append OAuth params to the existing set of query components
sch, net, path, par, query, fra = urlparse(uri)
query = urlencode(
_append_params(oauth_params, extract_params(query) or []))
return urlunparse((sch, net, path, par, query, fra))
| 4,802
|
Python
|
.py
| 99
| 42.20202
| 80
| 0.672307
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,783
|
request_validator.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/request_validator.py
|
"""
oauthlib.oauth1.rfc5849
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
from . import SIGNATURE_METHODS, utils
class RequestValidator:
"""A validator/datastore interaction base class for OAuth 1 providers.
OAuth providers should inherit from RequestValidator and implement the
methods and properties outlined below. Further details are provided in the
documentation for each method and property.
Methods used to check the format of input parameters. Common tests include
length, character set, membership, range or pattern. These tests are
referred to as `whitelisting or blacklisting`_. Whitelisting is better
but blacklisting can be usefull to spot malicious activity.
The following have methods a default implementation:
- check_client_key
- check_request_token
- check_access_token
- check_nonce
- check_verifier
- check_realms
The methods above default to whitelist input parameters, checking that they
are alphanumerical and between a minimum and maximum length. Rather than
overloading the methods a few properties can be used to configure these
methods.
* @safe_characters -> (character set)
* @client_key_length -> (min, max)
* @request_token_length -> (min, max)
* @access_token_length -> (min, max)
* @nonce_length -> (min, max)
* @verifier_length -> (min, max)
* @realms -> [list, of, realms]
Methods used to validate/invalidate input parameters. These checks usually
hit either persistent or temporary storage such as databases or the
filesystem. See each methods documentation for detailed usage.
The following methods must be implemented:
- validate_client_key
- validate_request_token
- validate_access_token
- validate_timestamp_and_nonce
- validate_redirect_uri
- validate_requested_realms
- validate_realms
- validate_verifier
- invalidate_request_token
Methods used to retrieve sensitive information from storage.
The following methods must be implemented:
- get_client_secret
- get_request_token_secret
- get_access_token_secret
- get_rsa_key
- get_realms
- get_default_realms
- get_redirect_uri
Methods used to save credentials.
The following methods must be implemented:
- save_request_token
- save_verifier
- save_access_token
Methods used to verify input parameters. This methods are used during
authorizing request token by user (AuthorizationEndpoint), to check if
parameters are valid. During token authorization request is not signed,
thus 'validation' methods can not be used. The following methods must be
implemented:
- verify_realms
- verify_request_token
To prevent timing attacks it is necessary to not exit early even if the
client key or resource owner key is invalid. Instead dummy values should
be used during the remaining verification process. It is very important
that the dummy client and token are valid input parameters to the methods
get_client_secret, get_rsa_key and get_(access/request)_token_secret and
that the running time of those methods when given a dummy value remain
equivalent to the running time when given a valid client/resource owner.
The following properties must be implemented:
* @dummy_client
* @dummy_request_token
* @dummy_access_token
Example implementations have been provided, note that the database used is
a simple dictionary and serves only an illustrative purpose. Use whichever
database suits your project and how to access it is entirely up to you.
The methods are introduced in an order which should make understanding
their use more straightforward and as such it could be worth reading what
follows in chronological order.
.. _`whitelisting or blacklisting`: https://www.schneier.com/blog/archives/2011/01/whitelisting_vs.html
"""
def __init__(self):
pass
@property
def allowed_signature_methods(self):
return SIGNATURE_METHODS
@property
def safe_characters(self):
return set(utils.UNICODE_ASCII_CHARACTER_SET)
@property
def client_key_length(self):
return 20, 30
@property
def request_token_length(self):
return 20, 30
@property
def access_token_length(self):
return 20, 30
@property
def timestamp_lifetime(self):
return 600
@property
def nonce_length(self):
return 20, 30
@property
def verifier_length(self):
return 20, 30
@property
def realms(self):
return []
@property
def enforce_ssl(self):
return True
def check_client_key(self, client_key):
"""Check that the client key only contains safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.client_key_length
return (set(client_key) <= self.safe_characters and
lower <= len(client_key) <= upper)
def check_request_token(self, request_token):
"""Checks that the request token contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.request_token_length
return (set(request_token) <= self.safe_characters and
lower <= len(request_token) <= upper)
def check_access_token(self, request_token):
"""Checks that the token contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.access_token_length
return (set(request_token) <= self.safe_characters and
lower <= len(request_token) <= upper)
def check_nonce(self, nonce):
"""Checks that the nonce only contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.nonce_length
return (set(nonce) <= self.safe_characters and
lower <= len(nonce) <= upper)
def check_verifier(self, verifier):
"""Checks that the verifier contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.verifier_length
return (set(verifier) <= self.safe_characters and
lower <= len(verifier) <= upper)
def check_realms(self, realms):
"""Check that the realm is one of a set allowed realms."""
return all(r in self.realms for r in realms)
def _subclass_must_implement(self, fn):
"""
Returns a NotImplementedError for a function that should be implemented.
:param fn: name of the function
"""
m = "Missing function implementation in {}: {}".format(type(self), fn)
return NotImplementedError(m)
@property
def dummy_client(self):
"""Dummy client used when an invalid client key is supplied.
:returns: The dummy client key string.
The dummy client should be associated with either a client secret,
a rsa key or both depending on which signature methods are supported.
Providers should make sure that
get_client_secret(dummy_client)
get_rsa_key(dummy_client)
return a valid secret or key for the dummy client.
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise self._subclass_must_implement("dummy_client")
@property
def dummy_request_token(self):
"""Dummy request token used when an invalid token was supplied.
:returns: The dummy request token string.
The dummy request token should be associated with a request token
secret such that get_request_token_secret(.., dummy_request_token)
returns a valid secret.
This method is used by
* AccessTokenEndpoint
"""
raise self._subclass_must_implement("dummy_request_token")
@property
def dummy_access_token(self):
"""Dummy access token used when an invalid token was supplied.
:returns: The dummy access token string.
The dummy access token should be associated with an access token
secret such that get_access_token_secret(.., dummy_access_token)
returns a valid secret.
This method is used by
* ResourceEndpoint
"""
raise self._subclass_must_implement("dummy_access_token")
def get_client_secret(self, client_key, request):
"""Retrieves the client secret associated with the client key.
:param client_key: The client/consumer key.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The client secret as a string.
This method must allow the use of a dummy client_key value.
Fetching the secret using the dummy key must take the same amount of
time as fetching a secret for a valid client::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import ClientSecret
if ClientSecret.has(client_key):
return ClientSecret.get(client_key)
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import ClientSecret
return ClientSecret.get(client_key, 'dummy')
Note that the returned key must be in plaintext.
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise self._subclass_must_implement('get_client_secret')
def get_request_token_secret(self, client_key, token, request):
"""Retrieves the shared secret associated with the request token.
:param client_key: The client/consumer key.
:param token: The request token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The token secret as a string.
This method must allow the use of a dummy values and the running time
must be roughly equivalent to that of the running time of valid values::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import RequestTokenSecret
if RequestTokenSecret.has(client_key):
return RequestTokenSecret.get((client_key, request_token))
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import RequestTokenSecret
return ClientSecret.get((client_key, request_token), 'dummy')
Note that the returned key must be in plaintext.
This method is used by
* AccessTokenEndpoint
"""
raise self._subclass_must_implement('get_request_token_secret')
def get_access_token_secret(self, client_key, token, request):
"""Retrieves the shared secret associated with the access token.
:param client_key: The client/consumer key.
:param token: The access token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The token secret as a string.
This method must allow the use of a dummy values and the running time
must be roughly equivalent to that of the running time of valid values::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import AccessTokenSecret
if AccessTokenSecret.has(client_key):
return AccessTokenSecret.get((client_key, request_token))
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import AccessTokenSecret
return ClientSecret.get((client_key, request_token), 'dummy')
Note that the returned key must be in plaintext.
This method is used by
* ResourceEndpoint
"""
raise self._subclass_must_implement("get_access_token_secret")
def get_default_realms(self, client_key, request):
"""Get the default realms for a client.
:param client_key: The client/consumer key.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The list of default realms associated with the client.
The list of default realms will be set during client registration and
is outside the scope of OAuthLib.
This method is used by
* RequestTokenEndpoint
"""
raise self._subclass_must_implement("get_default_realms")
def get_realms(self, token, request):
"""Get realms associated with a request token.
:param token: The request token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The list of realms associated with the request token.
This method is used by
* AuthorizationEndpoint
* AccessTokenEndpoint
"""
raise self._subclass_must_implement("get_realms")
def get_redirect_uri(self, token, request):
"""Get the redirect URI associated with a request token.
:param token: The request token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The redirect URI associated with the request token.
It may be desirable to return a custom URI if the redirect is set to "oob".
In this case, the user will be redirected to the returned URI and at that
endpoint the verifier can be displayed.
This method is used by
* AuthorizationEndpoint
"""
raise self._subclass_must_implement("get_redirect_uri")
def get_rsa_key(self, client_key, request):
"""Retrieves a previously stored client provided RSA key.
:param client_key: The client/consumer key.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The rsa public key as a string.
This method must allow the use of a dummy client_key value. Fetching
the rsa key using the dummy key must take the same amount of time
as fetching a key for a valid client. The dummy key must also be of
the same bit length as client keys.
Note that the key must be returned in plaintext.
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise self._subclass_must_implement("get_rsa_key")
def invalidate_request_token(self, client_key, request_token, request):
"""Invalidates a used request token.
:param client_key: The client/consumer key.
:param request_token: The request token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: None
Per `Section 2.3`__ of the spec:
"The server MUST (...) ensure that the temporary
credentials have not expired or been used before."
.. _`Section 2.3`: https://tools.ietf.org/html/rfc5849#section-2.3
This method should ensure that provided token won't validate anymore.
It can be simply removing RequestToken from storage or setting
specific flag that makes it invalid (note that such flag should be
also validated during request token validation).
This method is used by
* AccessTokenEndpoint
"""
raise self._subclass_must_implement("invalidate_request_token")
def validate_client_key(self, client_key, request):
"""Validates that supplied client key is a registered and valid client.
:param client_key: The client/consumer key.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
Note that if the dummy client is supplied it should validate in same
or nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import Client
try:
return Client.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import Client
if access_token == self.dummy_access_token:
return False
else:
return Client.exists(client_key, access_token)
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise self._subclass_must_implement("validate_client_key")
def validate_request_token(self, client_key, token, request):
"""Validates that supplied request token is registered and valid.
:param client_key: The client/consumer key.
:param token: The request token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
Note that if the dummy request_token is supplied it should validate in
the same nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import RequestToken
try:
return RequestToken.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import RequestToken
if access_token == self.dummy_access_token:
return False
else:
return RequestToken.exists(client_key, access_token)
This method is used by
* AccessTokenEndpoint
"""
raise self._subclass_must_implement("validate_request_token")
def validate_access_token(self, client_key, token, request):
"""Validates that supplied access token is registered and valid.
:param client_key: The client/consumer key.
:param token: The access token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
Note that if the dummy access token is supplied it should validate in
the same or nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import AccessToken
try:
return AccessToken.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import AccessToken
if access_token == self.dummy_access_token:
return False
else:
return AccessToken.exists(client_key, access_token)
This method is used by
* ResourceEndpoint
"""
raise self._subclass_must_implement("validate_access_token")
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request, request_token=None, access_token=None):
"""Validates that the nonce has not been used before.
:param client_key: The client/consumer key.
:param timestamp: The ``oauth_timestamp`` parameter.
:param nonce: The ``oauth_nonce`` parameter.
:param request_token: Request token string, if any.
:param access_token: Access token string, if any.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
Per `Section 3.3`_ of the spec.
"A nonce is a random string, uniquely generated by the client to allow
the server to verify that a request has never been made before and
helps prevent replay attacks when requests are made over a non-secure
channel. The nonce value MUST be unique across all requests with the
same timestamp, client credentials, and token combinations."
.. _`Section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3
One of the first validation checks that will be made is for the validity
of the nonce and timestamp, which are associated with a client key and
possibly a token. If invalid then immediately fail the request
by returning False. If the nonce/timestamp pair has been used before and
you may just have detected a replay attack. Therefore it is an essential
part of OAuth security that you not allow nonce/timestamp reuse.
Note that this validation check is done before checking the validity of
the client and token.::
nonces_and_timestamps_database = [
(u'foo', 1234567890, u'rannoMstrInghere', u'bar')
]
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request_token=None, access_token=None):
return ((client_key, timestamp, nonce, request_token or access_token)
not in self.nonces_and_timestamps_database)
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise self._subclass_must_implement("validate_timestamp_and_nonce")
def validate_redirect_uri(self, client_key, redirect_uri, request):
"""Validates the client supplied redirection URI.
:param client_key: The client/consumer key.
:param redirect_uri: The URI the client which to redirect back to after
authorization is successful.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
It is highly recommended that OAuth providers require their clients
to register all redirection URIs prior to using them in requests and
register them as absolute URIs. See `CWE-601`_ for more information
about open redirection attacks.
By requiring registration of all redirection URIs it should be
straightforward for the provider to verify whether the supplied
redirect_uri is valid or not.
Alternatively per `Section 2.1`_ of the spec:
"If the client is unable to receive callbacks or a callback URI has
been established via other means, the parameter value MUST be set to
"oob" (case sensitive), to indicate an out-of-band configuration."
.. _`CWE-601`: http://cwe.mitre.org/top25/index.html#CWE-601
.. _`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1
This method is used by
* RequestTokenEndpoint
"""
raise self._subclass_must_implement("validate_redirect_uri")
def validate_requested_realms(self, client_key, realms, request):
"""Validates that the client may request access to the realm.
:param client_key: The client/consumer key.
:param realms: The list of realms that client is requesting access to.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
This method is invoked when obtaining a request token and should
tie a realm to the request token and after user authorization
this realm restriction should transfer to the access token.
This method is used by
* RequestTokenEndpoint
"""
raise self._subclass_must_implement("validate_requested_realms")
def validate_realms(self, client_key, token, request, uri=None,
realms=None):
"""Validates access to the request realm.
:param client_key: The client/consumer key.
:param token: A request token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param uri: The URI the realms is protecting.
:param realms: A list of realms that must have been granted to
the access token.
:returns: True or False
How providers choose to use the realm parameter is outside the OAuth
specification but it is commonly used to restrict access to a subset
of protected resources such as "photos".
realms is a convenience parameter which can be used to provide
a per view method pre-defined list of allowed realms.
Can be as simple as::
from your_datastore import RequestToken
request_token = RequestToken.get(token, None)
if not request_token:
return False
return set(request_token.realms).issuperset(set(realms))
This method is used by
* ResourceEndpoint
"""
raise self._subclass_must_implement("validate_realms")
def validate_verifier(self, client_key, token, verifier, request):
"""Validates a verification code.
:param client_key: The client/consumer key.
:param token: A request token string.
:param verifier: The authorization verifier string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
OAuth providers issue a verification code to clients after the
resource owner authorizes access. This code is used by the client to
obtain token credentials and the provider must verify that the
verifier is valid and associated with the client as well as the
resource owner.
Verifier validation should be done in near constant time
(to avoid verifier enumeration). To achieve this we need a
constant time string comparison which is provided by OAuthLib
in ``oauthlib.common.safe_string_equals``::
from your_datastore import Verifier
correct_verifier = Verifier.get(client_key, request_token)
from oauthlib.common import safe_string_equals
return safe_string_equals(verifier, correct_verifier)
This method is used by
* AccessTokenEndpoint
"""
raise self._subclass_must_implement("validate_verifier")
def verify_request_token(self, token, request):
"""Verify that the given OAuth1 request token is valid.
:param token: A request token string.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
This method is used only in AuthorizationEndpoint to check whether the
oauth_token given in the authorization URL is valid or not.
This request is not signed and thus similar ``validate_request_token``
method can not be used.
This method is used by
* AuthorizationEndpoint
"""
raise self._subclass_must_implement("verify_request_token")
def verify_realms(self, token, realms, request):
"""Verify authorized realms to see if they match those given to token.
:param token: An access token string.
:param realms: A list of realms the client attempts to access.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: True or False
This prevents the list of authorized realms sent by the client during
the authorization step to be altered to include realms outside what
was bound with the request token.
Can be as simple as::
valid_realms = self.get_realms(token)
return all((r in valid_realms for r in realms))
This method is used by
* AuthorizationEndpoint
"""
raise self._subclass_must_implement("verify_realms")
def save_access_token(self, token, request):
"""Save an OAuth1 access token.
:param token: A dict with token credentials.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
The token dictionary will at minimum include
* ``oauth_token`` the access token string.
* ``oauth_token_secret`` the token specific secret used in signing.
* ``oauth_authorized_realms`` a space separated list of realms.
Client key can be obtained from ``request.client_key``.
The list of realms (not joined string) can be obtained from
``request.realm``.
This method is used by
* AccessTokenEndpoint
"""
raise self._subclass_must_implement("save_access_token")
def save_request_token(self, token, request):
"""Save an OAuth1 request token.
:param token: A dict with token credentials.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
The token dictionary will at minimum include
* ``oauth_token`` the request token string.
* ``oauth_token_secret`` the token specific secret used in signing.
* ``oauth_callback_confirmed`` the string ``true``.
Client key can be obtained from ``request.client_key``.
This method is used by
* RequestTokenEndpoint
"""
raise self._subclass_must_implement("save_request_token")
def save_verifier(self, token, verifier, request):
"""Associate an authorization verifier with a request token.
:param token: A request token string.
:param verifier A dictionary containing the oauth_verifier and
oauth_token
:param request: OAuthlib request.
:type request: oauthlib.common.Request
We need to associate verifiers with tokens for validation during the
access token request.
Note that unlike save_x_token token here is the ``oauth_token`` token
string from the request token saved previously.
This method is used by
* AuthorizationEndpoint
"""
raise self._subclass_must_implement("save_verifier")
| 30,988
|
Python
|
.py
| 640
| 39.16875
| 107
| 0.671555
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,784
|
utils.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/utils.py
|
"""
oauthlib.utils
~~~~~~~~~~~~~~
This module contains utility methods used by various parts of the OAuth
spec.
"""
import urllib.request as urllib2
from oauthlib.common import quote, unquote
UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789')
def filter_params(target):
"""Decorator which filters params to remove non-oauth_* parameters
Assumes the decorated method takes a params dict or list of tuples as its
first argument.
"""
def wrapper(params, *args, **kwargs):
params = filter_oauth_params(params)
return target(params, *args, **kwargs)
wrapper.__doc__ = target.__doc__
return wrapper
def filter_oauth_params(params):
"""Removes all non oauth parameters from a dict or a list of params."""
is_oauth = lambda kv: kv[0].startswith("oauth_")
if isinstance(params, dict):
return list(filter(is_oauth, list(params.items())))
else:
return list(filter(is_oauth, params))
def escape(u):
"""Escape a unicode string in an OAuth-compatible fashion.
Per `section 3.6`_ of the spec.
.. _`section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
"""
if not isinstance(u, str):
raise ValueError('Only unicode objects are escapable. ' +
'Got {!r} of type {}.'.format(u, type(u)))
# Letters, digits, and the characters '_.-' are already treated as safe
# by urllib.quote(). We need to add '~' to fully support rfc5849.
return quote(u, safe=b'~')
def unescape(u):
if not isinstance(u, str):
raise ValueError('Only unicode objects are unescapable.')
return unquote(u)
def parse_keqv_list(l):
"""A unicode-safe version of urllib2.parse_keqv_list"""
# With Python 2.6, parse_http_list handles unicode fine
return urllib2.parse_keqv_list(l)
def parse_http_list(u):
"""A unicode-safe version of urllib2.parse_http_list"""
# With Python 2.6, parse_http_list handles unicode fine
return urllib2.parse_http_list(u)
def parse_authorization_header(authorization_header):
"""Parse an OAuth authorization header into a list of 2-tuples"""
auth_scheme = 'OAuth '.lower()
if authorization_header[:len(auth_scheme)].lower().startswith(auth_scheme):
items = parse_http_list(authorization_header[len(auth_scheme):])
try:
return list(parse_keqv_list(items).items())
except (IndexError, ValueError):
pass
raise ValueError('Malformed authorization header')
| 2,613
|
Python
|
.py
| 61
| 36.442623
| 79
| 0.670751
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,785
|
__init__.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/__init__.py
|
"""
oauthlib.oauth1.rfc5849
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
It supports all three standard signature methods defined in RFC 5849:
- HMAC-SHA1
- RSA-SHA1
- PLAINTEXT
It also supports signature methods that are not defined in RFC 5849. These are
based on the standard ones but replace SHA-1 with the more secure SHA-256:
- HMAC-SHA256
- RSA-SHA256
"""
import base64
import hashlib
import logging
import urllib.parse as urlparse
from oauthlib.common import (
Request, generate_nonce, generate_timestamp, to_unicode, urlencode,
)
from . import parameters, signature
log = logging.getLogger(__name__)
# Available signature methods
#
# Note: SIGNATURE_HMAC and SIGNATURE_RSA are kept for backward compatibility
# with previous versions of this library, when it the only HMAC-based and
# RSA-based signature methods were HMAC-SHA1 and RSA-SHA1. But now that it
# supports other hashing algorithms besides SHA1, explicitly identifying which
# hashing algorithm is being used is recommended.
#
# Note: if additional values are defined here, don't forget to update the
# imports in "../__init__.py" so they are available outside this module.
SIGNATURE_HMAC_SHA1 = "HMAC-SHA1"
SIGNATURE_HMAC_SHA256 = "HMAC-SHA256"
SIGNATURE_HMAC_SHA512 = "HMAC-SHA512"
SIGNATURE_HMAC = SIGNATURE_HMAC_SHA1 # deprecated variable for HMAC-SHA1
SIGNATURE_RSA_SHA1 = "RSA-SHA1"
SIGNATURE_RSA_SHA256 = "RSA-SHA256"
SIGNATURE_RSA_SHA512 = "RSA-SHA512"
SIGNATURE_RSA = SIGNATURE_RSA_SHA1 # deprecated variable for RSA-SHA1
SIGNATURE_PLAINTEXT = "PLAINTEXT"
SIGNATURE_METHODS = (
SIGNATURE_HMAC_SHA1,
SIGNATURE_HMAC_SHA256,
SIGNATURE_HMAC_SHA512,
SIGNATURE_RSA_SHA1,
SIGNATURE_RSA_SHA256,
SIGNATURE_RSA_SHA512,
SIGNATURE_PLAINTEXT
)
SIGNATURE_TYPE_AUTH_HEADER = 'AUTH_HEADER'
SIGNATURE_TYPE_QUERY = 'QUERY'
SIGNATURE_TYPE_BODY = 'BODY'
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
class Client:
"""A client used to sign OAuth 1.0 RFC 5849 requests."""
SIGNATURE_METHODS = {
SIGNATURE_HMAC_SHA1: signature.sign_hmac_sha1_with_client,
SIGNATURE_HMAC_SHA256: signature.sign_hmac_sha256_with_client,
SIGNATURE_HMAC_SHA512: signature.sign_hmac_sha512_with_client,
SIGNATURE_RSA_SHA1: signature.sign_rsa_sha1_with_client,
SIGNATURE_RSA_SHA256: signature.sign_rsa_sha256_with_client,
SIGNATURE_RSA_SHA512: signature.sign_rsa_sha512_with_client,
SIGNATURE_PLAINTEXT: signature.sign_plaintext_with_client
}
@classmethod
def register_signature_method(cls, method_name, method_callback):
cls.SIGNATURE_METHODS[method_name] = method_callback
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC_SHA1,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None, verifier=None, realm=None,
encoding='utf-8', decoding=None,
nonce=None, timestamp=None):
"""Create an OAuth 1 client.
:param client_key: Client key (consumer key), mandatory.
:param resource_owner_key: Resource owner key (oauth token).
:param resource_owner_secret: Resource owner secret (oauth token secret).
:param callback_uri: Callback used when obtaining request token.
:param signature_method: SIGNATURE_HMAC, SIGNATURE_RSA or SIGNATURE_PLAINTEXT.
:param signature_type: SIGNATURE_TYPE_AUTH_HEADER (default),
SIGNATURE_TYPE_QUERY or SIGNATURE_TYPE_BODY
depending on where you want to embed the oauth
credentials.
:param rsa_key: RSA key used with SIGNATURE_RSA.
:param verifier: Verifier used when obtaining an access token.
:param realm: Realm (scope) to which access is being requested.
:param encoding: If you provide non-unicode input you may use this
to have oauthlib automatically convert.
:param decoding: If you wish that the returned uri, headers and body
from sign be encoded back from unicode, then set
decoding to your preferred encoding, i.e. utf-8.
:param nonce: Use this nonce instead of generating one. (Mainly for testing)
:param timestamp: Use this timestamp instead of using current. (Mainly for testing)
"""
# Convert to unicode using encoding if given, else assume unicode
encode = lambda x: to_unicode(x, encoding) if encoding else x
self.client_key = encode(client_key)
self.client_secret = encode(client_secret)
self.resource_owner_key = encode(resource_owner_key)
self.resource_owner_secret = encode(resource_owner_secret)
self.signature_method = encode(signature_method)
self.signature_type = encode(signature_type)
self.callback_uri = encode(callback_uri)
self.rsa_key = encode(rsa_key)
self.verifier = encode(verifier)
self.realm = encode(realm)
self.encoding = encode(encoding)
self.decoding = encode(decoding)
self.nonce = encode(nonce)
self.timestamp = encode(timestamp)
def __repr__(self):
attrs = vars(self).copy()
attrs['client_secret'] = '****' if attrs['client_secret'] else None
attrs['rsa_key'] = '****' if attrs['rsa_key'] else None
attrs[
'resource_owner_secret'] = '****' if attrs['resource_owner_secret'] else None
attribute_str = ', '.join('{}={}'.format(k, v) for k, v in attrs.items())
return '<{} {}>'.format(self.__class__.__name__, attribute_str)
def get_oauth_signature(self, request):
"""Get an OAuth signature to be used in signing a request
To satisfy `section 3.4.1.2`_ item 2, if the request argument's
headers dict attribute contains a Host item, its value will
replace any netloc part of the request argument's uri attribute
value.
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
"""
if self.signature_method == SIGNATURE_PLAINTEXT:
# fast-path
return signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
uri, headers, body = self._render(request)
collected_params = signature.collect_parameters(
uri_query=urlparse.urlparse(uri).query,
body=body,
headers=headers)
log.debug("Collected params: {}".format(collected_params))
normalized_params = signature.normalize_parameters(collected_params)
normalized_uri = signature.base_string_uri(uri, headers.get('Host', None))
log.debug("Normalized params: {}".format(normalized_params))
log.debug("Normalized URI: {}".format(normalized_uri))
base_string = signature.signature_base_string(request.http_method,
normalized_uri, normalized_params)
log.debug("Signing: signature base string: {}".format(base_string))
if self.signature_method not in self.SIGNATURE_METHODS:
raise ValueError('Invalid signature method.')
sig = self.SIGNATURE_METHODS[self.signature_method](base_string, self)
log.debug("Signature: {}".format(sig))
return sig
def get_oauth_params(self, request):
"""Get the basic OAuth parameters to be used in generating a signature.
"""
nonce = (generate_nonce()
if self.nonce is None else self.nonce)
timestamp = (generate_timestamp()
if self.timestamp is None else self.timestamp)
params = [
('oauth_nonce', nonce),
('oauth_timestamp', timestamp),
('oauth_version', '1.0'),
('oauth_signature_method', self.signature_method),
('oauth_consumer_key', self.client_key),
]
if self.resource_owner_key:
params.append(('oauth_token', self.resource_owner_key))
if self.callback_uri:
params.append(('oauth_callback', self.callback_uri))
if self.verifier:
params.append(('oauth_verifier', self.verifier))
# providing body hash for requests other than x-www-form-urlencoded
# as described in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-4.1.1
# 4.1.1. When to include the body hash
# * [...] MUST NOT include an oauth_body_hash parameter on requests with form-encoded request bodies
# * [...] SHOULD include the oauth_body_hash parameter on all other requests.
# Note that SHA-1 is vulnerable. The spec acknowledges that in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-6.2
# At this time, no further effort has been made to replace SHA-1 for the OAuth Request Body Hash extension.
content_type = request.headers.get('Content-Type', None)
content_type_eligible = content_type and content_type.find('application/x-www-form-urlencoded') < 0
if request.body is not None and content_type_eligible:
params.append(('oauth_body_hash', base64.b64encode(hashlib.sha1(request.body.encode('utf-8')).digest()).decode('utf-8')))
return params
def _render(self, request, formencode=False, realm=None):
"""Render a signed request according to signature type
Returns a 3-tuple containing the request URI, headers, and body.
If the formencode argument is True and the body contains parameters, it
is escaped and returned as a valid formencoded string.
"""
# TODO what if there are body params on a header-type auth?
# TODO what if there are query params on a body-type auth?
uri, headers, body = request.uri, request.headers, request.body
# TODO: right now these prepare_* methods are very narrow in scope--they
# only affect their little thing. In some cases (for example, with
# header auth) it might be advantageous to allow these methods to touch
# other parts of the request, like the headers—so the prepare_headers
# method could also set the Content-Type header to x-www-form-urlencoded
# like the spec requires. This would be a fundamental change though, and
# I'm not sure how I feel about it.
if self.signature_type == SIGNATURE_TYPE_AUTH_HEADER:
headers = parameters.prepare_headers(
request.oauth_params, request.headers, realm=realm)
elif self.signature_type == SIGNATURE_TYPE_BODY and request.decoded_body is not None:
body = parameters.prepare_form_encoded_body(
request.oauth_params, request.decoded_body)
if formencode:
body = urlencode(body)
headers['Content-Type'] = 'application/x-www-form-urlencoded'
elif self.signature_type == SIGNATURE_TYPE_QUERY:
uri = parameters.prepare_request_uri_query(
request.oauth_params, request.uri)
else:
raise ValueError('Unknown signature type specified.')
return uri, headers, body
def sign(self, uri, http_method='GET', body=None, headers=None, realm=None):
"""Sign a request
Signs an HTTP request with the specified parts.
Returns a 3-tuple of the signed request's URI, headers, and body.
Note that http_method is not returned as it is unaffected by the OAuth
signing process. Also worth noting is that duplicate parameters
will be included in the signature, regardless of where they are
specified (query, body).
The body argument may be a dict, a list of 2-tuples, or a formencoded
string. The Content-Type header must be 'application/x-www-form-urlencoded'
if it is present.
If the body argument is not one of the above, it will be returned
verbatim as it is unaffected by the OAuth signing process. Attempting to
sign a request with non-formencoded data using the OAuth body signature
type is invalid and will raise an exception.
If the body does contain parameters, it will be returned as a properly-
formatted formencoded string.
Body may not be included if the http_method is either GET or HEAD as
this changes the semantic meaning of the request.
All string data MUST be unicode or be encoded with the same encoding
scheme supplied to the Client constructor, default utf-8. This includes
strings inside body dicts, for example.
"""
# normalize request data
request = Request(uri, http_method, body, headers,
encoding=self.encoding)
# sanity check
content_type = request.headers.get('Content-Type', None)
multipart = content_type and content_type.startswith('multipart/')
should_have_params = content_type == CONTENT_TYPE_FORM_URLENCODED
has_params = request.decoded_body is not None
# 3.4.1.3.1. Parameter Sources
# [Parameters are collected from the HTTP request entity-body, but only
# if [...]:
# * The entity-body is single-part.
if multipart and has_params:
raise ValueError(
"Headers indicate a multipart body but body contains parameters.")
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
elif should_have_params and not has_params:
raise ValueError(
"Headers indicate a formencoded body but body was not decodable.")
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
elif not should_have_params and has_params:
raise ValueError(
"Body contains parameters but Content-Type header was {} "
"instead of {}".format(content_type or "not set",
CONTENT_TYPE_FORM_URLENCODED))
# 3.5.2. Form-Encoded Body
# Protocol parameters can be transmitted in the HTTP request entity-
# body, but only if the following REQUIRED conditions are met:
# o The entity-body is single-part.
# o The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
# o The HTTP request entity-header includes the "Content-Type" header
# field set to "application/x-www-form-urlencoded".
elif self.signature_type == SIGNATURE_TYPE_BODY and not (
should_have_params and has_params and not multipart):
raise ValueError(
'Body signatures may only be used with form-urlencoded content')
# We amend https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
# with the clause that parameters from body should only be included
# in non GET or HEAD requests. Extracting the request body parameters
# and including them in the signature base string would give semantic
# meaning to the body, which it should not have according to the
# HTTP 1.1 spec.
elif http_method.upper() in ('GET', 'HEAD') and has_params:
raise ValueError('GET/HEAD requests should not include body.')
# generate the basic OAuth parameters
request.oauth_params = self.get_oauth_params(request)
# generate the signature
request.oauth_params.append(
('oauth_signature', self.get_oauth_signature(request)))
# render the signed request and return it
uri, headers, body = self._render(request, formencode=True,
realm=(realm or self.realm))
if self.decoding:
log.debug('Encoding URI, headers and body to %s.', self.decoding)
uri = uri.encode(self.decoding)
body = body.encode(self.decoding) if body else body
new_headers = {}
for k, v in headers.items():
new_headers[k.encode(self.decoding)] = v.encode(self.decoding)
headers = new_headers
return uri, headers, body
| 16,744
|
Python
|
.py
| 306
| 45.026144
| 140
| 0.659991
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,786
|
signature.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/signature.py
|
"""
This module is an implementation of `section 3.4`_ of RFC 5849.
**Usage**
Steps for signing a request:
1. Collect parameters from the request using ``collect_parameters``.
2. Normalize those parameters using ``normalize_parameters``.
3. Create the *base string URI* using ``base_string_uri``.
4. Create the *signature base string* from the above three components
using ``signature_base_string``.
5. Pass the *signature base string* and the client credentials to one of the
sign-with-client functions. The HMAC-based signing functions needs
client credentials with secrets. The RSA-based signing functions needs
client credentials with an RSA private key.
To verify a request, pass the request and credentials to one of the verify
functions. The HMAC-based signing functions needs the shared secrets. The
RSA-based verify functions needs the RSA public key.
**Scope**
All of the functions in this module should be considered internal to OAuthLib,
since they are not imported into the "oauthlib.oauth1" module. Programs using
OAuthLib should not use directly invoke any of the functions in this module.
**Deprecated functions**
The "sign_" methods that are not "_with_client" have been deprecated. They may
be removed in a future release. Since they are all internal functions, this
should have no impact on properly behaving programs.
.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
"""
import binascii
import hashlib
import hmac
import logging
import warnings
from oauthlib.common import extract_params, safe_string_equals, urldecode
import urllib.parse as urlparse
from . import utils
log = logging.getLogger(__name__)
# ==== Common functions ==========================================
def signature_base_string(
http_method: str,
base_str_uri: str,
normalized_encoded_request_parameters: str) -> str:
"""
Construct the signature base string.
The *signature base string* is the value that is calculated and signed by
the client. It is also independently calculated by the server to verify
the signature, and therefore must produce the exact same value at both
ends or the signature won't verify.
The rules for calculating the *signature base string* are defined in
section 3.4.1.1`_ of RFC 5849.
.. _`section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1
"""
# The signature base string is constructed by concatenating together,
# in order, the following HTTP request elements:
# 1. The HTTP request method in uppercase. For example: "HEAD",
# "GET", "POST", etc. If the request uses a custom HTTP method, it
# MUST be encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
base_string = utils.escape(http_method.upper())
# 2. An "&" character (ASCII code 38).
base_string += '&'
# 3. The base string URI from `Section 3.4.1.2`_, after being encoded
# (`Section 3.6`_).
#
# .. _`Section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
base_string += utils.escape(base_str_uri)
# 4. An "&" character (ASCII code 38).
base_string += '&'
# 5. The request parameters as normalized in `Section 3.4.1.3.2`_, after
# being encoded (`Section 3.6`).
#
# .. _`Sec 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
base_string += utils.escape(normalized_encoded_request_parameters)
return base_string
def base_string_uri(uri: str, host: str = None) -> str:
"""
Calculates the _base string URI_.
The *base string URI* is one of the components that make up the
*signature base string*.
The ``host`` is optional. If provided, it is used to override any host and
port values in the ``uri``. The value for ``host`` is usually extracted from
the "Host" request header from the HTTP request. Its value may be just the
hostname, or the hostname followed by a colon and a TCP/IP port number
(hostname:port). If a value for the``host`` is provided but it does not
contain a port number, the default port number is used (i.e. if the ``uri``
contained a port number, it will be discarded).
The rules for calculating the *base string URI* are defined in
section 3.4.1.2`_ of RFC 5849.
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
:param uri: URI
:param host: hostname with optional port number, separated by a colon
:return: base string URI
"""
if not isinstance(uri, str):
raise ValueError('uri must be a string.')
# FIXME: urlparse does not support unicode
scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
# The scheme, authority, and path of the request resource URI `RFC3986`
# are included by constructing an "http" or "https" URI representing
# the request resource (without the query or fragment) as follows:
#
# .. _`RFC3986`: https://tools.ietf.org/html/rfc3986
if not scheme:
raise ValueError('missing scheme')
# Per `RFC 2616 section 5.1.2`_:
#
# Note that the absolute path cannot be empty; if none is present in
# the original URI, it MUST be given as "/" (the server root).
#
# .. _`RFC 2616 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2
if not path:
path = '/'
# 1. The scheme and host MUST be in lowercase.
scheme = scheme.lower()
netloc = netloc.lower()
# Note: if ``host`` is used, it will be converted to lowercase below
# 2. The host and port values MUST match the content of the HTTP
# request "Host" header field.
if host is not None:
netloc = host.lower() # override value in uri with provided host
# 3. The port MUST be included if it is not the default port for the
# scheme, and MUST be excluded if it is the default. Specifically,
# the port MUST be excluded when making an HTTP request `RFC2616`_
# to port 80 or when making an HTTPS request `RFC2818`_ to port 443.
# All other non-default port numbers MUST be included.
#
# .. _`RFC2616`: https://tools.ietf.org/html/rfc2616
# .. _`RFC2818`: https://tools.ietf.org/html/rfc2818
if ':' in netloc:
# Contains a colon ":", so try to parse as "host:port"
hostname, port_str = netloc.split(':', 1)
if len(hostname) == 0:
raise ValueError('missing host') # error: netloc was ":port" or ":"
if len(port_str) == 0:
netloc = hostname # was "host:", so just use the host part
else:
try:
port_num = int(port_str) # try to parse into an integer number
except ValueError:
raise ValueError('port is not an integer')
if port_num <= 0 or 65535 < port_num:
raise ValueError('port out of range') # 16-bit unsigned ints
if (scheme, port_num) in (('http', 80), ('https', 443)):
netloc = hostname # default port for scheme: exclude port num
else:
netloc = hostname + ':' + str(port_num) # use hostname:port
else:
# Does not contain a colon, so entire value must be the hostname
if len(netloc) == 0:
raise ValueError('missing host') # error: netloc was empty string
v = urlparse.urlunparse((scheme, netloc, path, params, '', ''))
# RFC 5849 does not specify which characters are encoded in the
# "base string URI", nor how they are encoded - which is very bad, since
# the signatures won't match if there are any differences. Fortunately,
# most URIs only use characters that are clearly not encoded (e.g. digits
# and A-Z, a-z), so have avoided any differences between implementations.
#
# The example from its section 3.4.1.2 illustrates that spaces in
# the path are percent encoded. But it provides no guidance as to what other
# characters (if any) must be encoded (nor how); nor if characters in the
# other components are to be encoded or not.
#
# This implementation **assumes** that **only** the space is percent-encoded
# and it is done to the entire value (not just to spaces in the path).
#
# This code may need to be changed if it is discovered that other characters
# are expected to be encoded.
#
# Note: the "base string URI" returned by this function will be encoded
# again before being concatenated into the "signature base string". So any
# spaces in the URI will actually appear in the "signature base string"
# as "%2520" (the "%20" further encoded according to section 3.6).
return v.replace(' ', '%20')
def collect_parameters(uri_query='', body=None, headers=None,
exclude_oauth_signature=True, with_realm=False):
"""
Gather the request parameters from all the parameter sources.
This function is used to extract all the parameters, which are then passed
to ``normalize_parameters`` to produce one of the components that make up
the *signature base string*.
Parameters starting with `oauth_` will be unescaped.
Body parameters must be supplied as a dict, a list of 2-tuples, or a
form encoded query string.
Headers must be supplied as a dict.
The rules where the parameters must be sourced from are defined in
`section 3.4.1.3.1`_ of RFC 5849.
.. _`Sec 3.4.1.3.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
"""
if body is None:
body = []
headers = headers or {}
params = []
# The parameters from the following sources are collected into a single
# list of name/value pairs:
# * The query component of the HTTP request URI as defined by
# `RFC3986, Section 3.4`_. The query component is parsed into a list
# of name/value pairs by treating it as an
# "application/x-www-form-urlencoded" string, separating the names
# and values and decoding them as defined by W3C.REC-html40-19980424
# `W3C-HTML-4.0`_, Section 17.13.4.
#
# .. _`RFC3986, Sec 3.4`: https://tools.ietf.org/html/rfc3986#section-3.4
# .. _`W3C-HTML-4.0`: https://www.w3.org/TR/1998/REC-html40-19980424/
if uri_query:
params.extend(urldecode(uri_query))
# * The OAuth HTTP "Authorization" header field (`Section 3.5.1`_) if
# present. The header's content is parsed into a list of name/value
# pairs excluding the "realm" parameter if present. The parameter
# values are decoded as defined by `Section 3.5.1`_.
#
# .. _`Section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1
if headers:
headers_lower = {k.lower(): v for k, v in headers.items()}
authorization_header = headers_lower.get('authorization')
if authorization_header is not None:
params.extend([i for i in utils.parse_authorization_header(
authorization_header) if with_realm or i[0] != 'realm'])
# * The HTTP request entity-body, but only if all of the following
# conditions are met:
# * The entity-body is single-part.
#
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# W3C.REC-html40-19980424 `W3C-HTML-4.0`_.
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
#
# .. _`W3C-HTML-4.0`: https://www.w3.org/TR/1998/REC-html40-19980424/
# TODO: enforce header param inclusion conditions
bodyparams = extract_params(body) or []
params.extend(bodyparams)
# ensure all oauth params are unescaped
unescaped_params = []
for k, v in params:
if k.startswith('oauth_'):
v = utils.unescape(v)
unescaped_params.append((k, v))
# The "oauth_signature" parameter MUST be excluded from the signature
# base string if present.
if exclude_oauth_signature:
unescaped_params = list(filter(lambda i: i[0] != 'oauth_signature',
unescaped_params))
return unescaped_params
def normalize_parameters(params) -> str:
"""
Calculate the normalized request parameters.
The *normalized request parameters* is one of the components that make up
the *signature base string*.
The rules for parameter normalization are defined in `section 3.4.1.3.2`_ of
RFC 5849.
.. _`Sec 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
"""
# The parameters collected in `Section 3.4.1.3`_ are normalized into a
# single string as follows:
#
# .. _`Section 3.4.1.3`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3
# 1. First, the name and value of each parameter are encoded
# (`Section 3.6`_).
#
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
key_values = [(utils.escape(k), utils.escape(v)) for k, v in params]
# 2. The parameters are sorted by name, using ascending byte value
# ordering. If two or more parameters share the same name, they
# are sorted by their value.
key_values.sort()
# 3. The name of each parameter is concatenated to its corresponding
# value using an "=" character (ASCII code 61) as a separator, even
# if the value is empty.
parameter_parts = ['{}={}'.format(k, v) for k, v in key_values]
# 4. The sorted name/value pairs are concatenated together into a
# single string by using an "&" character (ASCII code 38) as
# separator.
return '&'.join(parameter_parts)
# ==== Common functions for HMAC-based signature methods =========
def _sign_hmac(hash_algorithm_name: str,
sig_base_str: str,
client_secret: str,
resource_owner_secret: str):
"""
**HMAC-SHA256**
The "HMAC-SHA256" signature method uses the HMAC-SHA256 signature
algorithm as defined in `RFC4634`_::
digest = HMAC-SHA256 (key, text)
Per `section 3.4.2`_ of the spec.
.. _`RFC4634`: https://tools.ietf.org/html/rfc4634
.. _`section 3.4.2`: https://tools.ietf.org/html/rfc5849#section-3.4.2
"""
# The HMAC-SHA256 function variables are used in following way:
# text is set to the value of the signature base string from
# `Section 3.4.1.1`_.
#
# .. _`Section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1
text = sig_base_str
# key is set to the concatenated values of:
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
key = utils.escape(client_secret or '')
# 2. An "&" character (ASCII code 38), which MUST be included
# even when either secret is empty.
key += '&'
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
key += utils.escape(resource_owner_secret or '')
# Get the hashing algorithm to use
m = {
'SHA-1': hashlib.sha1,
'SHA-256': hashlib.sha256,
'SHA-512': hashlib.sha512,
}
hash_alg = m[hash_algorithm_name]
# Calculate the signature
# FIXME: HMAC does not support unicode!
key_utf8 = key.encode('utf-8')
text_utf8 = text.encode('utf-8')
signature = hmac.new(key_utf8, text_utf8, hash_alg)
# digest is used to set the value of the "oauth_signature" protocol
# parameter, after the result octet string is base64-encoded
# per `RFC2045, Section 6.8`.
#
# .. _`RFC2045, Sec 6.8`: https://tools.ietf.org/html/rfc2045#section-6.8
return binascii.b2a_base64(signature.digest())[:-1].decode('utf-8')
def _verify_hmac(hash_algorithm_name: str,
request,
client_secret=None,
resource_owner_secret=None):
"""Verify a HMAC-SHA1 signature.
Per `section 3.4`_ of the spec.
.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri
attribute MUST be an absolute URI whose netloc part identifies the
origin server or gateway on which the resource resides. Any Host
item of the request argument's headers dict attribute will be
ignored.
.. _`RFC2616 section 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2
"""
norm_params = normalize_parameters(request.params)
bs_uri = base_string_uri(request.uri)
sig_base_str = signature_base_string(request.http_method, bs_uri,
norm_params)
signature = _sign_hmac(hash_algorithm_name, sig_base_str,
client_secret, resource_owner_secret)
match = safe_string_equals(signature, request.signature)
if not match:
log.debug('Verify HMAC failed: signature base string: %s', sig_base_str)
return match
# ==== HMAC-SHA1 =================================================
def sign_hmac_sha1_with_client(sig_base_str, client):
return _sign_hmac('SHA-1', sig_base_str,
client.client_secret, client.resource_owner_secret)
def verify_hmac_sha1(request, client_secret=None, resource_owner_secret=None):
return _verify_hmac('SHA-1', request, client_secret, resource_owner_secret)
def sign_hmac_sha1(base_string, client_secret, resource_owner_secret):
"""
Deprecated function for calculating a HMAC-SHA1 signature.
This function has been replaced by invoking ``sign_hmac`` with "SHA-1"
as the hash algorithm name.
This function was invoked by sign_hmac_sha1_with_client and
test_signatures.py, but does any application invoke it directly? If not,
it can be removed.
"""
warnings.warn('use sign_hmac_sha1_with_client instead of sign_hmac_sha1',
DeprecationWarning)
# For some unknown reason, the original implementation assumed base_string
# could either be bytes or str. The signature base string calculating
# function always returned a str, so the new ``sign_rsa`` only expects that.
base_string = base_string.decode('ascii') \
if isinstance(base_string, bytes) else base_string
return _sign_hmac('SHA-1', base_string,
client_secret, resource_owner_secret)
# ==== HMAC-SHA256 ===============================================
def sign_hmac_sha256_with_client(sig_base_str, client):
return _sign_hmac('SHA-256', sig_base_str,
client.client_secret, client.resource_owner_secret)
def verify_hmac_sha256(request, client_secret=None, resource_owner_secret=None):
return _verify_hmac('SHA-256', request,
client_secret, resource_owner_secret)
def sign_hmac_sha256(base_string, client_secret, resource_owner_secret):
"""
Deprecated function for calculating a HMAC-SHA256 signature.
This function has been replaced by invoking ``sign_hmac`` with "SHA-256"
as the hash algorithm name.
This function was invoked by sign_hmac_sha256_with_client and
test_signatures.py, but does any application invoke it directly? If not,
it can be removed.
"""
warnings.warn(
'use sign_hmac_sha256_with_client instead of sign_hmac_sha256',
DeprecationWarning)
# For some unknown reason, the original implementation assumed base_string
# could either be bytes or str. The signature base string calculating
# function always returned a str, so the new ``sign_rsa`` only expects that.
base_string = base_string.decode('ascii') \
if isinstance(base_string, bytes) else base_string
return _sign_hmac('SHA-256', base_string,
client_secret, resource_owner_secret)
# ==== HMAC-SHA512 ===============================================
def sign_hmac_sha512_with_client(sig_base_str: str,
client):
return _sign_hmac('SHA-512', sig_base_str,
client.client_secret, client.resource_owner_secret)
def verify_hmac_sha512(request,
client_secret: str = None,
resource_owner_secret: str = None):
return _verify_hmac('SHA-512', request,
client_secret, resource_owner_secret)
# ==== Common functions for RSA-based signature methods ==========
_jwt_rsa = {} # cache of RSA-hash implementations from PyJWT jwt.algorithms
def _get_jwt_rsa_algorithm(hash_algorithm_name: str):
"""
Obtains an RSAAlgorithm object that implements RSA with the hash algorithm.
This method maintains the ``_jwt_rsa`` cache.
Returns a jwt.algorithm.RSAAlgorithm.
"""
if hash_algorithm_name in _jwt_rsa:
# Found in cache: return it
return _jwt_rsa[hash_algorithm_name]
else:
# Not in cache: instantiate a new RSAAlgorithm
# PyJWT has some nice pycrypto/cryptography abstractions
import jwt.algorithms as jwt_algorithms
m = {
'SHA-1': jwt_algorithms.hashes.SHA1,
'SHA-256': jwt_algorithms.hashes.SHA256,
'SHA-512': jwt_algorithms.hashes.SHA512,
}
v = jwt_algorithms.RSAAlgorithm(m[hash_algorithm_name])
_jwt_rsa[hash_algorithm_name] = v # populate cache
return v
def _prepare_key_plus(alg, keystr):
"""
Prepare a PEM encoded key (public or private), by invoking the `prepare_key`
method on alg with the keystr.
The keystr should be a string or bytes. If the keystr is bytes, it is
decoded as UTF-8 before being passed to prepare_key. Otherwise, it
is passed directly.
"""
if isinstance(keystr, bytes):
keystr = keystr.decode('utf-8')
return alg.prepare_key(keystr)
def _sign_rsa(hash_algorithm_name: str,
sig_base_str: str,
rsa_private_key: str):
"""
Calculate the signature for an RSA-based signature method.
The ``alg`` is used to calculate the digest over the signature base string.
For the "RSA_SHA1" signature method, the alg must be SHA-1. While OAuth 1.0a
only defines the RSA-SHA1 signature method, this function can be used for
other non-standard signature methods that only differ from RSA-SHA1 by the
digest algorithm.
Signing for the RSA-SHA1 signature method is defined in
`section 3.4.3`_ of RFC 5849.
The RSASSA-PKCS1-v1_5 signature algorithm used defined by
`RFC3447, Section 8.2`_ (also known as PKCS#1), with the `alg` as the
hash function for EMSA-PKCS1-v1_5. To
use this method, the client MUST have established client credentials
with the server that included its RSA public key (in a manner that is
beyond the scope of this specification).
.. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3
.. _`RFC3447, Section 8.2`: https://tools.ietf.org/html/rfc3447#section-8.2
"""
# Get the implementation of RSA-hash
alg = _get_jwt_rsa_algorithm(hash_algorithm_name)
# Check private key
if not rsa_private_key:
raise ValueError('rsa_private_key required for RSA with ' +
alg.hash_alg.name + ' signature method')
# Convert the "signature base string" into a sequence of bytes (M)
#
# The signature base string, by definition, only contain printable US-ASCII
# characters. So encoding it as 'ascii' will always work. It will raise a
# ``UnicodeError`` if it can't encode the value, which will never happen
# if the signature base string was created correctly. Therefore, using
# 'ascii' encoding provides an extra level of error checking.
m = sig_base_str.encode('ascii')
# Perform signing: S = RSASSA-PKCS1-V1_5-SIGN (K, M)
key = _prepare_key_plus(alg, rsa_private_key)
s = alg.sign(m, key)
# base64-encoded per RFC2045 section 6.8.
#
# 1. While b2a_base64 implements base64 defined by RFC 3548. As used here,
# it is the same as base64 defined by RFC 2045.
# 2. b2a_base64 includes a "\n" at the end of its result ([:-1] removes it)
# 3. b2a_base64 produces a binary string. Use decode to produce a str.
# It should only contain only printable US-ASCII characters.
return binascii.b2a_base64(s)[:-1].decode('ascii')
def _verify_rsa(hash_algorithm_name: str,
request,
rsa_public_key: str):
"""
Verify a base64 encoded signature for a RSA-based signature method.
The ``alg`` is used to calculate the digest over the signature base string.
For the "RSA_SHA1" signature method, the alg must be SHA-1. While OAuth 1.0a
only defines the RSA-SHA1 signature method, this function can be used for
other non-standard signature methods that only differ from RSA-SHA1 by the
digest algorithm.
Verification for the RSA-SHA1 signature method is defined in
`section 3.4.3`_ of RFC 5849.
.. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3
To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri
attribute MUST be an absolute URI whose netloc part identifies the
origin server or gateway on which the resource resides. Any Host
item of the request argument's headers dict attribute will be
ignored.
.. _`RFC2616 Sec 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2
"""
try:
# Calculate the *signature base string* of the actual received request
norm_params = normalize_parameters(request.params)
bs_uri = base_string_uri(request.uri)
sig_base_str = signature_base_string(
request.http_method, bs_uri, norm_params)
# Obtain the signature that was received in the request
sig = binascii.a2b_base64(request.signature.encode('ascii'))
# Get the implementation of RSA-with-hash algorithm to use
alg = _get_jwt_rsa_algorithm(hash_algorithm_name)
# Verify the received signature was produced by the private key
# corresponding to the `rsa_public_key`, signing exact same
# *signature base string*.
#
# RSASSA-PKCS1-V1_5-VERIFY ((n, e), M, S)
key = _prepare_key_plus(alg, rsa_public_key)
# The signature base string only contain printable US-ASCII characters.
# The ``encode`` method with the default "strict" error handling will
# raise a ``UnicodeError`` if it can't encode the value. So using
# "ascii" will always work.
verify_ok = alg.verify(sig_base_str.encode('ascii'), key, sig)
if not verify_ok:
log.debug('Verify failed: RSA with ' + alg.hash_alg.name +
': signature base string=%s' + sig_base_str)
return verify_ok
except UnicodeError:
# A properly encoded signature will only contain printable US-ASCII
# characters. The ``encode`` method with the default "strict" error
# handling will raise a ``UnicodeError`` if it can't decode the value.
# So using "ascii" will work with all valid signatures. But an
# incorrectly or maliciously produced signature could contain other
# bytes.
#
# This implementation treats that situation as equivalent to the
# signature verification having failed.
#
# Note: simply changing the encode to use 'utf-8' will not remove this
# case, since an incorrect or malicious request can contain bytes which
# are invalid as UTF-8.
return False
# ==== RSA-SHA1 ==================================================
def sign_rsa_sha1_with_client(sig_base_str, client):
# For some reason, this function originally accepts both str and bytes.
# This behaviour is preserved here. But won't be done for the newer
# sign_rsa_sha256_with_client and sign_rsa_sha512_with_client functions,
# which will only accept strings. The function to calculate a
# "signature base string" always produces a string, so it is not clear
# why support for bytes would ever be needed.
sig_base_str = sig_base_str.decode('ascii')\
if isinstance(sig_base_str, bytes) else sig_base_str
return _sign_rsa('SHA-1', sig_base_str, client.rsa_key)
def verify_rsa_sha1(request, rsa_public_key: str):
return _verify_rsa('SHA-1', request, rsa_public_key)
def sign_rsa_sha1(base_string, rsa_private_key):
"""
Deprecated function for calculating a RSA-SHA1 signature.
This function has been replaced by invoking ``sign_rsa`` with "SHA-1"
as the hash algorithm name.
This function was invoked by sign_rsa_sha1_with_client and
test_signatures.py, but does any application invoke it directly? If not,
it can be removed.
"""
warnings.warn('use _sign_rsa("SHA-1", ...) instead of sign_rsa_sha1',
DeprecationWarning)
if isinstance(base_string, bytes):
base_string = base_string.decode('ascii')
return _sign_rsa('SHA-1', base_string, rsa_private_key)
# ==== RSA-SHA256 ================================================
def sign_rsa_sha256_with_client(sig_base_str: str, client):
return _sign_rsa('SHA-256', sig_base_str, client.rsa_key)
def verify_rsa_sha256(request, rsa_public_key: str):
return _verify_rsa('SHA-256', request, rsa_public_key)
# ==== RSA-SHA512 ================================================
def sign_rsa_sha512_with_client(sig_base_str: str, client):
return _sign_rsa('SHA-512', sig_base_str, client.rsa_key)
def verify_rsa_sha512(request, rsa_public_key: str):
return _verify_rsa('SHA-512', request, rsa_public_key)
# ==== PLAINTEXT =================================================
def sign_plaintext_with_client(_signature_base_string, client):
# _signature_base_string is not used because the signature with PLAINTEXT
# is just the secret: it isn't a real signature.
return sign_plaintext(client.client_secret, client.resource_owner_secret)
def sign_plaintext(client_secret, resource_owner_secret):
"""Sign a request using plaintext.
Per `section 3.4.4`_ of the spec.
The "PLAINTEXT" method does not employ a signature algorithm. It
MUST be used with a transport-layer mechanism such as TLS or SSL (or
sent over a secure channel with equivalent protections). It does not
utilize the signature base string or the "oauth_timestamp" and
"oauth_nonce" parameters.
.. _`section 3.4.4`: https://tools.ietf.org/html/rfc5849#section-3.4.4
"""
# The "oauth_signature" protocol parameter is set to the concatenated
# value of:
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
signature = utils.escape(client_secret or '')
# 2. An "&" character (ASCII code 38), which MUST be included even
# when either secret is empty.
signature += '&'
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
#
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
signature += utils.escape(resource_owner_secret or '')
return signature
def verify_plaintext(request, client_secret=None, resource_owner_secret=None):
"""Verify a PLAINTEXT signature.
Per `section 3.4`_ of the spec.
.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
"""
signature = sign_plaintext(client_secret, resource_owner_secret)
match = safe_string_equals(signature, request.signature)
if not match:
log.debug('Verify PLAINTEXT failed')
return match
| 32,024
|
Python
|
.py
| 624
| 44.766026
| 80
| 0.665373
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,787
|
access_token.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/access_token.py
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth1.rfc5849.endpoints.access_token
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of the access token provider logic of
OAuth 1.0 RFC 5849. It validates the correctness of access token requests,
creates and persists tokens as well as create the proper response to be
returned to the client.
"""
import logging
from oauthlib.common import urlencode
from .. import errors
from .base import BaseEndpoint
log = logging.getLogger(__name__)
class AccessTokenEndpoint(BaseEndpoint):
"""An endpoint responsible for providing OAuth 1 access tokens.
Typical use is to instantiate with a request validator and invoke the
``create_access_token_response`` from a view function. The tuple returned
has all information necessary (body, status, headers) to quickly form
and return a proper response. See :doc:`/oauth1/validator` for details on which
validator methods to implement for this endpoint.
"""
def create_access_token(self, request, credentials):
"""Create and save a new access token.
Similar to OAuth 2, indication of granted scopes will be included as a
space separated list in ``oauth_authorized_realms``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:returns: The token as an urlencoded string.
"""
request.realms = self.request_validator.get_realms(
request.resource_owner_key, request)
token = {
'oauth_token': self.token_generator(),
'oauth_token_secret': self.token_generator(),
# Backport the authorized scopes indication used in OAuth2
'oauth_authorized_realms': ' '.join(request.realms)
}
token.update(credentials)
self.request_validator.save_access_token(token, request)
return urlencode(token.items())
def create_access_token_response(self, uri, http_method='GET', body=None,
headers=None, credentials=None):
"""Create an access token response, with a new request token if valid.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:param credentials: A list of extra credentials to include in the token.
:returns: A tuple of 3 elements.
1. A dict of headers to set on the response.
2. The response body as a string.
3. The response status code as an integer.
An example of a valid request::
>>> from your_validator import your_validator
>>> from oauthlib.oauth1 import AccessTokenEndpoint
>>> endpoint = AccessTokenEndpoint(your_validator)
>>> h, b, s = endpoint.create_access_token_response(
... 'https://your.provider/access_token?foo=bar',
... headers={
... 'Authorization': 'OAuth oauth_token=234lsdkf....'
... },
... credentials={
... 'my_specific': 'argument',
... })
>>> h
{'Content-Type': 'application/x-www-form-urlencoded'}
>>> b
'oauth_token=lsdkfol23w54jlksdef&oauth_token_secret=qwe089234lkjsdf&oauth_authorized_realms=movies+pics&my_specific=argument'
>>> s
200
An response to invalid request would have a different body and status::
>>> b
'error=invalid_request&description=missing+resource+owner+key'
>>> s
400
The same goes for an an unauthorized request:
>>> b
''
>>> s
401
"""
resp_headers = {'Content-Type': 'application/x-www-form-urlencoded'}
try:
request = self._create_request(uri, http_method, body, headers)
valid, processed_request = self.validate_access_token_request(
request)
if valid:
token = self.create_access_token(request, credentials or {})
self.request_validator.invalidate_request_token(
request.client_key,
request.resource_owner_key,
request)
return resp_headers, token, 200
else:
return {}, None, 401
except errors.OAuth1Error as e:
return resp_headers, e.urlencoded, e.status_code
def validate_access_token_request(self, request):
"""Validate an access token request.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:raises: OAuth1Error if the request is invalid.
:returns: A tuple of 2 elements.
1. The validation result (True or False).
2. The request object.
"""
self._check_transport_security(request)
self._check_mandatory_parameters(request)
if not request.resource_owner_key:
raise errors.InvalidRequestError(
description='Missing resource owner.')
if not self.request_validator.check_request_token(
request.resource_owner_key):
raise errors.InvalidRequestError(
description='Invalid resource owner key format.')
if not request.verifier:
raise errors.InvalidRequestError(
description='Missing verifier.')
if not self.request_validator.check_verifier(request.verifier):
raise errors.InvalidRequestError(
description='Invalid verifier format.')
if not self.request_validator.validate_timestamp_and_nonce(
request.client_key, request.timestamp, request.nonce, request,
request_token=request.resource_owner_key):
return False, request
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid client credentials.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy client is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable client enumeration
valid_client = self.request_validator.validate_client_key(
request.client_key, request)
if not valid_client:
request.client_key = self.request_validator.dummy_client
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid or expired token.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy token is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable resource owner enumeration
valid_resource_owner = self.request_validator.validate_request_token(
request.client_key, request.resource_owner_key, request)
if not valid_resource_owner:
request.resource_owner_key = self.request_validator.dummy_request_token
# The server MUST verify (Section 3.2) the validity of the request,
# ensure that the resource owner has authorized the provisioning of
# token credentials to the client, and ensure that the temporary
# credentials have not expired or been used before. The server MUST
# also verify the verification code received from the client.
# .. _`Section 3.2`: https://tools.ietf.org/html/rfc5849#section-3.2
#
# Note that early exit would enable resource owner authorization
# verifier enumertion.
valid_verifier = self.request_validator.validate_verifier(
request.client_key,
request.resource_owner_key,
request.verifier,
request)
valid_signature = self._check_signature(request, is_token_request=True)
# log the results to the validator_log
# this lets us handle internal reporting and analysis
request.validator_log['client'] = valid_client
request.validator_log['resource_owner'] = valid_resource_owner
request.validator_log['verifier'] = valid_verifier
request.validator_log['signature'] = valid_signature
# We delay checking validity until the very end, using dummy values for
# calculations and fetching secrets/keys to ensure the flow of every
# request remains almost identical regardless of whether valid values
# have been supplied. This ensures near constant time execution and
# prevents malicious users from guessing sensitive information
v = all((valid_client, valid_resource_owner, valid_verifier,
valid_signature))
if not v:
log.info("[Failure] request verification failed.")
log.info("Valid client:, %s", valid_client)
log.info("Valid token:, %s", valid_resource_owner)
log.info("Valid verifier:, %s", valid_verifier)
log.info("Valid signature:, %s", valid_signature)
return v, request
| 9,347
|
Python
|
.py
| 183
| 40.415301
| 137
| 0.638414
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,788
|
pre_configured.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py
|
from . import (
AccessTokenEndpoint, AuthorizationEndpoint, RequestTokenEndpoint,
ResourceEndpoint,
)
class WebApplicationServer(RequestTokenEndpoint, AuthorizationEndpoint,
AccessTokenEndpoint, ResourceEndpoint):
def __init__(self, request_validator):
RequestTokenEndpoint.__init__(self, request_validator)
AuthorizationEndpoint.__init__(self, request_validator)
AccessTokenEndpoint.__init__(self, request_validator)
ResourceEndpoint.__init__(self, request_validator)
| 543
|
Python
|
.py
| 11
| 41.636364
| 71
| 0.729679
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,789
|
authorization.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/authorization.py
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth1.rfc5849.endpoints.authorization
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
from urllib.parse import urlencode
from oauthlib.common import add_params_to_uri
from .. import errors
from .base import BaseEndpoint
class AuthorizationEndpoint(BaseEndpoint):
"""An endpoint responsible for letting authenticated users authorize access
to their protected resources to a client.
Typical use would be to have two views, one for displaying the authorization
form and one to process said form on submission.
The first view will want to utilize ``get_realms_and_credentials`` to fetch
requested realms and useful client credentials, such as name and
description, to be used when creating the authorization form.
During form processing you can use ``create_authorization_response`` to
validate the request, create a verifier as well as prepare the final
redirection URI used to send the user back to the client.
See :doc:`/oauth1/validator` for details on which validator methods to implement
for this endpoint.
"""
def create_verifier(self, request, credentials):
"""Create and save a new request token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param credentials: A dict of extra token credentials.
:returns: The verifier as a dict.
"""
verifier = {
'oauth_token': request.resource_owner_key,
'oauth_verifier': self.token_generator(),
}
verifier.update(credentials)
self.request_validator.save_verifier(
request.resource_owner_key, verifier, request)
return verifier
def create_authorization_response(self, uri, http_method='GET', body=None,
headers=None, realms=None, credentials=None):
"""Create an authorization response, with a new request token if valid.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:param credentials: A list of credentials to include in the verifier.
:returns: A tuple of 3 elements.
1. A dict of headers to set on the response.
2. The response body as a string.
3. The response status code as an integer.
If the callback URI tied to the current token is "oob", a response with
a 200 status code will be returned. In this case, it may be desirable to
modify the response to better display the verifier to the client.
An example of an authorization request::
>>> from your_validator import your_validator
>>> from oauthlib.oauth1 import AuthorizationEndpoint
>>> endpoint = AuthorizationEndpoint(your_validator)
>>> h, b, s = endpoint.create_authorization_response(
... 'https://your.provider/authorize?oauth_token=...',
... credentials={
... 'extra': 'argument',
... })
>>> h
{'Location': 'https://the.client/callback?oauth_verifier=...&extra=argument'}
>>> b
None
>>> s
302
An example of a request with an "oob" callback::
>>> from your_validator import your_validator
>>> from oauthlib.oauth1 import AuthorizationEndpoint
>>> endpoint = AuthorizationEndpoint(your_validator)
>>> h, b, s = endpoint.create_authorization_response(
... 'https://your.provider/authorize?foo=bar',
... credentials={
... 'extra': 'argument',
... })
>>> h
{'Content-Type': 'application/x-www-form-urlencoded'}
>>> b
'oauth_verifier=...&extra=argument'
>>> s
200
"""
request = self._create_request(uri, http_method=http_method, body=body,
headers=headers)
if not request.resource_owner_key:
raise errors.InvalidRequestError(
'Missing mandatory parameter oauth_token.')
if not self.request_validator.verify_request_token(
request.resource_owner_key, request):
raise errors.InvalidClientError()
request.realms = realms
if (request.realms and not self.request_validator.verify_realms(
request.resource_owner_key, request.realms, request)):
raise errors.InvalidRequestError(
description=('User granted access to realms outside of '
'what the client may request.'))
verifier = self.create_verifier(request, credentials or {})
redirect_uri = self.request_validator.get_redirect_uri(
request.resource_owner_key, request)
if redirect_uri == 'oob':
response_headers = {
'Content-Type': 'application/x-www-form-urlencoded'}
response_body = urlencode(verifier)
return response_headers, response_body, 200
else:
populated_redirect = add_params_to_uri(
redirect_uri, verifier.items())
return {'Location': populated_redirect}, None, 302
def get_realms_and_credentials(self, uri, http_method='GET', body=None,
headers=None):
"""Fetch realms and credentials for the presented request token.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:returns: A tuple of 2 elements.
1. A list of request realms.
2. A dict of credentials which may be useful in creating the
authorization form.
"""
request = self._create_request(uri, http_method=http_method, body=body,
headers=headers)
if not self.request_validator.verify_request_token(
request.resource_owner_key, request):
raise errors.InvalidClientError()
realms = self.request_validator.get_realms(
request.resource_owner_key, request)
return realms, {'resource_owner_key': request.resource_owner_key}
| 6,724
|
Python
|
.py
| 132
| 39.742424
| 89
| 0.616966
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,790
|
__init__.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/__init__.py
|
from .access_token import AccessTokenEndpoint
from .authorization import AuthorizationEndpoint
from .base import BaseEndpoint
from .request_token import RequestTokenEndpoint
from .resource import ResourceEndpoint
from .signature_only import SignatureOnlyEndpoint
from .pre_configured import WebApplicationServer # isort:skip
| 327
|
Python
|
.py
| 7
| 45.571429
| 62
| 0.884013
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,791
|
base.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/base.py
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth1.rfc5849.endpoints.base
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
import time
from oauthlib.common import CaseInsensitiveDict, Request, generate_token
from .. import (
CONTENT_TYPE_FORM_URLENCODED,
SIGNATURE_HMAC_SHA1, SIGNATURE_HMAC_SHA256, SIGNATURE_HMAC_SHA512,
SIGNATURE_RSA_SHA1, SIGNATURE_RSA_SHA256, SIGNATURE_RSA_SHA512,
SIGNATURE_PLAINTEXT,
SIGNATURE_TYPE_AUTH_HEADER, SIGNATURE_TYPE_BODY,
SIGNATURE_TYPE_QUERY, errors, signature, utils)
class BaseEndpoint:
def __init__(self, request_validator, token_generator=None):
self.request_validator = request_validator
self.token_generator = token_generator or generate_token
def _get_signature_type_and_params(self, request):
"""Extracts parameters from query, headers and body. Signature type
is set to the source in which parameters were found.
"""
# Per RFC5849, only the Authorization header may contain the 'realm'
# optional parameter.
header_params = signature.collect_parameters(headers=request.headers,
exclude_oauth_signature=False, with_realm=True)
body_params = signature.collect_parameters(body=request.body,
exclude_oauth_signature=False)
query_params = signature.collect_parameters(uri_query=request.uri_query,
exclude_oauth_signature=False)
params = []
params.extend(header_params)
params.extend(body_params)
params.extend(query_params)
signature_types_with_oauth_params = list(filter(lambda s: s[2], (
(SIGNATURE_TYPE_AUTH_HEADER, params,
utils.filter_oauth_params(header_params)),
(SIGNATURE_TYPE_BODY, params,
utils.filter_oauth_params(body_params)),
(SIGNATURE_TYPE_QUERY, params,
utils.filter_oauth_params(query_params))
)))
if len(signature_types_with_oauth_params) > 1:
found_types = [s[0] for s in signature_types_with_oauth_params]
raise errors.InvalidRequestError(
description=('oauth_ params must come from only 1 signature'
'type but were found in %s',
', '.join(found_types)))
try:
signature_type, params, oauth_params = signature_types_with_oauth_params[
0]
except IndexError:
raise errors.InvalidRequestError(
description='Missing mandatory OAuth parameters.')
return signature_type, params, oauth_params
def _create_request(self, uri, http_method, body, headers):
# Only include body data from x-www-form-urlencoded requests
headers = CaseInsensitiveDict(headers or {})
if ("Content-Type" in headers and
CONTENT_TYPE_FORM_URLENCODED in headers["Content-Type"]):
request = Request(uri, http_method, body, headers)
else:
request = Request(uri, http_method, '', headers)
signature_type, params, oauth_params = (
self._get_signature_type_and_params(request))
# The server SHOULD return a 400 (Bad Request) status code when
# receiving a request with duplicated protocol parameters.
if len(dict(oauth_params)) != len(oauth_params):
raise errors.InvalidRequestError(
description='Duplicate OAuth1 entries.')
oauth_params = dict(oauth_params)
request.signature = oauth_params.get('oauth_signature')
request.client_key = oauth_params.get('oauth_consumer_key')
request.resource_owner_key = oauth_params.get('oauth_token')
request.nonce = oauth_params.get('oauth_nonce')
request.timestamp = oauth_params.get('oauth_timestamp')
request.redirect_uri = oauth_params.get('oauth_callback')
request.verifier = oauth_params.get('oauth_verifier')
request.signature_method = oauth_params.get('oauth_signature_method')
request.realm = dict(params).get('realm')
request.oauth_params = oauth_params
# Parameters to Client depend on signature method which may vary
# for each request. Note that HMAC-SHA1 and PLAINTEXT share parameters
request.params = [(k, v) for k, v in params if k != "oauth_signature"]
if 'realm' in request.headers.get('Authorization', ''):
request.params = [(k, v)
for k, v in request.params if k != "realm"]
return request
def _check_transport_security(self, request):
# TODO: move into oauthlib.common from oauth2.utils
if (self.request_validator.enforce_ssl and
not request.uri.lower().startswith("https://")):
raise errors.InsecureTransportError()
def _check_mandatory_parameters(self, request):
# The server SHOULD return a 400 (Bad Request) status code when
# receiving a request with missing parameters.
if not all((request.signature, request.client_key,
request.nonce, request.timestamp,
request.signature_method)):
raise errors.InvalidRequestError(
description='Missing mandatory OAuth parameters.')
# OAuth does not mandate a particular signature method, as each
# implementation can have its own unique requirements. Servers are
# free to implement and document their own custom methods.
# Recommending any particular method is beyond the scope of this
# specification. Implementers should review the Security
# Considerations section (`Section 4`_) before deciding on which
# method to support.
# .. _`Section 4`: https://tools.ietf.org/html/rfc5849#section-4
if (not request.signature_method in
self.request_validator.allowed_signature_methods):
raise errors.InvalidSignatureMethodError(
description="Invalid signature, {} not in {!r}.".format(
request.signature_method,
self.request_validator.allowed_signature_methods))
# Servers receiving an authenticated request MUST validate it by:
# If the "oauth_version" parameter is present, ensuring its value is
# "1.0".
if ('oauth_version' in request.oauth_params and
request.oauth_params['oauth_version'] != '1.0'):
raise errors.InvalidRequestError(
description='Invalid OAuth version.')
# The timestamp value MUST be a positive integer. Unless otherwise
# specified by the server's documentation, the timestamp is expressed
# in the number of seconds since January 1, 1970 00:00:00 GMT.
if len(request.timestamp) != 10:
raise errors.InvalidRequestError(
description='Invalid timestamp size')
try:
ts = int(request.timestamp)
except ValueError:
raise errors.InvalidRequestError(
description='Timestamp must be an integer.')
else:
# To avoid the need to retain an infinite number of nonce values for
# future checks, servers MAY choose to restrict the time period after
# which a request with an old timestamp is rejected.
if abs(time.time() - ts) > self.request_validator.timestamp_lifetime:
raise errors.InvalidRequestError(
description=('Timestamp given is invalid, differ from '
'allowed by over %s seconds.' % (
self.request_validator.timestamp_lifetime)))
# Provider specific validation of parameters, used to enforce
# restrictions such as character set and length.
if not self.request_validator.check_client_key(request.client_key):
raise errors.InvalidRequestError(
description='Invalid client key format.')
if not self.request_validator.check_nonce(request.nonce):
raise errors.InvalidRequestError(
description='Invalid nonce format.')
def _check_signature(self, request, is_token_request=False):
# ---- RSA Signature verification ----
if request.signature_method == SIGNATURE_RSA_SHA1 or \
request.signature_method == SIGNATURE_RSA_SHA256 or \
request.signature_method == SIGNATURE_RSA_SHA512:
# RSA-based signature method
# The server verifies the signature per `[RFC3447] section 8.2.2`_
# .. _`[RFC3447] section 8.2.2`: https://tools.ietf.org/html/rfc3447#section-8.2.1
rsa_key = self.request_validator.get_rsa_key(
request.client_key, request)
if request.signature_method == SIGNATURE_RSA_SHA1:
valid_signature = signature.verify_rsa_sha1(request, rsa_key)
elif request.signature_method == SIGNATURE_RSA_SHA256:
valid_signature = signature.verify_rsa_sha256(request, rsa_key)
elif request.signature_method == SIGNATURE_RSA_SHA512:
valid_signature = signature.verify_rsa_sha512(request, rsa_key)
else:
valid_signature = False
# ---- HMAC or Plaintext Signature verification ----
else:
# Non-RSA based signature method
# Servers receiving an authenticated request MUST validate it by:
# Recalculating the request signature independently as described in
# `Section 3.4`_ and comparing it to the value received from the
# client via the "oauth_signature" parameter.
# .. _`Section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
client_secret = self.request_validator.get_client_secret(
request.client_key, request)
resource_owner_secret = None
if request.resource_owner_key:
if is_token_request:
resource_owner_secret = \
self.request_validator.get_request_token_secret(
request.client_key, request.resource_owner_key,
request)
else:
resource_owner_secret = \
self.request_validator.get_access_token_secret(
request.client_key, request.resource_owner_key,
request)
if request.signature_method == SIGNATURE_HMAC_SHA1:
valid_signature = signature.verify_hmac_sha1(
request, client_secret, resource_owner_secret)
elif request.signature_method == SIGNATURE_HMAC_SHA256:
valid_signature = signature.verify_hmac_sha256(
request, client_secret, resource_owner_secret)
elif request.signature_method == SIGNATURE_HMAC_SHA512:
valid_signature = signature.verify_hmac_sha512(
request, client_secret, resource_owner_secret)
elif request.signature_method == SIGNATURE_PLAINTEXT:
valid_signature = signature.verify_plaintext(
request, client_secret, resource_owner_secret)
else:
valid_signature = False
return valid_signature
| 11,643
|
Python
|
.py
| 207
| 43.15942
| 100
| 0.623443
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,792
|
resource.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/resource.py
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth1.rfc5849.endpoints.resource
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of the resource protection provider logic of
OAuth 1.0 RFC 5849.
"""
import logging
from .. import errors
from .base import BaseEndpoint
log = logging.getLogger(__name__)
class ResourceEndpoint(BaseEndpoint):
"""An endpoint responsible for protecting resources.
Typical use is to instantiate with a request validator and invoke the
``validate_protected_resource_request`` in a decorator around a view
function. If the request is valid, invoke and return the response of the
view. If invalid create and return an error response directly from the
decorator.
See :doc:`/oauth1/validator` for details on which validator methods to implement
for this endpoint.
An example decorator::
from functools import wraps
from your_validator import your_validator
from oauthlib.oauth1 import ResourceEndpoint
endpoint = ResourceEndpoint(your_validator)
def require_oauth(realms=None):
def decorator(f):
@wraps(f)
def wrapper(request, *args, **kwargs):
v, r = provider.validate_protected_resource_request(
request.url,
http_method=request.method,
body=request.data,
headers=request.headers,
realms=realms or [])
if v:
return f(*args, **kwargs)
else:
return abort(403)
"""
def validate_protected_resource_request(self, uri, http_method='GET',
body=None, headers=None, realms=None):
"""Create a request token response, with a new request token if valid.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:param realms: A list of realms the resource is protected under.
This will be supplied to the ``validate_realms``
method of the request validator.
:returns: A tuple of 2 elements.
1. True if valid, False otherwise.
2. An oauthlib.common.Request object.
"""
try:
request = self._create_request(uri, http_method, body, headers)
except errors.OAuth1Error:
return False, None
try:
self._check_transport_security(request)
self._check_mandatory_parameters(request)
except errors.OAuth1Error:
return False, request
if not request.resource_owner_key:
return False, request
if not self.request_validator.check_access_token(
request.resource_owner_key):
return False, request
if not self.request_validator.validate_timestamp_and_nonce(
request.client_key, request.timestamp, request.nonce, request,
access_token=request.resource_owner_key):
return False, request
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid client credentials.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy client is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable client enumeration
valid_client = self.request_validator.validate_client_key(
request.client_key, request)
if not valid_client:
request.client_key = self.request_validator.dummy_client
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid or expired token.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy token is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable resource owner enumeration
valid_resource_owner = self.request_validator.validate_access_token(
request.client_key, request.resource_owner_key, request)
if not valid_resource_owner:
request.resource_owner_key = self.request_validator.dummy_access_token
# Note that `realm`_ is only used in authorization headers and how
# it should be interepreted is not included in the OAuth spec.
# However they could be seen as a scope or realm to which the
# client has access and as such every client should be checked
# to ensure it is authorized access to that scope or realm.
# .. _`realm`: https://tools.ietf.org/html/rfc2617#section-1.2
#
# Note that early exit would enable client realm access enumeration.
#
# The require_realm indicates this is the first step in the OAuth
# workflow where a client requests access to a specific realm.
# This first step (obtaining request token) need not require a realm
# and can then be identified by checking the require_resource_owner
# flag and abscence of realm.
#
# Clients obtaining an access token will not supply a realm and it will
# not be checked. Instead the previously requested realm should be
# transferred from the request token to the access token.
#
# Access to protected resources will always validate the realm but note
# that the realm is now tied to the access token and not provided by
# the client.
valid_realm = self.request_validator.validate_realms(request.client_key,
request.resource_owner_key, request, uri=request.uri,
realms=realms)
valid_signature = self._check_signature(request)
# log the results to the validator_log
# this lets us handle internal reporting and analysis
request.validator_log['client'] = valid_client
request.validator_log['resource_owner'] = valid_resource_owner
request.validator_log['realm'] = valid_realm
request.validator_log['signature'] = valid_signature
# We delay checking validity until the very end, using dummy values for
# calculations and fetching secrets/keys to ensure the flow of every
# request remains almost identical regardless of whether valid values
# have been supplied. This ensures near constant time execution and
# prevents malicious users from guessing sensitive information
v = all((valid_client, valid_resource_owner, valid_realm,
valid_signature))
if not v:
log.info("[Failure] request verification failed.")
log.info("Valid client: %s", valid_client)
log.info("Valid token: %s", valid_resource_owner)
log.info("Valid realm: %s", valid_realm)
log.info("Valid signature: %s", valid_signature)
return v, request
| 7,376
|
Python
|
.py
| 140
| 41.085714
| 114
| 0.635242
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,793
|
signature_only.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/signature_only.py
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth1.rfc5849.endpoints.signature_only
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of the signing logic of OAuth 1.0 RFC 5849.
"""
import logging
from .. import errors
from .base import BaseEndpoint
log = logging.getLogger(__name__)
class SignatureOnlyEndpoint(BaseEndpoint):
"""An endpoint only responsible for verifying an oauth signature."""
def validate_request(self, uri, http_method='GET',
body=None, headers=None):
"""Validate a signed OAuth request.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:returns: A tuple of 2 elements.
1. True if valid, False otherwise.
2. An oauthlib.common.Request object.
"""
try:
request = self._create_request(uri, http_method, body, headers)
except errors.OAuth1Error as err:
log.info(
'Exception caught while validating request, %s.' % err)
return False, None
try:
self._check_transport_security(request)
self._check_mandatory_parameters(request)
except errors.OAuth1Error as err:
log.info(
'Exception caught while validating request, %s.' % err)
return False, request
if not self.request_validator.validate_timestamp_and_nonce(
request.client_key, request.timestamp, request.nonce, request):
log.debug('[Failure] verification failed: timestamp/nonce')
return False, request
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid client credentials.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy client is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable client enumeration
valid_client = self.request_validator.validate_client_key(
request.client_key, request)
if not valid_client:
request.client_key = self.request_validator.dummy_client
valid_signature = self._check_signature(request)
# log the results to the validator_log
# this lets us handle internal reporting and analysis
request.validator_log['client'] = valid_client
request.validator_log['signature'] = valid_signature
# We delay checking validity until the very end, using dummy values for
# calculations and fetching secrets/keys to ensure the flow of every
# request remains almost identical regardless of whether valid values
# have been supplied. This ensures near constant time execution and
# prevents malicious users from guessing sensitive information
v = all((valid_client, valid_signature))
if not v:
log.info("[Failure] request verification failed.")
log.info("Valid client: %s", valid_client)
log.info("Valid signature: %s", valid_signature)
return v, request
| 3,327
|
Python
|
.py
| 67
| 40.119403
| 79
| 0.646841
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,794
|
request_token.py
|
rembo10_headphones/lib/oauthlib/oauth1/rfc5849/endpoints/request_token.py
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth1.rfc5849.endpoints.request_token
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of the request token provider logic of
OAuth 1.0 RFC 5849. It validates the correctness of request token requests,
creates and persists tokens as well as create the proper response to be
returned to the client.
"""
import logging
from oauthlib.common import urlencode
from .. import errors
from .base import BaseEndpoint
log = logging.getLogger(__name__)
class RequestTokenEndpoint(BaseEndpoint):
"""An endpoint responsible for providing OAuth 1 request tokens.
Typical use is to instantiate with a request validator and invoke the
``create_request_token_response`` from a view function. The tuple returned
has all information necessary (body, status, headers) to quickly form
and return a proper response. See :doc:`/oauth1/validator` for details on which
validator methods to implement for this endpoint.
"""
def create_request_token(self, request, credentials):
"""Create and save a new request token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param credentials: A dict of extra token credentials.
:returns: The token as an urlencoded string.
"""
token = {
'oauth_token': self.token_generator(),
'oauth_token_secret': self.token_generator(),
'oauth_callback_confirmed': 'true'
}
token.update(credentials)
self.request_validator.save_request_token(token, request)
return urlencode(token.items())
def create_request_token_response(self, uri, http_method='GET', body=None,
headers=None, credentials=None):
"""Create a request token response, with a new request token if valid.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:param credentials: A list of extra credentials to include in the token.
:returns: A tuple of 3 elements.
1. A dict of headers to set on the response.
2. The response body as a string.
3. The response status code as an integer.
An example of a valid request::
>>> from your_validator import your_validator
>>> from oauthlib.oauth1 import RequestTokenEndpoint
>>> endpoint = RequestTokenEndpoint(your_validator)
>>> h, b, s = endpoint.create_request_token_response(
... 'https://your.provider/request_token?foo=bar',
... headers={
... 'Authorization': 'OAuth realm=movies user, oauth_....'
... },
... credentials={
... 'my_specific': 'argument',
... })
>>> h
{'Content-Type': 'application/x-www-form-urlencoded'}
>>> b
'oauth_token=lsdkfol23w54jlksdef&oauth_token_secret=qwe089234lkjsdf&oauth_callback_confirmed=true&my_specific=argument'
>>> s
200
An response to invalid request would have a different body and status::
>>> b
'error=invalid_request&description=missing+callback+uri'
>>> s
400
The same goes for an an unauthorized request:
>>> b
''
>>> s
401
"""
resp_headers = {'Content-Type': 'application/x-www-form-urlencoded'}
try:
request = self._create_request(uri, http_method, body, headers)
valid, processed_request = self.validate_request_token_request(
request)
if valid:
token = self.create_request_token(request, credentials or {})
return resp_headers, token, 200
else:
return {}, None, 401
except errors.OAuth1Error as e:
return resp_headers, e.urlencoded, e.status_code
def validate_request_token_request(self, request):
"""Validate a request token request.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:raises: OAuth1Error if the request is invalid.
:returns: A tuple of 2 elements.
1. The validation result (True or False).
2. The request object.
"""
self._check_transport_security(request)
self._check_mandatory_parameters(request)
if request.realm:
request.realms = request.realm.split(' ')
else:
request.realms = self.request_validator.get_default_realms(
request.client_key, request)
if not self.request_validator.check_realms(request.realms):
raise errors.InvalidRequestError(
description='Invalid realm {}. Allowed are {!r}.'.format(
request.realms, self.request_validator.realms))
if not request.redirect_uri:
raise errors.InvalidRequestError(
description='Missing callback URI.')
if not self.request_validator.validate_timestamp_and_nonce(
request.client_key, request.timestamp, request.nonce, request,
request_token=request.resource_owner_key):
return False, request
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid client credentials.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy client is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable client enumeration
valid_client = self.request_validator.validate_client_key(
request.client_key, request)
if not valid_client:
request.client_key = self.request_validator.dummy_client
# Note that `realm`_ is only used in authorization headers and how
# it should be interepreted is not included in the OAuth spec.
# However they could be seen as a scope or realm to which the
# client has access and as such every client should be checked
# to ensure it is authorized access to that scope or realm.
# .. _`realm`: https://tools.ietf.org/html/rfc2617#section-1.2
#
# Note that early exit would enable client realm access enumeration.
#
# The require_realm indicates this is the first step in the OAuth
# workflow where a client requests access to a specific realm.
# This first step (obtaining request token) need not require a realm
# and can then be identified by checking the require_resource_owner
# flag and abscence of realm.
#
# Clients obtaining an access token will not supply a realm and it will
# not be checked. Instead the previously requested realm should be
# transferred from the request token to the access token.
#
# Access to protected resources will always validate the realm but note
# that the realm is now tied to the access token and not provided by
# the client.
valid_realm = self.request_validator.validate_requested_realms(
request.client_key, request.realms, request)
# Callback is normally never required, except for requests for
# a Temporary Credential as described in `Section 2.1`_
# .._`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1
valid_redirect = self.request_validator.validate_redirect_uri(
request.client_key, request.redirect_uri, request)
if not request.redirect_uri:
raise NotImplementedError('Redirect URI must either be provided '
'or set to a default during validation.')
valid_signature = self._check_signature(request)
# log the results to the validator_log
# this lets us handle internal reporting and analysis
request.validator_log['client'] = valid_client
request.validator_log['realm'] = valid_realm
request.validator_log['callback'] = valid_redirect
request.validator_log['signature'] = valid_signature
# We delay checking validity until the very end, using dummy values for
# calculations and fetching secrets/keys to ensure the flow of every
# request remains almost identical regardless of whether valid values
# have been supplied. This ensures near constant time execution and
# prevents malicious users from guessing sensitive information
v = all((valid_client, valid_realm, valid_redirect, valid_signature))
if not v:
log.info("[Failure] request verification failed.")
log.info("Valid client: %s.", valid_client)
log.info("Valid realm: %s.", valid_realm)
log.info("Valid callback: %s.", valid_redirect)
log.info("Valid signature: %s.", valid_signature)
return v, request
| 9,293
|
Python
|
.py
| 180
| 41.211111
| 131
| 0.637605
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,795
|
__init__.py
|
rembo10_headphones/lib/oauthlib/oauth2/__init__.py
|
"""
oauthlib.oauth2
~~~~~~~~~~~~~~
This module is a wrapper for the most recent implementation of OAuth 2.0 Client
and Server classes.
"""
from .rfc6749.clients import (
BackendApplicationClient, Client, LegacyApplicationClient,
MobileApplicationClient, ServiceApplicationClient, WebApplicationClient,
)
from .rfc6749.endpoints import (
AuthorizationEndpoint, BackendApplicationServer, IntrospectEndpoint,
LegacyApplicationServer, MetadataEndpoint, MobileApplicationServer,
ResourceEndpoint, RevocationEndpoint, Server, TokenEndpoint,
WebApplicationServer,
)
from .rfc6749.errors import (
AccessDeniedError, FatalClientError, InsecureTransportError,
InvalidClientError, InvalidClientIdError, InvalidGrantError,
InvalidRedirectURIError, InvalidRequestError, InvalidRequestFatalError,
InvalidScopeError, MismatchingRedirectURIError, MismatchingStateError,
MissingClientIdError, MissingCodeError, MissingRedirectURIError,
MissingResponseTypeError, MissingTokenError, MissingTokenTypeError,
OAuth2Error, ServerError, TemporarilyUnavailableError, TokenExpiredError,
UnauthorizedClientError, UnsupportedGrantTypeError,
UnsupportedResponseTypeError, UnsupportedTokenTypeError,
)
from .rfc6749.grant_types import (
AuthorizationCodeGrant, ClientCredentialsGrant, ImplicitGrant,
RefreshTokenGrant, ResourceOwnerPasswordCredentialsGrant,
)
from .rfc6749.request_validator import RequestValidator
from .rfc6749.tokens import BearerToken, OAuth2Token
from .rfc6749.utils import is_secure_transport
| 1,555
|
Python
|
.py
| 34
| 42.705882
| 79
| 0.846053
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,796
|
errors.py
|
rembo10_headphones/lib/oauthlib/oauth2/rfc6749/errors.py
|
"""
oauthlib.oauth2.rfc6749.errors
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Error used both by OAuth 2 clients and providers to represent the spec
defined error responses for all four core grant types.
"""
import json
from oauthlib.common import add_params_to_uri, urlencode
class OAuth2Error(Exception):
error = None
status_code = 400
description = ''
def __init__(self, description=None, uri=None, state=None,
status_code=None, request=None):
"""
:param description: A human-readable ASCII [USASCII] text providing
additional information, used to assist the client
developer in understanding the error that occurred.
Values for the "error_description" parameter
MUST NOT include characters outside the set
x20-21 / x23-5B / x5D-7E.
:param uri: A URI identifying a human-readable web page with information
about the error, used to provide the client developer with
additional information about the error. Values for the
"error_uri" parameter MUST conform to the URI- Reference
syntax, and thus MUST NOT include characters outside the set
x21 / x23-5B / x5D-7E.
:param state: A CSRF protection value received from the client.
:param status_code:
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
if description is not None:
self.description = description
message = '({}) {}'.format(self.error, self.description)
if request:
message += ' ' + repr(request)
super().__init__(message)
self.uri = uri
self.state = state
if status_code:
self.status_code = status_code
if request:
self.redirect_uri = request.redirect_uri
self.client_id = request.client_id
self.scopes = request.scopes
self.response_type = request.response_type
self.response_mode = request.response_mode
self.grant_type = request.grant_type
if not state:
self.state = request.state
else:
self.redirect_uri = None
self.client_id = None
self.scopes = None
self.response_type = None
self.response_mode = None
self.grant_type = None
def in_uri(self, uri):
fragment = self.response_mode == "fragment"
return add_params_to_uri(uri, self.twotuples, fragment)
@property
def twotuples(self):
error = [('error', self.error)]
if self.description:
error.append(('error_description', self.description))
if self.uri:
error.append(('error_uri', self.uri))
if self.state:
error.append(('state', self.state))
return error
@property
def urlencoded(self):
return urlencode(self.twotuples)
@property
def json(self):
return json.dumps(dict(self.twotuples))
@property
def headers(self):
if self.status_code == 401:
"""
https://tools.ietf.org/html/rfc6750#section-3
All challenges defined by this specification MUST use the auth-scheme
value "Bearer". This scheme MUST be followed by one or more
auth-param values.
"""
authvalues = [
"Bearer",
'error="{}"'.format(self.error)
]
if self.description:
authvalues.append('error_description="{}"'.format(self.description))
if self.uri:
authvalues.append('error_uri="{}"'.format(self.uri))
return {"WWW-Authenticate": ", ".join(authvalues)}
return {}
class TokenExpiredError(OAuth2Error):
error = 'token_expired'
class InsecureTransportError(OAuth2Error):
error = 'insecure_transport'
description = 'OAuth 2 MUST utilize https.'
class MismatchingStateError(OAuth2Error):
error = 'mismatching_state'
description = 'CSRF Warning! State not equal in request and response.'
class MissingCodeError(OAuth2Error):
error = 'missing_code'
class MissingTokenError(OAuth2Error):
error = 'missing_token'
class MissingTokenTypeError(OAuth2Error):
error = 'missing_token_type'
class FatalClientError(OAuth2Error):
"""
Errors during authorization where user should not be redirected back.
If the request fails due to a missing, invalid, or mismatching
redirection URI, or if the client identifier is missing or invalid,
the authorization server SHOULD inform the resource owner of the
error and MUST NOT automatically redirect the user-agent to the
invalid redirection URI.
Instead the user should be informed of the error by the provider itself.
"""
pass
class InvalidRequestFatalError(FatalClientError):
"""
For fatal errors, the request is missing a required parameter, includes
an invalid parameter value, includes a parameter more than once, or is
otherwise malformed.
"""
error = 'invalid_request'
class InvalidRedirectURIError(InvalidRequestFatalError):
description = 'Invalid redirect URI.'
class MissingRedirectURIError(InvalidRequestFatalError):
description = 'Missing redirect URI.'
class MismatchingRedirectURIError(InvalidRequestFatalError):
description = 'Mismatching redirect URI.'
class InvalidClientIdError(InvalidRequestFatalError):
description = 'Invalid client_id parameter value.'
class MissingClientIdError(InvalidRequestFatalError):
description = 'Missing client_id parameter.'
class InvalidRequestError(OAuth2Error):
"""
The request is missing a required parameter, includes an invalid
parameter value, includes a parameter more than once, or is
otherwise malformed.
"""
error = 'invalid_request'
class MissingResponseTypeError(InvalidRequestError):
description = 'Missing response_type parameter.'
class MissingCodeChallengeError(InvalidRequestError):
"""
If the server requires Proof Key for Code Exchange (PKCE) by OAuth
public clients and the client does not send the "code_challenge" in
the request, the authorization endpoint MUST return the authorization
error response with the "error" value set to "invalid_request". The
"error_description" or the response of "error_uri" SHOULD explain the
nature of error, e.g., code challenge required.
"""
description = 'Code challenge required.'
class MissingCodeVerifierError(InvalidRequestError):
"""
The request to the token endpoint, when PKCE is enabled, has
the parameter `code_verifier` REQUIRED.
"""
description = 'Code verifier required.'
class AccessDeniedError(OAuth2Error):
"""
The resource owner or authorization server denied the request.
"""
error = 'access_denied'
class UnsupportedResponseTypeError(OAuth2Error):
"""
The authorization server does not support obtaining an authorization
code using this method.
"""
error = 'unsupported_response_type'
class UnsupportedCodeChallengeMethodError(InvalidRequestError):
"""
If the server supporting PKCE does not support the requested
transformation, the authorization endpoint MUST return the
authorization error response with "error" value set to
"invalid_request". The "error_description" or the response of
"error_uri" SHOULD explain the nature of error, e.g., transform
algorithm not supported.
"""
description = 'Transform algorithm not supported.'
class InvalidScopeError(OAuth2Error):
"""
The requested scope is invalid, unknown, or malformed, or
exceeds the scope granted by the resource owner.
https://tools.ietf.org/html/rfc6749#section-5.2
"""
error = 'invalid_scope'
class ServerError(OAuth2Error):
"""
The authorization server encountered an unexpected condition that
prevented it from fulfilling the request. (This error code is needed
because a 500 Internal Server Error HTTP status code cannot be returned
to the client via a HTTP redirect.)
"""
error = 'server_error'
class TemporarilyUnavailableError(OAuth2Error):
"""
The authorization server is currently unable to handle the request
due to a temporary overloading or maintenance of the server.
(This error code is needed because a 503 Service Unavailable HTTP
status code cannot be returned to the client via a HTTP redirect.)
"""
error = 'temporarily_unavailable'
class InvalidClientError(FatalClientError):
"""
Client authentication failed (e.g. unknown client, no client
authentication included, or unsupported authentication method).
The authorization server MAY return an HTTP 401 (Unauthorized) status
code to indicate which HTTP authentication schemes are supported.
If the client attempted to authenticate via the "Authorization" request
header field, the authorization server MUST respond with an
HTTP 401 (Unauthorized) status code, and include the "WWW-Authenticate"
response header field matching the authentication scheme used by the
client.
"""
error = 'invalid_client'
status_code = 401
class InvalidGrantError(OAuth2Error):
"""
The provided authorization grant (e.g. authorization code, resource
owner credentials) or refresh token is invalid, expired, revoked, does
not match the redirection URI used in the authorization request, or was
issued to another client.
https://tools.ietf.org/html/rfc6749#section-5.2
"""
error = 'invalid_grant'
status_code = 400
class UnauthorizedClientError(OAuth2Error):
"""
The authenticated client is not authorized to use this authorization
grant type.
"""
error = 'unauthorized_client'
class UnsupportedGrantTypeError(OAuth2Error):
"""
The authorization grant type is not supported by the authorization
server.
"""
error = 'unsupported_grant_type'
class UnsupportedTokenTypeError(OAuth2Error):
"""
The authorization server does not support the hint of the
presented token type. I.e. the client tried to revoke an access token
on a server not supporting this feature.
"""
error = 'unsupported_token_type'
class InvalidTokenError(OAuth2Error):
"""
The access token provided is expired, revoked, malformed, or
invalid for other reasons. The resource SHOULD respond with
the HTTP 401 (Unauthorized) status code. The client MAY
request a new access token and retry the protected resource
request.
"""
error = 'invalid_token'
status_code = 401
description = ("The access token provided is expired, revoked, malformed, "
"or invalid for other reasons.")
class InsufficientScopeError(OAuth2Error):
"""
The request requires higher privileges than provided by the
access token. The resource server SHOULD respond with the HTTP
403 (Forbidden) status code and MAY include the "scope"
attribute with the scope necessary to access the protected
resource.
"""
error = 'insufficient_scope'
status_code = 403
description = ("The request requires higher privileges than provided by "
"the access token.")
class ConsentRequired(OAuth2Error):
"""
The Authorization Server requires End-User consent.
This error MAY be returned when the prompt parameter value in the
Authentication Request is none, but the Authentication Request cannot be
completed without displaying a user interface for End-User consent.
"""
error = 'consent_required'
class LoginRequired(OAuth2Error):
"""
The Authorization Server requires End-User authentication.
This error MAY be returned when the prompt parameter value in the
Authentication Request is none, but the Authentication Request cannot be
completed without displaying a user interface for End-User authentication.
"""
error = 'login_required'
class CustomOAuth2Error(OAuth2Error):
"""
This error is a placeholder for all custom errors not described by the RFC.
Some of the popular OAuth2 providers are using custom errors.
"""
def __init__(self, error, *args, **kwargs):
self.error = error
super().__init__(*args, **kwargs)
def raise_from_error(error, params=None):
import inspect
import sys
kwargs = {
'description': params.get('error_description'),
'uri': params.get('error_uri'),
'state': params.get('state')
}
for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
if cls.error == error:
raise cls(**kwargs)
raise CustomOAuth2Error(error=error, **kwargs)
| 12,991
|
Python
|
.py
| 310
| 34.945161
| 84
| 0.69455
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,797
|
parameters.py
|
rembo10_headphones/lib/oauthlib/oauth2/rfc6749/parameters.py
|
"""
oauthlib.oauth2.rfc6749.parameters
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains methods related to `Section 4`_ of the OAuth 2 RFC.
.. _`Section 4`: https://tools.ietf.org/html/rfc6749#section-4
"""
import json
import os
import time
import urllib.parse as urlparse
from oauthlib.common import add_params_to_qs, add_params_to_uri
from oauthlib.signals import scope_changed
from .errors import (
InsecureTransportError, MismatchingStateError, MissingCodeError,
MissingTokenError, MissingTokenTypeError, raise_from_error,
)
from .tokens import OAuth2Token
from .utils import is_secure_transport, list_to_scope, scope_to_list
def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
scope=None, state=None, **kwargs):
"""Prepare the authorization grant request URI.
The client constructs the request URI by adding the following
parameters to the query component of the authorization endpoint URI
using the ``application/x-www-form-urlencoded`` format as defined by
[`W3C.REC-html401-19991224`_]:
:param uri:
:param client_id: The client identifier as described in `Section 2.2`_.
:param response_type: To indicate which OAuth 2 grant/flow is required,
"code" and "token".
:param redirect_uri: The client provided URI to redirect back to after
authorization as described in `Section 3.1.2`_.
:param scope: The scope of the access request as described by
`Section 3.3`_.
:param state: An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent
back to the client. The parameter SHOULD be used for
preventing cross-site request forgery as described in
`Section 10.12`_.
:param kwargs: Extra arguments to embed in the grant/authorization URL.
An example of an authorization code grant authorization URL:
.. code-block:: http
GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1
Host: server.example.com
.. _`W3C.REC-html401-19991224`: https://tools.ietf.org/html/rfc6749#ref-W3C.REC-html401-19991224
.. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2
.. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2
.. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3
.. _`section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12
"""
if not is_secure_transport(uri):
raise InsecureTransportError()
params = [(('response_type', response_type)),
(('client_id', client_id))]
if redirect_uri:
params.append(('redirect_uri', redirect_uri))
if scope:
params.append(('scope', list_to_scope(scope)))
if state:
params.append(('state', state))
for k in kwargs:
if kwargs[k]:
params.append((str(k), kwargs[k]))
return add_params_to_uri(uri, params)
def prepare_token_request(grant_type, body='', include_client_id=True, **kwargs):
"""Prepare the access token request.
The client makes a request to the token endpoint by adding the
following parameters using the ``application/x-www-form-urlencoded``
format in the HTTP request entity-body:
:param grant_type: To indicate grant type being used, i.e. "password",
"authorization_code" or "client_credentials".
:param body: Existing request body (URL encoded string) to embed parameters
into. This may contain extra parameters. Default ''.
:param include_client_id: `True` (default) to send the `client_id` in the
body of the upstream request. This is required
if the client is not authenticating with the
authorization server as described in
`Section 3.2.1`_.
:type include_client_id: Boolean
:param client_id: Unicode client identifier. Will only appear if
`include_client_id` is True. *
:param client_secret: Unicode client secret. Will only appear if set to a
value that is not `None`. Invoking this function with
an empty string will send an empty `client_secret`
value to the server. *
:param code: If using authorization_code grant, pass the previously
obtained authorization code as the ``code`` argument. *
:param redirect_uri: If the "redirect_uri" parameter was included in the
authorization request as described in
`Section 4.1.1`_, and their values MUST be identical. *
:param kwargs: Extra arguments to embed in the request body.
Parameters marked with a `*` above are not explicit arguments in the
function signature, but are specially documented arguments for items
appearing in the generic `**kwargs` keyworded input.
An example of an authorization code token request body:
.. code-block:: http
grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb
.. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1
"""
params = [('grant_type', grant_type)]
if 'scope' in kwargs:
kwargs['scope'] = list_to_scope(kwargs['scope'])
# pull the `client_id` out of the kwargs.
client_id = kwargs.pop('client_id', None)
if include_client_id:
if client_id is not None:
params.append(('client_id', client_id))
# the kwargs iteration below only supports including boolean truth (truthy)
# values, but some servers may require an empty string for `client_secret`
client_secret = kwargs.pop('client_secret', None)
if client_secret is not None:
params.append(('client_secret', client_secret))
# this handles: `code`, `redirect_uri`, and other undocumented params
for k in kwargs:
if kwargs[k]:
params.append((str(k), kwargs[k]))
return add_params_to_qs(body, params)
def prepare_token_revocation_request(url, token, token_type_hint="access_token",
callback=None, body='', **kwargs):
"""Prepare a token revocation request.
The client constructs the request by including the following parameters
using the ``application/x-www-form-urlencoded`` format in the HTTP request
entity-body:
:param token: REQUIRED. The token that the client wants to get revoked.
:param token_type_hint: OPTIONAL. A hint about the type of the token
submitted for revocation. Clients MAY pass this
parameter in order to help the authorization server
to optimize the token lookup. If the server is
unable to locate the token using the given hint, it
MUST extend its search across all of its supported
token types. An authorization server MAY ignore
this parameter, particularly if it is able to detect
the token type automatically.
This specification defines two values for `token_type_hint`:
* access_token: An access token as defined in [RFC6749],
`Section 1.4`_
* refresh_token: A refresh token as defined in [RFC6749],
`Section 1.5`_
Specific implementations, profiles, and extensions of this
specification MAY define other values for this parameter using the
registry defined in `Section 4.1.2`_.
.. _`Section 1.4`: https://tools.ietf.org/html/rfc6749#section-1.4
.. _`Section 1.5`: https://tools.ietf.org/html/rfc6749#section-1.5
.. _`Section 4.1.2`: https://tools.ietf.org/html/rfc7009#section-4.1.2
"""
if not is_secure_transport(url):
raise InsecureTransportError()
params = [('token', token)]
if token_type_hint:
params.append(('token_type_hint', token_type_hint))
for k in kwargs:
if kwargs[k]:
params.append((str(k), kwargs[k]))
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
if callback:
params.append(('callback', callback))
return add_params_to_uri(url, params), headers, body
else:
return url, headers, add_params_to_qs(body, params)
def parse_authorization_code_response(uri, state=None):
"""Parse authorization grant response URI into a dict.
If the resource owner grants the access request, the authorization
server issues an authorization code and delivers it to the client by
adding the following parameters to the query component of the
redirection URI using the ``application/x-www-form-urlencoded`` format:
**code**
REQUIRED. The authorization code generated by the
authorization server. The authorization code MUST expire
shortly after it is issued to mitigate the risk of leaks. A
maximum authorization code lifetime of 10 minutes is
RECOMMENDED. The client MUST NOT use the authorization code
more than once. If an authorization code is used more than
once, the authorization server MUST deny the request and SHOULD
revoke (when possible) all tokens previously issued based on
that authorization code. The authorization code is bound to
the client identifier and redirection URI.
**state**
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
:param uri: The full redirect URL back to the client.
:param state: The state parameter from the authorization request.
For example, the authorization server redirects the user-agent by
sending the following HTTP response:
.. code-block:: http
HTTP/1.1 302 Found
Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA
&state=xyz
"""
if not is_secure_transport(uri):
raise InsecureTransportError()
query = urlparse.urlparse(uri).query
params = dict(urlparse.parse_qsl(query))
if state and params.get('state', None) != state:
raise MismatchingStateError()
if 'error' in params:
raise_from_error(params.get('error'), params)
if not 'code' in params:
raise MissingCodeError("Missing code parameter in response.")
return params
def parse_implicit_response(uri, state=None, scope=None):
"""Parse the implicit token response URI into a dict.
If the resource owner grants the access request, the authorization
server issues an access token and delivers it to the client by adding
the following parameters to the fragment component of the redirection
URI using the ``application/x-www-form-urlencoded`` format:
**access_token**
REQUIRED. The access token issued by the authorization server.
**token_type**
REQUIRED. The type of the token issued as described in
Section 7.1. Value is case insensitive.
**expires_in**
RECOMMENDED. The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
**scope**
OPTIONAL, if identical to the scope requested by the client,
otherwise REQUIRED. The scope of the access token as described
by Section 3.3.
**state**
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
:param uri:
:param state:
:param scope:
Similar to the authorization code response, but with a full token provided
in the URL fragment:
.. code-block:: http
HTTP/1.1 302 Found
Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA
&state=xyz&token_type=example&expires_in=3600
"""
if not is_secure_transport(uri):
raise InsecureTransportError()
fragment = urlparse.urlparse(uri).fragment
params = dict(urlparse.parse_qsl(fragment, keep_blank_values=True))
for key in ('expires_in',):
if key in params: # cast things to int
params[key] = int(params[key])
if 'scope' in params:
params['scope'] = scope_to_list(params['scope'])
if 'expires_in' in params:
params['expires_at'] = time.time() + int(params['expires_in'])
if state and params.get('state', None) != state:
raise ValueError("Mismatching or missing state in params.")
params = OAuth2Token(params, old_scope=scope)
validate_token_parameters(params)
return params
def parse_token_response(body, scope=None):
"""Parse the JSON token response body into a dict.
The authorization server issues an access token and optional refresh
token, and constructs the response by adding the following parameters
to the entity body of the HTTP response with a 200 (OK) status code:
access_token
REQUIRED. The access token issued by the authorization server.
token_type
REQUIRED. The type of the token issued as described in
`Section 7.1`_. Value is case insensitive.
expires_in
RECOMMENDED. The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
refresh_token
OPTIONAL. The refresh token which can be used to obtain new
access tokens using the same authorization grant as described
in `Section 6`_.
scope
OPTIONAL, if identical to the scope requested by the client,
otherwise REQUIRED. The scope of the access token as described
by `Section 3.3`_.
The parameters are included in the entity body of the HTTP response
using the "application/json" media type as defined by [`RFC4627`_]. The
parameters are serialized into a JSON structure by adding each
parameter at the highest structure level. Parameter names and string
values are included as JSON strings. Numerical values are included
as JSON numbers. The order of parameters does not matter and can
vary.
:param body: The full json encoded response body.
:param scope: The scope requested during authorization.
For example:
.. code-block:: http
HTTP/1.1 200 OK
Content-Type: application/json
Cache-Control: no-store
Pragma: no-cache
{
"access_token":"2YotnFZFEjr1zCsicMWpAA",
"token_type":"example",
"expires_in":3600,
"refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter":"example_value"
}
.. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1
.. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6
.. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3
.. _`RFC4627`: https://tools.ietf.org/html/rfc4627
"""
try:
params = json.loads(body)
except ValueError:
# Fall back to URL-encoded string, to support old implementations,
# including (at time of writing) Facebook. See:
# https://github.com/oauthlib/oauthlib/issues/267
params = dict(urlparse.parse_qsl(body))
for key in ('expires_in',):
if key in params: # cast things to int
params[key] = int(params[key])
if 'scope' in params:
params['scope'] = scope_to_list(params['scope'])
if 'expires_in' in params:
if params['expires_in'] is None:
params.pop('expires_in')
else:
params['expires_at'] = time.time() + int(params['expires_in'])
params = OAuth2Token(params, old_scope=scope)
validate_token_parameters(params)
return params
def validate_token_parameters(params):
"""Ensures token presence, token type, expiration and scope in params."""
if 'error' in params:
raise_from_error(params.get('error'), params)
if not 'access_token' in params:
raise MissingTokenError(description="Missing access token parameter.")
if not 'token_type' in params:
if os.environ.get('OAUTHLIB_STRICT_TOKEN_TYPE'):
raise MissingTokenTypeError()
# If the issued access token scope is different from the one requested by
# the client, the authorization server MUST include the "scope" response
# parameter to inform the client of the actual scope granted.
# https://tools.ietf.org/html/rfc6749#section-3.3
if params.scope_changed:
message = 'Scope has changed from "{old}" to "{new}".'.format(
old=params.old_scope, new=params.scope,
)
scope_changed.send(message=message, old=params.old_scopes, new=params.scopes)
if not os.environ.get('OAUTHLIB_RELAX_TOKEN_SCOPE', None):
w = Warning(message)
w.token = params
w.old_scope = params.old_scopes
w.new_scope = params.scopes
raise w
| 17,936
|
Python
|
.py
| 348
| 42.439655
| 100
| 0.663749
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,798
|
tokens.py
|
rembo10_headphones/lib/oauthlib/oauth2/rfc6749/tokens.py
|
"""
oauthlib.oauth2.rfc6749.tokens
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains methods for adding two types of access tokens to requests.
- Bearer https://tools.ietf.org/html/rfc6750
- MAC https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01
"""
import hashlib
import hmac
import warnings
from binascii import b2a_base64
from urllib.parse import urlparse
from oauthlib import common
from oauthlib.common import add_params_to_qs, add_params_to_uri
from . import utils
class OAuth2Token(dict):
def __init__(self, params, old_scope=None):
super().__init__(params)
self._new_scope = None
if 'scope' in params and params['scope']:
self._new_scope = set(utils.scope_to_list(params['scope']))
if old_scope is not None:
self._old_scope = set(utils.scope_to_list(old_scope))
if self._new_scope is None:
# the rfc says that if the scope hasn't changed, it's optional
# in params so set the new scope to the old scope
self._new_scope = self._old_scope
else:
self._old_scope = self._new_scope
@property
def scope_changed(self):
return self._new_scope != self._old_scope
@property
def old_scope(self):
return utils.list_to_scope(self._old_scope)
@property
def old_scopes(self):
return list(self._old_scope)
@property
def scope(self):
return utils.list_to_scope(self._new_scope)
@property
def scopes(self):
return list(self._new_scope)
@property
def missing_scopes(self):
return list(self._old_scope - self._new_scope)
@property
def additional_scopes(self):
return list(self._new_scope - self._old_scope)
def prepare_mac_header(token, uri, key, http_method,
nonce=None,
headers=None,
body=None,
ext='',
hash_algorithm='hmac-sha-1',
issue_time=None,
draft=0):
"""Add an `MAC Access Authentication`_ signature to headers.
Unlike OAuth 1, this HMAC signature does not require inclusion of the
request payload/body, neither does it use a combination of client_secret
and token_secret but rather a mac_key provided together with the access
token.
Currently two algorithms are supported, "hmac-sha-1" and "hmac-sha-256",
`extension algorithms`_ are not supported.
Example MAC Authorization header, linebreaks added for clarity
Authorization: MAC id="h480djs93hd8",
nonce="1336363200:dj83hs9s",
mac="bhCQXTVyfj5cmA9uKkPFx1zeOXM="
.. _`MAC Access Authentication`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01
.. _`extension algorithms`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-7.1
:param token:
:param uri: Request URI.
:param key: MAC given provided by token endpoint.
:param http_method: HTTP Request method.
:param nonce:
:param headers: Request headers as a dictionary.
:param body:
:param ext:
:param hash_algorithm: HMAC algorithm provided by token endpoint.
:param issue_time: Time when the MAC credentials were issued (datetime).
:param draft: MAC authentication specification version.
:return: headers dictionary with the authorization field added.
"""
http_method = http_method.upper()
host, port = utils.host_from_uri(uri)
if hash_algorithm.lower() == 'hmac-sha-1':
h = hashlib.sha1
elif hash_algorithm.lower() == 'hmac-sha-256':
h = hashlib.sha256
else:
raise ValueError('unknown hash algorithm')
if draft == 0:
nonce = nonce or '{}:{}'.format(utils.generate_age(issue_time),
common.generate_nonce())
else:
ts = common.generate_timestamp()
nonce = common.generate_nonce()
sch, net, path, par, query, fra = urlparse(uri)
if query:
request_uri = path + '?' + query
else:
request_uri = path
# Hash the body/payload
if body is not None and draft == 0:
body = body.encode('utf-8')
bodyhash = b2a_base64(h(body).digest())[:-1].decode('utf-8')
else:
bodyhash = ''
# Create the normalized base string
base = []
if draft == 0:
base.append(nonce)
else:
base.append(ts)
base.append(nonce)
base.append(http_method.upper())
base.append(request_uri)
base.append(host)
base.append(port)
if draft == 0:
base.append(bodyhash)
base.append(ext or '')
base_string = '\n'.join(base) + '\n'
# hmac struggles with unicode strings - http://bugs.python.org/issue5285
if isinstance(key, str):
key = key.encode('utf-8')
sign = hmac.new(key, base_string.encode('utf-8'), h)
sign = b2a_base64(sign.digest())[:-1].decode('utf-8')
header = []
header.append('MAC id="%s"' % token)
if draft != 0:
header.append('ts="%s"' % ts)
header.append('nonce="%s"' % nonce)
if bodyhash:
header.append('bodyhash="%s"' % bodyhash)
if ext:
header.append('ext="%s"' % ext)
header.append('mac="%s"' % sign)
headers = headers or {}
headers['Authorization'] = ', '.join(header)
return headers
def prepare_bearer_uri(token, uri):
"""Add a `Bearer Token`_ to the request URI.
Not recommended, use only if client can't use authorization header or body.
http://www.example.com/path?access_token=h480djs93hd8
.. _`Bearer Token`: https://tools.ietf.org/html/rfc6750
:param token:
:param uri:
"""
return add_params_to_uri(uri, [(('access_token', token))])
def prepare_bearer_headers(token, headers=None):
"""Add a `Bearer Token`_ to the request URI.
Recommended method of passing bearer tokens.
Authorization: Bearer h480djs93hd8
.. _`Bearer Token`: https://tools.ietf.org/html/rfc6750
:param token:
:param headers:
"""
headers = headers or {}
headers['Authorization'] = 'Bearer %s' % token
return headers
def prepare_bearer_body(token, body=''):
"""Add a `Bearer Token`_ to the request body.
access_token=h480djs93hd8
.. _`Bearer Token`: https://tools.ietf.org/html/rfc6750
:param token:
:param body:
"""
return add_params_to_qs(body, [(('access_token', token))])
def random_token_generator(request, refresh_token=False):
"""
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param refresh_token:
"""
return common.generate_token()
def signed_token_generator(private_pem, **kwargs):
"""
:param private_pem:
"""
def signed_token_generator(request):
request.claims = kwargs
return common.generate_signed_token(private_pem, request)
return signed_token_generator
def get_token_from_header(request):
"""
Helper function to extract a token from the request header.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:return: Return the token or None if the Authorization header is malformed.
"""
token = None
if 'Authorization' in request.headers:
split_header = request.headers.get('Authorization').split()
if len(split_header) == 2 and split_header[0].lower() == 'bearer':
token = split_header[1]
else:
token = request.access_token
return token
class TokenBase:
def __call__(self, request, refresh_token=False):
raise NotImplementedError('Subclasses must implement this method.')
def validate_request(self, request):
"""
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
raise NotImplementedError('Subclasses must implement this method.')
def estimate_type(self, request):
"""
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
raise NotImplementedError('Subclasses must implement this method.')
class BearerToken(TokenBase):
__slots__ = (
'request_validator', 'token_generator',
'refresh_token_generator', 'expires_in'
)
def __init__(self, request_validator=None, token_generator=None,
expires_in=None, refresh_token_generator=None):
self.request_validator = request_validator
self.token_generator = token_generator or random_token_generator
self.refresh_token_generator = (
refresh_token_generator or self.token_generator
)
self.expires_in = expires_in or 3600
def create_token(self, request, refresh_token=False, **kwargs):
"""
Create a BearerToken, by default without refresh token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param refresh_token:
"""
if "save_token" in kwargs:
warnings.warn("`save_token` has been deprecated, it was not called internally."
"If you do, call `request_validator.save_token()` instead.",
DeprecationWarning)
if callable(self.expires_in):
expires_in = self.expires_in(request)
else:
expires_in = self.expires_in
request.expires_in = expires_in
token = {
'access_token': self.token_generator(request),
'expires_in': expires_in,
'token_type': 'Bearer',
}
# If provided, include - this is optional in some cases https://tools.ietf.org/html/rfc6749#section-3.3 but
# there is currently no mechanism to coordinate issuing a token for only a subset of the requested scopes so
# all tokens issued are for the entire set of requested scopes.
if request.scopes is not None:
token['scope'] = ' '.join(request.scopes)
if refresh_token:
if (request.refresh_token and
not self.request_validator.rotate_refresh_token(request)):
token['refresh_token'] = request.refresh_token
else:
token['refresh_token'] = self.refresh_token_generator(request)
token.update(request.extra_credentials or {})
return OAuth2Token(token)
def validate_request(self, request):
"""
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
token = get_token_from_header(request)
return self.request_validator.validate_bearer_token(
token, request.scopes, request)
def estimate_type(self, request):
"""
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
if request.headers.get('Authorization', '').split(' ')[0].lower() == 'bearer':
return 9
elif request.access_token is not None:
return 5
else:
return 0
| 11,097
|
Python
|
.py
| 281
| 31.601423
| 116
| 0.633681
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|
8,799
|
request_validator.py
|
rembo10_headphones/lib/oauthlib/oauth2/rfc6749/request_validator.py
|
"""
oauthlib.oauth2.rfc6749.request_validator
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
log = logging.getLogger(__name__)
class RequestValidator:
def client_authentication_required(self, request, *args, **kwargs):
"""Determine if client authentication is required for current request.
According to the rfc6749, client authentication is required in the following cases:
- Resource Owner Password Credentials Grant, when Client type is Confidential or when
Client was issued client credentials or whenever Client provided client
authentication, see `Section 4.3.2`_.
- Authorization Code Grant, when Client type is Confidential or when Client was issued
client credentials or whenever Client provided client authentication,
see `Section 4.1.3`_.
- Refresh Token Grant, when Client type is Confidential or when Client was issued
client credentials or whenever Client provided client authentication, see
`Section 6`_
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Resource Owner Password Credentials Grant
- Refresh Token Grant
.. _`Section 4.3.2`: https://tools.ietf.org/html/rfc6749#section-4.3.2
.. _`Section 4.1.3`: https://tools.ietf.org/html/rfc6749#section-4.1.3
.. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6
"""
return True
def authenticate_client(self, request, *args, **kwargs):
"""Authenticate client through means outside the OAuth 2 spec.
Means of authentication is negotiated beforehand and may for example
be `HTTP Basic Authentication Scheme`_ which utilizes the Authorization
header.
Headers may be accesses through request.headers and parameters found in
both body and query can be obtained by direct attribute access, i.e.
request.client_id for client_id in the URL query.
The authentication process is required to contain the identification of
the client (i.e. search the database based on the client_id). In case the
client doesn't exist based on the received client_id, this method has to
return False and the HTTP response created by the library will contain
'invalid_client' message.
After the client identification succeeds, this method needs to set the
client on the request, i.e. request.client = client. A client object's
class must contain the 'client_id' attribute and the 'client_id' must have
a value.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Resource Owner Password Credentials Grant (may be disabled)
- Client Credentials Grant
- Refresh Token Grant
.. _`HTTP Basic Authentication Scheme`: https://tools.ietf.org/html/rfc1945#section-11.1
"""
raise NotImplementedError('Subclasses must implement this method.')
def authenticate_client_id(self, client_id, request, *args, **kwargs):
"""Ensure client_id belong to a non-confidential client.
A non-confidential client is one that is not required to authenticate
through other means, such as using HTTP Basic.
Note, while not strictly necessary it can often be very convenient
to set request.client to the client object associated with the
given client_id.
:param client_id: Unicode client identifier.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def confirm_redirect_uri(self, client_id, code, redirect_uri, client, request,
*args, **kwargs):
"""Ensure that the authorization process represented by this authorization
code began with this 'redirect_uri'.
If the client specifies a redirect_uri when obtaining code then that
redirect URI must be bound to the code and verified equal in this
method, according to RFC 6749 section 4.1.3. Do not compare against
the client's allowed redirect URIs, but against the URI used when the
code was saved.
:param client_id: Unicode client identifier.
:param code: Unicode authorization_code.
:param redirect_uri: Unicode absolute URI.
:param client: Client object set by you, see ``.authenticate_client``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant (during token request)
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_default_redirect_uri(self, client_id, request, *args, **kwargs):
"""Get the default redirect URI for the client.
:param client_id: Unicode client identifier.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: The default redirect URI for the client
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_default_scopes(self, client_id, request, *args, **kwargs):
"""Get the default scopes for the client.
:param client_id: Unicode client identifier.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: List of default scopes
Method is used by all core grant types:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant
- Client Credentials grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def get_original_scopes(self, refresh_token, request, *args, **kwargs):
"""Get the list of scopes associated with the refresh token.
:param refresh_token: Unicode refresh token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: List of scopes.
Method is used by:
- Refresh token grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def is_within_original_scope(self, request_scopes, refresh_token, request, *args, **kwargs):
"""Check if requested scopes are within a scope of the refresh token.
When access tokens are refreshed the scope of the new token
needs to be within the scope of the original token. This is
ensured by checking that all requested scopes strings are on
the list returned by the get_original_scopes. If this check
fails, is_within_original_scope is called. The method can be
used in situations where returning all valid scopes from the
get_original_scopes is not practical.
:param request_scopes: A list of scopes that were requested by client.
:param refresh_token: Unicode refresh_token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Refresh token grant
"""
return False
def introspect_token(self, token, token_type_hint, request, *args, **kwargs):
"""Introspect an access or refresh token.
Called once the introspect request is validated. This method should
verify the *token* and either return a dictionary with the list of
claims associated, or `None` in case the token is unknown.
Below the list of registered claims you should be interested in:
- scope : space-separated list of scopes
- client_id : client identifier
- username : human-readable identifier for the resource owner
- token_type : type of the token
- exp : integer timestamp indicating when this token will expire
- iat : integer timestamp indicating when this token was issued
- nbf : integer timestamp indicating when it can be "not-before" used
- sub : subject of the token - identifier of the resource owner
- aud : list of string identifiers representing the intended audience
- iss : string representing issuer of this token
- jti : string identifier for the token
Note that most of them are coming directly from JWT RFC. More details
can be found in `Introspect Claims`_ or `_JWT Claims`_.
The implementation can use *token_type_hint* to improve lookup
efficency, but must fallback to other types to be compliant with RFC.
The dict of claims is added to request.token after this method.
:param token: The token string.
:param token_type_hint: access_token or refresh_token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
Method is used by:
- Introspect Endpoint (all grants are compatible)
.. _`Introspect Claims`: https://tools.ietf.org/html/rfc7662#section-2.2
.. _`JWT Claims`: https://tools.ietf.org/html/rfc7519#section-4
"""
raise NotImplementedError('Subclasses must implement this method.')
def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs):
"""Invalidate an authorization code after use.
:param client_id: Unicode client identifier.
:param code: The authorization code grant (request.code).
:param request: OAuthlib request.
:type request: oauthlib.common.Request
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def revoke_token(self, token, token_type_hint, request, *args, **kwargs):
"""Revoke an access or refresh token.
:param token: The token string.
:param token_type_hint: access_token or refresh_token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
Method is used by:
- Revocation Endpoint
"""
raise NotImplementedError('Subclasses must implement this method.')
def rotate_refresh_token(self, request):
"""Determine whether to rotate the refresh token. Default, yes.
When access tokens are refreshed the old refresh token can be kept
or replaced with a new one (rotated). Return True to rotate and
and False for keeping original.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Refresh Token Grant
"""
return True
def save_authorization_code(self, client_id, code, request, *args, **kwargs):
"""Persist the authorization_code.
The code should at minimum be stored with:
- the client_id (``client_id``)
- the redirect URI used (``request.redirect_uri``)
- a resource owner / user (``request.user``)
- the authorized scopes (``request.scopes``)
To support PKCE, you MUST associate the code with:
- Code Challenge (``request.code_challenge``) and
- Code Challenge Method (``request.code_challenge_method``)
To support OIDC, you MUST associate the code with:
- nonce, if present (``code["nonce"]``)
The ``code`` argument is actually a dictionary, containing at least a
``code`` key with the actual authorization code:
``{'code': 'sdf345jsdf0934f'}``
It may also have a ``claims`` parameter which, when present, will be a dict
deserialized from JSON as described at
http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter
This value should be saved in this method and used again in ``.validate_code``.
:param client_id: Unicode client identifier.
:param code: A dict of the authorization code grant and, optionally, state.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def save_token(self, token, request, *args, **kwargs):
"""Persist the token with a token type specific method.
Currently, only save_bearer_token is supported.
:param token: A (Bearer) token dict.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
return self.save_bearer_token(token, request, *args, **kwargs)
def save_bearer_token(self, token, request, *args, **kwargs):
"""Persist the Bearer token.
The Bearer token should at minimum be associated with:
- a client and it's client_id, if available
- a resource owner / user (request.user)
- authorized scopes (request.scopes)
- an expiration time
- a refresh token, if issued
- a claims document, if present in request.claims
The Bearer token dict may hold a number of items::
{
'token_type': 'Bearer',
'access_token': 'askfjh234as9sd8',
'expires_in': 3600,
'scope': 'string of space separated authorized scopes',
'refresh_token': '23sdf876234', # if issued
'state': 'given_by_client', # if supplied by client (implicit ONLY)
}
Note that while "scope" is a string-separated list of authorized scopes,
the original list is still available in request.scopes.
The token dict is passed as a reference so any changes made to the dictionary
will go back to the user. If additional information must return to the client
user, and it is only possible to get this information after writing the token
to storage, it should be added to the token dictionary. If the token
dictionary must be modified but the changes should not go back to the user,
a copy of the dictionary must be made before making the changes.
Also note that if an Authorization Code grant request included a valid claims
parameter (for OpenID Connect) then the request.claims property will contain
the claims dict, which should be saved for later use when generating the
id_token and/or UserInfo response content.
:param token: A Bearer token dict.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: The default redirect URI for the client
Method is used by all core grant types issuing Bearer tokens:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant (might not associate a client)
- Client Credentials grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_bearer_token(self, token, scopes, request):
"""Ensure the Bearer token is valid and authorized access to scopes.
:param token: A string of random characters.
:param scopes: A list of scopes associated with the protected resource.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
A key to OAuth 2 security and restricting impact of leaked tokens is
the short expiration time of tokens, *always ensure the token has not
expired!*.
Two different approaches to scope validation:
1) all(scopes). The token must be authorized access to all scopes
associated with the resource. For example, the
token has access to ``read-only`` and ``images``,
thus the client can view images but not upload new.
Allows for fine grained access control through
combining various scopes.
2) any(scopes). The token must be authorized access to one of the
scopes associated with the resource. For example,
token has access to ``read-only-images``.
Allows for fine grained, although arguably less
convenient, access control.
A powerful way to use scopes would mimic UNIX ACLs and see a scope
as a group with certain privileges. For a restful API these might
map to HTTP verbs instead of read, write and execute.
Note, the request.user attribute can be set to the resource owner
associated with this token. Similarly the request.client and
request.scopes attribute can be set to associated client object
and authorized scopes. If you then use a decorator such as the
one provided for django these attributes will be made available
in all protected views as keyword arguments.
:param token: Unicode Bearer token
:param scopes: List of scopes (defined by you)
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is indirectly used by all core Bearer token issuing grant types:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant
- Client Credentials Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_client_id(self, client_id, request, *args, **kwargs):
"""Ensure client_id belong to a valid and active client.
Note, while not strictly necessary it can often be very convenient
to set request.client to the client object associated with the
given client_id.
:param client_id: Unicode client identifier.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_code(self, client_id, code, client, request, *args, **kwargs):
"""Verify that the authorization_code is valid and assigned to the given
client.
Before returning true, set the following based on the information stored
with the code in 'save_authorization_code':
- request.user
- request.scopes
- request.claims (if given)
OBS! The request.user attribute should be set to the resource owner
associated with this authorization code. Similarly request.scopes
must also be set.
The request.claims property, if it was given, should assigned a dict.
If PKCE is enabled (see 'is_pkce_required' and 'save_authorization_code')
you MUST set the following based on the information stored:
- request.code_challenge
- request.code_challenge_method
:param client_id: Unicode client identifier.
:param code: Unicode authorization code.
:param client: Client object set by you, see ``.authenticate_client``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs):
"""Ensure client is authorized to use the grant_type requested.
:param client_id: Unicode client identifier.
:param grant_type: Unicode grant type, i.e. authorization_code, password.
:param client: Client object set by you, see ``.authenticate_client``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Resource Owner Password Credentials Grant
- Client Credentials Grant
- Refresh Token Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs):
"""Ensure client is authorized to redirect to the redirect_uri requested.
All clients should register the absolute URIs of all URIs they intend
to redirect to. The registration is outside of the scope of oauthlib.
:param client_id: Unicode client identifier.
:param redirect_uri: Unicode absolute URI.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs):
"""Ensure the Bearer token is valid and authorized access to scopes.
OBS! The request.user attribute should be set to the resource owner
associated with this refresh token.
:param refresh_token: Unicode refresh token.
:param client: Client object set by you, see ``.authenticate_client``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant (indirectly by issuing refresh tokens)
- Resource Owner Password Credentials Grant (also indirectly)
- Refresh Token Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_response_type(self, client_id, response_type, client, request, *args, **kwargs):
"""Ensure client is authorized to use the response_type requested.
:param client_id: Unicode client identifier.
:param response_type: Unicode response type, i.e. code, token.
:param client: Client object set by you, see ``.authenticate_client``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
- Implicit Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs):
"""Ensure the client is authorized access to requested scopes.
:param client_id: Unicode client identifier.
:param scopes: List of scopes (defined by you).
:param client: Client object set by you, see ``.authenticate_client``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by all core grant types:
- Authorization Code Grant
- Implicit Grant
- Resource Owner Password Credentials Grant
- Client Credentials Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def validate_user(self, username, password, client, request, *args, **kwargs):
"""Ensure the username and password is valid.
OBS! The validation should also set the user attribute of the request
to a valid resource owner, i.e. request.user = username or similar. If
not set you will be unable to associate a token with a user in the
persistance method used (commonly, save_bearer_token).
:param username: Unicode username.
:param password: Unicode password.
:param client: Client object set by you, see ``.authenticate_client``.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Resource Owner Password Credentials Grant
"""
raise NotImplementedError('Subclasses must implement this method.')
def is_pkce_required(self, client_id, request):
"""Determine if current request requires PKCE. Default, False.
This is called for both "authorization" and "token" requests.
Override this method by ``return True`` to enable PKCE for everyone.
You might want to enable it only for public clients.
Note that PKCE can also be used in addition of a client authentication.
OAuth 2.0 public clients utilizing the Authorization Code Grant are
susceptible to the authorization code interception attack. This
specification describes the attack as well as a technique to mitigate
against the threat through the use of Proof Key for Code Exchange
(PKCE, pronounced "pixy"). See `RFC7636`_.
:param client_id: Client identifier.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: True or False
Method is used by:
- Authorization Code Grant
.. _`RFC7636`: https://tools.ietf.org/html/rfc7636
"""
return False
def get_code_challenge(self, code, request):
"""Is called for every "token" requests.
When the server issues the authorization code in the authorization
response, it MUST associate the ``code_challenge`` and
``code_challenge_method`` values with the authorization code so it can
be verified later.
Typically, the ``code_challenge`` and ``code_challenge_method`` values
are stored in encrypted form in the ``code`` itself but could
alternatively be stored on the server associated with the code. The
server MUST NOT include the ``code_challenge`` value in client requests
in a form that other entities can extract.
Return the ``code_challenge`` associated to the code.
If ``None`` is returned, code is considered to not be associated to any
challenges.
:param code: Authorization code.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: code_challenge string
Method is used by:
- Authorization Code Grant - when PKCE is active
"""
return None
def get_code_challenge_method(self, code, request):
"""Is called during the "token" request processing, when a
``code_verifier`` and a ``code_challenge`` has been provided.
See ``.get_code_challenge``.
Must return ``plain`` or ``S256``. You can return a custom value if you have
implemented your own ``AuthorizationCodeGrant`` class.
:param code: Authorization code.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:rtype: code_challenge_method string
Method is used by:
- Authorization Code Grant - when PKCE is active
"""
raise NotImplementedError('Subclasses must implement this method.')
| 27,845
|
Python
|
.py
| 512
| 44.261719
| 98
| 0.663982
|
rembo10/headphones
| 3,370
| 601
| 527
|
GPL-3.0
|
9/5/2024, 5:10:38 PM (Europe/Amsterdam)
|