_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q36400 | Server.setup_assertion | train | def setup_assertion(self, authn, sp_entity_id, in_response_to, consumer_url,
name_id, policy, _issuer, authn_statement, identity,
best_effort, sign_response, farg=None,
session_not_on_or_after=None, **kwargs):
"""
Construct and return the Assertion
:param authn: Authentication information
:param sp_entity_id:
:param in_response_to: The ID of the request this is an answer to
:param consumer_url: The recipient of the assertion
:param name_id: The NameID of the subject
:param policy: Assertion policies
:param _issuer: Issuer of the statement
:param authn_statement: An AuthnStatement instance
:param identity: Identity information about the Subject
:param best_effort: Even if not the SPs demands can be met send a
response.
:param sign_response: Sign the response, only applicable if
ErrorResponse
:param kwargs: Extra keyword arguments
:return: An Assertion instance
"""
ast = Assertion(identity)
ast.acs = self.config.getattr("attribute_converters", "idp")
if policy is None:
policy = Policy()
try:
ast.apply_policy(sp_entity_id, policy, self.metadata)
except MissingValue as exc:
if not best_effort:
return self.create_error_response(in_response_to, consumer_url,
exc, sign_response)
farg = self.update_farg(in_response_to, consumer_url, farg)
if authn: # expected to be a dictionary
# Would like to use dict comprehension but ...
authn_args = dict(
[(AUTHN_DICT_MAP[k], v) for k, v in authn.items() if
k in AUTHN_DICT_MAP])
authn_args.update(kwargs)
assertion = ast.construct(
sp_entity_id, self.config.attribute_converters, policy,
issuer=_issuer, farg=farg['assertion'], name_id=name_id,
session_not_on_or_after=session_not_on_or_after,
**authn_args)
elif authn_statement: # Got a complete AuthnStatement
assertion = ast.construct(
sp_entity_id, self.config.attribute_converters, policy,
issuer=_issuer, authn_statem=authn_statement,
farg=farg['assertion'], name_id=name_id,
**kwargs)
else:
assertion = ast.construct(
sp_entity_id, self.config.attribute_converters, policy,
issuer=_issuer, farg=farg['assertion'], name_id=name_id,
session_not_on_or_after=session_not_on_or_after,
**kwargs)
return assertion | python | {
"resource": ""
} |
q36401 | Server._authn_response | train | def _authn_response(self, in_response_to, consumer_url,
sp_entity_id, identity=None, name_id=None,
status=None, authn=None, issuer=None, policy=None,
sign_assertion=False, sign_response=False,
best_effort=False, encrypt_assertion=False,
encrypt_cert_advice=None, encrypt_cert_assertion=None,
authn_statement=None,
encrypt_assertion_self_contained=False,
encrypted_advice_attributes=False,
pefim=False, sign_alg=None, digest_alg=None,
farg=None, session_not_on_or_after=None):
""" Create a response. A layer of indirection.
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param sp_entity_id: The entity identifier of the SP
:param identity: A dictionary with attributes and values that are
expected to be the bases for the assertion in the response.
:param name_id: The identifier of the subject
:param status: The status of the response
:param authn: A dictionary containing information about the
authn context.
:param issuer: The issuer of the response
:param policy:
:param sign_assertion: Whether the assertion should be signed or not
:param sign_response: Whether the response should be signed or not
:param best_effort: Even if not the SPs demands can be met send a
response.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces
selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param authn_statement: Authentication statement.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:param farg: Argument to pass on to the assertion constructor
:return: A response instance
"""
if farg is None:
assertion_args = {}
args = {}
# if identity:
_issuer = self._issuer(issuer)
# if encrypt_assertion and show_nameid:
# tmp_name_id = name_id
# name_id = None
# name_id = None
# tmp_authn = authn
# authn = None
# tmp_authn_statement = authn_statement
# authn_statement = None
if pefim:
encrypted_advice_attributes = True
encrypt_assertion_self_contained = True
assertion_attributes = self.setup_assertion(
None, sp_entity_id, None, None, None, policy, None, None,
identity, best_effort, sign_response, farg=farg)
assertion = self.setup_assertion(
authn, sp_entity_id, in_response_to, consumer_url, name_id,
policy, _issuer, authn_statement, [], True, sign_response,
farg=farg, session_not_on_or_after=session_not_on_or_after)
assertion.advice = saml.Advice()
# assertion.advice.assertion_id_ref.append(saml.AssertionIDRef())
# assertion.advice.assertion_uri_ref.append(saml.AssertionURIRef())
assertion.advice.assertion.append(assertion_attributes)
else:
assertion = self.setup_assertion(
authn, sp_entity_id, in_response_to, consumer_url, name_id,
policy, _issuer, authn_statement, identity, True,
sign_response, farg=farg,
session_not_on_or_after=session_not_on_or_after)
to_sign = []
if not encrypt_assertion:
if sign_assertion:
assertion.signature = pre_signature_part(assertion.id,
self.sec.my_cert, 2,
sign_alg=sign_alg,
digest_alg=digest_alg)
to_sign.append((class_name(assertion), assertion.id))
args["assertion"] = assertion
if (self.support_AssertionIDRequest() or self.support_AuthnQuery()):
self.session_db.store_assertion(assertion, to_sign)
return self._response(
in_response_to, consumer_url, status, issuer, sign_response,
to_sign, sp_entity_id=sp_entity_id,
encrypt_assertion=encrypt_assertion,
encrypt_cert_advice=encrypt_cert_advice,
encrypt_cert_assertion=encrypt_cert_assertion,
encrypt_assertion_self_contained=encrypt_assertion_self_contained,
encrypted_advice_attributes=encrypted_advice_attributes,
sign_assertion=sign_assertion,
pefim=pefim, sign_alg=sign_alg, digest_alg=digest_alg, **args) | python | {
"resource": ""
} |
q36402 | Server.create_attribute_response | train | def create_attribute_response(self, identity, in_response_to, destination,
sp_entity_id, userid="", name_id=None,
status=None, issuer=None,
sign_assertion=False, sign_response=False,
attributes=None, sign_alg=None,
digest_alg=None, farg=None, **kwargs):
""" Create an attribute assertion response.
:param identity: A dictionary with attributes and values that are
expected to be the bases for the assertion in the response.
:param in_response_to: The session identifier of the request
:param destination: The URL which should receive the response
:param sp_entity_id: The entity identifier of the SP
:param userid: A identifier of the user
:param name_id: The identifier of the subject
:param status: The status of the response
:param issuer: The issuer of the response
:param sign_assertion: Whether the assertion should be signed or not
:param sign_response: Whether the whole response should be signed
:param attributes:
:param kwargs: To catch extra keyword arguments
:return: A response instance
"""
policy = self.config.getattr("policy", "aa")
if not name_id and userid:
try:
name_id = self.ident.construct_nameid(userid, policy,
sp_entity_id)
logger.warning("Unspecified NameID format")
except Exception:
pass
to_sign = []
if identity:
farg = self.update_farg(in_response_to, sp_entity_id, farg=farg)
_issuer = self._issuer(issuer)
ast = Assertion(identity)
if policy:
ast.apply_policy(sp_entity_id, policy, self.metadata)
else:
policy = Policy()
if attributes:
restr = restriction_from_attribute_spec(attributes)
ast = filter_attribute_value_assertions(ast)
assertion = ast.construct(
sp_entity_id, self.config.attribute_converters, policy,
issuer=_issuer, name_id=name_id,
farg=farg['assertion'])
if sign_assertion:
assertion.signature = pre_signature_part(assertion.id,
self.sec.my_cert, 1,
sign_alg=sign_alg,
digest_alg=digest_alg)
# Just the assertion or the response and the assertion ?
to_sign = [(class_name(assertion), assertion.id)]
kwargs['sign_assertion'] = True
kwargs["assertion"] = assertion
if sp_entity_id:
kwargs['sp_entity_id'] = sp_entity_id
return self._response(in_response_to, destination, status, issuer,
sign_response, to_sign, sign_alg=sign_alg,
digest_alg=digest_alg, **kwargs) | python | {
"resource": ""
} |
q36403 | Server.create_authn_response | train | def create_authn_response(self, identity, in_response_to, destination,
sp_entity_id, name_id_policy=None, userid=None,
name_id=None, authn=None, issuer=None,
sign_response=None, sign_assertion=None,
encrypt_cert_advice=None,
encrypt_cert_assertion=None,
encrypt_assertion=None,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False, pefim=False,
sign_alg=None, digest_alg=None,
session_not_on_or_after=None,
**kwargs):
""" Constructs an AuthenticationResponse
:param identity: Information about an user
:param in_response_to: The identifier of the authentication request
this response is an answer to.
:param destination: Where the response should be sent
:param sp_entity_id: The entity identifier of the Service Provider
:param name_id_policy: How the NameID should be constructed
:param userid: The subject identifier
:param name_id: The identifier of the subject. A saml.NameID instance.
:param authn: Dictionary with information about the authentication
context
:param issuer: Issuer of the response
:param sign_assertion: Whether the assertion should be signed or not.
:param sign_response: Whether the response should be signed or not.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces
selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:return: A response instance
"""
try:
args = self.gather_authn_response_args(
sp_entity_id, name_id_policy=name_id_policy, userid=userid,
name_id=name_id, sign_response=sign_response,
sign_assertion=sign_assertion,
encrypt_cert_advice=encrypt_cert_advice,
encrypt_cert_assertion=encrypt_cert_assertion,
encrypt_assertion=encrypt_assertion,
encrypt_assertion_self_contained
=encrypt_assertion_self_contained,
encrypted_advice_attributes=encrypted_advice_attributes,
pefim=pefim, **kwargs)
except IOError as exc:
response = self.create_error_response(in_response_to,
destination,
sp_entity_id,
exc, name_id)
return ("%s" % response).split("\n")
try:
_authn = authn
if (sign_assertion or sign_response) and \
self.sec.cert_handler.generate_cert():
with self.lock:
self.sec.cert_handler.update_cert(True)
return self._authn_response(
in_response_to, destination, sp_entity_id, identity,
authn=_authn, issuer=issuer, pefim=pefim,
sign_alg=sign_alg, digest_alg=digest_alg,
session_not_on_or_after=session_not_on_or_after, **args)
return self._authn_response(
in_response_to, destination, sp_entity_id, identity,
authn=_authn, issuer=issuer, pefim=pefim, sign_alg=sign_alg,
digest_alg=digest_alg,
session_not_on_or_after=session_not_on_or_after, **args)
except MissingValue as exc:
return self.create_error_response(in_response_to, destination,
sp_entity_id, exc, name_id) | python | {
"resource": ""
} |
q36404 | Server.create_name_id_mapping_response | train | def create_name_id_mapping_response(self, name_id=None, encrypted_id=None,
in_response_to=None,
issuer=None, sign_response=False,
status=None, sign_alg=None,
digest_alg=None, **kwargs):
"""
protocol for mapping a principal's name identifier into a
different name identifier for the same principal.
Done over soap.
:param name_id:
:param encrypted_id:
:param in_response_to:
:param issuer:
:param sign_response:
:param status:
:return:
"""
# Done over SOAP
ms_args = self.message_args()
_resp = NameIDMappingResponse(name_id, encrypted_id,
in_response_to=in_response_to, **ms_args)
if sign_response:
return self.sign(_resp, sign_alg=sign_alg, digest_alg=digest_alg)
else:
logger.info("Message: %s", _resp)
return _resp | python | {
"resource": ""
} |
q36405 | Server.clean_out_user | train | def clean_out_user(self, name_id):
"""
Remove all authentication statements that belongs to a user identified
by a NameID instance
:param name_id: NameID instance
:return: The local identifier for the user
"""
lid = self.ident.find_local_id(name_id)
logger.info("Clean out %s", lid)
# remove the authentications
try:
for _nid in [decode(x) for x in self.ident.db[lid].split(" ")]:
try:
self.session_db.remove_authn_statements(_nid)
except KeyError:
pass
except KeyError:
pass
return lid | python | {
"resource": ""
} |
q36406 | _mdb_get_database | train | def _mdb_get_database(uri, **kwargs):
"""
Helper-function to connect to MongoDB and return a database object.
The `uri' argument should be either a full MongoDB connection URI string,
or just a database name in which case a connection to the default mongo
instance at mongodb://localhost:27017 will be made.
Performs explicit authentication if a username is provided in a connection
string URI, since PyMongo does not always seem to do that as promised.
:params database: name as string or (uri, name)
:returns: pymongo database object
"""
if not "tz_aware" in kwargs:
# default, but not forced
kwargs["tz_aware"] = True
connection_factory = MongoClient
_parsed_uri = {}
try:
_parsed_uri = pymongo.uri_parser.parse_uri(uri)
except pymongo.errors.InvalidURI:
# assume URI to be just the database name
db_name = uri
_conn = MongoClient()
pass
else:
if "replicaset" in _parsed_uri["options"]:
connection_factory = MongoReplicaSetClient
db_name = _parsed_uri.get("database", "pysaml2")
_conn = connection_factory(uri, **kwargs)
_db = _conn[db_name]
if "username" in _parsed_uri:
_db.authenticate(
_parsed_uri.get("username", None),
_parsed_uri.get("password", None)
)
return _db | python | {
"resource": ""
} |
q36407 | add_path | train | def add_path(tdict, path):
"""
Create or extend an argument tree `tdict` from `path`.
:param tdict: a dictionary representing a argument tree
:param path: a path list
:return: a dictionary
Convert a list of items in a 'path' into a nested dict, where the
second to last item becomes the key for the final item. The remaining
items in the path become keys in the nested dict around that final pair
of items.
For example, for input values of:
tdict={}
path = ['assertion', 'subject', 'subject_confirmation',
'method', 'urn:oasis:names:tc:SAML:2.0:cm:bearer']
Returns an output value of:
{'assertion': {'subject': {'subject_confirmation':
{'method': 'urn:oasis:names:tc:SAML:2.0:cm:bearer'}}}}
Another example, this time with a non-empty tdict input:
tdict={'method': 'urn:oasis:names:tc:SAML:2.0:cm:bearer'},
path=['subject_confirmation_data', 'in_response_to', '_012345']
Returns an output value of:
{'subject_confirmation_data': {'in_response_to': '_012345'},
'method': 'urn:oasis:names:tc:SAML:2.0:cm:bearer'}
"""
t = tdict
for step in path[:-2]:
try:
t = t[step]
except KeyError:
t[step] = {}
t = t[step]
t[path[-2]] = path[-1]
return tdict | python | {
"resource": ""
} |
q36408 | valid_email | train | def valid_email(emailaddress, domains=GENERIC_DOMAINS):
"""Checks for a syntactically valid email address."""
# Email address must be at least 6 characters in total.
# Assuming noone may have addresses of the type a@com
if len(emailaddress) < 6:
return False # Address too short.
# Split up email address into parts.
try:
localpart, domainname = emailaddress.rsplit('@', 1)
host, toplevel = domainname.rsplit('.', 1)
except ValueError:
return False # Address does not have enough parts.
# Check for Country code or Generic Domain.
if len(toplevel) != 2 and toplevel not in domains:
return False # Not a domain name.
for i in '-_.%+.':
localpart = localpart.replace(i, "")
for i in '-_.':
host = host.replace(i, "")
if localpart.isalnum() and host.isalnum():
return True # Email address is fine.
else:
return False | python | {
"resource": ""
} |
q36409 | deflate_and_base64_encode | train | def deflate_and_base64_encode(string_val):
"""
Deflates and the base64 encodes a string
:param string_val: The string to deflate and encode
:return: The deflated and encoded string
"""
if not isinstance(string_val, six.binary_type):
string_val = string_val.encode('utf-8')
return base64.b64encode(zlib.compress(string_val)[2:-4]) | python | {
"resource": ""
} |
q36410 | rndbytes | train | def rndbytes(size=16, alphabet=""):
"""
Returns rndstr always as a binary type
"""
x = rndstr(size, alphabet)
if isinstance(x, six.string_types):
return x.encode('utf-8')
return x | python | {
"resource": ""
} |
q36411 | parse_attribute_map | train | def parse_attribute_map(filenames):
"""
Expects a file with each line being composed of the oid for the attribute
exactly one space, a user friendly name of the attribute and then
the type specification of the name.
:param filenames: List of filenames on mapfiles.
:return: A 2-tuple, one dictionary with the oid as keys and the friendly
names as values, the other one the other way around.
"""
forward = {}
backward = {}
for filename in filenames:
with open(filename) as fp:
for line in fp:
(name, friendly_name, name_format) = line.strip().split()
forward[(name, name_format)] = friendly_name
backward[friendly_name] = (name, name_format)
return forward, backward | python | {
"resource": ""
} |
q36412 | signature | train | def signature(secret, parts):
"""Generates a signature. All strings are assumed to be utf-8
"""
if not isinstance(secret, six.binary_type):
secret = secret.encode('utf-8')
newparts = []
for part in parts:
if not isinstance(part, six.binary_type):
part = part.encode('utf-8')
newparts.append(part)
parts = newparts
if sys.version_info >= (2, 5):
csum = hmac.new(secret, digestmod=hashlib.sha1)
else:
csum = hmac.new(secret, digestmod=sha)
for part in parts:
csum.update(part)
return csum.hexdigest() | python | {
"resource": ""
} |
q36413 | MetaData.any | train | def any(self, typ, service, binding=None):
"""
Return any entity that matches the specification
:param typ: Type of entity
:param service:
:param binding:
:return:
"""
res = {}
for ent in self.keys():
bind = self.service(ent, typ, service, binding)
if bind:
res[ent] = bind
return res | python | {
"resource": ""
} |
q36414 | MetaData.bindings | train | def bindings(self, entity_id, typ, service):
"""
Get me all the bindings that are registered for a service entity
:param entity_id:
:param service:
:return:
"""
return self.service(entity_id, typ, service) | python | {
"resource": ""
} |
q36415 | MetaData.with_descriptor | train | def with_descriptor(self, descriptor):
'''
Returns any entities with the specified descriptor
'''
res = {}
desc = "%s_descriptor" % descriptor
for eid, ent in self.items():
if desc in ent:
res[eid] = ent
return res | python | {
"resource": ""
} |
q36416 | MetaData.certs | train | def certs(self, entity_id, descriptor, use="signing"):
'''
Returns certificates for the given Entity
'''
ent = self[entity_id]
def extract_certs(srvs):
res = []
for srv in srvs:
if "key_descriptor" in srv:
for key in srv["key_descriptor"]:
if "use" in key and key["use"] == use:
for dat in key["key_info"]["x509_data"]:
cert = repack_cert(
dat["x509_certificate"]["text"])
if cert not in res:
res.append(cert)
elif not "use" in key:
for dat in key["key_info"]["x509_data"]:
cert = repack_cert(
dat["x509_certificate"]["text"])
if cert not in res:
res.append(cert)
return res
if descriptor == "any":
res = []
for descr in ["spsso", "idpsso", "role", "authn_authority",
"attribute_authority", "pdp"]:
try:
srvs = ent["%s_descriptor" % descr]
except KeyError:
continue
res.extend(extract_certs(srvs))
else:
srvs = ent["%s_descriptor" % descriptor]
res = extract_certs(srvs)
return res | python | {
"resource": ""
} |
q36417 | InMemoryMetaData.service | train | def service(self, entity_id, typ, service, binding=None):
""" Get me all services with a specified
entity ID and type, that supports the specified version of binding.
:param entity_id: The EntityId
:param typ: Type of service (idp, attribute_authority, ...)
:param service: which service that is sought for
:param binding: A binding identifier
:return: list of service descriptions.
Or if no binding was specified a list of 2-tuples (binding, srv)
"""
try:
srvs = []
for t in self[entity_id][typ]:
try:
srvs.extend(t[service])
except KeyError:
pass
except KeyError:
return None
if not srvs:
return srvs
if binding:
res = []
for srv in srvs:
if srv["binding"] == binding:
res.append(srv)
else:
res = {}
for srv in srvs:
try:
res[srv["binding"]].append(srv)
except KeyError:
res[srv["binding"]] = [srv]
logger.debug("service => %s", res)
return res | python | {
"resource": ""
} |
q36418 | InMemoryMetaData.attribute_requirement | train | def attribute_requirement(self, entity_id, index=None):
""" Returns what attributes the SP requires and which are optional
if any such demands are registered in the Metadata.
:param entity_id: The entity id of the SP
:param index: which of the attribute consumer services its all about
if index=None then return all attributes expected by all
attribute_consuming_services.
:return: 2-tuple, list of required and list of optional attributes
"""
res = {"required": [], "optional": []}
try:
for sp in self[entity_id]["spsso_descriptor"]:
_res = attribute_requirement(sp, index)
res["required"].extend(_res["required"])
res["optional"].extend(_res["optional"])
except KeyError:
return None
return res | python | {
"resource": ""
} |
q36419 | MetaDataExtern.load | train | def load(self, *args, **kwargs):
""" Imports metadata by the use of HTTP GET.
If the fingerprint is known the file will be checked for
compliance before it is imported.
"""
response = self.http.send(self.url)
if response.status_code == 200:
_txt = response.content
return self.parse_and_check_signature(_txt)
else:
logger.info("Response status: %s", response.status_code)
raise SourceNotFound(self.url) | python | {
"resource": ""
} |
q36420 | MetadataStore.entity_categories | train | def entity_categories(self, entity_id):
"""
Get a list of entity categories for an entity id.
:param entity_id: Entity id
:return: Entity categories
:type entity_id: string
:rtype: [string]
"""
attributes = self.entity_attributes(entity_id)
return attributes.get(ENTITY_CATEGORY, []) | python | {
"resource": ""
} |
q36421 | MetadataStore.supported_entity_categories | train | def supported_entity_categories(self, entity_id):
"""
Get a list of entity category support for an entity id.
:param entity_id: Entity id
:return: Entity category support
:type entity_id: string
:rtype: [string]
"""
attributes = self.entity_attributes(entity_id)
return attributes.get(ENTITY_CATEGORY_SUPPORT, []) | python | {
"resource": ""
} |
q36422 | MetadataStore.entity_attributes | train | def entity_attributes(self, entity_id):
"""
Get all entity attributes for an entry in the metadata.
Example return data:
{'http://macedir.org/entity-category': ['something', 'something2'],
'http://example.org/saml-foo': ['bar']}
:param entity_id: Entity id
:return: dict with keys and value-lists from metadata
:type entity_id: string
:rtype: dict
"""
res = {}
try:
ext = self.__getitem__(entity_id)["extensions"]
except KeyError:
return res
for elem in ext["extension_elements"]:
if elem["__class__"] == ENTITYATTRIBUTES:
for attr in elem["attribute"]:
if attr["name"] not in res:
res[attr["name"]] = []
res[attr["name"]] += [v["text"] for v in attr[
"attribute_value"]]
return res | python | {
"resource": ""
} |
q36423 | MetadataStore.dumps | train | def dumps(self, format="local"):
"""
Dumps the content in standard metadata format or the pysaml2 metadata
format
:param format: Which format to dump in
:return: a string
"""
if format == "local":
res = EntitiesDescriptor()
for _md in self.metadata.values():
try:
res.entity_descriptor.extend(
_md.entities_descr.entity_descriptor)
except AttributeError:
res.entity_descriptor.append(_md.entity_descr)
return "%s" % res
elif format == "md":
return json.dumps(self.items(), indent=2) | python | {
"resource": ""
} |
q36424 | http_form_post_message | train | def http_form_post_message(message, location, relay_state="",
typ="SAMLRequest", **kwargs):
"""The HTTP POST binding defines a mechanism by which SAML protocol
messages may be transmitted within the base64-encoded content of a
HTML form control.
:param message: The message
:param location: Where the form should be posted to
:param relay_state: for preserving and conveying state information
:return: A tuple containing header information and a HTML message.
"""
if not isinstance(message, six.string_types):
message = str(message)
if not isinstance(message, six.binary_type):
message = message.encode('utf-8')
if typ == "SAMLRequest" or typ == "SAMLResponse":
_msg = base64.b64encode(message)
else:
_msg = message
_msg = _msg.decode('ascii')
saml_response_input = HTML_INPUT_ELEMENT_SPEC.format(
name=cgi.escape(typ),
val=cgi.escape(_msg),
type='hidden')
relay_state_input = ""
if relay_state:
relay_state_input = HTML_INPUT_ELEMENT_SPEC.format(
name='RelayState',
val=cgi.escape(relay_state),
type='hidden')
response = HTML_FORM_SPEC.format(
saml_response_input=saml_response_input,
relay_state_input=relay_state_input,
action=location)
return {"headers": [("Content-type", "text/html")], "data": response} | python | {
"resource": ""
} |
q36425 | http_redirect_message | train | def http_redirect_message(message, location, relay_state="", typ="SAMLRequest",
sigalg='', signer=None, **kwargs):
"""The HTTP Redirect binding defines a mechanism by which SAML protocol
messages can be transmitted within URL parameters.
Messages are encoded for use with this binding using a URL encoding
technique, and transmitted using the HTTP GET method.
The DEFLATE Encoding is used in this function.
:param message: The message
:param location: Where the message should be posted to
:param relay_state: for preserving and conveying state information
:param typ: What type of message it is SAMLRequest/SAMLResponse/SAMLart
:param sigalg: Which algorithm the signature function will use to sign
the message
:param signer: A signature function that can be used to sign the message
:return: A tuple containing header information and a HTML message.
"""
if not isinstance(message, six.string_types):
message = "%s" % (message,)
_order = None
if typ in ["SAMLRequest", "SAMLResponse"]:
if typ == "SAMLRequest":
_order = REQ_ORDER
else:
_order = RESP_ORDER
args = {typ: deflate_and_base64_encode(message)}
elif typ == "SAMLart":
args = {typ: message}
else:
raise Exception("Unknown message type: %s" % typ)
if relay_state:
args["RelayState"] = relay_state
if signer:
# sigalgs, should be one defined in xmldsig
assert sigalg in [b for a, b in SIG_ALLOWED_ALG]
args["SigAlg"] = sigalg
string = "&".join([urlencode({k: args[k]})
for k in _order if k in args]).encode('ascii')
args["Signature"] = base64.b64encode(signer.sign(string))
string = urlencode(args)
else:
string = urlencode(args)
glue_char = "&" if urlparse(location).query else "?"
login_url = glue_char.join([location, string])
headers = [('Location', str(login_url))]
body = []
return {"headers": headers, "data": body} | python | {
"resource": ""
} |
q36426 | parse_soap_enveloped_saml | train | def parse_soap_enveloped_saml(text, body_class, header_class=None):
"""Parses a SOAP enveloped SAML thing and returns header parts and body
:param text: The SOAP object as XML
:return: header parts and body as saml.samlbase instances
"""
envelope = defusedxml.ElementTree.fromstring(text)
assert envelope.tag == '{%s}Envelope' % NAMESPACE
# print(len(envelope))
body = None
header = {}
for part in envelope:
# print(">",part.tag)
if part.tag == '{%s}Body' % NAMESPACE:
for sub in part:
try:
body = saml2.create_class_from_element_tree(
body_class, sub)
except Exception:
raise Exception(
"Wrong body type (%s) in SOAP envelope" % sub.tag)
elif part.tag == '{%s}Header' % NAMESPACE:
if not header_class:
raise Exception("Header where I didn't expect one")
# print("--- HEADER ---")
for sub in part:
# print(">>",sub.tag)
for klass in header_class:
# print("?{%s}%s" % (klass.c_namespace,klass.c_tag))
if sub.tag == "{%s}%s" % (klass.c_namespace, klass.c_tag):
header[sub.tag] = \
saml2.create_class_from_element_tree(klass, sub)
break
return body, header | python | {
"resource": ""
} |
q36427 | Config.load | train | def load(self, cnf, metadata_construction=False):
""" The base load method, loads the configuration
:param cnf: The configuration as a dictionary
:param metadata_construction: Is this only to be able to construct
metadata. If so some things can be left out.
:return: The Configuration instance
"""
_uc = self.unicode_convert
for arg in COMMON_ARGS:
if arg == "virtual_organization":
if "virtual_organization" in cnf:
for key, val in cnf["virtual_organization"].items():
self.vorg[key] = VirtualOrg(None, key, val)
continue
elif arg == "extension_schemas":
# List of filename of modules representing the schemas
if "extension_schemas" in cnf:
for mod_file in cnf["extension_schemas"]:
_mod = self._load(mod_file)
self.extension_schema[_mod.NAMESPACE] = _mod
try:
setattr(self, arg, _uc(cnf[arg]))
except KeyError:
pass
except TypeError: # Something that can't be a string
setattr(self, arg, cnf[arg])
if "service" in cnf:
for typ in ["aa", "idp", "sp", "pdp", "aq"]:
try:
self.load_special(
cnf["service"][typ], typ,
metadata_construction=metadata_construction)
self.serves.append(typ)
except KeyError:
pass
if "extensions" in cnf:
self.do_extensions(cnf["extensions"])
self.load_complex(cnf, metadata_construction=metadata_construction)
self.context = self.def_context
return self | python | {
"resource": ""
} |
q36428 | Config.load_metadata | train | def load_metadata(self, metadata_conf):
""" Loads metadata into an internal structure """
acs = self.attribute_converters
if acs is None:
raise ConfigurationError(
"Missing attribute converter specification")
try:
ca_certs = self.ca_certs
except:
ca_certs = None
try:
disable_validation = self.disable_ssl_certificate_validation
except:
disable_validation = False
mds = MetadataStore(acs, self, ca_certs,
disable_ssl_certificate_validation=disable_validation)
mds.imp(metadata_conf)
return mds | python | {
"resource": ""
} |
q36429 | Config.endpoint | train | def endpoint(self, service, binding=None, context=None):
""" Goes through the list of endpoint specifications for the
given type of service and returns a list of endpoint that matches
the given binding. If no binding is given all endpoints available for
that service will be returned.
:param service: The service the endpoint should support
:param binding: The expected binding
:return: All the endpoints that matches the given restrictions
"""
spec = []
unspec = []
endps = self.getattr("endpoints", context)
if endps and service in endps:
for endpspec in endps[service]:
try:
endp, bind = endpspec
if binding is None or bind == binding:
spec.append(endp)
except ValueError:
unspec.append(endpspec)
if spec:
return spec
else:
return unspec | python | {
"resource": ""
} |
q36430 | Config.service_per_endpoint | train | def service_per_endpoint(self, context=None):
"""
List all endpoint this entity publishes and which service and binding
that are behind the endpoint
:param context: Type of entity
:return: Dictionary with endpoint url as key and a tuple of
service and binding as value
"""
endps = self.getattr("endpoints", context)
res = {}
for service, specs in endps.items():
for endp, binding in specs:
res[endp] = (service, binding)
return res | python | {
"resource": ""
} |
q36431 | SPConfig.ecp_endpoint | train | def ecp_endpoint(self, ipaddress):
"""
Returns the entity ID of the IdP which the ECP client should talk to
:param ipaddress: The IP address of the user client
:return: IdP entity ID or None
"""
_ecp = self.getattr("ecp")
if _ecp:
for key, eid in _ecp.items():
if re.match(key, ipaddress):
return eid
return None | python | {
"resource": ""
} |
q36432 | VirtualOrg.members_to_ask | train | def members_to_ask(self, name_id):
"""Find the member of the Virtual Organization that I haven't already
spoken too
"""
vo_members = self._affiliation_members()
for member in self.member:
if member not in vo_members:
vo_members.append(member)
# Remove the ones I have cached data from about this subject
vo_members = [m for m in vo_members if not self.sp.users.cache.active(
name_id, m)]
logger.info("VO members (not cached): %s", vo_members)
return vo_members | python | {
"resource": ""
} |
q36433 | construct_came_from | train | def construct_came_from(environ):
""" The URL that the user used when the process where interupted
for single-sign-on processing. """
came_from = environ.get("PATH_INFO")
qstr = environ.get("QUERY_STRING", "")
if qstr:
came_from += "?" + qstr
return came_from | python | {
"resource": ""
} |
q36434 | for_me | train | def for_me(conditions, myself):
""" Am I among the intended audiences """
if not conditions.audience_restriction: # No audience restriction
return True
for restriction in conditions.audience_restriction:
if not restriction.audience:
continue
for audience in restriction.audience:
if audience.text.strip() == myself:
return True
else:
# print("Not for me: %s != %s" % (audience.text.strip(),
# myself))
pass
return False | python | {
"resource": ""
} |
q36435 | StatusResponse.issue_instant_ok | train | def issue_instant_ok(self):
""" Check that the response was issued at a reasonable time """
upper = time_util.shift_time(time_util.time_in_a_while(days=1),
self.timeslack).timetuple()
lower = time_util.shift_time(time_util.time_a_while_ago(days=1),
-self.timeslack).timetuple()
# print("issue_instant: %s" % self.response.issue_instant)
# print("%s < x < %s" % (lower, upper))
issued_at = str_to_time(self.response.issue_instant)
return lower < issued_at < upper | python | {
"resource": ""
} |
q36436 | AuthnResponse.decrypt_attributes | train | def decrypt_attributes(self, attribute_statement):
"""
Decrypts possible encrypted attributes and adds the decrypts to the
list of attributes.
:param attribute_statement: A SAML.AttributeStatement which might
contain both encrypted attributes and attributes.
"""
# _node_name = [
# "urn:oasis:names:tc:SAML:2.0:assertion:EncryptedData",
# "urn:oasis:names:tc:SAML:2.0:assertion:EncryptedAttribute"]
for encattr in attribute_statement.encrypted_attribute:
if not encattr.encrypted_key:
_decr = self.sec.decrypt(encattr.encrypted_data)
_attr = attribute_from_string(_decr)
attribute_statement.attribute.append(_attr)
else:
_decr = self.sec.decrypt(encattr)
enc_attr = encrypted_attribute_from_string(_decr)
attrlist = enc_attr.extensions_as_elements("Attribute", saml)
attribute_statement.attribute.extend(attrlist) | python | {
"resource": ""
} |
q36437 | AuthnResponse.get_identity | train | def get_identity(self):
""" The assertion can contain zero or more attributeStatements
"""
ava = {}
for _assertion in self.assertions:
if _assertion.advice:
if _assertion.advice.assertion:
for tmp_assertion in _assertion.advice.assertion:
if tmp_assertion.attribute_statement:
assert len(tmp_assertion.attribute_statement) == 1
ava.update(self.read_attribute_statement(
tmp_assertion.attribute_statement[0]))
if _assertion.attribute_statement:
logger.debug("Assertion contains %s attribute statement(s)",
(len(self.assertion.attribute_statement)))
for _attr_statem in _assertion.attribute_statement:
logger.debug("Attribute Statement: %s" % (_attr_statem,))
ava.update(self.read_attribute_statement(_attr_statem))
if not ava:
logger.debug("Assertion contains no attribute statements")
return ava | python | {
"resource": ""
} |
q36438 | AuthnResponse.get_subject | train | def get_subject(self):
""" The assertion must contain a Subject
"""
assert self.assertion.subject
subject = self.assertion.subject
subjconf = []
if not self.verify_attesting_entity(subject.subject_confirmation):
raise VerificationError("No valid attesting address")
for subject_confirmation in subject.subject_confirmation:
_data = subject_confirmation.subject_confirmation_data
if subject_confirmation.method == SCM_BEARER:
if not self._bearer_confirmed(_data):
continue
elif subject_confirmation.method == SCM_HOLDER_OF_KEY:
if not self._holder_of_key_confirmed(_data):
continue
elif subject_confirmation.method == SCM_SENDER_VOUCHES:
pass
else:
raise ValueError("Unknown subject confirmation method: %s" % (
subject_confirmation.method,))
_recip = _data.recipient
if not _recip or not self.verify_recipient(_recip):
raise VerificationError("No valid recipient")
subjconf.append(subject_confirmation)
if not subjconf:
raise VerificationError("No valid subject confirmation")
subject.subject_confirmation = subjconf
# The subject may contain a name_id
if subject.name_id:
self.name_id = subject.name_id
elif subject.encrypted_id:
# decrypt encrypted ID
_name_id_str = self.sec.decrypt(
subject.encrypted_id.encrypted_data.to_string())
_name_id = saml.name_id_from_string(_name_id_str)
self.name_id = _name_id
logger.info("Subject NameID: %s", self.name_id)
return self.name_id | python | {
"resource": ""
} |
q36439 | AuthnResponse.decrypt_assertions | train | def decrypt_assertions(self, encrypted_assertions, decr_txt, issuer=None,
verified=False):
""" Moves the decrypted assertion from the encrypted assertion to a
list.
:param encrypted_assertions: A list of encrypted assertions.
:param decr_txt: The string representation containing the decrypted
data. Used when verifying signatures.
:param issuer: The issuer of the response.
:param verified: If True do not verify signatures, otherwise verify
the signature if it exists.
:return: A list of decrypted assertions.
"""
res = []
for encrypted_assertion in encrypted_assertions:
if encrypted_assertion.extension_elements:
assertions = extension_elements_to_elements(
encrypted_assertion.extension_elements, [saml, samlp])
for assertion in assertions:
if assertion.signature and not verified:
if not self.sec.check_signature(
assertion, origdoc=decr_txt,
node_name=class_name(assertion), issuer=issuer):
logger.error("Failed to verify signature on '%s'",
assertion)
raise SignatureError()
res.append(assertion)
return res | python | {
"resource": ""
} |
q36440 | AuthnResponse.find_encrypt_data_assertion_list | train | def find_encrypt_data_assertion_list(self, _assertions):
""" Verifies if a list of assertions contains encrypted data in the
advice element.
:param _assertions: A list of assertions.
:return: True encrypted data exists otherwise false.
"""
for _assertion in _assertions:
if _assertion.advice:
if _assertion.advice.encrypted_assertion:
res = self.find_encrypt_data_assertion(
_assertion.advice.encrypted_assertion)
if res:
return True | python | {
"resource": ""
} |
q36441 | AuthnResponse.find_encrypt_data | train | def find_encrypt_data(self, resp):
""" Verifies if a saml response contains encrypted assertions with
encrypted data.
:param resp: A saml response.
:return: True encrypted data exists otherwise false.
"""
if resp.encrypted_assertion:
res = self.find_encrypt_data_assertion(resp.encrypted_assertion)
if res:
return True
if resp.assertion:
for tmp_assertion in resp.assertion:
if tmp_assertion.advice:
if tmp_assertion.advice.encrypted_assertion:
res = self.find_encrypt_data_assertion(
tmp_assertion.advice.encrypted_assertion)
if res:
return True
return False | python | {
"resource": ""
} |
q36442 | AuthnResponse.verify | train | def verify(self, keys=None):
""" Verify that the assertion is syntactically correct and the
signature is correct if present.
:param keys: If not the default key file should be used then use one
of these.
"""
try:
res = self._verify()
except AssertionError as err:
logger.error("Verification error on the response: %s", err)
raise
else:
if res is None:
return None
if not isinstance(self.response, samlp.Response):
return self
if self.parse_assertion(keys):
return self
else:
logger.error("Could not parse the assertion")
return None | python | {
"resource": ""
} |
q36443 | AuthnResponse.verify_recipient | train | def verify_recipient(self, recipient):
"""
Verify that I'm the recipient of the assertion
:param recipient: A URI specifying the entity or location to which an
attesting entity can present the assertion.
:return: True/False
"""
if not self.conv_info:
return True
_info = self.conv_info
try:
if recipient == _info['entity_id']:
return True
except KeyError:
pass
try:
if recipient in self.return_addrs:
return True
except KeyError:
pass
return False | python | {
"resource": ""
} |
q36444 | signed | train | def signed(item):
"""
Is any part of the document signed ?
:param item: A Samlbase instance
:return: True if some part of it is signed
"""
if SIG in item.c_children.keys() and item.signature:
return True
else:
for prop in item.c_child_order:
child = getattr(item, prop, None)
if isinstance(child, list):
for chi in child:
if signed(chi):
return True
elif child and signed(child):
return True
return False | python | {
"resource": ""
} |
q36445 | get_xmlsec_binary | train | def get_xmlsec_binary(paths=None):
"""
Tries to find the xmlsec1 binary.
:param paths: Non-system path paths which should be searched when
looking for xmlsec1
:return: full name of the xmlsec1 binary found. If no binaries are
found then an exception is raised.
"""
if os.name == 'posix':
bin_name = ['xmlsec1']
elif os.name == 'nt':
bin_name = ['xmlsec.exe', 'xmlsec1.exe']
else: # Default !?
bin_name = ['xmlsec1']
if paths:
for bname in bin_name:
for path in paths:
fil = os.path.join(path, bname)
try:
if os.lstat(fil):
return fil
except OSError:
pass
for path in os.environ['PATH'].split(os.pathsep):
for bname in bin_name:
fil = os.path.join(path, bname)
try:
if os.lstat(fil):
return fil
except OSError:
pass
raise SigverError('Cannot find {binary}'.format(binary=bin_name)) | python | {
"resource": ""
} |
q36446 | _get_xmlsec_cryptobackend | train | def _get_xmlsec_cryptobackend(path=None, search_paths=None):
"""
Initialize a CryptoBackendXmlSec1 crypto backend.
This function is now internal to this module.
"""
if path is None:
path = get_xmlsec_binary(paths=search_paths)
return CryptoBackendXmlSec1(path) | python | {
"resource": ""
} |
q36447 | make_temp | train | def make_temp(string, suffix='', decode=True, delete=True):
""" xmlsec needs files in some cases where only strings exist, hence the
need for this function. It creates a temporary file with the
string as only content.
:param string: The information to be placed in the file
:param suffix: The temporary file might have to have a specific
suffix in certain circumstances.
:param decode: The input string might be base64 coded. If so it
must, in some cases, be decoded before being placed in the file.
:return: 2-tuple with file pointer ( so the calling function can
close the file) and filename (which is for instance needed by the
xmlsec function).
"""
ntf = NamedTemporaryFile(suffix=suffix, delete=delete)
# Python3 tempfile requires byte-like object
if not isinstance(string, six.binary_type):
string = string.encode('utf-8')
if decode:
ntf.write(base64.b64decode(string))
else:
ntf.write(string)
ntf.seek(0)
return ntf, ntf.name | python | {
"resource": ""
} |
q36448 | active_cert | train | def active_cert(key):
"""
Verifies that a key is active that is present time is after not_before
and before not_after.
:param key: The Key
:return: True if the key is active else False
"""
try:
cert_str = pem_format(key)
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_str)
assert cert.has_expired() == 0
assert not OpenSSLWrapper().certificate_not_valid_yet(cert)
return True
except AssertionError:
return False
except AttributeError:
return False | python | {
"resource": ""
} |
q36449 | cert_from_key_info | train | def cert_from_key_info(key_info, ignore_age=False):
""" Get all X509 certs from a KeyInfo instance. Care is taken to make sure
that the certs are continues sequences of bytes.
All certificates appearing in an X509Data element MUST relate to the
validation key by either containing it or being part of a certification
chain that terminates in a certificate containing the validation key.
:param key_info: The KeyInfo instance
:return: A possibly empty list of certs
"""
res = []
for x509_data in key_info.x509_data:
x509_certificate = x509_data.x509_certificate
cert = x509_certificate.text.strip()
cert = '\n'.join(split_len(''.join([s.strip() for s in
cert.split()]), 64))
if ignore_age or active_cert(cert):
res.append(cert)
else:
logger.info('Inactive cert')
return res | python | {
"resource": ""
} |
q36450 | cert_from_instance | train | def cert_from_instance(instance):
""" Find certificates that are part of an instance
:param instance: An instance
:return: possible empty list of certificates
"""
if instance.signature:
if instance.signature.key_info:
return cert_from_key_info(instance.signature.key_info,
ignore_age=True)
return [] | python | {
"resource": ""
} |
q36451 | parse_xmlsec_output | train | def parse_xmlsec_output(output):
""" Parse the output from xmlsec to try to find out if the
command was successfull or not.
:param output: The output from Popen
:return: A boolean; True if the command was a success otherwise False
"""
for line in output.splitlines():
if line == 'OK':
return True
elif line == 'FAIL':
raise XmlsecError(output)
raise XmlsecError(output) | python | {
"resource": ""
} |
q36452 | read_cert_from_file | train | def read_cert_from_file(cert_file, cert_type):
""" Reads a certificate from a file. The assumption is that there is
only one certificate in the file
:param cert_file: The name of the file
:param cert_type: The certificate type
:return: A base64 encoded certificate as a string or the empty string
"""
if not cert_file:
return ''
if cert_type == 'pem':
_a = read_file(cert_file, 'rb').decode()
_b = _a.replace('\r\n', '\n')
lines = _b.split('\n')
for pattern in (
'-----BEGIN CERTIFICATE-----',
'-----BEGIN PUBLIC KEY-----'):
if pattern in lines:
lines = lines[lines.index(pattern) + 1:]
break
else:
raise CertificateError('Strange beginning of PEM file')
for pattern in (
'-----END CERTIFICATE-----',
'-----END PUBLIC KEY-----'):
if pattern in lines:
lines = lines[:lines.index(pattern)]
break
else:
raise CertificateError('Strange end of PEM file')
return make_str(''.join(lines).encode())
if cert_type in ['der', 'cer', 'crt']:
data = read_file(cert_file, 'rb')
_cert = base64.b64encode(data)
return make_str(_cert) | python | {
"resource": ""
} |
q36453 | security_context | train | def security_context(conf):
""" Creates a security context based on the configuration
:param conf: The configuration, this is a Config instance
:return: A SecurityContext instance
"""
if not conf:
return None
try:
metadata = conf.metadata
except AttributeError:
metadata = None
try:
id_attr = conf.id_attr_name
except AttributeError:
id_attr = None
sec_backend = None
if conf.crypto_backend == 'xmlsec1':
xmlsec_binary = conf.xmlsec_binary
if not xmlsec_binary:
try:
_path = conf.xmlsec_path
except AttributeError:
_path = []
xmlsec_binary = get_xmlsec_binary(_path)
# verify that xmlsec is where it's supposed to be
if not os.path.exists(xmlsec_binary):
# if not os.access(, os.F_OK):
err_msg = 'xmlsec binary not found: {binary}'
err_msg = err_msg.format(binary=xmlsec_binary)
raise SigverError(err_msg)
crypto = _get_xmlsec_cryptobackend(xmlsec_binary)
_file_name = conf.getattr('key_file', '')
if _file_name:
try:
rsa_key = import_rsa_key_from_file(_file_name)
except Exception as err:
logger.error('Cannot import key from {file}: {err_msg}'.format(
file=_file_name, err_msg=err))
raise
else:
sec_backend = RSACrypto(rsa_key)
elif conf.crypto_backend == 'XMLSecurity':
# new and somewhat untested pyXMLSecurity crypto backend.
crypto = CryptoBackendXMLSecurity()
else:
err_msg = 'Unknown crypto_backend {backend}'
err_msg = err_msg.format(backend=conf.crypto_backend)
raise SigverError(err_msg)
enc_key_files = []
if conf.encryption_keypairs is not None:
for _encryption_keypair in conf.encryption_keypairs:
if 'key_file' in _encryption_keypair:
enc_key_files.append(_encryption_keypair['key_file'])
return SecurityContext(
crypto,
conf.key_file,
cert_file=conf.cert_file,
metadata=metadata,
only_use_keys_in_metadata=conf.only_use_keys_in_metadata,
cert_handler_extra_class=conf.cert_handler_extra_class,
generate_cert_info=conf.generate_cert_info,
tmp_cert_file=conf.tmp_cert_file,
tmp_key_file=conf.tmp_key_file,
validate_certificate=conf.validate_certificate,
enc_key_files=enc_key_files,
encryption_keypairs=conf.encryption_keypairs,
sec_backend=sec_backend,
id_attr=id_attr) | python | {
"resource": ""
} |
q36454 | pre_signature_part | train | def pre_signature_part(ident, public_key=None, identifier=None, digest_alg=None, sign_alg=None):
"""
If an assertion is to be signed the signature part has to be preset
with which algorithms to be used, this function returns such a
preset part.
:param ident: The identifier of the assertion, so you know which assertion
was signed
:param public_key: The base64 part of a PEM file
:param identifier:
:return: A preset signature part
"""
if not digest_alg:
digest_alg = ds.DefaultSignature().get_digest_alg()
if not sign_alg:
sign_alg = ds.DefaultSignature().get_sign_alg()
signature_method = ds.SignatureMethod(algorithm=sign_alg)
canonicalization_method = ds.CanonicalizationMethod(
algorithm=ds.ALG_EXC_C14N)
trans0 = ds.Transform(algorithm=ds.TRANSFORM_ENVELOPED)
trans1 = ds.Transform(algorithm=ds.ALG_EXC_C14N)
transforms = ds.Transforms(transform=[trans0, trans1])
digest_method = ds.DigestMethod(algorithm=digest_alg)
reference = ds.Reference(
uri='#{id}'.format(id=ident),
digest_value=ds.DigestValue(),
transforms=transforms,
digest_method=digest_method)
signed_info = ds.SignedInfo(
signature_method=signature_method,
canonicalization_method=canonicalization_method,
reference=reference)
signature = ds.Signature(
signed_info=signed_info,
signature_value=ds.SignatureValue())
if identifier:
signature.id = 'Signature{n}'.format(n=identifier)
if public_key:
x509_data = ds.X509Data(
x509_certificate=[ds.X509Certificate(text=public_key)])
key_info = ds.KeyInfo(x509_data=x509_data)
signature.key_info = key_info
return signature | python | {
"resource": ""
} |
q36455 | SecurityContext.verify_signature | train | def verify_signature(self, signedtext, cert_file=None, cert_type='pem', node_name=NODE_NAME, node_id=None, id_attr=''):
""" Verifies the signature of a XML document.
:param signedtext: The XML document as a string
:param cert_file: The public key that was used to sign the document
:param cert_type: The file type of the certificate
:param node_name: The name of the class that is signed
:param node_id: The identifier of the node
:param id_attr: The attribute name for the identifier, normally one of
'id','Id' or 'ID'
:return: Boolean True if the signature was correct otherwise False.
"""
# This is only for testing purposes, otherwise when would you receive
# stuff that is signed with your key !?
if not cert_file:
cert_file = self.cert_file
cert_type = self.cert_type
if not id_attr:
id_attr = self.id_attr
return self.crypto.validate_signature(
signedtext,
cert_file=cert_file,
cert_type=cert_type,
node_name=node_name,
node_id=node_id,
id_attr=id_attr) | python | {
"resource": ""
} |
q36456 | SecurityContext.correctly_signed_message | train | def correctly_signed_message(self, decoded_xml, msgtype, must=False, origdoc=None, only_valid_cert=False):
"""Check if a request is correctly signed, if we have metadata for
the entity that sent the info use that, if not use the key that are in
the message if any.
:param decoded_xml: The SAML message as an XML infoset (a string)
:param msgtype: SAML protocol message type
:param must: Whether there must be a signature
:param origdoc:
:return:
"""
attr = '{type}_from_string'.format(type=msgtype)
_func = getattr(saml, attr, None)
_func = getattr(samlp, attr, _func)
msg = _func(decoded_xml)
if not msg:
raise TypeError('Not a {type}'.format(type=msgtype))
if not msg.signature:
if must:
err_msg = 'Required signature missing on {type}'
err_msg = err_msg.format(type=msgtype)
raise SignatureError(err_msg)
else:
return msg
return self._check_signature(
decoded_xml,
msg,
class_name(msg),
origdoc,
must=must,
only_valid_cert=only_valid_cert) | python | {
"resource": ""
} |
q36457 | SecurityContext.correctly_signed_response | train | def correctly_signed_response(self, decoded_xml, must=False, origdoc=None, only_valid_cert=False, require_response_signature=False, **kwargs):
""" Check if a instance is correctly signed, if we have metadata for
the IdP that sent the info use that, if not use the key that are in
the message if any.
:param decoded_xml: The SAML message as a XML string
:param must: Whether there must be a signature
:param origdoc:
:param only_valid_cert:
:param require_response_signature:
:return: None if the signature can not be verified otherwise an instance
"""
response = samlp.any_response_from_string(decoded_xml)
if not response:
raise TypeError('Not a Response')
if response.signature:
if 'do_not_verify' in kwargs:
pass
else:
self._check_signature(decoded_xml, response,
class_name(response), origdoc)
elif require_response_signature:
raise SignatureError('Signature missing for response')
return response | python | {
"resource": ""
} |
q36458 | SecurityContext.sign_statement | train | def sign_statement(self, statement, node_name, key=None, key_file=None, node_id=None, id_attr=''):
"""Sign a SAML statement.
:param statement: The statement to be signed
:param node_name: string like 'urn:oasis:names:...:Assertion'
:param key: The key to be used for the signing, either this or
:param key_file: The file where the key can be found
:param node_id:
:param id_attr: The attribute name for the identifier, normally one of
'id','Id' or 'ID'
:return: The signed statement
"""
if not id_attr:
id_attr = self.id_attr
if not key_file and key:
_, key_file = make_temp(str(key).encode(), '.pem')
if not key and not key_file:
key_file = self.key_file
return self.crypto.sign_statement(
statement,
node_name,
key_file,
node_id,
id_attr) | python | {
"resource": ""
} |
q36459 | SecurityContext.sign_assertion | train | def sign_assertion(self, statement, **kwargs):
"""Sign a SAML assertion.
See sign_statement() for the kwargs.
:param statement: The statement to be signed
:return: The signed statement
"""
return self.sign_statement(
statement, class_name(saml.Assertion()), **kwargs) | python | {
"resource": ""
} |
q36460 | SecurityContext.sign_attribute_query | train | def sign_attribute_query(self, statement, **kwargs):
"""Sign a SAML attribute query.
See sign_statement() for the kwargs.
:param statement: The statement to be signed
:return: The signed statement
"""
return self.sign_statement(
statement, class_name(samlp.AttributeQuery()), **kwargs) | python | {
"resource": ""
} |
q36461 | SecurityContext.multiple_signatures | train | def multiple_signatures(self, statement, to_sign, key=None, key_file=None, sign_alg=None, digest_alg=None):
"""
Sign multiple parts of a statement
:param statement: The statement that should be sign, this is XML text
:param to_sign: A list of (items, id, id attribute name) tuples that
specifies what to sign
:param key: A key that should be used for doing the signing
:param key_file: A file that contains the key to be used
:return: A possibly multiple signed statement
"""
for (item, sid, id_attr) in to_sign:
if not sid:
if not item.id:
sid = item.id = sid()
else:
sid = item.id
if not item.signature:
item.signature = pre_signature_part(
sid,
self.cert_file,
sign_alg=sign_alg,
digest_alg=digest_alg)
statement = self.sign_statement(
statement,
class_name(item),
key=key,
key_file=key_file,
node_id=sid,
id_attr=id_attr)
return statement | python | {
"resource": ""
} |
q36462 | parse_soap_enveloped_saml_thingy | train | def parse_soap_enveloped_saml_thingy(text, expected_tags):
"""Parses a SOAP enveloped SAML thing and returns the thing as
a string.
:param text: The SOAP object as XML string
:param expected_tags: What the tag of the SAML thingy is expected to be.
:return: SAML thingy as a string
"""
envelope = defusedxml.ElementTree.fromstring(text)
# Make sure it's a SOAP message
assert envelope.tag == '{%s}Envelope' % soapenv.NAMESPACE
assert len(envelope) >= 1
body = None
for part in envelope:
if part.tag == '{%s}Body' % soapenv.NAMESPACE:
assert len(part) == 1
body = part
break
if body is None:
return ""
saml_part = body[0]
if saml_part.tag in expected_tags:
return ElementTree.tostring(saml_part, encoding="UTF-8")
else:
raise WrongMessageType("Was '%s' expected one of %s" % (saml_part.tag,
expected_tags)) | python | {
"resource": ""
} |
q36463 | class_instances_from_soap_enveloped_saml_thingies | train | def class_instances_from_soap_enveloped_saml_thingies(text, modules):
"""Parses a SOAP enveloped header and body SAML thing and returns the
thing as a dictionary class instance.
:param text: The SOAP object as XML
:param modules: modules representing xsd schemas
:return: The body and headers as class instances
"""
try:
envelope = defusedxml.ElementTree.fromstring(text)
except Exception as exc:
raise XmlParseError("%s" % exc)
assert envelope.tag == '{%s}Envelope' % soapenv.NAMESPACE
assert len(envelope) >= 1
env = {"header": [], "body": None}
for part in envelope:
if part.tag == '{%s}Body' % soapenv.NAMESPACE:
assert len(part) == 1
env["body"] = instanciate_class(part[0], modules)
elif part.tag == "{%s}Header" % soapenv.NAMESPACE:
for item in part:
env["header"].append(instanciate_class(item, modules))
return env | python | {
"resource": ""
} |
q36464 | soap_fault | train | def soap_fault(message=None, actor=None, code=None, detail=None):
""" Create a SOAP Fault message
:param message: Human readable error message
:param actor: Who discovered the error
:param code: Error code
:param detail: More specific error message
:return: A SOAP Fault message as a string
"""
_string = _actor = _code = _detail = None
if message:
_string = soapenv.Fault_faultstring(text=message)
if actor:
_actor = soapenv.Fault_faultactor(text=actor)
if code:
_code = soapenv.Fault_faultcode(text=code)
if detail:
_detail = soapenv.Fault_detail(text=detail)
fault = soapenv.Fault(
faultcode=_code,
faultstring=_string,
faultactor=_actor,
detail=_detail,
)
return "%s" % fault | python | {
"resource": ""
} |
q36465 | AESCipher._deprecation_notice | train | def _deprecation_notice(cls):
"""Warn about deprecation of this class."""
_deprecation_msg = (
'{name} {type} is deprecated. '
'It will be removed in the next version. '
'Use saml2.cryptography.symmetric instead.'
).format(name=cls.__name__, type=type(cls).__name__)
_warnings.warn(_deprecation_msg, DeprecationWarning) | python | {
"resource": ""
} |
q36466 | OpenSSLWrapper.create_certificate | train | def create_certificate(self, cert_info, request=False, valid_from=0,
valid_to=315360000, sn=1, key_length=1024,
hash_alg="sha256", write_to_file=False, cert_dir="",
cipher_passphrase=None):
"""
Can create certificate requests, to be signed later by another
certificate with the method
create_cert_signed_certificate. If request is True.
Can also create self signed root certificates if request is False.
This is default behaviour.
:param cert_info: Contains information about the certificate.
Is a dictionary that must contain the keys:
cn = Common name. This part
must match the host being authenticated
country_code = Two letter description
of the country.
state = State
city = City
organization = Organization, can be a
company name.
organization_unit = A unit at the
organization, can be a department.
Example:
cert_info_ca = {
"cn": "company.com",
"country_code": "se",
"state": "AC",
"city": "Dorotea",
"organization":
"Company",
"organization_unit":
"Sales"
}
:param request: True if this is a request for certificate,
that should be signed.
False if this is a self signed certificate,
root certificate.
:param valid_from: When the certificate starts to be valid.
Amount of seconds from when the
certificate is generated.
:param valid_to: How long the certificate will be valid from
when it is generated.
The value is in seconds. Default is
315360000 seconds, a.k.a 10 years.
:param sn: Serial number for the certificate. Default
is 1.
:param key_length: Length of the key to be generated. Defaults
to 1024.
:param hash_alg: Hash algorithm to use for the key. Default
is sha256.
:param write_to_file: True if you want to write the certificate
to a file. The method will then return
a tuple with path to certificate file and
path to key file.
False if you want to get the result as
strings. The method will then return a tuple
with the certificate string and the key as
string.
WILL OVERWRITE ALL EXISTING FILES WITHOUT
ASKING!
:param cert_dir: Where to save the files if write_to_file is
true.
:param cipher_passphrase A dictionary with cipher and passphrase.
Example::
{"cipher": "blowfish", "passphrase": "qwerty"}
:return: string representation of certificate,
string representation of private key
if write_to_file parameter is False otherwise
path to certificate file, path to private
key file
"""
cn = cert_info["cn"]
c_f = None
k_f = None
if write_to_file:
cert_file = "%s.crt" % cn
key_file = "%s.key" % cn
try:
remove(cert_file)
except:
pass
try:
remove(key_file)
except:
pass
c_f = join(cert_dir, cert_file)
k_f = join(cert_dir, key_file)
# create a key pair
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, key_length)
# create a self-signed cert
cert = crypto.X509()
if request:
cert = crypto.X509Req()
if (len(cert_info["country_code"]) != 2):
raise WrongInput("Country code must be two letters!")
cert.get_subject().C = cert_info["country_code"]
cert.get_subject().ST = cert_info["state"]
cert.get_subject().L = cert_info["city"]
cert.get_subject().O = cert_info["organization"]
cert.get_subject().OU = cert_info["organization_unit"]
cert.get_subject().CN = cn
if not request:
cert.set_serial_number(sn)
cert.gmtime_adj_notBefore(valid_from) #Valid before present time
cert.gmtime_adj_notAfter(valid_to) #3 650 days
cert.set_issuer(cert.get_subject())
cert.set_pubkey(k)
cert.sign(k, hash_alg)
try:
if request:
tmp_cert = crypto.dump_certificate_request(crypto.FILETYPE_PEM,
cert)
else:
tmp_cert = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
tmp_key = None
if cipher_passphrase is not None:
passphrase = cipher_passphrase["passphrase"]
if isinstance(cipher_passphrase["passphrase"],
six.string_types):
passphrase = passphrase.encode('utf-8')
tmp_key = crypto.dump_privatekey(crypto.FILETYPE_PEM, k,
cipher_passphrase["cipher"],
passphrase)
else:
tmp_key = crypto.dump_privatekey(crypto.FILETYPE_PEM, k)
if write_to_file:
with open(c_f, 'wt') as fc:
fc.write(tmp_cert.decode('utf-8'))
with open(k_f, 'wt') as fk:
fk.write(tmp_key.decode('utf-8'))
return c_f, k_f
return tmp_cert, tmp_key
except Exception as ex:
raise CertificateError("Certificate cannot be generated.", ex) | python | {
"resource": ""
} |
q36467 | OpenSSLWrapper.verify | train | def verify(self, signing_cert_str, cert_str):
"""
Verifies if a certificate is valid and signed by a given certificate.
:param signing_cert_str: This certificate will be used to verify the
signature. Must be a string representation
of the certificate. If you only have a file
use the method read_str_from_file to
get a string representation.
:param cert_str: This certificate will be verified if it is
correct. Must be a string representation
of the certificate. If you only have a file
use the method read_str_from_file to
get a string representation.
:return: Valid, Message
Valid = True if the certificate is valid,
otherwise false.
Message = Why the validation failed.
"""
try:
ca_cert = crypto.load_certificate(crypto.FILETYPE_PEM,
signing_cert_str)
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_str)
if self.certificate_not_valid_yet(ca_cert):
return False, "CA certificate is not valid yet."
if ca_cert.has_expired() == 1:
return False, "CA certificate is expired."
if cert.has_expired() == 1:
return False, "The signed certificate is expired."
if self.certificate_not_valid_yet(cert):
return False, "The signed certificate is not valid yet."
if ca_cert.get_subject().CN == cert.get_subject().CN:
return False, ("CN may not be equal for CA certificate and the "
"signed certificate.")
cert_algorithm = cert.get_signature_algorithm()
if six.PY3:
cert_algorithm = cert_algorithm.decode('ascii')
cert_str = cert_str.encode('ascii')
cert_crypto = saml2.cryptography.pki.load_pem_x509_certificate(
cert_str)
try:
crypto.verify(ca_cert, cert_crypto.signature,
cert_crypto.tbs_certificate_bytes,
cert_algorithm)
return True, "Signed certificate is valid and correctly signed by CA certificate."
except crypto.Error as e:
return False, "Certificate is incorrectly signed."
except Exception as e:
return False, "Certificate is not valid for an unknown reason. %s" % str(e) | python | {
"resource": ""
} |
q36468 | HTTPBase.cookies | train | def cookies(self, url):
"""
Return cookies that are matching the path and are still valid
:param url:
:return:
"""
part = urlparse(url)
#if part.port:
# _domain = "%s:%s" % (part.hostname, part.port)
#else:
_domain = part.hostname
cookie_dict = {}
now = utc_now()
for _, a in list(self.cookiejar._cookies.items()):
for _, b in a.items():
for cookie in list(b.values()):
# print(cookie)
if cookie.expires and cookie.expires <= now:
continue
if not re.search("%s$" % cookie.domain, _domain):
continue
if not re.match(cookie.path, part.path):
continue
cookie_dict[cookie.name] = cookie.value
return cookie_dict | python | {
"resource": ""
} |
q36469 | HTTPBase.set_cookie | train | def set_cookie(self, kaka, request):
"""Returns a http_cookiejar.Cookie based on a set-cookie header line"""
if not kaka:
return
part = urlparse(request.url)
_domain = part.hostname
logger.debug("%s: '%s'", _domain, kaka)
for cookie_name, morsel in kaka.items():
std_attr = ATTRS.copy()
std_attr["name"] = cookie_name
_tmp = morsel.coded_value
if _tmp.startswith('"') and _tmp.endswith('"'):
std_attr["value"] = _tmp[1:-1]
else:
std_attr["value"] = _tmp
std_attr["version"] = 0
# copy attributes that have values
for attr in morsel.keys():
if attr in ATTRS:
if morsel[attr]:
if attr == "expires":
std_attr[attr] = _since_epoch(morsel[attr])
elif attr == "path":
if morsel[attr].endswith(","):
std_attr[attr] = morsel[attr][:-1]
else:
std_attr[attr] = morsel[attr]
else:
std_attr[attr] = morsel[attr]
elif attr == "max-age":
if morsel["max-age"]:
std_attr["expires"] = time.time() + int(morsel["max-age"])
for att, item in PAIRS.items():
if std_attr[att]:
std_attr[item] = True
if std_attr["domain"]:
if std_attr["domain"].startswith("."):
std_attr["domain_initial_dot"] = True
else:
std_attr["domain"] = _domain
std_attr["domain_specified"] = True
if morsel["max-age"] is 0:
try:
self.cookiejar.clear(domain=std_attr["domain"],
path=std_attr["path"],
name=std_attr["name"])
except ValueError:
pass
elif std_attr["expires"] and std_attr["expires"] < utc_now():
try:
self.cookiejar.clear(domain=std_attr["domain"],
path=std_attr["path"],
name=std_attr["name"])
except ValueError:
pass
else:
new_cookie = http_cookiejar.Cookie(**std_attr)
self.cookiejar.set_cookie(new_cookie) | python | {
"resource": ""
} |
q36470 | HTTPBase.use_http_post | train | def use_http_post(message, destination, relay_state,
typ="SAMLRequest"):
"""
Return a urlencoded message that should be POSTed to the recipient.
:param message: The response
:param destination: Where the response should be sent
:param relay_state: The relay_state received in the request
:param typ: Whether a Request, Response or Artifact
:return: dictionary
"""
if not isinstance(message, six.string_types):
message = "%s" % (message,)
return http_post_message(message, relay_state, typ) | python | {
"resource": ""
} |
q36471 | HTTPBase.use_http_form_post | train | def use_http_form_post(message, destination, relay_state,
typ="SAMLRequest"):
"""
Return a form that will automagically execute and POST the message
to the recipient.
:param message:
:param destination:
:param relay_state:
:param typ: Whether a Request, Response or Artifact
:return: dictionary
"""
if not isinstance(message, six.string_types):
message = "%s" % (message,)
return http_form_post_message(message, destination, relay_state, typ) | python | {
"resource": ""
} |
q36472 | HTTPBase.use_soap | train | def use_soap(self, request, destination="", soap_headers=None, sign=False,
**kwargs):
"""
Construct the necessary information for using SOAP+POST
:param request:
:param destination:
:param soap_headers:
:param sign:
:return: dictionary
"""
headers = [("content-type", "application/soap+xml")]
soap_message = make_soap_enveloped_saml_thingy(request, soap_headers)
logger.debug("SOAP message: %s", soap_message)
if sign and self.sec:
_signed = self.sec.sign_statement(soap_message,
class_name=class_name(request),
node_id=request.id)
soap_message = _signed
return {"url": destination, "method": "POST",
"data": soap_message, "headers": headers} | python | {
"resource": ""
} |
q36473 | HTTPBase.send_using_soap | train | def send_using_soap(self, request, destination, headers=None, sign=False):
"""
Send a message using SOAP+POST
:param request:
:param destination:
:param headers:
:param sign:
:return:
"""
# _response = self.server.post(soap_message, headers, path=path)
try:
args = self.use_soap(request, destination, headers, sign)
args["headers"] = dict(args["headers"])
response = self.send(**args)
except Exception as exc:
logger.info("HTTPClient exception: %s", exc)
raise
if response.status_code == 200:
logger.info("SOAP response: %s", response.text)
return response
else:
raise HTTPError("%d:%s" % (response.status_code, response.content)) | python | {
"resource": ""
} |
q36474 | HTTPBase.use_http_get | train | def use_http_get(message, destination, relay_state,
typ="SAMLRequest", sigalg="", signer=None, **kwargs):
"""
Send a message using GET, this is the HTTP-Redirect case so
no direct response is expected to this request.
:param message:
:param destination:
:param relay_state:
:param typ: Whether a Request, Response or Artifact
:param sigalg: Which algorithm the signature function will use to sign
the message
:param signer: A signing function that can be used to sign the message
:return: dictionary
"""
if not isinstance(message, six.string_types):
message = "%s" % (message,)
return http_redirect_message(message, destination, relay_state, typ,
sigalg, signer) | python | {
"resource": ""
} |
q36475 | extract | train | def extract(environ, empty=False, err=False):
"""Extracts strings in form data and returns a dict.
:param environ: WSGI environ
:param empty: Stops on empty fields (default: Fault)
:param err: Stops on errors in fields (default: Fault)
"""
formdata = cgi.parse(environ['wsgi.input'], environ, empty, err)
# Remove single entries from lists
for key, value in iter(formdata.items()):
if len(value) == 1:
formdata[key] = value[0]
return formdata | python | {
"resource": ""
} |
q36476 | cookie_signature | train | def cookie_signature(seed, *parts):
"""Generates a cookie signature."""
sha1 = hmac.new(seed, digestmod=hashlib.sha1)
for part in parts:
if part:
sha1.update(part)
return sha1.hexdigest() | python | {
"resource": ""
} |
q36477 | make_cookie | train | def make_cookie(name, load, seed, expire=0, domain="", path="",
timestamp=""):
"""
Create and return a cookie
:param name: Cookie name
:param load: Cookie load
:param seed: A seed for the HMAC function
:param expire: Number of minutes before this cookie goes stale
:param domain: The domain of the cookie
:param path: The path specification for the cookie
:return: A tuple to be added to headers
"""
cookie = SimpleCookie()
if not timestamp:
timestamp = str(int(time.mktime(time.gmtime())))
signature = cookie_signature(seed, load, timestamp)
cookie[name] = "|".join([load, timestamp, signature])
if path:
cookie[name]["path"] = path
if domain:
cookie[name]["domain"] = domain
if expire:
cookie[name]["expires"] = _expiration(expire,
"%a, %d-%b-%Y %H:%M:%S GMT")
return tuple(cookie.output().split(": ", 1)) | python | {
"resource": ""
} |
q36478 | HttpClient.set_basic_auth | train | def set_basic_auth(self, username, password, realm):
"""
Set up basic auth for the client
@param username: Login name.
@param password: Login password.
@param realm: The authentication realm.
@return: The current object
"""
self._passmgr.add_password(realm, self._base_url, username, password)
return self | python | {
"resource": ""
} |
q36479 | query_timeseries | train | def query_timeseries(resource_root, query, from_time=None, to_time=None,
desired_rollup=None, must_use_desired_rollup=None, by_post=False):
"""
Query for time series data from the CM time series data store.
@param query: Query string.
@param from_time: Start of the period to query (optional).
@type from_time: datetime.datetime Note the that datetime must either be time
zone aware or specified in the server time zone. See the
python datetime documentation for more details about python's
time zone handling.
@param to_time: End of the period to query (default = now).
This may be an ISO format string, or a datetime object.
@type to_time: datetime.datetime Note the that datetime must either be time
zone aware or specified in the server time zone. See the
python datetime documentation for more details about python's
time zone handling.
@param desired_rollup: The aggregate rollup to get data for. This can be
RAW, TEN_MINUTELY, HOURLY, SIX_HOURLY, DAILY, or
WEEKLY. Note that rollup desired is only a hint unless
must_use_desired_rollup is set to true.
@param must_use_desired_rollup: Indicates that the monitoring server should
return the data at the rollup desired.
@param by_post: If true, an HTTP POST request will be made to server. This
allows longer query string to be accepted compared to HTTP
GET request.
@return: List of ApiTimeSeriesResponse
"""
data = None
params = {}
request_method = resource_root.get
if by_post:
request = ApiTimeSeriesRequest(resource_root,
query=query)
data = request
request_method = resource_root.post
elif query:
params['query'] = query
if from_time:
params['from'] = from_time.isoformat()
if to_time:
params['to'] = to_time.isoformat()
if desired_rollup:
params['desiredRollup'] = desired_rollup
if must_use_desired_rollup:
params['mustUseDesiredRollup'] = must_use_desired_rollup
return call(request_method, TIME_SERIES_PATH,
ApiTimeSeriesResponse, True, params=params, data=data) | python | {
"resource": ""
} |
q36480 | get_parcel | train | def get_parcel(resource_root, product, version, cluster_name="default"):
"""
Lookup a parcel by name
@param resource_root: The root Resource object.
@param product: Parcel product name
@param version: Parcel version
@param cluster_name: Cluster name
@return: An ApiService object
"""
return _get_parcel(resource_root, PARCEL_PATH % (cluster_name, product, version)) | python | {
"resource": ""
} |
q36481 | get_all_parcels | train | def get_all_parcels(resource_root, cluster_name="default", view=None):
"""
Get all parcels
@param resource_root: The root Resource object.
@param cluster_name: Cluster name
@return: A list of ApiParcel objects.
@since: API v3
"""
return call(resource_root.get, PARCELS_PATH % (cluster_name,),
ApiParcel, True, params=view and dict(view=view) or None, api_version=3) | python | {
"resource": ""
} |
q36482 | do_bulk_config_update | train | def do_bulk_config_update(hostnames):
"""
Given a list of hostnames, update the configs of all the
datanodes, tasktrackers and regionservers on those hosts.
"""
api = ApiResource(CM_HOST, username=CM_USER, password=CM_PASSWD)
hosts = collect_hosts(api, hostnames)
# Set config
for h in hosts:
configure_roles_on_host(api, h) | python | {
"resource": ""
} |
q36483 | collect_hosts | train | def collect_hosts(api, wanted_hostnames):
"""
Return a list of ApiHost objects for the set of hosts that
we want to change config for.
"""
all_hosts = api.get_all_hosts(view='full')
all_hostnames = set([ h.hostname for h in all_hosts])
wanted_hostnames = set(wanted_hostnames)
unknown_hosts = wanted_hostnames.difference(all_hostnames)
if len(unknown_hosts) != 0:
msg = "The following hosts are not found in Cloudera Manager. "\
"Please check for typos:\n%s" % ('\n'.join(unknown_hosts))
LOG.error(msg)
raise RuntimeError(msg)
return [ h for h in all_hosts if h.hostname in wanted_hostnames ] | python | {
"resource": ""
} |
q36484 | configure_roles_on_host | train | def configure_roles_on_host(api, host):
"""
Go through all the roles on this host, and configure them if they
match the role types that we care about.
"""
for role_ref in host.roleRefs:
# Mgmt service/role has no cluster name. Skip over those.
if role_ref.get('clusterName') is None:
continue
# Get the role and inspect the role type
role = api.get_cluster(role_ref['clusterName'])\
.get_service(role_ref['serviceName'])\
.get_role(role_ref['roleName'])
LOG.debug("Evaluating %s (%s)" % (role.name, host.hostname))
config = None
if role.type == 'DATANODE':
config = DATANODE_CONF
elif role.type == 'TASKTRACKER':
config = TASKTRACKER_CONF
elif role.type == 'REGIONSERVER':
config = REGIONSERVER_CONF
else:
continue
# Set the config
LOG.info("Configuring %s (%s)" % (role.name, host.hostname))
role.update_config(config) | python | {
"resource": ""
} |
q36485 | read_host_file | train | def read_host_file(path):
"""
Read the host file. Return a list of hostnames.
"""
res = []
for l in file(path).xreadlines():
hostname = l.strip()
if hostname:
res.append(hostname)
return res | python | {
"resource": ""
} |
q36486 | ClouderaManager.get_commands | train | def get_commands(self, view=None):
"""
Retrieve a list of running global commands.
@param view: View to materialize ('full' or 'summary')
@return: A list of running commands.
"""
return self._get("commands", ApiCommand, True,
params = view and dict(view=view) or None) | python | {
"resource": ""
} |
q36487 | ClouderaManager.update_license | train | def update_license(self, license_text):
"""
Install or update the Cloudera Manager license.
@param license_text: the license in text form
"""
content = (
'--MULTI_BOUNDARY',
'Content-Disposition: form-data; name="license"',
'',
license_text,
'--MULTI_BOUNDARY--',
'')
resp = self._get_resource_root().post('cm/license',
data="\r\n".join(content),
contenttype='multipart/form-data; boundary=MULTI_BOUNDARY')
return ApiLicense.from_json_dict(resp, self._get_resource_root()) | python | {
"resource": ""
} |
q36488 | ClouderaManager.import_admin_credentials | train | def import_admin_credentials(self, username, password):
"""
Imports the KDC Account Manager credentials needed by Cloudera
Manager to create kerberos principals needed by CDH services.
@param username Username of the Account Manager. Full name including the Kerberos
realm must be specified.
@param password Password for the Account Manager.
@return: Information about the submitted command.
@since API v7
"""
return self._cmd('importAdminCredentials', params=dict(username=username, password=password)) | python | {
"resource": ""
} |
q36489 | ClouderaManager.collect_diagnostic_data | train | def collect_diagnostic_data(self, start_datetime, end_datetime, includeInfoLog=False):
"""
This method is deprecated as of CM 4.5.
You should use collect_diagnostic_data_45.
Issue the command to collect diagnostic data.
@param start_datetime: The start of the collection period. Type datetime.
@param end_datetime: The end of the collection period. Type datetime.
@param includeInfoLog: Whether to include INFO level log messages.
"""
args = {
'startTime': start_datetime.isoformat(),
'endTime': end_datetime.isoformat(),
'includeInfoLog': includeInfoLog,
}
# This method is deprecated as of CM API version 3 which was introduced
# in CM 4.5.
return self._cmd('collectDiagnosticData', data=args, api_version=2) | python | {
"resource": ""
} |
q36490 | ClouderaManager.collect_diagnostic_data_45 | train | def collect_diagnostic_data_45(self, end_datetime, bundle_size_bytes, cluster_name=None,
roles=None, collect_metrics=False, start_datetime=None):
"""
Issue the command to collect diagnostic data.
If start_datetime is specified, diagnostic data is collected for the entire period between
start_datetime and end_datetime provided that bundle size is less than or equal to
bundle_size_bytes. Diagnostics data collection fails if the bundle size is greater than
bundle_size_bytes.
If start_datetime is not specified, diagnostic data is collected starting from end_datetime
and collecting backwards upto a maximum of bundle_size_bytes.
@param end_datetime: The end of the collection period. Type datetime.
@param bundle_size_bytes: The target size for the support bundle in bytes
@param cluster_name: The cluster to collect or None for all clusters
@param roles: Role ids of roles to restrict log and metric collection to. Valid since v10.
@param collect_metrics: Whether to collect metrics for viewing as charts. Valid since v13.
@param start_datetime: The start of the collection period. Type datetime. Valid since v13.
"""
args = {
'endTime': end_datetime.isoformat(),
'bundleSizeBytes': bundle_size_bytes,
'clusterName': cluster_name
}
if self._get_resource_root().version >= 10:
args['roles'] = roles
if self._get_resource_root().version >= 13:
args['enableMonitorMetricsCollection'] = collect_metrics
if start_datetime is not None:
args['startTime'] = start_datetime.isoformat()
return self._cmd('collectDiagnosticData', data=args) | python | {
"resource": ""
} |
q36491 | ClouderaManager.create_peer | train | def create_peer(self, name, url, username, password, peer_type="REPLICATION"):
"""
Create a new peer for replication.
@param name: The name of the peer.
@param url: The url of the peer.
@param username: The admin username to use to setup the remote side of the peer connection.
@param password: The password of the admin user.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The newly created peer.
@since: API v3
"""
if self._get_resource_root().version < 11:
peer_type = None
peer = ApiCmPeer(self._get_resource_root(),
name=name,
url=url,
username=username,
password=password,
type=peer_type)
return self._post("peers", ApiCmPeer, data=peer, api_version=3) | python | {
"resource": ""
} |
q36492 | ClouderaManager._get_peer_type_param | train | def _get_peer_type_param(self, peer_type):
"""
Checks if the resource_root's API version is >= 11 and construct type param.
"""
params = None
if self._get_resource_root().version >= 11:
params = {
'type': peer_type,
}
return params | python | {
"resource": ""
} |
q36493 | ClouderaManager.delete_peer | train | def delete_peer(self, name, peer_type="REPLICATION"):
"""
Delete a replication peer.
@param name: The name of the peer.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The deleted peer.
@since: API v3
"""
params = self._get_peer_type_param(peer_type)
return self._delete("peers/" + name, ApiCmPeer, params=params, api_version=3) | python | {
"resource": ""
} |
q36494 | ClouderaManager.update_peer | train | def update_peer(self,
current_name,
new_name, new_url, username, password, peer_type="REPLICATION"):
"""
Update a replication peer.
@param current_name: The name of the peer to updated.
@param new_name: The new name for the peer.
@param new_url: The new url for the peer.
@param username: The admin username to use to setup the remote side of the peer connection.
@param password: The password of the admin user.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The updated peer.
@since: API v3
"""
if self._get_resource_root().version < 11:
peer_type = None
peer = ApiCmPeer(self._get_resource_root(),
name=new_name,
url=new_url,
username=username,
password=password,
type=peer_type)
return self._put("peers/" + current_name, ApiCmPeer, data=peer, api_version=3) | python | {
"resource": ""
} |
q36495 | ClouderaManager.get_peer | train | def get_peer(self, name, peer_type="REPLICATION"):
"""
Retrieve a replication peer by name.
@param name: The name of the peer.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The peer.
@since: API v3
"""
params = self._get_peer_type_param(peer_type)
return self._get("peers/" + name, ApiCmPeer, params=params, api_version=3) | python | {
"resource": ""
} |
q36496 | ClouderaManager.host_install | train | def host_install(self, user_name, host_names, ssh_port=None, password=None,
private_key=None, passphrase=None, parallel_install_count=None,
cm_repo_url=None, gpg_key_custom_url=None,
java_install_strategy=None, unlimited_jce=None):
"""
Install Cloudera Manager Agent on a set of hosts.
@param user_name: The username used to authenticate with the hosts. Root access
to your hosts is required to install Cloudera packages. The
installer will connect to your hosts via SSH and log in either
directly as root or as another user with password-less sudo
privileges to become root.
@param host_names: List of names of hosts to configure for use with
Cloudera Manager. A host may be specified by a
hostname(FQDN) or an IP address.
@param ssh_port: SSH port. If unset, defaults to 22.
@param password: The password used to authenticate with the hosts. Specify
either this or a private key. For password-less login, use
an empty string as password.
@param private_key: The private key to authenticate with the hosts. Specify
either this or a password.
@param passphrase: The passphrase associated with the private key used to
authenticate with the hosts (optional).
@param parallel_install_count: Number of simultaneous installations.
Defaults to 10. Running a large number of
installations at once can consume large amounts
of network bandwidth and other system resources.
@param cm_repo_url: The Cloudera Manager repository URL to use (optional).
Example for SLES, Redhat or other RPM based distributions:
http://archive-primary.cloudera.com/cm5/redhat/6/x86_64/cm/5/
Example for Ubuntu or other Debian based distributions:
"deb http://archive.cloudera.com/cm5/ubuntu/lucid/amd64/cm/ lucid-cm5 contrib"
@param gpg_key_custom_url: The Cloudera Manager public GPG key (optional).
Example for SLES, Redhat or other RPM based distributions:
http://archive-primary.cloudera.com/cm5/redhat/6/x86_64/cm/RPM-GPG-KEY-cloudera
Example for Ubuntu or other Debian based distributions:
http://archive.cloudera.com/debian/archive.key
@param java_install_strategy: Added in v8: Strategy to use for JDK installation. Valid values are 1.
AUTO (default): Cloudera Manager will install the JDK versions that are
required when the "AUTO" option is selected. Cloudera Manager may
overwrite any of the existing JDK installations. 2. NONE: Cloudera
Manager will not install any JDK when "NONE" option is selected. It
should be used if an existing JDK installation has to be used.
@param unlimited_jce: Added in v8: Flag for unlimited strength JCE policy files installation If
unset, defaults to false
@return: Information about the submitted command.
@since: API v6
"""
host_install_args = {}
if user_name:
host_install_args['userName'] = user_name
if host_names:
host_install_args['hostNames'] = host_names
if ssh_port:
host_install_args['sshPort'] = ssh_port
if password:
host_install_args['password'] = password
if private_key:
host_install_args['privateKey'] = private_key
if passphrase:
host_install_args['passphrase'] = passphrase
if parallel_install_count:
host_install_args['parallelInstallCount'] = parallel_install_count
if cm_repo_url:
host_install_args['cmRepoUrl'] = cm_repo_url
if gpg_key_custom_url:
host_install_args['gpgKeyCustomUrl'] = gpg_key_custom_url
if java_install_strategy is not None:
host_install_args['javaInstallStrategy'] = java_install_strategy
if unlimited_jce:
host_install_args['unlimitedJCE'] = unlimited_jce
return self._cmd('hostInstall', data=host_install_args) | python | {
"resource": ""
} |
q36497 | ClouderaManager.import_cluster_template | train | def import_cluster_template(self, api_cluster_template, add_repositories=False):
"""
Create a cluster according to the provided template
@param api_cluster_template: cluster template to import
@param add_repositories: if true the parcels repositories in the cluster template will be added.
@return: Command handing cluster import
@since: API v12
"""
return self._post("importClusterTemplate", ApiCommand, False, api_cluster_template, params=dict(addRepositories=add_repositories), api_version=12) | python | {
"resource": ""
} |
q36498 | do_get_aliases | train | def do_get_aliases(name):
"""
Get aliases for given metric name
"""
metric_schemas = MetricSchemas()
aliases = metric_schemas.get_aliases(name)
for alias in aliases:
do_print(alias) | python | {
"resource": ""
} |
q36499 | get_supported_types | train | def get_supported_types(resource_root, category_name):
"""
Lookup all supported types in a category.
@param resource_root: The root Resource object.
@param category_name: The category name
@return: An ApiExternalAcccountType list
"""
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("supportedTypes", category_name,),
ApiExternalAccountType, True) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.