sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def serialize(self, obj, fields):
"""
Serializes a single model instance to a Python dict, based on the specified list of fields.
"""
data = {}
remaining_fields = []
for field in fields:
if callable(field): # Callable
data[field.__name__] = field(obj)
elif hasattr(self, field) and callable(getattr(self, field)): # Method on the view
data[field] = getattr(self, field)(obj)
elif hasattr(obj, field): # Callable/property/field on the model
attr = getattr(obj, field)
if isinstance(attr, Model):
data[field] = attr.pk
elif isinstance(attr, Manager):
data[field] = [item['pk'] for item in attr.values('pk')]
elif callable(attr): # Callable on the model
data[field] = attr()
else:
remaining_fields.append(field)
else:
raise AttributeError('Invalid field: %s' % field)
# Add on db fields
serializer = Serializer()
serializer.serialize([obj], fields=list(remaining_fields))
data.update(serializer.getvalue()[0]['fields'])
# Any remaining fields should be properties on the model
remaining_fields = set(remaining_fields) - set(data.keys())
for field in remaining_fields:
data[field] = getattr(obj, field)
return data | Serializes a single model instance to a Python dict, based on the specified list of fields. | entailment |
def json_dumps(self, data, **options):
"""
Wrapper around `json.dumps` that uses a special JSON encoder.
"""
params = {'sort_keys': True, 'indent': 2}
params.update(options)
# This code is based off django's built in JSON serializer
if json.__version__.split('.') >= ['2', '1', '3']:
# Use JS strings to represent Python Decimal instances (ticket #16850)
params.update({'use_decimal': False})
return json.dumps(data, cls=DjangoJSONEncoder, **params) | Wrapper around `json.dumps` that uses a special JSON encoder. | entailment |
def dir(cls, label, children):
"""Return ``FSEntry`` directory object."""
return FSEntry(label=label, children=children, type=u"Directory", use=None) | Return ``FSEntry`` directory object. | entailment |
def from_fptr(cls, label, type_, fptr):
"""Return ``FSEntry`` object."""
return FSEntry(
label=label,
type=type_,
path=fptr.path,
use=fptr.use,
file_uuid=fptr.file_uuid,
derived_from=fptr.derived_from,
checksum=fptr.checksum,
checksumtype=fptr.checksumtype,
) | Return ``FSEntry`` object. | entailment |
def file_id(self):
""" Returns the fptr @FILEID if this is not a Directory. """
if self.type.lower() == "directory":
return None
if self.file_uuid is None:
raise exceptions.MetsError(
"No FILEID: File %s does not have file_uuid set" % self.path
)
if self.is_aip:
return os.path.splitext(os.path.basename(self.path))[0]
return utils.FILE_ID_PREFIX + self.file_uuid | Returns the fptr @FILEID if this is not a Directory. | entailment |
def group_id(self):
"""
Returns the @GROUPID.
If derived_from is set, returns that group_id.
"""
if self.derived_from is not None:
return self.derived_from.group_id()
if self.file_uuid is None:
return None
return utils.GROUP_ID_PREFIX + self.file_uuid | Returns the @GROUPID.
If derived_from is set, returns that group_id. | entailment |
def _add_metadata_element(self, md, subsection, mdtype, mode="mdwrap", **kwargs):
"""
:param md: Value to pass to the MDWrap/MDRef
:param str subsection: Metadata tag to create. See :const:`SubSection.ALLOWED_SUBSECTIONS`
:param str mdtype: Value for mdWrap/mdRef @MDTYPE
:param str mode: 'mdwrap' or 'mdref'
:param str loctype: Required if mode is 'mdref'. LOCTYPE of a mdRef
:param str label: Optional. Label of a mdRef
:param str otherloctype: Optional. OTHERLOCTYPE of a mdRef.
:param str othermdtype: Optional. OTHERMDTYPE of a mdWrap.
"""
# HELP how handle multiple amdSecs?
# When adding *MD which amdSec to add to?
if mode.lower() == "mdwrap":
othermdtype = kwargs.get("othermdtype")
mdsec = MDWrap(md, mdtype, othermdtype)
elif mode.lower() == "mdref":
loctype = kwargs.get("loctype")
label = kwargs.get("label")
otherloctype = kwargs.get("otherloctype")
mdsec = MDRef(md, mdtype, loctype, label, otherloctype)
subsection = SubSection(subsection, mdsec)
if subsection.subsection == "dmdSec":
self.dmdsecs.append(subsection)
else:
try:
amdsec = self.amdsecs[0]
except IndexError:
amdsec = AMDSec()
self.amdsecs.append(amdsec)
amdsec.subsections.append(subsection)
return subsection | :param md: Value to pass to the MDWrap/MDRef
:param str subsection: Metadata tag to create. See :const:`SubSection.ALLOWED_SUBSECTIONS`
:param str mdtype: Value for mdWrap/mdRef @MDTYPE
:param str mode: 'mdwrap' or 'mdref'
:param str loctype: Required if mode is 'mdref'. LOCTYPE of a mdRef
:param str label: Optional. Label of a mdRef
:param str otherloctype: Optional. OTHERLOCTYPE of a mdRef.
:param str othermdtype: Optional. OTHERMDTYPE of a mdWrap. | entailment |
def serialize_md_inst(self, md_inst, md_class):
"""Serialize object ``md_inst`` by transforming it into an
``lxml.etree._ElementTree``. If it already is such, return it. If not,
make sure it is the correct type and return the output of calling
``seriaize()`` on it.
"""
valid_insts = tuple(
chain((etree._ElementTree, etree._Element), six.string_types)
)
if isinstance(md_inst, valid_insts):
return md_inst
if not isinstance(md_inst, md_class):
raise TypeError(
"Instance {!r} must be instance of {!r}".format(md_inst, md_class)
)
return md_inst.serialize() | Serialize object ``md_inst`` by transforming it into an
``lxml.etree._ElementTree``. If it already is such, return it. If not,
make sure it is the correct type and return the output of calling
``seriaize()`` on it. | entailment |
def add_child(self, child):
"""Add a child FSEntry to this FSEntry.
Only FSEntrys with a type of 'directory' can have children.
This does not detect cyclic parent/child relationships, but that will
cause problems.
:param metsrw.fsentry.FSEntry child: FSEntry to add as a child
:return: The newly added child
:raises ValueError: If this FSEntry cannot have children.
:raises ValueError: If the child and the parent are the same
"""
if self.type.lower() != "directory":
raise ValueError("Only directory objects can have children")
if child is self:
raise ValueError("Cannot be a child of itself!")
if child not in self._children:
self._children.append(child)
child.parent = self
return child | Add a child FSEntry to this FSEntry.
Only FSEntrys with a type of 'directory' can have children.
This does not detect cyclic parent/child relationships, but that will
cause problems.
:param metsrw.fsentry.FSEntry child: FSEntry to add as a child
:return: The newly added child
:raises ValueError: If this FSEntry cannot have children.
:raises ValueError: If the child and the parent are the same | entailment |
def remove_child(self, child):
"""
Remove a child from this FSEntry
If `child` is not actually a child of this entry, nothing happens.
:param child: Child to remove
"""
try:
self._children.remove(child)
except ValueError: # Child may not be in list
pass
else:
child.parent = None | Remove a child from this FSEntry
If `child` is not actually a child of this entry, nothing happens.
:param child: Child to remove | entailment |
def serialize_filesec(self):
"""
Return the file Element for this file, appropriate for use in a fileSec.
If this is not an Item or has no use, return None.
:return: fileSec element for this FSEntry
"""
if (
self.type.lower() not in ("item", "archival information package")
or self.use is None
):
return None
el = etree.Element(utils.lxmlns("mets") + "file", ID=self.file_id())
if self.group_id():
el.attrib["GROUPID"] = self.group_id()
if self.admids:
el.set("ADMID", " ".join(self.admids))
if self.checksum and self.checksumtype:
el.attrib["CHECKSUM"] = self.checksum
el.attrib["CHECKSUMTYPE"] = self.checksumtype
if self.path:
flocat = etree.SubElement(el, utils.lxmlns("mets") + "FLocat")
# Setting manually so order is correct
try:
flocat.set(utils.lxmlns("xlink") + "href", utils.urlencode(self.path))
except ValueError:
raise exceptions.SerializeError(
'Value "{}" (for attribute xlink:href) is not a valid'
" URL.".format(self.path)
)
flocat.set("LOCTYPE", "OTHER")
flocat.set("OTHERLOCTYPE", "SYSTEM")
for transform_file in self.transform_files:
transform_file_el = etree.SubElement(
el, utils.lxmlns("mets") + "transformFile"
)
for key, val in transform_file.items():
attribute = "transform{}".format(key).upper()
transform_file_el.attrib[attribute] = str(val)
return el | Return the file Element for this file, appropriate for use in a fileSec.
If this is not an Item or has no use, return None.
:return: fileSec element for this FSEntry | entailment |
def is_empty_dir(self):
"""Returns ``True`` if this fs item is a directory with no children or
a directory with only other empty directories as children.
"""
if self.mets_div_type == "Directory":
children = self._children
if children:
if all(child.is_empty_dir for child in children):
return True
else:
return False
else:
return True
else:
return False | Returns ``True`` if this fs item is a directory with no children or
a directory with only other empty directories as children. | entailment |
def serialize_structmap(self, recurse=True, normative=False):
"""Return the div Element for this file, appropriate for use in a
structMap.
If this FSEntry represents a directory, its children will be
recursively appended to itself. If this FSEntry represents a file, it
will contain a <fptr> element.
:param bool recurse: If true, serialize and apppend all children.
Otherwise, only serialize this element but not any children.
:param bool normative: If true, we are creating a "Normative Directory
Structure" logical structmap, in which case we add div elements for
empty directories and do not add fptr elements for files.
:return: structMap element for this FSEntry
"""
if not self.label:
return None
# Empty directories are not included in the physical structmap.
if self.is_empty_dir and not normative:
return None
el = etree.Element(utils.lxmlns("mets") + "div", TYPE=self.mets_div_type)
el.attrib["LABEL"] = self.label
if (not normative) and self.file_id():
etree.SubElement(el, utils.lxmlns("mets") + "fptr", FILEID=self.file_id())
if self.dmdids:
if (not normative) or (normative and self.is_empty_dir):
el.set("DMDID", " ".join(self.dmdids))
if recurse and self._children:
for child in self._children:
child_el = child.serialize_structmap(
recurse=recurse, normative=normative
)
if child_el is not None:
el.append(child_el)
return el | Return the div Element for this file, appropriate for use in a
structMap.
If this FSEntry represents a directory, its children will be
recursively appended to itself. If this FSEntry represents a file, it
will contain a <fptr> element.
:param bool recurse: If true, serialize and apppend all children.
Otherwise, only serialize this element but not any children.
:param bool normative: If true, we are creating a "Normative Directory
Structure" logical structmap, in which case we add div elements for
empty directories and do not add fptr elements for files.
:return: structMap element for this FSEntry | entailment |
def check_secret(self, secret):
"""Checks if the secret string used in the authentication attempt
matches the "known" secret string. Some mechanisms will override this
method to control how this comparison is made.
Args:
secret: The secret string to compare against what was used in the
authentication attempt.
Returns:
True if the given secret matches the authentication attempt.
"""
try:
return hmac.compare_digest(secret, self.secret)
except AttributeError: # pragma: no cover
return secret == self.secret | Checks if the secret string used in the authentication attempt
matches the "known" secret string. Some mechanisms will override this
method to control how this comparison is made.
Args:
secret: The secret string to compare against what was used in the
authentication attempt.
Returns:
True if the given secret matches the authentication attempt. | entailment |
def secure(cls):
"""Uses only authentication mechanisms that are secure for use in
non-encrypted sessions.
Returns:
A new :class:`SASLAuth` object.
"""
builtin_mechs = cls._get_builtin_mechanisms()
secure_mechs = [mech for _, mech in builtin_mechs.items()
if not mech.insecure and mech.priority is not None]
return SASLAuth(secure_mechs) | Uses only authentication mechanisms that are secure for use in
non-encrypted sessions.
Returns:
A new :class:`SASLAuth` object. | entailment |
def plaintext(cls):
"""Uses only authentication mechanisms that provide the credentials in
un-hashed form, typically meaning
:attr:`~pysasl.AuthenticationCredentials.has_secret` is True.
Returns:
A new :class:`SASLAuth` object.
"""
builtin_mechs = cls._get_builtin_mechanisms()
plaintext_mechs = [mech for _, mech in builtin_mechs.items()
if mech.insecure and mech.priority is not None]
return SASLAuth(plaintext_mechs) | Uses only authentication mechanisms that provide the credentials in
un-hashed form, typically meaning
:attr:`~pysasl.AuthenticationCredentials.has_secret` is True.
Returns:
A new :class:`SASLAuth` object. | entailment |
def server_mechanisms(self):
"""List of available :class:`ServerMechanism` objects."""
return [mech for mech in self.mechs.values()
if isinstance(mech, ServerMechanism)] | List of available :class:`ServerMechanism` objects. | entailment |
def client_mechanisms(self):
"""List of available :class:`ClientMechanism` objects."""
return [mech for mech in self.mechs.values()
if isinstance(mech, ClientMechanism)] | List of available :class:`ClientMechanism` objects. | entailment |
def get_server(self, name):
"""Like :meth:`.get`, but only mechanisms inheriting
:class:`ServerMechanism` will be returned.
Args:
name: The SASL mechanism name.
Returns:
The mechanism object or ``None``
"""
mech = self.get(name)
return mech if isinstance(mech, ServerMechanism) else None | Like :meth:`.get`, but only mechanisms inheriting
:class:`ServerMechanism` will be returned.
Args:
name: The SASL mechanism name.
Returns:
The mechanism object or ``None`` | entailment |
def get_client(self, name):
"""Like :meth:`.get`, but only mechanisms inheriting
:class:`ClientMechanism` will be returned.
Args:
name: The SASL mechanism name.
Returns:
The mechanism object or ``None``
"""
mech = self.get(name)
return mech if isinstance(mech, ClientMechanism) else None | Like :meth:`.get`, but only mechanisms inheriting
:class:`ClientMechanism` will be returned.
Args:
name: The SASL mechanism name.
Returns:
The mechanism object or ``None`` | entailment |
def read(cls, source):
"""Read ``source`` into a ``METSDocument`` instance. This is an
instance constructor. The ``source`` may be a path to a METS file, a
file-like object, or a string of XML.
"""
if hasattr(source, "read"):
return cls.fromfile(source)
if os.path.exists(source):
return cls.fromfile(source)
if isinstance(source, six.string_types):
source = source.encode("utf8")
return cls.fromstring(source) | Read ``source`` into a ``METSDocument`` instance. This is an
instance constructor. The ``source`` may be a path to a METS file, a
file-like object, or a string of XML. | entailment |
def _collect_all_files(self, files=None):
"""
Collect all FSEntrys into a set, including all descendants.
:param list files: List of :class:`FSEntry` to traverse.
:returns: Set of FSEntry
"""
if files is None:
files = self._root_elements
collected = set()
for entry in files:
collected.add(entry)
collected.update(self._collect_all_files(entry.children))
return collected | Collect all FSEntrys into a set, including all descendants.
:param list files: List of :class:`FSEntry` to traverse.
:returns: Set of FSEntry | entailment |
def get_file(self, **kwargs):
"""
Return the FSEntry that matches parameters.
:param str file_uuid: UUID of the target FSEntry.
:param str label: structMap LABEL of the target FSEntry.
:param str type: structMap TYPE of the target FSEntry.
:returns: :class:`FSEntry` that matches parameters, or None.
"""
# TODO put this in a sqlite DB so it can be queried efficiently
# TODO handle multiple matches (with DB?)
# TODO check that kwargs are actual attrs
for entry in self.all_files():
if all(value == getattr(entry, key) for key, value in kwargs.items()):
return entry
return None | Return the FSEntry that matches parameters.
:param str file_uuid: UUID of the target FSEntry.
:param str label: structMap LABEL of the target FSEntry.
:param str type: structMap TYPE of the target FSEntry.
:returns: :class:`FSEntry` that matches parameters, or None. | entailment |
def append_file(self, fs_entry):
"""
Adds an FSEntry object to this METS document's tree. Any of the
represented object's children will also be added to the document.
A given FSEntry object can only be included in a document once,
and any attempt to add an object the second time will be ignored.
:param metsrw.mets.FSEntry fs_entry: FSEntry to add to the METS document
"""
if fs_entry in self._root_elements:
return
self._root_elements.append(fs_entry)
# Reset file lists so they get regenerated with the new files(s)
self._all_files = None | Adds an FSEntry object to this METS document's tree. Any of the
represented object's children will also be added to the document.
A given FSEntry object can only be included in a document once,
and any attempt to add an object the second time will be ignored.
:param metsrw.mets.FSEntry fs_entry: FSEntry to add to the METS document | entailment |
def remove_entry(self, fs_entry):
"""Removes an FSEntry object from this METS document.
Any children of this FSEntry will also be removed. This will be removed
as a child of it's parent, if any.
:param metsrw.mets.FSEntry fs_entry: FSEntry to remove from the METS
"""
try:
self._root_elements.remove(fs_entry)
except ValueError: # fs_entry may not be in the root elements
pass
if fs_entry.parent:
fs_entry.parent.remove_child(fs_entry)
# Reset file lists so they get regenerated without the removed file(s)
self._all_files = None | Removes an FSEntry object from this METS document.
Any children of this FSEntry will also be removed. This will be removed
as a child of it's parent, if any.
:param metsrw.mets.FSEntry fs_entry: FSEntry to remove from the METS | entailment |
def _document_root(self, fully_qualified=True):
"""
Return the mets Element for the document root.
"""
nsmap = {"xsi": utils.NAMESPACES["xsi"], "xlink": utils.NAMESPACES["xlink"]}
if fully_qualified:
nsmap["mets"] = utils.NAMESPACES["mets"]
else:
nsmap[None] = utils.NAMESPACES["mets"]
attrib = {
"{}schemaLocation".format(utils.lxmlns("xsi")): utils.SCHEMA_LOCATIONS
}
if self.objid:
attrib["OBJID"] = self.objid
return etree.Element(utils.lxmlns("mets") + "mets", nsmap=nsmap, attrib=attrib) | Return the mets Element for the document root. | entailment |
def _mets_header(self, now):
"""
Return the metsHdr Element.
"""
header_tag = etree.QName(utils.NAMESPACES[u"mets"], u"metsHdr")
header_attrs = {}
if self.createdate is None:
header_attrs[u"CREATEDATE"] = now
else:
header_attrs[u"CREATEDATE"] = self.createdate
header_attrs[u"LASTMODDATE"] = now
header_element = etree.Element(header_tag, **header_attrs)
for agent in self.agents:
header_element.append(agent.serialize())
for alternate_id in self.alternate_ids:
header_element.append(alternate_id.serialize())
return header_element | Return the metsHdr Element. | entailment |
def _collect_mdsec_elements(files):
"""
Return all dmdSec and amdSec classes associated with the files.
Returns all dmdSecs, then all amdSecs, so they only need to be
serialized before being appended to the METS document.
:param List files: List of :class:`FSEntry` to collect MDSecs for.
:returns: List of AMDSecs and SubSections
"""
dmdsecs = []
amdsecs = []
for f in files:
for d in f.dmdsecs:
dmdsecs.append(d)
for a in f.amdsecs:
amdsecs.append(a)
dmdsecs.sort(key=lambda x: x.id_string)
amdsecs.sort(key=lambda x: x.id_string)
return dmdsecs + amdsecs | Return all dmdSec and amdSec classes associated with the files.
Returns all dmdSecs, then all amdSecs, so they only need to be
serialized before being appended to the METS document.
:param List files: List of :class:`FSEntry` to collect MDSecs for.
:returns: List of AMDSecs and SubSections | entailment |
def _structmap(self):
"""
Returns structMap element for all files.
"""
structmap = etree.Element(
utils.lxmlns("mets") + "structMap",
TYPE="physical",
# TODO Add ability for multiple structMaps
ID="structMap_1",
# TODO don't hardcode this
LABEL="Archivematica default",
)
for item in self._root_elements:
child = item.serialize_structmap(recurse=True)
if child is not None:
structmap.append(child)
return structmap | Returns structMap element for all files. | entailment |
def _filesec(self, files=None):
"""
Returns fileSec Element containing all files grouped by use.
"""
if files is None:
files = self.all_files()
filesec = etree.Element(utils.lxmlns("mets") + "fileSec")
filegrps = {}
for file_ in files:
if file_.type.lower() not in ("item", AIP_ENTRY_TYPE):
continue
# Get fileGrp, or create if not exist
filegrp = filegrps.get(file_.use)
if filegrp is None:
filegrp = etree.SubElement(
filesec, utils.lxmlns("mets") + "fileGrp", USE=file_.use
)
filegrps[file_.use] = filegrp
file_el = file_.serialize_filesec()
if file_el is not None:
filegrp.append(file_el)
return filesec | Returns fileSec Element containing all files grouped by use. | entailment |
def serialize(self, fully_qualified=True):
"""
Returns this document serialized to an xml Element.
:return: Element for this document
"""
now = datetime.utcnow().replace(microsecond=0).isoformat("T")
files = self.all_files()
mdsecs = self._collect_mdsec_elements(files)
root = self._document_root(fully_qualified=fully_qualified)
root.append(self._mets_header(now=now))
for section in mdsecs:
root.append(section.serialize(now=now))
root.append(self._filesec(files))
root.append(self._structmap())
root.append(self._normative_structmap())
return root | Returns this document serialized to an xml Element.
:return: Element for this document | entailment |
def tostring(self, fully_qualified=True, pretty_print=True, encoding="UTF-8"):
"""
Serialize and return a string of this METS document.
To write to file, see :meth:`write`.
The default encoding is ``UTF-8``. This method will return a unicode
string when ``encoding`` is set to ``unicode``.
:return: String of this document
"""
root = self.serialize(fully_qualified=fully_qualified)
kwargs = {"pretty_print": pretty_print, "encoding": encoding}
if encoding != "unicode":
kwargs["xml_declaration"] = True
return etree.tostring(root, **kwargs) | Serialize and return a string of this METS document.
To write to file, see :meth:`write`.
The default encoding is ``UTF-8``. This method will return a unicode
string when ``encoding`` is set to ``unicode``.
:return: String of this document | entailment |
def write(
self, filepath, fully_qualified=True, pretty_print=False, encoding="UTF-8"
):
"""Serialize and write this METS document to `filepath`.
The default encoding is ``UTF-8``. This method will return a unicode
string when ``encoding`` is set to ``unicode``.
:param str filepath: Path to write the METS document to
"""
root = self.serialize(fully_qualified=fully_qualified)
tree = root.getroottree()
kwargs = {"pretty_print": pretty_print, "encoding": encoding}
if encoding != "unicode":
kwargs["xml_declaration"] = True
tree.write(filepath, **kwargs) | Serialize and write this METS document to `filepath`.
The default encoding is ``UTF-8``. This method will return a unicode
string when ``encoding`` is set to ``unicode``.
:param str filepath: Path to write the METS document to | entailment |
def _parse_tree_structmap(self, tree, parent_elem, normative_parent_elem=None):
"""Recursively parse all the children of parent_elem, including amdSecs
and dmdSecs.
:param lxml._ElementTree tree: encodes the entire METS file.
:param lxml._Element parent_elem: the element whose children we are
parsing.
:param lxml._Element normative_parent_elem: the normative
counterpart of ``parent_elem`` taken from the logical structMap
labelled "Normative Directory Structure".
"""
siblings = []
el_to_normative = self._get_el_to_normative(parent_elem, normative_parent_elem)
for elem, normative_elem in el_to_normative.items():
if elem.tag != utils.lxmlns("mets") + "div":
continue # Only handle divs, not fptrs
entry_type = elem.get("TYPE")
label = elem.get("LABEL")
fptr_elems = elem.findall("mets:fptr", namespaces=utils.NAMESPACES)
# Directories are walked recursively. Additionally, they may
# contain direct fptrs.
if entry_type.lower() == "directory":
children = self._parse_tree_structmap(
tree, elem, normative_parent_elem=normative_elem
)
fs_entry = fsentry.FSEntry.dir(label, children)
self._add_dmdsecs_to_fs_entry(elem, fs_entry, tree)
siblings.append(fs_entry)
for fptr_elem in fptr_elems:
fptr = self._analyze_fptr(fptr_elem, tree, entry_type)
fs_entry = fsentry.FSEntry.from_fptr(
label=None, type_=u"Item", fptr=fptr
)
self._add_amdsecs_to_fs_entry(fptr.amdids, fs_entry, tree)
siblings.append(fs_entry)
continue
# Other types, e.g.: items, aips...
if not len(fptr_elems):
continue
fptr = self._analyze_fptr(fptr_elems[0], tree, entry_type)
fs_entry = fsentry.FSEntry.from_fptr(label, entry_type, fptr)
self._add_dmdsecs_to_fs_entry(elem, fs_entry, tree)
self._add_amdsecs_to_fs_entry(fptr.amdids, fs_entry, tree)
siblings.append(fs_entry)
return siblings | Recursively parse all the children of parent_elem, including amdSecs
and dmdSecs.
:param lxml._ElementTree tree: encodes the entire METS file.
:param lxml._Element parent_elem: the element whose children we are
parsing.
:param lxml._Element normative_parent_elem: the normative
counterpart of ``parent_elem`` taken from the logical structMap
labelled "Normative Directory Structure". | entailment |
def _get_el_to_normative(parent_elem, normative_parent_elem):
"""Return ordered dict ``el_to_normative``, which maps children of
``parent_elem`` to their normative counterparts in the children of
``normative_parent_elem`` or to ``None`` if there is no normative
parent. If there is a normative div element with no non-normative
counterpart, that element is treated as a key with value ``None``.
This allows us to create ``FSEntry`` instances for empty directory div
elements, which are only documented in a normative logical structmap.
"""
el_to_normative = OrderedDict()
if normative_parent_elem is None:
for el in parent_elem:
el_to_normative[el] = None
else:
for norm_el in normative_parent_elem:
matches = [
el
for el in parent_elem
if el.get("TYPE") == norm_el.get("TYPE")
and el.get("LABEL") == norm_el.get("LABEL")
]
if matches:
el_to_normative[matches[0]] = norm_el
else:
el_to_normative[norm_el] = None
return el_to_normative | Return ordered dict ``el_to_normative``, which maps children of
``parent_elem`` to their normative counterparts in the children of
``normative_parent_elem`` or to ``None`` if there is no normative
parent. If there is a normative div element with no non-normative
counterpart, that element is treated as a key with value ``None``.
This allows us to create ``FSEntry`` instances for empty directory div
elements, which are only documented in a normative logical structmap. | entailment |
def fromfile(cls, path):
"""
Creates a METS by parsing a file.
:param str path: Path to a METS document.
"""
parser = etree.XMLParser(remove_blank_text=True)
return cls.fromtree(etree.parse(path, parser=parser)) | Creates a METS by parsing a file.
:param str path: Path to a METS document. | entailment |
def fromstring(cls, string):
"""
Create a METS by parsing a string.
:param str string: String containing a METS document.
"""
parser = etree.XMLParser(remove_blank_text=True)
root = etree.fromstring(string, parser)
tree = root.getroottree()
return cls.fromtree(tree) | Create a METS by parsing a string.
:param str string: String containing a METS document. | entailment |
def fromtree(cls, tree):
"""
Create a METS from an ElementTree or Element.
:param ElementTree tree: ElementTree to build a METS document from.
"""
mets = cls()
mets.tree = tree
mets._parse_tree(tree)
return mets | Create a METS from an ElementTree or Element.
:param ElementTree tree: ElementTree to build a METS document from. | entailment |
def get_schematron(sct_path):
"""Return an lxml ``isoschematron.Schematron()`` instance using the
schematron file at ``sct_path``.
"""
sct_path = _get_file_path(sct_path)
parser = etree.XMLParser(remove_blank_text=True)
sct_doc = etree.parse(sct_path, parser=parser)
return isoschematron.Schematron(sct_doc, store_report=True) | Return an lxml ``isoschematron.Schematron()`` instance using the
schematron file at ``sct_path``. | entailment |
def validate(mets_doc, xmlschema=METS_XSD_PATH, schematron=AM_SCT_PATH):
"""Validate a METS file using both an XMLSchema (.xsd) schema and a
schematron schema, the latter of which typically places additional
constraints on what a METS file can look like.
"""
is_xsd_valid, xsd_error_log = xsd_validate(mets_doc, xmlschema=xmlschema)
is_sct_valid, sct_report = schematron_validate(mets_doc, schematron=schematron)
valid = is_xsd_valid and is_sct_valid
report = {
"is_xsd_valid": is_xsd_valid,
"is_sct_valid": is_sct_valid,
"xsd_error_log": xsd_error_log,
"sct_report": sct_report,
}
report["report"] = report_string(report)
return valid, report | Validate a METS file using both an XMLSchema (.xsd) schema and a
schematron schema, the latter of which typically places additional
constraints on what a METS file can look like. | entailment |
def get_xmlschema(xmlschema, mets_doc):
"""Return a ``class::lxml.etree.XMLSchema`` instance given the path to the
XMLSchema (.xsd) file in ``xmlschema`` and the
``class::lxml.etree._ElementTree`` instance ``mets_doc`` representing the
METS file being parsed. The complication here is that the METS file to be
validated via the .xsd file may reference additional schemata via
``xsi:schemaLocation`` attributes. We have to find all of these and import
them from within the returned XMLSchema.
For the solution that this is based on, see:
http://code.activestate.com/recipes/578503-validate-xml-with-schemalocation/
For other descriptions of the problem, see:
- https://groups.google.com/forum/#!topic/archivematica/UBS1ay-g_tE
- https://stackoverflow.com/questions/26712645/xml-type-definition-is-absent
- https://stackoverflow.com/questions/2979824/in-document-schema-declarations-and-lxml
"""
xsd_path = _get_file_path(xmlschema)
xmlschema = etree.parse(xsd_path)
schema_locations = set(
mets_doc.xpath("//*/@xsi:schemaLocation", namespaces=NAMESPACES)
)
for schema_location in schema_locations:
namespaces_locations = schema_location.strip().split()
for namespace, location in zip(*[iter(namespaces_locations)] * 2):
if namespace == NAMESPACES["mets"]:
continue
xs_import = etree.Element("{http://www.w3.org/2001/XMLSchema}import")
xs_import.attrib["namespace"] = namespace
xs_import.attrib["schemaLocation"] = location
xmlschema.getroot().insert(0, xs_import)
return etree.XMLSchema(xmlschema) | Return a ``class::lxml.etree.XMLSchema`` instance given the path to the
XMLSchema (.xsd) file in ``xmlschema`` and the
``class::lxml.etree._ElementTree`` instance ``mets_doc`` representing the
METS file being parsed. The complication here is that the METS file to be
validated via the .xsd file may reference additional schemata via
``xsi:schemaLocation`` attributes. We have to find all of these and import
them from within the returned XMLSchema.
For the solution that this is based on, see:
http://code.activestate.com/recipes/578503-validate-xml-with-schemalocation/
For other descriptions of the problem, see:
- https://groups.google.com/forum/#!topic/archivematica/UBS1ay-g_tE
- https://stackoverflow.com/questions/26712645/xml-type-definition-is-absent
- https://stackoverflow.com/questions/2979824/in-document-schema-declarations-and-lxml | entailment |
def schematron_validate(mets_doc, schematron=AM_SCT_PATH):
"""Validate a METS file using a schematron schema. Return a boolean
indicating validity and a report as an ``lxml.ElementTree`` instance.
"""
if isinstance(schematron, six.string_types):
schematron = get_schematron(schematron)
is_valid = schematron.validate(mets_doc)
report = schematron.validation_report
return is_valid, report | Validate a METS file using a schematron schema. Return a boolean
indicating validity and a report as an ``lxml.ElementTree`` instance. | entailment |
def sct_report_string(report):
"""Return a human-readable string representation of the error report
returned by lxml's schematron validator.
"""
ret = []
namespaces = {"svrl": "http://purl.oclc.org/dsdl/svrl"}
for index, failed_assert_el in enumerate(
report.findall("svrl:failed-assert", namespaces=namespaces)
):
ret.append(
"{}. {}".format(
index + 1,
failed_assert_el.find("svrl:text", namespaces=namespaces).text,
)
)
ret.append(" test: {}".format(failed_assert_el.attrib["test"]))
ret.append(" location: {}".format(failed_assert_el.attrib["location"]))
ret.append("\n")
return "\n".join(ret) | Return a human-readable string representation of the error report
returned by lxml's schematron validator. | entailment |
def xsd_error_log_string(xsd_error_log):
"""Return a human-readable string representation of the error log
returned by lxml's XMLSchema validator.
"""
ret = []
for error in xsd_error_log:
ret.append(
"ERROR ON LINE {}: {}".format(error.line, error.message.encode("utf-8"))
)
return "\n".join(ret) | Return a human-readable string representation of the error log
returned by lxml's XMLSchema validator. | entailment |
def validate_arguments(self, view_class, kwargs):
"""
view_class : View Class used to render content popup dialog
view_class must be subclass of django.views.generic.View
"""
# Check view_class inherit from django View
if not issubclass(view_class, View):
raise PopupViewIsNotSubclassView()
self.view_class_name = view_class.__name__
self.popup_dialog_title = kwargs.pop("popup_dialog_title", _("Popup Dialog: Select value"))
self.callback_data = kwargs.pop("callback_data", {})
if not isinstance(self.callback_data, dict):
raise AttributeError("callback_data argument must be a dictionary")
try:
self.callback_data = urllib.urlencode(self.callback_data)
except AttributeError:
self.callback_data = urllib.parse.urlencode(self.callback_data) | view_class : View Class used to render content popup dialog
view_class must be subclass of django.views.generic.View | entailment |
def get_view_url(self):
"""Return url for ajax to view for render dialog content"""
url = reverse("django_popup_view_field:get_popup_view", args=(self.view_class_name,))
return "{url}?{cd}".format(
url=url,
cd=self.callback_data
) | Return url for ajax to view for render dialog content | entailment |
def on_timer(self):
"""Executes flush(). Ignores any errors to make sure one exception
doesn't halt the whole flushing process.
"""
try:
self.flush()
except Exception as e:
log.exception('Error while flushing: %s', e)
self._set_timer() | Executes flush(). Ignores any errors to make sure one exception
doesn't halt the whole flushing process. | entailment |
def timing_since(self, stat, start, sample_rate=1):
"""
Log timing information as the number of microseconds since the provided time float
>>> start = time.time()
>>> # do stuff
>>> statsd_client.timing_since('some.time', start)
"""
self.timing(stat, int((time.time() - start) * 1000000), sample_rate) | Log timing information as the number of microseconds since the provided time float
>>> start = time.time()
>>> # do stuff
>>> statsd_client.timing_since('some.time', start) | entailment |
def timing(self, stat, time, sample_rate=1):
"""
Log timing information for a single stat
>>> statsd_client.timing('some.time',500)
"""
stats = {stat: "%f|ms" % time}
self.send(stats, sample_rate) | Log timing information for a single stat
>>> statsd_client.timing('some.time',500) | entailment |
def gauge(self, stat, value, sample_rate=1):
"""
Log gauge information for a single stat
>>> statsd_client.gauge('some.gauge',42)
"""
stats = {stat: "%f|g" % value}
self.send(stats, sample_rate) | Log gauge information for a single stat
>>> statsd_client.gauge('some.gauge',42) | entailment |
def update_stats(self, stats, delta, sample_rate=1):
"""
Updates one or more stats counters by arbitrary amounts
>>> statsd_client.update_stats('some.int',10)
"""
if not isinstance(stats, list):
stats = [stats]
data = dict((stat, "%s|c" % delta) for stat in stats)
self.send(data, sample_rate) | Updates one or more stats counters by arbitrary amounts
>>> statsd_client.update_stats('some.int',10) | entailment |
def send(self, data, sample_rate=1):
"""
Squirt the metrics over UDP
"""
if self.prefix:
data = dict((".".join((self.prefix, stat)), value) for stat, value in data.items())
if sample_rate < 1:
if random.random() > sample_rate:
return
sampled_data = dict((stat, "%s|@%s" % (value, sample_rate))
for stat, value in data.items())
else:
sampled_data = data
try:
[self.udp_sock.sendto(bytes(bytearray("%s:%s" % (stat, value),
"utf-8")), self.addr)
for stat, value in sampled_data.items()]
except:
self.log.exception("unexpected error") | Squirt the metrics over UDP | entailment |
def start(self, *args, **kw):
"""Start the daemon."""
pid = None
if os.path.exists(self.pidfile):
with open(self.pidfile, 'r') as fp:
pid = int(fp.read().strip())
if pid:
msg = 'pidfile (%s) exists. Daemon already running?\n'
sys.stderr.write(msg % self.pidfile)
sys.exit(1)
self.daemonize()
self.run(*args, **kw) | Start the daemon. | entailment |
def stop(self):
"""Stop the daemon."""
pid = None
if os.path.exists(self.pidfile):
with open(self.pidfile, 'r') as fp:
pid = int(fp.read().strip())
if not pid:
msg = 'pidfile (%s) does not exist. Daemon not running?\n'
sys.stderr.write(msg % self.pidfile)
return
try:
while 1:
os.kill(pid, SIGTERM)
time.sleep(0.1)
except OSError as e:
e = str(e)
if e.find('No such process') > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print(e)
sys.exit(1) | Stop the daemon. | entailment |
def restart(self, *args, **kw):
"""Restart the daemon."""
self.stop()
self.start(*args, **kw) | Restart the daemon. | entailment |
def get_auth_providers(self, netloc):
"""BIG-IQ specific query for auth providers
BIG-IP doesn't really need this because BIG-IP's multiple auth providers
seem to handle fallthrough just fine. BIG-IQ on the other hand, needs to
have its auth provider specified if you're using one of the non-default
ones.
:param netloc:
:return:
"""
url = "https://%s/info/system?null" % (netloc)
response = requests.get(url, verify=self.verify)
if not response.ok or not hasattr(response, "json"):
error_message = '%s Unexpected Error: %s for uri: %s\nText: %r' %\
(response.status_code,
response.reason,
response.url,
response.text)
raise iControlUnexpectedHTTPError(error_message, response=response)
respJson = response.json()
result = respJson['providers']
return result | BIG-IQ specific query for auth providers
BIG-IP doesn't really need this because BIG-IP's multiple auth providers
seem to handle fallthrough just fine. BIG-IQ on the other hand, needs to
have its auth provider specified if you're using one of the non-default
ones.
:param netloc:
:return: | entailment |
def get_new_token(self, netloc):
"""Get a new token from BIG-IP and store it internally.
Throws relevant exception if it fails to get a new token.
This method will be called automatically if a request is attempted
but there is no authentication token, or the authentication token
is expired. It is usually not necessary for users to call it, but
it can be called if it is known that the authentication token has
been invalidated by other means.
"""
login_body = {
'username': self.username,
'password': self.password,
}
if self.auth_provider:
if self.auth_provider == 'local':
login_body['loginProviderName'] = 'local'
elif self.auth_provider == 'tmos':
login_body['loginProviderName'] = 'tmos'
elif self.auth_provider not in ['none', 'default']:
providers = self.get_auth_providers(netloc)
for provider in providers:
if self.auth_provider in provider['link']:
login_body['loginProviderName'] = provider['name']
break
elif self.auth_provider == provider['name']:
login_body['loginProviderName'] = provider['name']
break
else:
if self.login_provider_name == 'tmos':
login_body['loginProviderName'] = self.login_provider_name
login_url = "https://%s/mgmt/shared/authn/login" % (netloc)
response = requests.post(
login_url,
json=login_body,
verify=self.verify,
auth=HTTPBasicAuth(self.username, self.password)
)
self.attempts += 1
if not response.ok or not hasattr(response, "json"):
error_message = '%s Unexpected Error: %s for uri: %s\nText: %r' %\
(response.status_code,
response.reason,
response.url,
response.text)
raise iControlUnexpectedHTTPError(error_message,
response=response)
respJson = response.json()
token = self._get_token_from_response(respJson)
created_bigip = self._get_last_update_micros(token)
try:
expiration_bigip = self._get_expiration_micros(
token, created_bigip
)
except (KeyError, ValueError):
error_message = \
'%s Unparseable Response: %s for uri: %s\nText: %r' %\
(response.status_code,
response.reason,
response.url,
response.text)
raise iControlUnexpectedHTTPError(error_message,
response=response)
try:
self.expiration = self._get_token_expiration_time(
created_bigip, expiration_bigip
)
except iControlUnexpectedHTTPError:
error_message = \
'%s Token already expired: %s for uri: %s\nText: %r' % \
(response.status_code,
time.ctime(expiration_bigip),
response.url,
response.text)
raise iControlUnexpectedHTTPError(error_message,
response=response) | Get a new token from BIG-IP and store it internally.
Throws relevant exception if it fails to get a new token.
This method will be called automatically if a request is attempted
but there is no authentication token, or the authentication token
is expired. It is usually not necessary for users to call it, but
it can be called if it is known that the authentication token has
been invalidated by other means. | entailment |
def generate_bigip_uri(base_uri, partition, name, sub_path, suffix, **kwargs):
'''(str, str, str) --> str
This function checks the supplied elements to see if each conforms to
the specification for the appropriate part of the URI. These validations
are conducted by the helper function _validate_uri_parts.
After validation the parts are assembled into a valid BigIP REST URI
string which is then submitted with appropriate metadata.
>>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', \
'CUSTOMER1', 'nat52', params={'a':1})
'https://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52'
>>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', \
'CUSTOMER1', 'nat52', params={'a':1}, suffix='/wacky')
'https://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52/wacky'
>>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', '', '', \
params={'a':1}, suffix='/thwocky')
'https://0.0.0.0/mgmt/tm/ltm/nat/thwocky'
::Warning: There are cases where '/' and '~' characters are valid in the
object name or subPath. This is indicated by passing the 'transform_name' or 'transform_subpath' boolean
respectively as True. By default this is set to False.
'''
_validate_uri_parts(base_uri, partition, name, sub_path, suffix,
**kwargs)
if kwargs.get('transform_name', False):
if name != '':
name = name.replace('/', '~')
if kwargs.get('transform_subpath', False):
if sub_path != '':
sub_path = sub_path.replace('/', '~')
if partition != '':
partition = '~' + partition
else:
if sub_path:
msg = 'When giving the subPath component include partition ' \
'as well.'
raise InvalidURIComponentPart(msg)
if sub_path != '' and partition != '':
sub_path = '~' + sub_path
if name != '' and partition != '':
name = '~' + name
tilded_partition_and_instance = partition + sub_path + name
if suffix and not tilded_partition_and_instance:
suffix = suffix.lstrip('/')
REST_uri = base_uri + tilded_partition_and_instance + suffix
return REST_uri | (str, str, str) --> str
This function checks the supplied elements to see if each conforms to
the specification for the appropriate part of the URI. These validations
are conducted by the helper function _validate_uri_parts.
After validation the parts are assembled into a valid BigIP REST URI
string which is then submitted with appropriate metadata.
>>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', \
'CUSTOMER1', 'nat52', params={'a':1})
'https://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52'
>>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', \
'CUSTOMER1', 'nat52', params={'a':1}, suffix='/wacky')
'https://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52/wacky'
>>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', '', '', \
params={'a':1}, suffix='/thwocky')
'https://0.0.0.0/mgmt/tm/ltm/nat/thwocky'
::Warning: There are cases where '/' and '~' characters are valid in the
object name or subPath. This is indicated by passing the 'transform_name' or 'transform_subpath' boolean
respectively as True. By default this is set to False. | entailment |
def decorate_HTTP_verb_method(method):
"""Prepare and Post-Process HTTP VERB method for BigIP-RESTServer request.
This function decorates all of the HTTP VERB methods in the
iControlRESTSession class. It provides the core logic for this module.
If necessary it validates and assembles a uri from parts with a call to
`generate_bigip_uri`.
Then it:
1. pre-logs the details of the request
2. submits the request
3. logs the response, included expected status codes
4. raises exceptions for unexpected status codes. (i.e. not doc'd as BigIP
RESTServer codes.)
"""
@functools.wraps(method)
def wrapper(self, RIC_base_uri, **kwargs):
partition = kwargs.pop('partition', '')
sub_path = kwargs.pop('subPath', '')
suffix = kwargs.pop('suffix', '')
identifier, kwargs = _unique_resource_identifier_from_kwargs(**kwargs)
uri_as_parts = kwargs.pop('uri_as_parts', False)
transform_name = kwargs.pop('transform_name', False)
transform_subpath = kwargs.pop('transform_subpath', False)
if uri_as_parts:
REST_uri = generate_bigip_uri(RIC_base_uri, partition, identifier,
sub_path, suffix,
transform_name=transform_name,
transform_subpath=transform_subpath,
**kwargs)
else:
REST_uri = RIC_base_uri
pre_message = "%s WITH uri: %s AND suffix: %s AND kwargs: %s" %\
(method.__name__, REST_uri, suffix, kwargs)
logger = logging.getLogger(__name__)
logger.debug(pre_message)
response = method(self, REST_uri, **kwargs)
post_message =\
"RESPONSE::STATUS: %s Content-Type: %s Content-Encoding:"\
" %s\nText: %r" % (response.status_code,
response.headers.get('Content-Type', None),
response.headers.get('Content-Encoding', None),
response.text)
logger.debug(post_message)
if response.status_code not in range(200, 207):
error_message = '%s Unexpected Error: %s for uri: %s\nText: %r' %\
(response.status_code,
response.reason,
response.url,
response.text)
raise iControlUnexpectedHTTPError(error_message, response=response)
return response
return wrapper | Prepare and Post-Process HTTP VERB method for BigIP-RESTServer request.
This function decorates all of the HTTP VERB methods in the
iControlRESTSession class. It provides the core logic for this module.
If necessary it validates and assembles a uri from parts with a call to
`generate_bigip_uri`.
Then it:
1. pre-logs the details of the request
2. submits the request
3. logs the response, included expected status codes
4. raises exceptions for unexpected status codes. (i.e. not doc'd as BigIP
RESTServer codes.) | entailment |
def _unique_resource_identifier_from_kwargs(**kwargs):
"""Chooses an identifier given different choices
The unique identifier in BIG-IP's REST API at the time of this writing
is called 'name'. This is in contrast to the unique identifier that is
used by iWorkflow and BIG-IQ which at some times is 'name' and other
times is 'uuid'.
For example, in iWorkflow, there consider this URI
* https://10.2.2.3/mgmt/cm/cloud/tenants/{0}/services/iapp
Then consider this iWorkflow URI
* https://localhost/mgmt/cm/cloud/connectors/local/{0}
In the first example, the identifier, {0}, is what we would normally
consider a name. For example, "tenant1". In the second example though,
the value is expected to be what we would normally consider to be a
UUID. For example, '244bd478-374e-4eb2-8c73-6e46d7112604'.
This method only tries to rectify the problem of which to use.
I believe there might be some change that the two can appear together,
although I have not yet experienced it. If it is possible, I believe it
would happen in BIG-IQ/iWorkflow land where the UUID and Name both have
significance. That's why I deliberately prefer the UUID when it exists
in the parameters sent to the URL.
:param kwargs:
:return:
"""
name = kwargs.pop('name', '')
uuid = kwargs.pop('uuid', '')
id = kwargs.pop('id', '')
if uuid:
return uuid, kwargs
elif id:
# Used for /mgmt/cm/system/authn/providers/tmos on BIG-IP
return id, kwargs
else:
return name, kwargs | Chooses an identifier given different choices
The unique identifier in BIG-IP's REST API at the time of this writing
is called 'name'. This is in contrast to the unique identifier that is
used by iWorkflow and BIG-IQ which at some times is 'name' and other
times is 'uuid'.
For example, in iWorkflow, there consider this URI
* https://10.2.2.3/mgmt/cm/cloud/tenants/{0}/services/iapp
Then consider this iWorkflow URI
* https://localhost/mgmt/cm/cloud/connectors/local/{0}
In the first example, the identifier, {0}, is what we would normally
consider a name. For example, "tenant1". In the second example though,
the value is expected to be what we would normally consider to be a
UUID. For example, '244bd478-374e-4eb2-8c73-6e46d7112604'.
This method only tries to rectify the problem of which to use.
I believe there might be some change that the two can appear together,
although I have not yet experienced it. If it is possible, I believe it
would happen in BIG-IQ/iWorkflow land where the UUID and Name both have
significance. That's why I deliberately prefer the UUID when it exists
in the parameters sent to the URL.
:param kwargs:
:return: | entailment |
def delete(self, uri, **kwargs):
"""Sends a HTTP DELETE command to the BIGIP REST Server.
Use this method to send a DELETE command to the BIGIP. When calling
this method with the optional arguments ``name`` and ``partition``
as part of ``**kwargs`` they will be added to the ``uri`` passed
in separated by ~ to create a proper BIGIP REST API URL for objects.
All other parameters passed in as ``**kwargs`` are passed directly
to the :meth:`requests.Session.delete`
:param uri: A HTTP URI
:type uri: str
:param name: The object name that will be appended to the uri
:type name: str
:arg partition: The partition name that will be appened to the uri
:type partition: str
:param \**kwargs: The :meth:`reqeusts.Session.delete` optional params
"""
args1 = get_request_args(kwargs)
args2 = get_send_args(kwargs)
req = requests.Request('DELETE', uri, **args1)
prepared = self.session.prepare_request(req)
if self.debug:
self._debug_output.append(debug_prepared_request(prepared))
return self.session.send(prepared, **args2) | Sends a HTTP DELETE command to the BIGIP REST Server.
Use this method to send a DELETE command to the BIGIP. When calling
this method with the optional arguments ``name`` and ``partition``
as part of ``**kwargs`` they will be added to the ``uri`` passed
in separated by ~ to create a proper BIGIP REST API URL for objects.
All other parameters passed in as ``**kwargs`` are passed directly
to the :meth:`requests.Session.delete`
:param uri: A HTTP URI
:type uri: str
:param name: The object name that will be appended to the uri
:type name: str
:arg partition: The partition name that will be appened to the uri
:type partition: str
:param \**kwargs: The :meth:`reqeusts.Session.delete` optional params | entailment |
def append_user_agent(self, user_agent):
"""Append text to the User-Agent header for the request.
Use this method to update the User-Agent header by appending the
given string to the session's User-Agent header separated by a space.
:param user_agent: A string to append to the User-Agent header
:type user_agent: str
"""
old_ua = self.session.headers.get('User-Agent', '')
ua = old_ua + ' ' + user_agent
self.session.headers['User-Agent'] = ua.strip() | Append text to the User-Agent header for the request.
Use this method to update the User-Agent header by appending the
given string to the session's User-Agent header separated by a space.
:param user_agent: A string to append to the User-Agent header
:type user_agent: str | entailment |
def validate_public_key(value):
"""
Check that the given value is a valid RSA Public key in either PEM or OpenSSH format. If it is invalid,
raises ``django.core.exceptions.ValidationError``.
"""
is_valid = False
exc = None
for load in (load_pem_public_key, load_ssh_public_key):
if not is_valid:
try:
load(value.encode('utf-8'), default_backend())
is_valid = True
except Exception as e:
exc = e
if not is_valid:
raise ValidationError('Public key is invalid: %s' % exc) | Check that the given value is a valid RSA Public key in either PEM or OpenSSH format. If it is invalid,
raises ``django.core.exceptions.ValidationError``. | entailment |
def _register_formats(cls):
"""Adds format properties."""
for fmt in cls.OUTPUT_FORMATS:
clean_fmt = fmt.replace('+', '_')
setattr(cls, clean_fmt, property(
(lambda x, fmt=fmt: cls._output(x, fmt)), # fget
(lambda x, y, fmt=fmt: cls._input(x, y, fmt)))) | Adds format properties. | entailment |
def to_file(self, output_filename):
'''Handles pdf and epub format.
Inpute: output_filename should have the proper extension.
Output: The name of the file created, or an IOError if failed'''
temp_file = NamedTemporaryFile(mode="w", suffix=".md", delete=False)
temp_file.write(self._content)
temp_file.close()
subprocess_arguments = [PANDOC_PATH, temp_file.name, '-o %s' % output_filename]
subprocess_arguments.extend(self.arguments)
cmd = " ".join(subprocess_arguments)
fin = os.popen(cmd)
msg = fin.read()
fin.close()
if msg:
print("Pandoc message: {}",format(msg))
os.remove(temp_file.name)
if exists(output_filename):
return output_filename
else:
raise IOError("Failed creating file: %s" % output_filename) | Handles pdf and epub format.
Inpute: output_filename should have the proper extension.
Output: The name of the file created, or an IOError if failed | entailment |
def sign(username, private_key, generate_nonce=None, iat=None, algorithm=DEFAULT_ALGORITHM):
"""
Create a signed JWT using the given username and RSA private key.
:param username: Username (string) to authenticate as on the remote system.
:param private_key: Private key to use to sign the JWT claim.
:param generate_nonce: Optional. Callable to use to generate a new nonce. Defaults to
`random.random <https://docs.python.org/3/library/random.html#random.random>`_.
:param iat: Optional. Timestamp to include in the JWT claim. Defaults to
`time.time <https://docs.python.org/3/library/time.html#time.time>`_.
:param algorithm: Optional. Algorithm to use to sign the JWT claim. Default to ``RS512``.
See `pyjwt.readthedocs.io <https://pyjwt.readthedocs.io/en/latest/algorithms.html>`_ for other possible algorithms.
:return: JWT claim as a string.
"""
iat = iat if iat else time.time()
if not generate_nonce:
generate_nonce = lambda username, iat: random.random() # NOQA
token_data = {
'username': username,
'time': iat,
'nonce': generate_nonce(username, iat),
}
token = jwt.encode(token_data, private_key, algorithm=algorithm)
return token | Create a signed JWT using the given username and RSA private key.
:param username: Username (string) to authenticate as on the remote system.
:param private_key: Private key to use to sign the JWT claim.
:param generate_nonce: Optional. Callable to use to generate a new nonce. Defaults to
`random.random <https://docs.python.org/3/library/random.html#random.random>`_.
:param iat: Optional. Timestamp to include in the JWT claim. Defaults to
`time.time <https://docs.python.org/3/library/time.html#time.time>`_.
:param algorithm: Optional. Algorithm to use to sign the JWT claim. Default to ``RS512``.
See `pyjwt.readthedocs.io <https://pyjwt.readthedocs.io/en/latest/algorithms.html>`_ for other possible algorithms.
:return: JWT claim as a string. | entailment |
def get_claimed_username(token):
"""
Given a JWT, get the username that it is claiming to be `without verifying that the signature is valid`.
:param token: JWT claim
:return: Username
"""
unverified_data = jwt.decode(token, options={
'verify_signature': False
})
if 'username' not in unverified_data:
return None
return unverified_data['username'] | Given a JWT, get the username that it is claiming to be `without verifying that the signature is valid`.
:param token: JWT claim
:return: Username | entailment |
def verify(token, public_key, validate_nonce=None, algorithms=[DEFAULT_ALGORITHM]):
"""
Verify the validity of the given JWT using the given public key.
:param token: JWM claim
:param public_key: Public key to use when verifying the claim's signature.
:param validate_nonce: Callable to use to validate the claim's nonce.
:param algorithms: Allowable signing algorithms. Defaults to ['RS512'].
:return: False if the token is determined to be invalid or a dictionary of the token data if it is valid.
"""
try:
token_data = jwt.decode(token, public_key, algorithms=algorithms)
except jwt.InvalidTokenError:
logger.debug('JWT failed verification')
return False
claimed_username = token_data.get('username')
claimed_time = token_data.get('time', 0)
claimed_nonce = token_data.get('nonce')
# Ensure time is within acceptable bounds
current_time = time.time()
min_time, max_time = (current_time - TIMESTAMP_TOLERANCE, current_time + TIMESTAMP_TOLERANCE)
if claimed_time < min_time or claimed_time > max_time:
logger.debug('Claimed time is outside of allowable tolerances')
return False
# Ensure nonce is unique
if validate_nonce:
if not validate_nonce(claimed_username, claimed_time, claimed_nonce):
logger.debug('Claimed nonce failed to validate')
return False
else:
logger.warning('validate_nonce function was not supplied!')
# If we've gotten this far, the token is valid
return token_data | Verify the validity of the given JWT using the given public key.
:param token: JWM claim
:param public_key: Public key to use when verifying the claim's signature.
:param validate_nonce: Callable to use to validate the claim's nonce.
:param algorithms: Allowable signing algorithms. Defaults to ['RS512'].
:return: False if the token is determined to be invalid or a dictionary of the token data if it is valid. | entailment |
def log_used_nonce(self, username, iat, nonce):
"""
Log a nonce as being used, and therefore henceforth invalid.
:param username: Username as a string.
:param iat: Unix timestamp float or integer of when the nonce was used.
:param nonce: Nonce value.
"""
# TODO: Figure out some way to do this in a thread-safe manner. It'd be better to use
# a Redis Set or something, but we don't necessarily want to be tightly coupled to
# Redis either since not everyone uses it.
key = self.create_nonce_key(username, iat)
used = cache.get(key, [])
used.append(nonce)
cache.set(key, set(used), token.TIMESTAMP_TOLERANCE * 2) | Log a nonce as being used, and therefore henceforth invalid.
:param username: Username as a string.
:param iat: Unix timestamp float or integer of when the nonce was used.
:param nonce: Nonce value. | entailment |
def validate_nonce(self, username, iat, nonce):
"""
Confirm that the given nonce hasn't already been used.
:param username: Username as a string.
:param iat: Unix timestamp float or integer of when the nonce was used.
:param nonce: Nonce value.
:return: True if nonce is valid, False if it is invalid.
"""
key = self.create_nonce_key(username, iat)
used = cache.get(key, [])
return nonce not in used | Confirm that the given nonce hasn't already been used.
:param username: Username as a string.
:param iat: Unix timestamp float or integer of when the nonce was used.
:param nonce: Nonce value.
:return: True if nonce is valid, False if it is invalid. | entailment |
def process_request(self, request):
"""
Process a Django request and authenticate users.
If a JWT authentication header is detected and it is determined to be valid, the user is set as
``request.user`` and CSRF protection is disabled (``request._dont_enforce_csrf_checks = True``) on
the request.
:param request: Django Request instance
"""
if 'HTTP_AUTHORIZATION' not in request.META:
return
try:
method, claim = request.META['HTTP_AUTHORIZATION'].split(' ', 1)
except ValueError:
return
if method.upper() != AUTH_METHOD:
return
username = token.get_claimed_username(claim)
if not username:
return
User = get_user_model()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return
claim_data = None
for public in user.public_keys.all():
claim_data = token.verify(claim, public.key, validate_nonce=self.validate_nonce)
if claim_data:
break
if not claim_data:
return
logger.debug('Successfully authenticated %s using JWT', user.username)
request._dont_enforce_csrf_checks = True
request.user = user | Process a Django request and authenticate users.
If a JWT authentication header is detected and it is determined to be valid, the user is set as
``request.user`` and CSRF protection is disabled (``request._dont_enforce_csrf_checks = True``) on
the request.
:param request: Django Request instance | entailment |
def generate_key_pair(size=2048, public_exponent=65537, as_string=True):
"""
Generate a public/private key pair.
:param size: Optional. Describes how many bits long the key should be, larger keys provide more security,
currently 1024 and below are considered breakable, and 2048 or 4096 are reasonable default
key sizes for new keys. Defaults to 2048.
:param public_exponent: Optional. Indicates what one mathematical property of the key generation will be.
65537 is the default and should almost always be used.
:param as_string: Optional. If True, return tuple of strings. If false, return tuple of RSA key objects.
Defaults to True.
:return: (PrivateKey<string>, PublicKey<string>)
:return: (
`RSAPrivateKey <https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey>`_,
`RSAPublicKey <https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey>`_)
"""
private = rsa.generate_private_key(
public_exponent=public_exponent,
key_size=size,
backend=default_backend()
)
public = private.public_key()
if not as_string:
return private, public
pem_private = private.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()).decode(ENCODING)
pem_public = public.public_bytes(Encoding.PEM, PublicFormat.SubjectPublicKeyInfo).decode(ENCODING)
return pem_private, pem_public | Generate a public/private key pair.
:param size: Optional. Describes how many bits long the key should be, larger keys provide more security,
currently 1024 and below are considered breakable, and 2048 or 4096 are reasonable default
key sizes for new keys. Defaults to 2048.
:param public_exponent: Optional. Indicates what one mathematical property of the key generation will be.
65537 is the default and should almost always be used.
:param as_string: Optional. If True, return tuple of strings. If false, return tuple of RSA key objects.
Defaults to True.
:return: (PrivateKey<string>, PublicKey<string>)
:return: (
`RSAPrivateKey <https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey>`_,
`RSAPublicKey <https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey>`_) | entailment |
def load_private_key(key_file, key_password=None):
"""
Load a private key from disk.
:param key_file: File path to key file.
:param key_password: Optional. If the key file is encrypted, provide the password to decrypt it. Defaults to None.
:return: PrivateKey<string>
"""
key_file = os.path.expanduser(key_file)
key_file = os.path.abspath(key_file)
if not key_password:
with open(key_file, 'r') as key:
return key.read()
with open(key_file, 'rb') as key:
key_bytes = key.read()
return decrypt_key(key_bytes, key_password).decode(ENCODING) | Load a private key from disk.
:param key_file: File path to key file.
:param key_password: Optional. If the key file is encrypted, provide the password to decrypt it. Defaults to None.
:return: PrivateKey<string> | entailment |
def decrypt_key(key, password):
"""
Decrypt an encrypted private key.
:param key: Encrypted private key as a string.
:param password: Key pass-phrase.
:return: Decrypted private key as a string.
"""
private = serialization.load_pem_private_key(key, password=password, backend=default_backend())
return private.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()) | Decrypt an encrypted private key.
:param key: Encrypted private key as a string.
:param password: Key pass-phrase.
:return: Decrypted private key as a string. | entailment |
def create_auth_header(username, key=None, key_file="~/.ssh/id_rsa", key_password=None):
"""
Create an HTTP Authorization header using a private key file.
Either a key or a key_file must be provided.
:param username: The username to authenticate as on the remote system.
:param key: Optional. A private key as either a string or an instance of cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey.
:param key_file: Optional. Path to a file containing the user's private key. Defaults to ~/.ssh/id_rsa. Should be in PEM format.
:param key_password: Optional. Password to decrypt key_file. If set, should be a bytes object.
:return: Authentication header value as a string.
"""
if not key:
key = load_private_key(key_file, key_password)
claim = token.sign(username, key)
return "%s %s" % (AUTH_METHOD, claim.decode(ENCODING)) | Create an HTTP Authorization header using a private key file.
Either a key or a key_file must be provided.
:param username: The username to authenticate as on the remote system.
:param key: Optional. A private key as either a string or an instance of cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey.
:param key_file: Optional. Path to a file containing the user's private key. Defaults to ~/.ssh/id_rsa. Should be in PEM format.
:param key_password: Optional. Password to decrypt key_file. If set, should be a bytes object.
:return: Authentication header value as a string. | entailment |
def _config_logs(lvl=None, name=None):
"""
Set up or change logging configuration.
_config_logs() => idempotent setup;
_config_logs(L) => change log level
"""
# print('_config_log', 'from %s' %name if name else '')
FORMAT = '%(message)s'
# maybe better for log files
# FORMAT='[%(levelname)s]:%(message)s',
# Reset handlers
for h in list(logging.root.handlers):
logging.root.removeHandler(h)
global _log_level
if lvl: _log_level = lvl
logging.basicConfig(level=_log_level, format=FORMAT, stream=sys.stdout)
_log = logging.getLogger(__name__)
_log.setLevel(_log_level)
# external
for log in ['urllib3', 'asyncio']:
logging.getLogger(log).setLevel(_log_level) | Set up or change logging configuration.
_config_logs() => idempotent setup;
_config_logs(L) => change log level | entailment |
def mk_set_headers(self, data, columns):
""" figure out sizes and create header fmt """
columns = tuple(columns)
lens = []
for key in columns:
value_len = max(len(str(each.get(key, ''))) for each in data)
# account for header lengths
lens.append(max(value_len, len(self._get_name(key))))
fmt = self.mk_fmt(*lens)
return fmt | figure out sizes and create header fmt | entailment |
def _get_name(self, key):
""" get display name for a key, or mangle for display """
if key in self.display_names:
return self.display_names[key]
return key.capitalize() | get display name for a key, or mangle for display | entailment |
def display_set(self, typ, data, columns):
""" display a list of dicts """
self.display_section("%s (%d)" % (self._get_name(typ), len(data)))
headers = tuple(map(self._get_name, columns))
fmt = self.mk_set_headers(data, columns)
self.display_headers(fmt, headers)
for each in data:
row = tuple(self._get_val(each, k) for k, v in each.items())
self._print(fmt % row)
self._print("\n") | display a list of dicts | entailment |
def _print(self, *args):
""" internal print to self.fobj """
string = u" ".join(args) + '\n'
self.fobj.write(string) | internal print to self.fobj | entailment |
def display(self, typ, data):
""" display section of typ with data """
if hasattr(self, 'print_' + typ):
getattr(self, 'print_' + typ)(data)
elif not data:
self._print("%s: %s" % (typ, data))
elif isinstance(data, collections.Mapping):
self._print("\n", typ)
for k, v in data.items():
self.print(k, v)
elif isinstance(data, (list, tuple)):
# tabular data layout for lists of dicts
if isinstance(data[0], collections.Mapping):
self.display_set(typ, data, self._get_columns(data[0]))
else:
for each in data:
self.print(typ, each)
else:
self._print("%s: %s" % (typ, data))
self.fobj.flush() | display section of typ with data | entailment |
def _handler(func):
"Decorate a command handler"
def _wrapped(*a, **k):
r = func(*a, **k)
if r is None: r = 0
return r
return staticmethod(_wrapped) | Decorate a command handler | entailment |
def add_subcommands(parser, commands):
"Add commands to a parser"
subps = parser.add_subparsers()
for cmd, cls in commands:
subp = subps.add_parser(cmd, help=cls.__doc__)
add_args = getattr(cls, 'add_arguments', None)
if add_args:
add_args(subp)
handler = getattr(cls, 'handle', None)
if handler:
subp.set_defaults(handler=handler) | Add commands to a parser | entailment |
def reftag_to_cls(fn):
"""
decorator that checks function arguments for `concrete` and `resource`
and will properly set them to class references if a string (reftag) is
passed as the value
"""
names, _, _, values = inspect.getargspec(fn)
@wraps(fn)
def wrapped(*args, **kwargs):
i = 0
backend = args[0]
for name in names[1:]:
value = args[i]
if name == "concrete" and isinstance(value, six.string_types):
args[i] = backend.REFTAG_CONCRETE[value]
elif name == "resource" and isinstance(value, six.string_types):
args[i] = backend.REFTAG_RESOURCE[value]
i += 1
return fn(*args, **kwargs)
return wrapped | decorator that checks function arguments for `concrete` and `resource`
and will properly set them to class references if a string (reftag) is
passed as the value | entailment |
def update(self, res, pk, depth=1, since=None):
"""
Try to sync an object to the local database, in case of failure
where a referenced object is not found, attempt to fetch said
object from the REST api
"""
fetch = lambda: self._fetcher.fetch_latest(res, pk, 1, since=since)
self._update(res, fetch, depth) | Try to sync an object to the local database, in case of failure
where a referenced object is not found, attempt to fetch said
object from the REST api | entailment |
def update_where(self, res, depth=0, since=None, **kwargs):
"Like update() but uses WHERE-style args"
fetch = lambda: self._fetcher.fetch_all_latest(res, 0, kwargs, since=since)
self._update(res, fetch, depth) | Like update() but uses WHERE-style args | entailment |
def update_all(self, rs=None, since=None):
"Sync all objects for the relations rs (if None, sync all resources)"
self._log.info("Updating resources: %s", ' '.join(r.tag for r in rs))
if rs is None:
rs = resource.all_resources()
ctx = self._ContextClass(self)
for r in rs:
self._atomic_update(lambda: ctx.sync_resource(r, since=since)) | Sync all objects for the relations rs (if None, sync all resources) | entailment |
def get_task(self, key):
"""Get a scheduled task, or none"""
res, pk = key
jobs, lock = self._jobs
with lock:
return jobs[res].get(pk) | Get a scheduled task, or none | entailment |
def set_job(self, key, func, args):
"""
Get a scheduled task or set if none exists.
Returns:
- task coroutine/continuation
"""
res, pk = key
jobs, lock = self._jobs
task = _tasks.UpdateTask(func(*args), key)
with lock:
job = jobs[res].get(pk)
had = bool(job)
if not job:
job = task
jobs[res][pk] = job
else:
task.cancel()
self._log.debug('Scheduling: %s-%s (%s)', res.tag, pk,
'new task' if not had else 'dup')
return job | Get a scheduled task or set if none exists.
Returns:
- task coroutine/continuation | entailment |
def pending_tasks(self, res):
"Synchronized access to tasks"
jobs, lock = self._jobs
with lock:
return jobs[res].copy() | Synchronized access to tasks | entailment |
def fetch_and_index(self, fetch_func):
"Fetch data with func, return dict indexed by ID"
data, e = fetch_func()
if e: raise e
yield {row['id']: row for row in data} | Fetch data with func, return dict indexed by ID | entailment |
def clean_helper(B, obj, clean_func):
"""
Clean object, intercepting and collecting any missing-relation or
unique-constraint errors and returning the relevant resource ids/fields.
Returns:
- tuple: (<dict of non-unique fields>, <dict of missing refs>)
"""
try:
clean_func(obj)
except B.validation_error() as e:
# _debug.log_validation_errors(B, e, obj, k)
# Check if it's a uniqueness or missing relation error
fields = B.detect_uniqueness_error(e)
missing = B.detect_missing_relations(obj, e)
return fields, missing
return (None, None) | Clean object, intercepting and collecting any missing-relation or
unique-constraint errors and returning the relevant resource ids/fields.
Returns:
- tuple: (<dict of non-unique fields>, <dict of missing refs>) | entailment |
def initialize_object(B, res, row):
"""
Do a shallow initialization of an object
Arguments:
- row<dict>: dict of data like depth=1, i.e. many_refs are only ids
"""
B = get_backend()
field_groups = FieldGroups(B.get_concrete(res))
try:
obj = B.get_object(B.get_concrete(res), row['id'])
except B.object_missing_error(B.get_concrete(res)):
tbl = B.get_concrete(res)
obj = tbl()
# Set attributes, refs
for fname, field in field_groups['scalars'].items():
value = row.get(fname, getattr(obj, fname, None))
value = B.convert_field(obj.__class__, fname, value)
setattr(obj, fname, value)
# _debug('res, row: %s, %s', res, row)
# Already-fetched, and id-only refs
fetched, dangling = defaultdict(dict), defaultdict(set)
# To handle subrows that might be shallow (id) or deep (dict)
def _handle_subrow(R, subrow):
if isinstance(subrow, dict):
pk = subrow['id']
fetched[R][pk] = subrow
else:
pk = subrow
dangling[R].add(pk)
return pk
for fname, field in field_groups['one_refs'].items():
fieldres = _field_resource(B, B.get_concrete(res), fname)
key = field.column
subrow = row.get(key)
if subrow is None: # e.g. use "org" if "org_id" is missing
key = fname
subrow = row[key]
pk = _handle_subrow(fieldres, subrow)
setattr(obj, key, pk)
for fname, field in field_groups['many_refs'].items():
fieldres = _field_resource(B, B.get_concrete(res), fname)
pks = [
_handle_subrow(fieldres, subrow) for subrow in row.get(fname, [])
]
return obj, fetched, dangling | Do a shallow initialization of an object
Arguments:
- row<dict>: dict of data like depth=1, i.e. many_refs are only ids | entailment |
def read_config(conf_dir=DEFAULT_CONFIG_DIR):
"Find and read config file for a directory, return None if not found."
conf_path = os.path.expanduser(conf_dir)
if not os.path.exists(conf_path):
# only throw if not default
if conf_dir != DEFAULT_CONFIG_DIR:
raise IOError("Config directory not found at %s" % (conf_path, ))
return munge.load_datafile('config', conf_path, default=None) | Find and read config file for a directory, return None if not found. | entailment |
def load_config(conf_dir=DEFAULT_CONFIG_DIR, schema=CLIENT_SCHEMA):
"""
Load config files from the specified directory, using defaults for missing values.
Directory should contain a file named config.<ext> where <ext> is a
supported config file format.
"""
data = default_config(schema)
config = read_config(conf_dir)
if config:
recursive_update(data, config)
return data | Load config files from the specified directory, using defaults for missing values.
Directory should contain a file named config.<ext> where <ext> is a
supported config file format. | entailment |
def detect_old(data):
"Check for a config file with old schema"
if not data:
return False
ok, errors, warnings = _schema.validate(_OLD_SCHEMA, data)
return ok and not (errors or warnings) | Check for a config file with old schema | entailment |
def convert_old(data):
"Convert config data with old schema to new schema"
ret = default_config()
ret['sync'].update(data.get('peeringdb', {}))
ret['orm']['database'].update(data.get('database', {}))
return ret | Convert config data with old schema to new schema | entailment |
def write_config(data, conf_dir=DEFAULT_CONFIG_DIR, codec="yaml",
backup_existing=False):
"""
Write config values to a file.
Arguments:
- conf_dir<str>: path to output directory
- codec<str>: output field format
- backup_existing<bool>: if a config file exists,
make a copy before overwriting
"""
if not codec:
codec = 'yaml'
codec = munge.get_codec(codec)()
conf_dir = os.path.expanduser(conf_dir)
if not os.path.exists(conf_dir):
os.mkdir(conf_dir)
# Check for existing file, back up if necessary
outpath = os.path.join(conf_dir, 'config.' + codec.extensions[0])
if backup_existing and os.path.exists(outpath):
os.rename(outpath, outpath + '.bak')
codec.dump(data, open(outpath, 'w')) | Write config values to a file.
Arguments:
- conf_dir<str>: path to output directory
- codec<str>: output field format
- backup_existing<bool>: if a config file exists,
make a copy before overwriting | entailment |
def prompt_config(sch, defaults=None, path=None):
"""
Utility function to recursively prompt for config values
Arguments:
- defaults<dict>: default values used for empty inputs
- path<str>: path to prepend to config keys (eg. "path.keyname")
"""
out = {}
for name, attr in sch.attributes():
fullpath = name
if path:
fullpath = '{}.{}'.format(path, name)
if defaults is None:
defaults = {}
default = defaults.get(name)
if isinstance(attr, _schema.Schema):
# recurse on sub-schema
value = prompt_config(attr, defaults=default, path=fullpath)
else:
if default is None:
default = attr.default
if default is None:
default = ''
value = prompt(fullpath, default)
out[name] = value
return sch.validate(out) | Utility function to recursively prompt for config values
Arguments:
- defaults<dict>: default values used for empty inputs
- path<str>: path to prepend to config keys (eg. "path.keyname") | entailment |
def fetch(self, R, pk, depth=1):
"Request object from API"
d, e = self._fetcher.fetch(R, pk, depth)
if e: raise e
return d | Request object from API | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.