code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
workspace = Workspace(ctx.resolver, directory=ctx.directory, mets_basename=ctx.mets_basename, automatic_backup=ctx.automatic_backup)
if not local_filename.startswith(ctx.directory):
log.debug("File '%s' is not in workspace, copying", local_filename)
local_filename = ctx.resolver.download_to_directory(ctx.directory, "file://" + local_filename, subdir=file_grp)
url = "file://" + local_filename
workspace.mets.add_file(
fileGrp=file_grp,
ID=file_id,
mimetype=mimetype,
url=url,
pageId=page_id,
force=force,
local_filename=local_filename
)
workspace.save_mets()
|
def workspace_add_file(ctx, file_grp, file_id, mimetype, page_id, force, local_filename)
|
Add a file LOCAL_FILENAME to METS in a workspace.
| 3.383956
| 3.237118
| 1.045361
|
workspace = Workspace(ctx.resolver, directory=ctx.directory, mets_basename=ctx.mets_basename)
for f in workspace.mets.find_files(
ID=file_id,
fileGrp=file_grp,
mimetype=mimetype,
pageId=page_id,
):
if download:
workspace.download_file(f)
workspace.save_mets()
ret = '\t'.join([getattr(f, field) or '' for field in output_field])
print(ret)
|
def workspace_find(ctx, file_grp, mimetype, page_id, file_id, output_field, download)
|
Find files.
| 3.658341
| 3.962117
| 0.92333
|
backup_manager = WorkspaceBackupManager(Workspace(ctx.resolver, directory=ctx.directory, mets_basename=ctx.mets_basename, automatic_backup=ctx.automatic_backup))
backup_manager.add()
|
def workspace_backup_add(ctx)
|
Create a new backup
| 8.000204
| 8.676848
| 0.922017
|
backup_manager = WorkspaceBackupManager(Workspace(ctx.resolver, directory=ctx.directory, mets_basename=ctx.mets_basename, automatic_backup=ctx.automatic_backup))
for b in backup_manager.list():
print(b)
|
def workspace_backup_list(ctx)
|
List backups
| 6.98824
| 7.534602
| 0.927486
|
backup_manager = WorkspaceBackupManager(Workspace(ctx.resolver, directory=ctx.directory, mets_basename=ctx.mets_basename, automatic_backup=ctx.automatic_backup))
backup_manager.restore(bak, choose_first)
|
def workspace_backup_restore(ctx, choose_first, bak)
|
Restore backup BAK
| 6.633183
| 6.818052
| 0.972885
|
backup_manager = WorkspaceBackupManager(Workspace(ctx.resolver, directory=ctx.directory, mets_basename=ctx.mets_basename, automatic_backup=ctx.automatic_backup))
backup_manager.undo()
|
def workspace_backup_undo(ctx)
|
Restore the last backup
| 7.602122
| 7.948053
| 0.956476
|
validate_properties = validator_class.VALIDATORS["properties"]
def set_defaults(validator, properties, instance, schema):
for prop, subschema in properties.items():
if "default" in subschema:
instance.setdefault(prop, subschema["default"])
for error in validate_properties(validator, properties, instance, schema):
yield error
return validators.extend(validator_class, {"properties": set_defaults})
|
def extend_with_default(validator_class)
|
Add a default-setting mechanism to a ``jsonschema`` validation class.
| 1.831968
| 1.89877
| 0.964818
|
if isinstance(obj, str):
obj = json.loads(obj)
return JsonValidator(schema)._validate(obj)
|
def validate(obj, schema)
|
Validate an object against a schema
Args:
obj (dict):
schema (dict):
| 4.937128
| 5.634586
| 0.876218
|
report = ValidationReport()
if not self.validator.is_valid(obj):
for v in self.validator.iter_errors(obj):
report.add_error("[%s] %s" % ('.'.join(str(vv) for vv in v.path), v.message))
return report
|
def _validate(self, obj)
|
Do the actual validation
Arguments:
obj (dict): object to validate
Returns: ValidationReport
| 3.072313
| 3.028396
| 1.014502
|
if not '://' in parameter:
fname = os.path.abspath(parameter)
else:
fname = workspace.download_url(parameter)
with open(fname, 'r') as param_json_file:
parameter = json.load(param_json_file)
else:
parameter = {}
log.debug("Running processor %s", processorClass)
processor = processorClass(
workspace,
ocrd_tool=ocrd_tool,
page_id=page_id,
input_file_grp=input_file_grp,
output_file_grp=output_file_grp,
parameter=parameter
)
ocrd_tool = processor.ocrd_tool
name = '%s v%s' % (ocrd_tool['executable'], processor.version)
otherrole = ocrd_tool['steps'][0]
log.debug("Processor instance %s (%s doing %s)", processor, name, otherrole)
processor.process()
workspace.mets.add_agent(
name=name,
_type='OTHER',
othertype='SOFTWARE',
role='OTHER',
otherrole=otherrole
)
workspace.save_mets()
return processor
|
def run_processor(
processorClass,
ocrd_tool=None,
mets_url=None,
resolver=None,
workspace=None,
page_id=None,
log_level=None,
input_file_grp=None,
output_file_grp=None,
parameter=None,
working_dir=None,
): # pylint: disable=too-many-locals
workspace = _get_workspace(
workspace,
resolver,
mets_url,
working_dir
)
if parameter is not None
|
Create a workspace for mets_url and run processor through it
Args:
parameter (string): URL to the parameter
| 3.440113
| 3.538326
| 0.972243
|
workspace = _get_workspace(workspace, resolver, mets_url, working_dir)
args = [executable, '--working-dir', workspace.directory]
args += ['--mets', mets_url]
if log_level:
args += ['--log-level', log_level]
if page_id:
args += ['--page-id', page_id]
if input_file_grp:
args += ['--input-file-grp', input_file_grp]
if output_file_grp:
args += ['--output-file-grp', output_file_grp]
if parameter:
args += ['--parameter', parameter]
log.debug("Running subprocess '%s'", ' '.join(args))
return subprocess.call(args)
|
def run_cli(
executable,
mets_url=None,
resolver=None,
workspace=None,
page_id=None,
log_level=None,
input_file_grp=None,
output_file_grp=None,
parameter=None,
working_dir=None,
)
|
Create a workspace for mets_url and run MP CLI through it
| 1.870114
| 1.789898
| 1.044816
|
return self.workspace.mets.find_files(fileGrp=self.input_file_grp, pageId=self.page_id)
|
def input_files(self)
|
List the input files
| 11.132594
| 10.635579
| 1.046731
|
if input_file.local_filename is None:
raise Exception("input_file must have 'local_filename' property")
exif = exif_from_filename(input_file.local_filename)
now = datetime.now()
return PcGtsType(
Metadata=MetadataType(
Creator="OCR-D/core %s" % VERSION,
Created=now,
LastChange=now
),
Page=PageType(
imageWidth=exif.width,
imageHeight=exif.height,
# XXX brittle
imageFilename=input_file.url if input_file.url is not None else 'file://' + input_file.local_filename
)
)
|
def page_from_image(input_file)
|
Create `OcrdPage </../../ocrd_models/ocrd_models.ocrd_page.html>`_
from an `OcrdFile </../../ocrd_models/ocrd_models.ocrd_file.html>`_
representing an image (i.e. should have ``mimetype`` starting with ``image/``).
Arguments:
* input_file (OcrdFile):
| 4.570951
| 4.374844
| 1.044826
|
# print("PARSING PARSING '%s'" % input_file)
if input_file.mimetype.startswith('image'):
return page_from_image(input_file)
if input_file.mimetype == MIMETYPE_PAGE:
return parse(input_file.local_filename, silence=True)
raise Exception("Unsupported mimetype '%s'" % input_file.mimetype)
|
def page_from_file(input_file)
|
Create a new PAGE-XML from a METS file representing a PAGE-XML or an image.
Arguments:
* input_file (OcrdFile):
| 4.269522
| 4.428517
| 0.964097
|
ret = base
for n in args:
if is_string(n):
ret = "%s_%s" % (ret, n)
else:
ret = "%s_%04i" % (ret, n + 1)
return ret
|
def concat_padded(base, *args)
|
Concatenate string and zero-padded 4 digit number
| 3.403105
| 3.271423
| 1.040252
|
x, y, w, h = box['x'], box['y'], box['w'], box['h']
# tesseract uses a different region representation format
return "%i,%i %i,%i %i,%i %i,%i" % (
x, y,
x + w, y,
x + w, y + h,
x, y + h
)
|
def points_from_xywh(box)
|
Constructs a polygon representation from a rectangle described as a dict with keys x, y, w, h.
| 3.094465
| 2.793699
| 1.107659
|
y0 = yxyx[0]
x0 = yxyx[1]
y1 = yxyx[2]
x1 = yxyx[3]
return "%s,%s %s,%s %s,%s %s,%s" % (
x0, y0,
x1, y0,
x1, y1,
x0, y1
)
|
def points_from_y0x0y1x1(yxyx)
|
Constructs a polygon representation from a rectangle described as a list [y0, x0, y1, x1]
| 1.643988
| 1.618785
| 1.015569
|
x0 = xyxy[0]
y0 = xyxy[1]
x1 = xyxy[2]
y1 = xyxy[3]
return "%s,%s %s,%s %s,%s %s,%s" % (
x0, y0,
x1, y0,
x1, y1,
x0, y1
)
|
def points_from_x0y0x1y1(xyxy)
|
Constructs a polygon representation from a rectangle described as a list [x0, y0, x1, y1]
| 1.673154
| 1.59115
| 1.051538
|
polygon = []
for pair in points.split(" "):
x_y = pair.split(",")
polygon.append([float(x_y[0]), float(x_y[1])])
return polygon
|
def polygon_from_points(points)
|
Constructs a numpy-compatible polygon from a page representation.
| 2.831852
| 2.470861
| 1.146099
|
z = ZipFile(path_to_zip, 'r')
z.extractall(output_directory)
z.close()
|
def unzip_file_to_dir(path_to_zip, output_directory)
|
Extract a ZIP archive to a directory
| 2.115422
| 2.166852
| 0.976265
|
xys = [[int(p) for p in pair.split(',')] for pair in points.split(' ')]
minx = sys.maxsize
miny = sys.maxsize
maxx = 0
maxy = 0
for xy in xys:
if xy[0] < minx:
minx = xy[0]
if xy[0] > maxx:
maxx = xy[0]
if xy[1] < miny:
miny = xy[1]
if xy[1] > maxy:
maxy = xy[1]
return {
'x': minx,
'y': miny,
'w': maxx - minx,
'h': maxy - miny,
}
|
def xywh_from_points(points)
|
Constructs an dict representing a rectangle with keys x, y, w, h
| 1.581226
| 1.552855
| 1.01827
|
if not self.profile_validator.validate(bag):
raise Exception(str(self.profile_validator.report))
|
def _validate_profile(self, bag)
|
Validate against OCRD BagIt profile (bag-info fields, algos etc)
| 5.919881
| 4.396905
| 1.346375
|
failed = None
try:
bag.validate(**kwargs)
except BagValidationError as e:
failed = e
# for d in e.details:
# if isinstance(d, ChecksumMismatch):
# log.error("Validation Error: expected %s to have %s checksum of %s but found %s", d.path, d.algorithm, d.expected, d.found)
# else:
# log.error("Validation Error: %s", d)
if failed:
raise BagValidationError("%s" % failed)
|
def _validate_bag(self, bag, **kwargs)
|
Validate BagIt (checksums, payload.oxum etc)
| 3.679055
| 3.395609
| 1.083474
|
if skip_unzip:
bagdir = self.path_to_zip
skip_delete = True
else:
# try:
self.profile_validator.validate_serialization(self.path_to_zip)
# except IOError as err:
# raise err
# except ProfileValidationError as err:
# self.report.add_error(err.value)
bagdir = mkdtemp(prefix=TMP_BAGIT_PREFIX)
unzip_file_to_dir(self.path_to_zip, bagdir)
try:
bag = Bag(bagdir)
self._validate_profile(bag)
if not skip_bag:
self._validate_bag(bag, fast=skip_checksums, processes=processes)
finally:
if not skip_delete:
# remove tempdir
rmtree(bagdir)
return self.report
|
def validate(self, skip_checksums=False, skip_bag=False, skip_unzip=False, skip_delete=False, processes=2)
|
Validate an OCRD-ZIP file for profile, bag and workspace conformance
Arguments:
skip_bag (boolean): Whether to skip all checks of manifests and files
skip_checksums (boolean): Whether to omit checksum checks but still check basic BagIt conformance
skip_unzip (boolean): Whether the OCRD-ZIP is unzipped, i.e. a directory
skip_delete (boolean): Whether to skip deleting the unpacked OCRD-ZIP dir after valdiation
processes (integer): Number of processes used for checksum validation
| 3.838465
| 3.816116
| 1.005857
|
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
|
def quote_xml(inStr)
|
Escape markup chars, but do not modify CDATA sections.
| 1.326504
| 1.370138
| 0.968154
|
'''Parse a string, create the object tree, and export it.
Arguments:
- inString -- A string. This XML fragment should not start
with an XML declaration containing an encoding.
- silence -- A boolean. If False, export the object.
Returns -- The root object in the tree.
'''
parser = None
rootNode= parsexmlstring_(inString, parser)
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'PcGts'
rootClass = PcGts
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2018-07-15"')
return rootObj
|
def parseString(inString, silence=False)
|
Parse a string, create the object tree, and export it.
Arguments:
- inString -- A string. This XML fragment should not start
with an XML declaration containing an encoding.
- silence -- A boolean. If False, export the object.
Returns -- The root object in the tree.
| 1.558154
| 1.34141
| 1.16158
|
if parameters is None or parameters == "":
parameters = {}
else:
with open(parameters, 'r') as f:
parameters = loads(f.read())
parameterValidator = ParameterValidator(ctx.json['tools'][ctx.tool_name])
report = parameterValidator.validate(parameters)
if not report.is_valid:
print(report.to_xml())
sys.exit(1)
if json:
print(dumps(parameters))
else:
for k in parameters:
print('params["%s"]="%s"' % (k, parameters[k]))
|
def ocrd_tool_tool_parse_params(ctx, parameters, json)
|
Parse parameters with fallback to defaults and output as shell-eval'able assignments to params var.
| 3.165954
| 3.010276
| 1.051716
|
if othertype is not None:
self._el.set('TYPE', 'OTHER')
self._el.set('OTHERTYPE', othertype)
|
def othertype(self, othertype)
|
Set the ``OTHERTYPE`` attribute value.
| 4.351541
| 3.512406
| 1.238906
|
if otherrole is not None:
self._el.set('ROLE', 'OTHER')
self._el.set('OTHERROLE', otherrole)
|
def otherrole(self, otherrole)
|
Get the ``OTHERROLE`` attribute value.
| 4.986941
| 4.099153
| 1.216579
|
el_name = self._el.find('mets:name', NS)
if el_name is not None:
return el_name.text
|
def name(self)
|
Get the ``mets:name`` element value.
| 5.553421
| 2.883925
| 1.925647
|
if name is not None:
el_name = self._el.find('mets:name', NS)
if el_name is None:
el_name = ET.SubElement(self._el, TAG_METS_NAME)
el_name.text = name
|
def name(self, name)
|
Get the ``mets:name`` element value.
| 3.272446
| 2.561126
| 1.277737
|
def validate(self, *args, **kwargs): # pylint: disable=arguments-differ
return super(ParameterValidator, self)._validate(*args, **kwargs)
|
Validate a parameter dict against a parameter schema from an ocrd-tool.json
Args:
obj (dict):
schema (dict):
| null | null | null |
|
if isinstance(node, PcGtsType):
node = node.get_Page()
elif isinstance(node, GlyphType):
return report
_, tag, getter, concatenate_with = [x for x in _HIERARCHY if isinstance(node, x[0])][0]
children_are_consistent = True
children = getattr(node, getter)()
for child in children:
errors_before = len(report.errors)
handle_inconsistencies(child, strictness, strategy, report)
if len(report.errors) > errors_before:
children_are_consistent = False
if concatenate_with is not None:
concatenated_children = concatenate_children(node, concatenate_with, strategy)
text_results = get_text(node, strategy)
if concatenated_children and text_results and concatenated_children != text_results:
if strictness == 'fix':
set_text(node, concatenated_children, strategy)
# if children_are_consistent:
# else:
# # TODO fix text results recursively
# report.add_warning("Fixing inconsistencies recursively not implemented")
elif strictness == 'lax':
if not compare_without_whitespace(concatenated_children, text_results):
report.add_error(ConsistencyError(tag, node.id, text_results, concatenated_children))
else:
report.add_error(ConsistencyError(tag, node.id, text_results, concatenated_children))
return report
|
def handle_inconsistencies(node, strictness, strategy, report)
|
Check whether the text results on an element is consistent with its child element text results.
| 3.748333
| 3.46029
| 1.083242
|
_, _, getter, concatenate_with = [x for x in _HIERARCHY if isinstance(node, x[0])][0]
tokens = [get_text(x, strategy) for x in getattr(node, getter)()]
return concatenate_with.join(tokens).strip()
|
def concatenate_children(node, concatenate_with, strategy)
|
Concatenate children of node according to https://ocr-d.github.io/page#consistency-of-text-results-on-different-levels
| 5.979923
| 5.654291
| 1.05759
|
textEquivs = node.get_TextEquiv()
if not textEquivs:
log.debug("No text results on %s %s", node, node.id)
return ''
# elif strategy == 'index1':
else:
if len(textEquivs) > 1:
index1 = [x for x in textEquivs if x.index == 1]
if index1:
return index1[0].get_Unicode().strip()
return textEquivs[0].get_Unicode().strip()
|
def get_text(node, strategy)
|
Get the most confident text results, either those with @index = 1 or the first text results or empty string.
| 3.994896
| 3.250966
| 1.228833
|
text = text.strip()
textEquivs = node.get_TextEquiv()
if not textEquivs:
node.add_TextEquiv(TextEquivType(Unicode=text))
# elif strategy == 'index1':
else:
if len(textEquivs) > 1:
index1 = [x for x in textEquivs if x.index == 1]
if index1:
index1[0].set_Unicode(text)
return
textEquivs[0].set_Unicode(text)
|
def set_text(node, text, strategy)
|
Set the most confident text results, either those with @index = 1, the first text results or add new one.
| 3.360749
| 3.049021
| 1.102239
|
if ocrd_page:
validator = PageValidator(ocrd_page, strictness, strategy)
elif ocrd_file:
validator = PageValidator(page_from_file(ocrd_file), strictness, strategy)
elif filename:
validator = PageValidator(parse(filename, silence=True), strictness, strategy)
else:
raise Exception("At least one of ocrd_page, ocrd_file or filename must be set")
return validator._validate()
|
def validate(filename=None, ocrd_page=None, ocrd_file=None, strictness='strict', strategy='index1')
|
Validates a PAGE file for consistency by filename, OcrdFile or passing OcrdPage directly.
Arguments:
filename (string): Path to PAGE
ocrd_page (OcrdPage): OcrdPage instance
ocrd_file (OcrdFile): OcrdFile instance wrapping OcrdPage
strictness (string): 'strict', 'lax', 'fix' or 'off'
strategy (string): Currently only 'index1'
Returns:
report (:class:`ValidationReport`) Report on the validity
| 2.358401
| 2.782185
| 0.847679
|
if self.strictness == 'off':
return self.report
handle_inconsistencies(self.page, self.strictness, self.strategy, self.report)
return self.report
|
def _validate(self)
|
Do the actual validation
| 10.26483
| 9.118062
| 1.125769
|
params = [
click.option('-m', '--mets', help="METS URL to validate"),
click.option('-w', '--working-dir', help="Working Directory"),
click.option('-I', '--input-file-grp', help='File group(s) used as input.', default='INPUT'),
click.option('-O', '--output-file-grp', help='File group(s) used as output.', default='OUTPUT'),
click.option('-g', '--page-id', help="ID(s) of the pages to process"),
click.option('-p', '--parameter', type=click.Path()),
click.option('-J', '--dump-json', help="Dump tool description as JSON and exit", is_flag=True, default=False),
loglevel_option,
click.option('-V', '--version', help="Show version", is_flag=True, default=False)
]
for param in params:
param(f)
return f
|
def ocrd_cli_options(f)
|
Implement MP CLI.
Usage::
import ocrd_click_cli from ocrd.utils
@click.command()
@ocrd_click_cli
def cli(mets_url):
print(mets_url)
| 3.215955
| 3.249091
| 0.989802
|
body = ''
for k in ['warning', 'error', 'notice']:
for msg in self.__dict__[k + 's']:
body += '\n <%s>%s</%s>' % (k, msg, k)
return '<report valid="%s">%s\n</report>' % ("true" if self.is_valid else "false", body)
|
def to_xml(self)
|
Serialize to XML.
| 4.026864
| 3.89965
| 1.032622
|
self.notices += otherself.notices
self.warnings += otherself.warnings
self.errors += otherself.errors
|
def merge_report(self, otherself)
|
Merge another report into this one.
| 3.244481
| 2.644099
| 1.227065
|
log = getLogger('ocrd.cli.process')
run_tasks(mets, log_level, page_id, tasks)
log.info("Finished")
|
def process_cli(log_level, mets, page_id, tasks)
|
Process a series of tasks
| 5.858285
| 5.816246
| 1.007228
|
resolver = Resolver()
workspace = Workspace(resolver, directory=directory, mets_basename=mets_basename)
workspace_bagger = WorkspaceBagger(resolver)
workspace_bagger.bag(
workspace,
dest=dest,
ocrd_identifier=identifier,
ocrd_manifestation_depth=manifestation_depth,
ocrd_mets=mets,
ocrd_base_version_checksum=base_version_checksum,
processes=processes,
tag_files=tag_file,
skip_zip=skip_zip,
in_place=in_place
)
|
def bag(directory, mets_basename, dest, identifier, in_place, manifestation_depth, mets, base_version_checksum, tag_file, skip_zip, processes)
|
Bag workspace as OCRD-ZIP at DEST
| 2.573303
| 2.3988
| 1.072746
|
resolver = Resolver()
workspace_bagger = WorkspaceBagger(resolver)
workspace = workspace_bagger.spill(src, directory)
print(workspace)
|
def spill(directory, src)
|
Spill/unpack OCRD-ZIP bag at SRC to DEST
SRC must exist an be an OCRD-ZIP
DEST must not exist and be a directory
| 7.32547
| 7.28523
| 1.005524
|
resolver = Resolver()
validator = OcrdZipValidator(resolver, src)
report = validator.validate(**kwargs)
print(report)
if not report.is_valid:
sys.exit(1)
|
def validate(src, **kwargs)
|
Validate OCRD-ZIP
SRC must exist an be an OCRD-ZIP, either a ZIP file or a directory.
| 5.51762
| 3.959104
| 1.393654
|
log = getLogger('ocrd.workspace_backup.restore')
bak = None
candidates = glob(join(self.backup_directory, '%s*' % chksum))
if not candidates:
log.error("No backup found: %s" % chksum)
return
if len(candidates) > 1 and not choose_first:
raise Exception("Not unique, could be\n%s" % '\n'.join(candidates))
bak = candidates[0]
self.add()
log.info("Restoring from %s/mets.xml" % bak)
src = join(bak, 'mets.xml')
dest = self.workspace.mets_target
log.debug('cp "%s" "%s"', src, dest)
copy(src, dest)
self.workspace.reload_mets()
|
def restore(self, chksum, choose_first=False)
|
Restore mets.xml to previous state
| 3.798871
| 3.658676
| 1.038319
|
log = getLogger('ocrd.workspace_backup.add')
mets_str = self.workspace.mets.to_xml()
chksum = _chksum(mets_str)
backups = self.list()
if backups and backups[0].chksum == chksum:
log.info('No changes since last backup: %s' % backups[0])
else:
timestamp = datetime.now().timestamp()
d = join(self.backup_directory, '%s.%s' % (chksum, timestamp))
mets_file = join(d, 'mets.xml')
log.info("Backing up to %s" % mets_file)
makedirs(d)
with open(mets_file, 'wb') as f:
f.write(mets_str)
return chksum
|
def add(self)
|
Create a backup in <self.backup_directory>
| 3.530754
| 3.19964
| 1.103485
|
backups = []
for d in glob(join(self.backup_directory, '*')):
backups.append(WorkspaceBackup.from_path(d))
backups.sort(key=lambda b: b.lastmod, reverse=True)
return backups
|
def list(self)
|
List all backups as WorkspaceBackup objects, sorted descending by lastmod.
| 5.385238
| 2.771341
| 1.943189
|
log = getLogger('ocrd.workspace_backup.undo')
backups = self.list()
if backups:
last_backup = backups[0]
self.restore(last_backup.chksum, choose_first=True)
else:
log.info("No backups, nothing to undo.")
|
def undo(self)
|
Restore to last version
| 8.4953
| 7.593807
| 1.118714
|
sio = StringIO()
el.export(sio, 0, name_='PcGts', namespacedef_='xmlns:pc="%s"' % NAMESPACES['page'])
return '<?xml version="1.0" encoding="UTF-8"?>\n' + sio.getvalue()
|
def to_xml(el)
|
Serialize ``pc:PcGts`` document
| 3.732405
| 3.025847
| 1.233508
|
lvl = _ocrdLevel2pythonLevel.get(lvl, lvl)
return logging.getLevelName(lvl)
|
def getLevelName(lvl)
|
Get (numerical) python logging level for (string) spec-defined log level name.
| 11.52422
| 8.653856
| 1.331686
|
if lvl is None:
return
logging.info('Overriding log level globally to %s', lvl)
lvl = getLevelName(lvl)
global _overrideLogLevel # pylint: disable=global-statement
_overrideLogLevel = lvl
logging.getLogger('').setLevel(lvl)
for loggerName in logging.Logger.manager.loggerDict:
logger = logging.Logger.manager.loggerDict[loggerName]
if isinstance(logger, logging.PlaceHolder):
continue
logger.setLevel(logging.NOTSET)
|
def setOverrideLogLevel(lvl)
|
Override all logger filter levels to include lvl and above.
- Set root logger level
- iterates all existing loggers and sets their log level to ``NOTSET``.
Args:
lvl (string): Log level name.
| 2.892003
| 3.164366
| 0.913928
|
logger = logging.getLogger(*args, **kwargs)
if _overrideLogLevel is not None:
logger.setLevel(logging.NOTSET)
return logger
|
def getLogger(*args, **kwargs)
|
Wrapper around ``logging.getLogger`` that respects `overrideLogLevel <#setOverrideLogLevel>`_.
| 4.183034
| 2.922846
| 1.431151
|
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s.%(msecs)03d %(levelname)s %(name)s - %(message)s',
datefmt='%H:%M:%S')
logging.getLogger('').setLevel(logging.INFO)
# logging.getLogger('ocrd.resolver').setLevel(logging.INFO)
# logging.getLogger('ocrd.resolver.download_to_directory').setLevel(logging.INFO)
# logging.getLogger('ocrd.resolver.add_files_to_mets').setLevel(logging.INFO)
logging.getLogger('PIL').setLevel(logging.INFO)
# Allow overriding
CONFIG_PATHS = [
os.path.curdir,
os.path.join(os.path.expanduser('~')),
'/etc',
]
for p in CONFIG_PATHS:
config_file = os.path.join(p, 'ocrd_logging.py')
if os.path.exists(config_file):
logging.info("Loading logging configuration from '%s'", config_file)
with open(config_file) as f:
code = compile(f.read(), config_file, 'exec')
exec(code, globals(), locals())
|
def initLogging()
|
Sets logging defaults
| 2.352602
| 2.339586
| 1.005564
|
if ocrd_manifestation_depth not in ('full', 'partial'):
raise Exception("manifestation_depth must be 'full' or 'partial'")
if in_place and (dest is not None):
raise Exception("Setting 'dest' and 'in_place' is a contradiction")
if in_place and not skip_zip:
raise Exception("Setting 'skip_zip' and not 'in_place' is a contradiction")
if tag_files is None:
tag_files = []
# create bagdir
bagdir = mkdtemp(prefix=TMP_BAGIT_PREFIX)
if dest is None:
if in_place:
dest = workspace.directory
elif not skip_zip:
dest = '%s.ocrd.zip' % workspace.directory
else:
dest = '%s.ocrd' % workspace.directory
log.info("Bagging %s to %s (temp dir %s)", workspace.directory, '(in-place)' if in_place else dest, bagdir)
# create data dir
makedirs(join(bagdir, 'data'))
# create bagit.txt
with open(join(bagdir, 'bagit.txt'), 'wb') as f:
f.write(BAGIT_TXT.encode('utf-8'))
# create manifests
total_bytes, total_files = self._bag_mets_files(workspace, bagdir, ocrd_manifestation_depth, ocrd_mets, processes)
# create bag-info.txt
bag = Bag(bagdir)
self._set_bag_info(bag, total_bytes, total_files, ocrd_identifier, ocrd_manifestation_depth, ocrd_base_version_checksum)
for tag_file in tag_files:
copyfile(tag_file, join(bagdir, basename(tag_file)))
# save bag
bag.save()
# ZIP it
self._serialize_bag(workspace, bagdir, dest, in_place, skip_zip)
log.info('Created bag at %s', dest)
return dest
|
def bag(self,
workspace,
ocrd_identifier,
dest=None,
ocrd_mets='mets.xml',
ocrd_manifestation_depth='full',
ocrd_base_version_checksum=None,
processes=1,
skip_zip=False,
in_place=False,
tag_files=None
)
|
Bag a workspace
See https://ocr-d.github.com/ocrd_zip#packing-a-workspace-as-ocrd-zip
Arguments:
workspace (ocrd.Workspace): workspace to bag
ord_identifier (string): Ocrd-Identifier in bag-info.txt
dest (string): Path of the generated OCRD-ZIP.
ord_mets (string): Ocrd-Mets in bag-info.txt
ord_manifestation_depth (string): Ocrd-Manifestation-Depth in bag-info.txt
ord_base_version_checksum (string): Ocrd-Base-Version-Checksum in bag-info.txt
processes (integer): Number of parallel processes checksumming
skip_zip (boolean): Whether to leave directory unzipped
in_place (boolean): Whether to **replace** the workspace with its BagIt variant
tag_files (list<string>): Path names of additional tag files to be bagged at the root of the bag
| 2.609237
| 2.6075
| 1.000666
|
# print(dest)
if exists(dest) and not isdir(dest):
raise Exception("Not a directory: %s" % dest)
# If dest is an existing directory, try to derive its name from src
if isdir(dest):
workspace_name = re.sub(r'(\.ocrd)?\.zip$', '', basename(src))
new_dest = join(dest, workspace_name)
if exists(new_dest):
raise Exception("Directory exists: %s" % new_dest)
dest = new_dest
log.info("Spilling %s to %s", src, dest)
bagdir = mkdtemp(prefix=TMP_BAGIT_PREFIX)
unzip_file_to_dir(src, bagdir)
datadir = join(bagdir, 'data')
for root, _, files in walk(datadir):
for f in files:
srcfile = join(root, f)
destdir = join(dest, relpath(root, datadir))
destfile = join(destdir, f)
if not exists(destdir):
makedirs(destdir)
log.debug("Copy %s -> %s", srcfile, destfile)
copyfile(srcfile, destfile)
# TODO copy allowed tag files if present
# TODO validate bagit
# Drop tempdir
rmtree(bagdir)
# Create workspace
workspace = Workspace(self.resolver, directory=dest)
# TODO validate workspace
return workspace
|
def spill(self, src, dest)
|
Spill a workspace, i.e. unpack it and turn it into a workspace.
See https://ocr-d.github.com/ocrd_zip#unpacking-ocrd-zip-to-a-workspace
Arguments:
src (string): Path to OCRD-ZIP
dest (string): Path to directory to unpack data folder to
| 3.350725
| 3.073085
| 1.090345
|
if self.baseurl and '://' not in url:
url = join(self.baseurl, url)
return self.resolver.download_to_directory(self.directory, url, **kwargs)
|
def download_url(self, url, **kwargs)
|
Download a URL to the workspace.
Args:
url (string): URL to download to directory
**kwargs : See :py:mod:`ocrd.resolver.Resolver`
Returns:
The local filename of the downloaded file
| 5.215705
| 6.141569
| 0.849246
|
# os.chdir(self.directory)
# log.info('f=%s' % f)
oldpwd = os.getcwd()
try:
os.chdir(self.directory)
if is_local_filename(f.url):
f.local_filename = abspath(f.url)
else:
if f.local_filename:
log.debug("Already downloaded: %s", f.local_filename)
else:
f.local_filename = self.download_url(f.url, basename='%s/%s' % (f.fileGrp, f.ID))
finally:
os.chdir(oldpwd)
# print(f)
return f
|
def download_file(self, f)
|
Download a :py:mod:`ocrd.model.ocrd_file.OcrdFile` to the workspace.
| 4.018727
| 3.781966
| 1.062603
|
log.debug(
'outputfile file_grp=%s local_filename=%s content=%s',
file_grp,
kwargs.get('local_filename'),
content is not None)
if content is not None and 'local_filename' not in kwargs:
raise Exception("'content' was set but no 'local_filename'")
oldpwd = os.getcwd()
try:
os.chdir(self.directory)
if 'local_filename' in kwargs:
local_filename_dir = kwargs['local_filename'].rsplit('/', 1)[0]
if not os.path.isdir(local_filename_dir):
os.makedirs(local_filename_dir)
if 'url' not in kwargs:
kwargs['url'] = kwargs['local_filename']
# print(kwargs)
ret = self.mets.add_file(file_grp, **kwargs)
if content is not None:
with open(kwargs['local_filename'], 'wb') as f:
if isinstance(content, str):
content = bytes(content, 'utf-8')
f.write(content)
finally:
os.chdir(oldpwd)
return ret
|
def add_file(self, file_grp, content=None, **kwargs)
|
Add an output file. Creates an :class:`OcrdFile` to pass around and adds that to the
OcrdMets OUTPUT section.
| 2.565093
| 2.465346
| 1.04046
|
log.info("Saving mets '%s'" % self.mets_target)
if self.automatic_backup:
WorkspaceBackupManager(self).add()
with open(self.mets_target, 'wb') as f:
f.write(self.mets.to_xml(xmllint=True))
|
def save_mets(self)
|
Write out the current state of the METS file.
| 5.262517
| 5.138351
| 1.024165
|
files = self.mets.find_files(url=image_url)
if files:
image_filename = self.download_file(files[0]).local_filename
else:
image_filename = self.download_url(image_url)
if image_url not in self.image_cache['exif']:
self.image_cache['exif'][image_url] = OcrdExif(Image.open(image_filename))
return self.image_cache['exif'][image_url]
|
def resolve_image_exif(self, image_url)
|
Get the EXIF metadata about an image URL as :class:`OcrdExif`
Args:
image_url (string) : URL of image
Return
:class:`OcrdExif`
| 3.122411
| 3.174863
| 0.983479
|
files = self.mets.find_files(url=image_url)
if files:
image_filename = self.download_file(files[0]).local_filename
else:
image_filename = self.download_url(image_url)
if image_url not in self.image_cache['pil']:
self.image_cache['pil'][image_url] = Image.open(image_filename)
pil_image = self.image_cache['pil'][image_url]
if coords is None:
return pil_image
if image_url not in self.image_cache['cv2']:
log.debug("Converting PIL to OpenCV: %s", image_url)
color_conversion = cv2.COLOR_GRAY2BGR if pil_image.mode in ('1', 'L') else cv2.COLOR_RGB2BGR
pil_as_np_array = np.array(pil_image).astype('uint8') if pil_image.mode == '1' else np.array(pil_image)
self.image_cache['cv2'][image_url] = cv2.cvtColor(pil_as_np_array, color_conversion)
cv2_image = self.image_cache['cv2'][image_url]
poly = np.array(coords, np.int32)
log.debug("Cutting region %s from %s", coords, image_url)
region_cut = cv2_image[
np.min(poly[:, 1]):np.max(poly[:, 1]),
np.min(poly[:, 0]):np.max(poly[:, 0])
]
return Image.fromarray(region_cut)
|
def resolve_image_as_pil(self, image_url, coords=None)
|
Resolve an image URL to a PIL image.
Args:
coords (list) : Coordinates of the bounding box to cut from the image
Returns:
Image or region in image as PIL.Image
| 2.222643
| 2.231815
| 0.99589
|
ret = '<exif>'
for k in self.__dict__:
ret += '<%s>%s</%s>' % (k, self.__dict__[k], k)
ret += '</exif>'
return ret
|
def to_xml(self)
|
Serialize all properties as XML
| 2.675498
| 2.570735
| 1.040752
|
ret = self.basename.rsplit('.', 1)[0]
if ret.endswith('.tar'):
ret = ret[0:len(ret)-4]
return ret
|
def basename_without_extension(self)
|
Get the ``os.path.basename`` of the local file, if any, with extension removed.
| 3.898163
| 4.044707
| 0.963769
|
if self.mets is None:
raise Exception("OcrdFile %s has no member 'mets' pointing to parent OcrdMets" % self)
return self.mets.get_physical_page_for_file(self)
|
def pageId(self)
|
Get the ID of the physical page this file manifests.
| 11.056329
| 8.546486
| 1.29367
|
if pageId is None:
return
if self.mets is None:
raise Exception("OcrdFile %s has no member 'mets' pointing to parent OcrdMets" % self)
self.mets.set_physical_page_for_file(pageId, self)
|
def pageId(self, pageId)
|
Set the ID of the physical page this file manifests.
| 9.231158
| 8.073985
| 1.143321
|
el_FLocat = self._el.find(TAG_METS_FLOCAT)
if el_FLocat is not None:
return el_FLocat.get("{%s}href" % NS["xlink"])
return ''
|
def url(self)
|
Get the ``xlink:href`` of this file.
| 5.55443
| 4.465397
| 1.243883
|
if url is None:
return
el_FLocat = self._el.find('mets:FLocat', NS)
if el_FLocat is None:
el_FLocat = ET.SubElement(self._el, TAG_METS_FLOCAT)
el_FLocat.set("{%s}href" % NS["xlink"], url)
|
def url(self, url)
|
Set the ``xlink:href`` of this file.
| 3.50469
| 3.326192
| 1.053664
|
tpl = METS_XML_EMPTY.decode('utf-8')
tpl = tpl.replace('{{ VERSION }}', VERSION)
tpl = tpl.replace('{{ NOW }}', '%s' % datetime.now())
return OcrdMets(content=tpl.encode('utf-8'))
|
def empty_mets()
|
Create an empty METS file from bundled template.
| 6.569492
| 6.099357
| 1.077079
|
for t in IDENTIFIER_PRIORITY:
found = self._tree.getroot().find('.//mods:identifier[@type="%s"]' % t, NS)
if found is not None:
return found.text
|
def unique_identifier(self)
|
Get the unique identifier by looking through ``mods:identifier``
See `specs <https://ocr-d.github.io/mets#unique-id-for-the-document-processed>`_ for details.
| 7.981709
| 6.15626
| 1.296519
|
id_el = None
for t in IDENTIFIER_PRIORITY:
id_el = self._tree.getroot().find('.//mods:identifier[@type="%s"]' % t, NS)
if id_el is not None:
break
if id_el is None:
mods = self._tree.getroot().find('.//mods:mods', NS)
id_el = ET.SubElement(mods, TAG_MODS_IDENTIFIER)
id_el.set('type', 'purl')
id_el.text = purl
|
def unique_identifier(self, purl)
|
Set the unique identifier by looking through ``mods:identifier``
See `specs <https://ocr-d.github.io/mets#unique-id-for-the-document-processed>`_ for details.
| 3.290498
| 3.125207
| 1.052889
|
return [OcrdAgent(el_agent) for el_agent in self._tree.getroot().findall('mets:metsHdr/mets:agent', NS)]
|
def agents(self)
|
List all `OcrdAgent </../../ocrd_models/ocrd_models.ocrd_agent.html>`_
| 12.595526
| 7.14414
| 1.763057
|
el_metsHdr = self._tree.getroot().find('.//mets:metsHdr', NS)
if el_metsHdr is None:
el_metsHdr = ET.Element(TAG_METS_METSHDR)
self._tree.getroot().insert(0, el_metsHdr)
# assert(el_metsHdr is not None)
el_agent = ET.SubElement(el_metsHdr, TAG_METS_AGENT)
# print(ET.tostring(el_metsHdr))
return OcrdAgent(el_agent, *args, **kwargs)
|
def add_agent(self, *args, **kwargs)
|
Add an `OcrdAgent </../../ocrd_models/ocrd_models.ocrd_agent.html>`_ to the list of agents in the metsHdr.
| 3.114373
| 2.258498
| 1.378957
|
return [el.get('USE') for el in self._tree.getroot().findall('.//mets:fileGrp', NS)]
|
def file_groups(self)
|
List the ``USE`` attributes of all ``mets:fileGrp``.
| 12.568583
| 3.925278
| 3.201959
|
ret = []
fileGrp_clause = '' if fileGrp is None else '[@USE="%s"]' % fileGrp
file_clause = ''
if ID is not None:
file_clause += '[@ID="%s"]' % ID
if mimetype is not None:
file_clause += '[@MIMETYPE="%s"]' % mimetype
if url is not None:
file_clause += '[mets:FLocat[@xlink:href = "%s"]]' % url
# TODO lxml says invalid predicate. I disagree
# if local_only:
# file_clause += "[mets:FLocat[starts-with(@xlink:href, 'file://')]]"
# Search
file_ids = self._tree.getroot().xpath("//mets:fileGrp%s/mets:file%s/@ID" % (fileGrp_clause, file_clause), namespaces=NS)
if pageId is not None:
by_pageid = self._tree.getroot().xpath('//mets:div[@TYPE="page"][@ID="%s"]/mets:fptr/@FILEID' % pageId, namespaces=NS)
file_ids = [i for i in by_pageid if i in file_ids]
# instantiate / get from cache
for file_id in file_ids:
el = self._tree.getroot().find('.//mets:file[@ID="%s"]' % file_id, NS)
if file_id not in self._file_by_id:
self._file_by_id[file_id] = OcrdFile(el, mets=self)
# If only local resources should be returned and file is neither a
# file:// URL nor a file path: skip the file
url = self._file_by_id[file_id].url
if local_only and not (url.startswith('file://') or '://' not in url):
continue
ret.append(self._file_by_id[file_id])
return ret
|
def find_files(self, ID=None, fileGrp=None, pageId=None, mimetype=None, url=None, local_only=False)
|
Search ``mets:file`` in this METS document.
Args:
ID (string) : ID of the file
fileGrp (string) : USE of the fileGrp to list files of
pageId (string) : ID of physical page manifested by matching files
url (string) : @xlink:href of mets:Flocat of mets:file
mimetype (string) : MIMETYPE of matching files
local (boolean) : Whether to restrict results to local files, i.e. file://-URL
Return:
List of files.
| 2.997252
| 2.925375
| 1.02457
|
el_fileSec = self._tree.getroot().find('mets:fileSec', NS)
if el_fileSec is None:
el_fileSec = ET.SubElement(self._tree.getroot(), TAG_METS_FILESEC)
el_fileGrp = el_fileSec.find('mets:fileGrp[@USE="%s"]' % fileGrp, NS)
if el_fileGrp is None:
el_fileGrp = ET.SubElement(el_fileSec, TAG_METS_FILEGRP)
el_fileGrp.set('USE', fileGrp)
return el_fileGrp
|
def add_file_group(self, fileGrp)
|
Add a new ``mets:fileGrp``.
Arguments:
fileGrp (string): ``USE`` attribute of the new filegroup.
| 1.97692
| 1.975967
| 1.000482
|
if not ID:
raise Exception("Must set ID of the mets:file")
el_fileGrp = self._tree.getroot().find(".//mets:fileGrp[@USE='%s']" % (fileGrp), NS)
if el_fileGrp is None:
el_fileGrp = self.add_file_group(fileGrp)
if ID is not None and self.find_files(ID=ID) != []:
if not force:
raise Exception("File with ID='%s' already exists" % ID)
mets_file = self.find_files(ID=ID)[0]
else:
mets_file = OcrdFile(ET.SubElement(el_fileGrp, TAG_METS_FILE), mets=self)
mets_file.url = url
mets_file.mimetype = mimetype
mets_file.ID = ID
mets_file.pageId = pageId
mets_file.local_filename = local_filename
self._file_by_id[ID] = mets_file
return mets_file
|
def add_file(self, fileGrp, mimetype=None, url=None, ID=None, pageId=None, force=False, local_filename=None, **kwargs)
|
Add a `OcrdFile </../../ocrd_models/ocrd_models.ocrd_file.html>`_.
Arguments:
fileGrp (string): Add file to ``mets:fileGrp`` with this ``USE`` attribute
mimetype (string):
url (string):
ID (string):
pageId (string):
force (boolean): Whether to add the file even if a ``mets:file`` with the same ``ID`` already exists.
local_filename (string):
mimetype (string):
| 2.515907
| 2.27981
| 1.10356
|
# print(pageId, ocrd_file)
# delete any page mapping for this file.ID
for el_fptr in self._tree.getroot().findall(
'mets:structMap[@TYPE="PHYSICAL"]/mets:div[@TYPE="physSequence"]/mets:div[@TYPE="page"]/mets:fptr[@FILEID="%s"]' %
ocrd_file.ID, namespaces=NS):
el_fptr.getparent().remove(el_fptr)
# find/construct as necessary
el_structmap = self._tree.getroot().find('mets:structMap[@TYPE="PHYSICAL"]', NS)
if el_structmap is None:
el_structmap = ET.SubElement(self._tree.getroot(), TAG_METS_STRUCTMAP)
el_structmap.set('TYPE', 'PHYSICAL')
el_seqdiv = el_structmap.find('mets:div[@TYPE="physSequence"]', NS)
if el_seqdiv is None:
el_seqdiv = ET.SubElement(el_structmap, TAG_METS_DIV)
el_seqdiv.set('TYPE', 'physSequence')
el_pagediv = el_seqdiv.find('mets:div[@ID="%s"]' % pageId, NS)
if el_pagediv is None:
el_pagediv = ET.SubElement(el_seqdiv, TAG_METS_DIV)
el_pagediv.set('TYPE', 'page')
el_pagediv.set('ID', pageId)
if order:
el_pagediv.set('ORDER', order)
if orderlabel:
el_pagediv.set('ORDERLABEL', orderlabel)
el_fptr = ET.SubElement(el_pagediv, TAG_METS_FPTR)
el_fptr.set('FILEID', ocrd_file.ID)
|
def set_physical_page_for_file(self, pageId, ocrd_file, order=None, orderlabel=None)
|
Create a new physical page
| 1.980444
| 1.996794
| 0.991812
|
ret = self._tree.getroot().xpath(
'/mets:mets/mets:structMap[@TYPE="PHYSICAL"]/mets:div[@TYPE="physSequence"]/mets:div[@TYPE="page"][./mets:fptr[@FILEID="%s"]]/@ID' %
ocrd_file.ID, namespaces=NS)
if ret:
return ret[0]
|
def get_physical_page_for_file(self, ocrd_file)
|
Get the pageId for a ocrd_file
| 3.70295
| 3.567458
| 1.03798
|
try:
self._resolve_workspace()
if 'mets_unique_identifier' not in self.skip:
self._validate_mets_unique_identifier()
if 'mets_file_group_names' not in self.skip:
self._validate_mets_file_group_names()
if 'mets_files' not in self.skip:
self._validate_mets_files()
if 'pixel_density' not in self.skip:
self._validate_pixel_density()
if 'page' not in self.skip:
self._validate_page()
except Exception as e: # pylint: disable=broad-except
self.report.add_error("Failed to instantiate workspace: %s" % e)
# raise e
return self.report
|
def _validate(self)
|
Actual validation.
| 3.056581
| 2.969996
| 1.029153
|
if self.workspace is None:
self.workspace = self.resolver.workspace_from_url(self.mets_url, baseurl=self.src_dir, download=self.download)
self.mets = self.workspace.mets
|
def _resolve_workspace(self)
|
Clone workspace from mets_url unless workspace was provided.
| 5.375788
| 3.279457
| 1.639231
|
for f in [f for f in self.mets.find_files() if f.mimetype.startswith('image/')]:
if not f.local_filename and not self.download:
self.report.add_notice("Won't download remote image <%s>" % f.url)
continue
exif = self.workspace.resolve_image_exif(f.url)
for k in ['xResolution', 'yResolution']:
v = exif.__dict__.get(k)
if v is None or v <= 72:
self.report.add_error("Image %s: %s (%s pixels per %s) is too low" % (f.ID, k, v, exif.resolutionUnit))
|
def _validate_pixel_density(self)
|
Validate image pixel density
See `spec <https://ocr-d.github.io/mets#pixel-density-of-images-must-be-explicit-and-high-enough>`_.
| 5.319778
| 5.014132
| 1.060957
|
for fileGrp in self.mets.file_groups:
if not fileGrp.startswith(FILE_GROUP_PREFIX):
self.report.add_notice("fileGrp USE does not begin with '%s': %s" % (FILE_GROUP_PREFIX, fileGrp))
else:
# OCR-D-FOO-BAR -> ('FOO', 'BAR')
# \____/\_/ \_/
# | | |
# Prefix | Name
# Category
category = fileGrp[len(FILE_GROUP_PREFIX):]
name = None
if '-' in category:
category, name = category.split('-', 1)
if category not in FILE_GROUP_CATEGORIES:
self.report.add_error("Unspecified USE category '%s' in fileGrp '%s'" % (category, fileGrp))
if name is not None and not re.match(r'^[A-Z0-9-]{3,}$', name):
self.report.add_error("Invalid USE name '%s' in fileGrp '%s'" % (name, fileGrp))
|
def _validate_mets_file_group_names(self)
|
Ensure ``USE`` attributes of ``mets:fileGrp`` conform to OCR-D naming schema..
See `spec <https://ocr-d.github.io/mets#file-group-use-syntax>`_.
| 3.701177
| 3.491451
| 1.060069
|
if not self.mets.find_files():
self.report.add_error("No files")
for f in self.mets.find_files():
if f._el.get('GROUPID'): # pylint: disable=protected-access
self.report.add_notice("File '%s' has GROUPID attribute - document might need an update" % f.ID)
if not f.pageId:
self.report.add_error("File '%s' does not manifest any physical page." % f.ID)
if 'url' not in self.skip and ':/' in f.url:
if re.match(r'^file:/[^/]', f.url):
self.report.add_warning("File '%s' has an invalid (Java-specific) file URL '%s'" % (f.ID, f.url))
scheme = f.url[0:f.url.index(':')]
if scheme not in ('http', 'https', 'file'):
self.report.add_warning("File '%s' has non-HTTP, non-file URL '%s'" % (f.ID, f.url))
|
def _validate_mets_files(self)
|
Validate ``mets:file`` URLs are sane.
| 3.88241
| 3.617185
| 1.073324
|
for ocrd_file in self.mets.find_files(mimetype=MIMETYPE_PAGE, local_only=True):
self.workspace.download_file(ocrd_file)
page_report = PageValidator.validate(ocrd_file=ocrd_file, strictness=self.page_strictness)
self.report.merge_report(page_report)
|
def _validate_page(self)
|
Run PageValidator on the PAGE-XML documents referenced in the METS.
| 5.12472
| 4.19383
| 1.221967
|
instance = kwargs.get('instance', None)
if instance is not None:
del kwargs['instance']
request = kwargs.get('request', None)
if request is not None:
del kwargs['request']
# Let's grab the current IP of the user.
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
remote_ip = x_forwarded_for.split(',')[0]
else:
remote_ip = request.META.get('REMOTE_ADDR')
kwargs.setdefault('remote_ip', remote_ip)
if instance is not None:
pk = self._get_pk_value(instance)
kwargs.setdefault(
'content_type',
ContentType.objects.get_for_model(instance)
)
kwargs.setdefault('object_pk', pk)
kwargs.setdefault('object_repr', smart_text(instance))
if isinstance(pk, integer_types):
kwargs.setdefault('object_id', pk)
get_object_extra_info = getattr(
instance,
'get_object_extra_info',
None
)
if callable(get_object_extra_info):
kwargs.setdefault('object_extra_info', get_object_extra_info())
# Delete log entries with the same pk as a newly created model.
# This should only be necessary when an pk is used twice.
if kwargs.get('action', None) is app_conf.CREATE:
is_obj_exists = self.filter(
content_type=kwargs.get('content_type'),
object_id=kwargs.get('object_id')
).exists()
if kwargs.get('object_id', None) is not None and is_obj_exists:
self.filter(
content_type=kwargs.get('content_type'),
object_id=kwargs.get('object_id')
).delete()
else:
self.filter(
content_type=kwargs.get('content_type'),
object_pk=kwargs.get('object_pk', '')
).delete()
action_log = self.create(**kwargs)
action_logged.send(sender=LogAction, action=action_log)
return action_log
|
def create_log_action(self, **kwargs)
|
Helper method to create a new log entry.
This method automatically populates some fields when no explicit value is given.
:param instance: The model instance to log a change for.
:type instance: Model
:param kwargs: Field overrides for the :py:class:`LogAction` object.
:return: The new log entry or `None` if there were no changes.
:rtype: LogAction
| 2.328498
| 2.310312
| 1.007872
|
if not isinstance(queryset, QuerySet) or queryset.count() == 0:
return self.none()
content_type = ContentType.objects.get_for_model(queryset.model)
primary_keys = queryset.values_list(queryset.model._meta.pk.name, flat=True)
if isinstance(primary_keys[0], integer_types):
return self.filter(content_type=content_type).filter(Q(object_id__in=primary_keys)).distinct()
else:
return self.filter(content_type=content_type).filter(Q(object_pk__in=primary_keys)).distinct()
|
def get_for_objects(self, queryset)
|
Get log entries for the objects in the specified queryset.
:param queryset: The queryset to get the log entries for.
:type queryset: QuerySet
:return: The LogAction objects for the objects in the given queryset.
:rtype: QuerySet
| 2.133799
| 2.179904
| 0.97885
|
if issubclass(model, Model):
self._registry[model] = {
'include_fields': include_fields,
'exclude_fields': exclude_fields,
}
self._connect_signals(model)
else:
raise TypeError("Supplied model is not a valid model.")
|
def register(self, model, include_fields=[], exclude_fields=[])
|
Register a model with actionslog. Actionslog will then track mutations on this model's instances.
:param model: The model to register.
:type model: Model
:param include_fields: The fields to include. Implicitly excludes all other fields.
:type include_fields: list
:param exclude_fields: The fields to exclude. Overrides the fields to include.
:type exclude_fields: list
| 3.206117
| 3.641111
| 0.880533
|
from actionslog.models import LogAction
# Do not track many to many relations
if field.many_to_many:
return False
# Do not track relations to LogAction
if getattr(field, 'rel', None) is not None and field.rel.to == LogAction:
return False
return True
|
def track_field(field)
|
Returns whether the given field should be tracked by Actionslog.
Untracked fields are many-to-many relations and relations to the Actionslog LogAction model.
:param field: The field to check.
:type field: Field
:return: Whether the given field should be tracked.
:rtype: bool
| 5.27824
| 3.016167
| 1.749982
|
from actionslog.registry import actionslog
if not(old is None or isinstance(old, Model)):
raise TypeError("The supplied old instance is not a valid model instance.")
if not(new is None or isinstance(new, Model)):
raise TypeError("The supplied new instance is not a valid model instance.")
diff = {}
if old is not None and new is not None:
fields = set(old._meta.fields + new._meta.fields)
model_fields = actionslog.get_model_fields(new._meta.model)
elif old is not None:
fields = set(get_fields_in_model(old))
model_fields = actionslog.get_model_fields(old._meta.model)
elif new is not None:
fields = set(get_fields_in_model(new))
model_fields = actionslog.get_model_fields(new._meta.model)
else:
fields = set()
model_fields = None
# Check if fields must be filtered
if model_fields and (model_fields['include_fields'] or model_fields['exclude_fields']) and fields:
filtered_fields = []
if model_fields['include_fields']:
filtered_fields = [field for field in fields
if field.name in model_fields['include_fields']]
else:
filtered_fields = fields
if model_fields['exclude_fields']:
filtered_fields = [field for field in filtered_fields
if field.name not in model_fields['exclude_fields']]
fields = filtered_fields
for field in fields:
try:
old_value = smart_text(getattr(old, field.name, None))
except ObjectDoesNotExist:
old_value = field.default if field.default is not NOT_PROVIDED else None
try:
new_value = smart_text(getattr(new, field.name, None))
except ObjectDoesNotExist:
new_value = None
if old_value != new_value:
diff[field.name] = (smart_text(old_value), smart_text(new_value))
if len(diff) == 0:
diff = None
return diff
|
def model_instance_diff(old, new)
|
Calculates the differences between two model instances. One of the instances may be ``None`` (i.e., a newly
created model or deleted model). This will cause all fields with a value to have changed (from ``None``).
:param old: The old state of the model instance.
:type old: Model
:param new: The new state of the model instance.
:type new: Model
:return: A dictionary with the names of the changed fields as keys and a two tuple of the old and new field values
as value.
:rtype: dict
| 1.974023
| 1.971108
| 1.001479
|
# Initialize thread local storage
threadlocal.actionslog = {
'signal_duid': (self.__class__, time.time()),
'remote_ip': request.META.get('REMOTE_ADDR'),
}
# In case of proxy, set 'original' address
if request.META.get('HTTP_X_FORWARDED_FOR'):
threadlocal.actionslog['remote_ip'] = request.META.get('HTTP_X_FORWARDED_FOR').split(',')[0]
# Connect signal for automatic logging
if hasattr(request, 'user') and hasattr(request.user, 'is_authenticated') and request.user.is_authenticated():
set_user = curry(self.set_user, request.user)
pre_save.connect(set_user, sender=LogAction, dispatch_uid=threadlocal.actionslog['signal_duid'], weak=False)
|
def process_request(self, request)
|
Gets the current user from the request and prepares and connects a signal receiver with the user already
attached to it.
| 4.095666
| 3.811563
| 1.074537
|
if hasattr(threadlocal, 'actionslog'):
pre_save.disconnect(sender=LogAction, dispatch_uid=threadlocal.actionslog['signal_duid'])
return response
|
def process_response(self, request, response)
|
Disconnects the signal receiver to prevent it from staying active.
| 15.935753
| 13.521506
| 1.178549
|
if hasattr(threadlocal, 'actionslog'):
pre_save.disconnect(sender=LogAction, dispatch_uid=threadlocal.actionslog['signal_duid'])
return None
|
def process_exception(self, request, exception)
|
Disconnects the signal receiver to prevent it from staying active in case of an exception.
| 16.622293
| 13.573419
| 1.224621
|
try:
app_label, model_name = settings.AUTH_USER_MODEL.split('.')
auth_user_model = apps.get_model(app_label, model_name)
except ValueError:
auth_user_model = apps.get_model('auth', 'user')
if sender == LogAction and isinstance(user, auth_user_model) and instance.user is None:
instance.user = user
if hasattr(threadlocal, 'actionslog'):
instance.remote_ip = threadlocal.actionslog['remote_ip']
|
def set_user(user, sender, instance, **kwargs)
|
Signal receiver with an extra, required 'user' kwarg. This method becomes a real (valid) signal receiver when
it is curried with the user.
| 3.343606
| 3.246859
| 1.029797
|
if created:
changes = model_instance_diff(None, instance)
log_entry = LogAction.objects.create_log_action(
instance=instance,
action=LogAction.CREATE,
changes=json.dumps(changes),
)
|
def action_log_create(sender, instance, created, **kwargs)
|
Signal receiver that creates a log entry when a model instance is first saved to the database.
Direct use is discouraged, connect your model through :py:func:`actionslog.registry.register` instead.
| 4.290623
| 4.695362
| 0.9138
|
if instance.pk is not None:
try:
old = sender.objects.get(pk=instance.pk)
except sender.DoesNotExist:
pass
else:
new = instance
changes = model_instance_diff(old, new)
# Log an entry only if there are changes
if changes:
log_entry = LogAction.objects.create_log_action(
instance=instance,
action=LogAction.UPDATE,
changes=json.dumps(changes),
)
|
def action_log_update(sender, instance, **kwargs)
|
Signal receiver that creates a log entry when a model instance is changed and saved to the database.
Direct use is discouraged, connect your model through :py:func:`actionslog.registry.register` instead.
| 3.080881
| 3.170008
| 0.971884
|
if instance.pk is not None:
changes = model_instance_diff(instance, None)
log_entry = LogAction.objects.create_log_action(
instance=instance,
action=LogAction.DELETE,
changes=json.dumps(changes),
)
|
def action_log_delete(sender, instance, **kwargs)
|
Signal receiver that creates a log entry when a model instance is deleted from the database.
Direct use is discouraged, connect your model through :py:func:`actionslog.registry.register` instead.
| 4.130332
| 4.176657
| 0.988909
|
LOG.debug("axapi_http: full url = %s", self.url_base + api_url)
LOG.debug("axapi_http: %s url = %s", method, api_url)
LOG.debug("axapi_http: params = %s", json.dumps(logutils.clean(params), indent=4))
# Set "data" variable for the request
if params:
extra_params = kwargs.get('axapi_args', {})
params_copy = merge_dicts(params, extra_params)
LOG.debug("axapi_http: params_all = %s", logutils.clean(params_copy))
payload = json.dumps(params_copy)
else:
try:
payload = kwargs.pop('payload', None)
self.headers = dict(self.HEADERS, **kwargs.pop('headers', {}))
LOG.debug("axapi_http: headers_all = %s", logutils.clean(self.headers))
except KeyError:
payload = None
max_retries = kwargs.get('max_retries', self.max_retries)
timeout = kwargs.get('timeout', self.timeout)
# Create session to set HTTPAdapter or SSLAdapter
session = Session()
if self.port == 443:
# Add adapter for any https session to force TLS1_0 connection for v21 of AXAPI
session.mount('https://', SSLAdapter(max_retries=max_retries))
else:
session.mount('http://', HTTPAdapter(max_retries=max_retries))
session_request = getattr(session, method.lower())
# Make actual request and handle any errors
try:
device_response = session_request(
self.url_base + api_url, verify=False, data=payload, headers=self.HEADERS, timeout=timeout
)
except (Exception) as e:
LOG.error("acos_client failing with error %s after %s retries", e.__class__.__name__, max_retries)
raise e
finally:
session.close()
# Log if the reponse is one of the known broken response
if device_response in broken_replies:
device_response = broken_replies[device_response]
LOG.debug("axapi_http: broken reply, new response: %s", logutils.clean(device_response))
# Validate json response
try:
json_response = device_response.json()
LOG.debug("axapi_http: data = %s", json.dumps(logutils.clean(json_response), indent=4))
except ValueError as e:
# The response is not JSON but it still succeeded.
LOG.debug("axapi_http: json = %s", e)
return device_response
# Handle "fail" responses returned by AXAPI
if 'response' in json_response and 'status' in json_response['response']:
if json_response['response']['status'] == 'fail':
acos_responses.raise_axapi_ex(json_response, action=extract_method(api_url))
# Return json portion of response
return json_response
|
def request(self, method, api_url, params={}, **kwargs)
|
Generate the API call to the device.
| 3.512909
| 3.522795
| 0.997194
|
payload = self._build_payload(host_list=host_list, serial=serial,
instance_name=instance_name,
use_mgmt_port=use_mgmt_port,
interval=interval, bandwidth_base=bandwidth_base,
bandwidth_unrestricted=bandwidth_unrestricted)
return self._post(self.url_base, payload)
|
def create(self, host_list=[], serial=None, instance_name=None, use_mgmt_port=False,
interval=None, bandwidth_base=None, bandwidth_unrestricted=None)
|
Creates a license manager entry
Keyword arguments:
instance_name -- license manager instance name
host_list -- list(dict) a list of dictionaries of the format:
{'ip': '127.0.0.1', 'port': 443}
serial - (str) appliance serial number
use_mgmt_port - (bool) use management for license interactions
interval - (int) 1=Monthly, 2=Daily, 3=Hourly
bandwidth_base - (int) Configure feature bandwidth base (Mb)
Valid range - 10-102400
bandwidth_unrestricted - (bool) Set the bandwidth to maximum
| 1.882752
| 2.269678
| 0.829524
|
return self.create(host_list=host_list, serial=serial, instance_name=instance_name,
use_mgmt_port=use_mgmt_port,
interval=interval, bandwidth_base=bandwidth_base,
bandwidth_unrestricted=bandwidth_unrestricted)
|
def update(self, host_list=[], serial=None, instance_name=None, use_mgmt_port=False,
interval=None, bandwidth_base=None, bandwidth_unrestricted=None)
|
Update a license manager entry
Keyword arguments:
instance_name -- license manager instance name
host_list -- list(dict) a list of dictionaries of the format:
{'ip': '127.0.0.1', 'port': 443}
serial - (str) appliance serial number
use_mgmt_port - (bool) use management for license interactions
interval - (int) 1=Monthly, 2=Daily, 3=Hourly
bandwidth_base - (int) Configure feature bandwidth base (Mb)
Valid range - 10-102400
bandwidth_unrestricted - (bool) Set the bandwidth to maximum
| 1.642781
| 1.967067
| 0.835142
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.