blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
2
616
content_id
stringlengths
40
40
detected_licenses
listlengths
0
69
license_type
stringclasses
2 values
repo_name
stringlengths
5
118
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringlengths
4
63
visit_date
timestamp[us]
revision_date
timestamp[us]
committer_date
timestamp[us]
github_id
int64
2.91k
686M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
23 values
gha_event_created_at
timestamp[us]
gha_created_at
timestamp[us]
gha_language
stringclasses
213 values
src_encoding
stringclasses
30 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
2
10.3M
extension
stringclasses
246 values
content
stringlengths
2
10.3M
authors
listlengths
1
1
author_id
stringlengths
0
212
9c60554c705a99f445f4cd9a478d7136f409ae20
3574de071eb81b32ec1b2ef1edd8affc1b728e24
/lesson/ftp_client.py
abd63fe1e62360076d922e7235204494601b03cf
[]
no_license
dpochernin/Homework_6
54376cfed1ae5fe4d3cdd3b114a0c7e5b7707f6c
502ee83b01cff38c0fdfbe11ae9f059758b4403d
refs/heads/master
2020-08-24T12:13:22.438742
2019-10-22T13:41:55
2019-10-22T13:41:55
216,823,296
0
0
null
null
null
null
UTF-8
Python
false
false
439
py
from ftplib import FTP with FTP(host='127.0.0.1', user='user', passwd='12345') as ftp: print(ftp.retrlines('LIST')) out = '..\\files_for_test\\index.png' with open(out, 'wb') as f: ftp.retrbinary('RETR index.png', f.write) path = '..\\files_for_test\\firm.txt' with open(path, 'rb') as file_2: print(file_2.name) ftp.storbinary('STOR firm.txt', file_2, 1024) print(ftp.retrlines('LIST'))
[ "d.pochernin@gmail.com" ]
d.pochernin@gmail.com
eb640e26c331adf4b564538d8301b8d9647bf62b
eb1292224689b4d792ce4f3b51a3af539be710d8
/blog/views.py
19d1328fddb9027dc7d6b102c19386301a84bf42
[]
no_license
esmeraldalopez/first-django-blog
0eab2d6107508a68694d86171a8ccde6fcb93fe3
e5f0b4588e9ead77cd10ae0293315bbb06355d56
refs/heads/master
2021-01-24T01:21:17.750872
2019-06-15T23:35:29
2019-06-15T23:35:29
122,801,868
0
0
null
null
null
null
UTF-8
Python
false
false
1,646
py
from django.shortcuts import render, get_object_or_404, redirect from django.utils import timezone from .models import Post #incluyo el modelo definido en models, .models indica que buscara en el mismo directorio from .forms import PostForm # Create your views here. def post_list(request): posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date') #QuerySet return render(request, 'blog/post_list.html', {'posts':posts}) #post_detail recibe pk, que es lo que recibira en la url, que bien puede ser el numero de blog, para este caso def post_detail(request, pk): #QuerySet para buscar el post post = get_object_or_404(Post, pk=pk) return render(request, 'blog/post_detail.html', {'post': post}) def post_new(request): if request.method == 'POST': form = PostForm(request.POST) if form.is_valid(): post = form.save(commit=False) post.author = request.user post.published_date = timezone.now() post.save() return redirect('post_detail', pk=post.pk) else: form = PostForm() return render(request, 'blog/post_edit.html', {'form':form}) def post_edit(request, pk): post = get_object_or_404(Post, pk=pk) if request.method == "POST": form = PostForm(request.POST, instance=post) if form.is_valid(): post = form.save(commit=False) post.author = request.user post.save() return redirect('post_detail', pk=post.pk) else: form = PostForm(instance=post) return render(request, 'blog/post_edit.html', {'form': form})
[ "esmeralda.lopez.e@gmail.com" ]
esmeralda.lopez.e@gmail.com
7ed072fa1524c95c0ada3f899e91a7dcbcfd91de
9897061cfd34babf80616ff21a20c30db0212970
/server/account/models.py
a01557b1bd74e7b11b8ff7b13401a7a631636ebe
[ "MIT" ]
permissive
Samhaina/mahjong-portal
f310553c5df13e122f3e89d05a9867d0f122d4f1
4cdbd8bd61655584c25a437b3d5cab053507b2f4
refs/heads/master
2020-03-16T22:10:20.864718
2018-10-11T00:45:22
2018-10-11T00:45:22
133,029,373
0
0
null
2018-05-11T11:05:41
2018-05-11T11:05:41
null
UTF-8
Python
false
false
287
py
from django.db import models from django.contrib.auth.models import AbstractUser from tournament.models import Tournament class User(AbstractUser): is_tournament_manager = models.BooleanField(default=False) managed_tournaments = models.ManyToManyField(Tournament, blank=True)
[ "lisikhin@gmail.com" ]
lisikhin@gmail.com
82b311c67a57cc924a2e56dcce6820aeb0307776
3de806956985605b4f5042879e11d88a859871dd
/notebooks/pdf_structure_env/bin/dumppdf.py
367ccdc95d1ee1fbd5d2f91f771c7cd4cfbb0e42
[]
no_license
qiyuyang16/CS4300_microGoogle
cb25e9f5b0e547f71873cc06e345b7776d1a24a6
e6ac8561a8e20b23bb858dbf4ed745e28bf60b94
refs/heads/master
2023-04-21T05:59:51.299146
2021-05-11T19:23:48
2021-05-11T19:23:48
354,361,818
0
0
null
2021-04-28T15:10:01
2021-04-03T18:12:04
Jupyter Notebook
UTF-8
Python
false
false
12,939
py
#!/home/vince/Documents/Code/microgoogle/CS4300_microGoogle/streamlit_testing/pdf_structure_env/bin/python """Extract pdf structure in XML format""" import logging import os.path import re import sys import warnings from argparse import ArgumentParser import pdfminer from pdfminer.pdfdocument import PDFDocument, PDFNoOutlines, PDFXRefFallback, \ PDFNoValidXRefWarning from pdfminer.pdfpage import PDFPage from pdfminer.pdfparser import PDFParser from pdfminer.pdftypes import PDFObjectNotFound, PDFValueError from pdfminer.pdftypes import PDFStream, PDFObjRef, resolve1, stream_value from pdfminer.psparser import PSKeyword, PSLiteral, LIT from pdfminer.utils import isnumber logging.basicConfig() ESC_PAT = re.compile(r'[\000-\037&<>()"\042\047\134\177-\377]') def e(s): if isinstance(s, bytes): s = str(s, 'latin-1') return ESC_PAT.sub(lambda m: '&#%d;' % ord(m.group(0)), s) def dumpxml(out, obj, codec=None): if obj is None: out.write('<null />') return if isinstance(obj, dict): out.write('<dict size="%d">\n' % len(obj)) for (k, v) in obj.items(): out.write('<key>%s</key>\n' % k) out.write('<value>') dumpxml(out, v) out.write('</value>\n') out.write('</dict>') return if isinstance(obj, list): out.write('<list size="%d">\n' % len(obj)) for v in obj: dumpxml(out, v) out.write('\n') out.write('</list>') return if isinstance(obj, ((str,), bytes)): out.write('<string size="%d">%s</string>' % (len(obj), e(obj))) return if isinstance(obj, PDFStream): if codec == 'raw': out.write(obj.get_rawdata()) elif codec == 'binary': out.write(obj.get_data()) else: out.write('<stream>\n<props>\n') dumpxml(out, obj.attrs) out.write('\n</props>\n') if codec == 'text': data = obj.get_data() out.write('<data size="%d">%s</data>\n' % (len(data), e(data))) out.write('</stream>') return if isinstance(obj, PDFObjRef): out.write('<ref id="%d" />' % obj.objid) return if isinstance(obj, PSKeyword): out.write('<keyword>%s</keyword>' % obj.name) return if isinstance(obj, PSLiteral): out.write('<literal>%s</literal>' % obj.name) return if isnumber(obj): out.write('<number>%s</number>' % obj) return raise TypeError(obj) def dumptrailers(out, doc, show_fallback_xref=False): for xref in doc.xrefs: if not isinstance(xref, PDFXRefFallback) or show_fallback_xref: out.write('<trailer>\n') dumpxml(out, xref.trailer) out.write('\n</trailer>\n\n') no_xrefs = all(isinstance(xref, PDFXRefFallback) for xref in doc.xrefs) if no_xrefs and not show_fallback_xref: msg = 'This PDF does not have an xref. Use --show-fallback-xref if ' \ 'you want to display the content of a fallback xref that ' \ 'contains all objects.' warnings.warn(msg, PDFNoValidXRefWarning) return def dumpallobjs(out, doc, codec=None, show_fallback_xref=False): visited = set() out.write('<pdf>') for xref in doc.xrefs: for objid in xref.get_objids(): if objid in visited: continue visited.add(objid) try: obj = doc.getobj(objid) if obj is None: continue out.write('<object id="%d">\n' % objid) dumpxml(out, obj, codec=codec) out.write('\n</object>\n\n') except PDFObjectNotFound as e: print('not found: %r' % e) dumptrailers(out, doc, show_fallback_xref) out.write('</pdf>') return def dumpoutline(outfp, fname, objids, pagenos, password='', dumpall=False, codec=None, extractdir=None): fp = open(fname, 'rb') parser = PDFParser(fp) doc = PDFDocument(parser, password) pages = {page.pageid: pageno for (pageno, page) in enumerate(PDFPage.create_pages(doc), 1)} def resolve_dest(dest): if isinstance(dest, str): dest = resolve1(doc.get_dest(dest)) elif isinstance(dest, PSLiteral): dest = resolve1(doc.get_dest(dest.name)) if isinstance(dest, dict): dest = dest['D'] if isinstance(dest, PDFObjRef): dest = dest.resolve() return dest try: outlines = doc.get_outlines() outfp.write('<outlines>\n') for (level, title, dest, a, se) in outlines: pageno = None if dest: dest = resolve_dest(dest) pageno = pages[dest[0].objid] elif a: action = a if isinstance(action, dict): subtype = action.get('S') if subtype and repr(subtype) == '/\'GoTo\'' and action.get( 'D'): dest = resolve_dest(action['D']) pageno = pages[dest[0].objid] s = e(title).encode('utf-8', 'xmlcharrefreplace') outfp.write('<outline level="{!r}" title="{}">\n'.format(level, s)) if dest is not None: outfp.write('<dest>') dumpxml(outfp, dest) outfp.write('</dest>\n') if pageno is not None: outfp.write('<pageno>%r</pageno>\n' % pageno) outfp.write('</outline>\n') outfp.write('</outlines>\n') except PDFNoOutlines: pass parser.close() fp.close() return LITERAL_FILESPEC = LIT('Filespec') LITERAL_EMBEDDEDFILE = LIT('EmbeddedFile') def extractembedded(outfp, fname, objids, pagenos, password='', dumpall=False, codec=None, extractdir=None): def extract1(objid, obj): filename = os.path.basename(obj.get('UF') or obj.get('F').decode()) fileref = obj['EF'].get('UF') or obj['EF'].get('F') fileobj = doc.getobj(fileref.objid) if not isinstance(fileobj, PDFStream): error_msg = 'unable to process PDF: reference for %r is not a ' \ 'PDFStream' % filename raise PDFValueError(error_msg) if fileobj.get('Type') is not LITERAL_EMBEDDEDFILE: raise PDFValueError( 'unable to process PDF: reference for %r ' 'is not an EmbeddedFile' % (filename)) path = os.path.join(extractdir, '%.6d-%s' % (objid, filename)) if os.path.exists(path): raise IOError('file exists: %r' % path) print('extracting: %r' % path) os.makedirs(os.path.dirname(path), exist_ok=True) out = open(path, 'wb') out.write(fileobj.get_data()) out.close() return with open(fname, 'rb') as fp: parser = PDFParser(fp) doc = PDFDocument(parser, password) extracted_objids = set() for xref in doc.xrefs: for objid in xref.get_objids(): obj = doc.getobj(objid) if objid not in extracted_objids and isinstance(obj, dict) \ and obj.get('Type') is LITERAL_FILESPEC: extracted_objids.add(objid) extract1(objid, obj) return def dumppdf(outfp, fname, objids, pagenos, password='', dumpall=False, codec=None, extractdir=None, show_fallback_xref=False): fp = open(fname, 'rb') parser = PDFParser(fp) doc = PDFDocument(parser, password) if objids: for objid in objids: obj = doc.getobj(objid) dumpxml(outfp, obj, codec=codec) if pagenos: for (pageno, page) in enumerate(PDFPage.create_pages(doc)): if pageno in pagenos: if codec: for obj in page.contents: obj = stream_value(obj) dumpxml(outfp, obj, codec=codec) else: dumpxml(outfp, page.attrs) if dumpall: dumpallobjs(outfp, doc, codec, show_fallback_xref) if (not objids) and (not pagenos) and (not dumpall): dumptrailers(outfp, doc, show_fallback_xref) fp.close() if codec not in ('raw', 'binary'): outfp.write('\n') return def create_parser(): parser = ArgumentParser(description=__doc__, add_help=True) parser.add_argument('files', type=str, default=None, nargs='+', help='One or more paths to PDF files.') parser.add_argument( "--version", "-v", action="version", version="pdfminer.six v{}".format(pdfminer.__version__)) parser.add_argument( '--debug', '-d', default=False, action='store_true', help='Use debug logging level.') procedure_parser = parser.add_mutually_exclusive_group() procedure_parser.add_argument( '--extract-toc', '-T', default=False, action='store_true', help='Extract structure of outline') procedure_parser.add_argument( '--extract-embedded', '-E', type=str, help='Extract embedded files') parse_params = parser.add_argument_group( 'Parser', description='Used during PDF parsing') parse_params.add_argument( '--page-numbers', type=int, default=None, nargs='+', help='A space-seperated list of page numbers to parse.') parse_params.add_argument( '--pagenos', '-p', type=str, help='A comma-separated list of page numbers to parse. Included for ' 'legacy applications, use --page-numbers for more idiomatic ' 'argument entry.') parse_params.add_argument( '--objects', '-i', type=str, help='Comma separated list of object numbers to extract') parse_params.add_argument( '--all', '-a', default=False, action='store_true', help='If the structure of all objects should be extracted') parse_params.add_argument( '--show-fallback-xref', action='store_true', help='Additionally show the fallback xref. Use this if the PDF ' 'has zero or only invalid xref\'s. This setting is ignored if ' '--extract-toc or --extract-embedded is used.') parse_params.add_argument( '--password', '-P', type=str, default='', help='The password to use for decrypting PDF file.') output_params = parser.add_argument_group( 'Output', description='Used during output generation.') output_params.add_argument( '--outfile', '-o', type=str, default='-', help='Path to file where output is written. Or "-" (default) to ' 'write to stdout.') codec_parser = output_params.add_mutually_exclusive_group() codec_parser.add_argument( '--raw-stream', '-r', default=False, action='store_true', help='Write stream objects without encoding') codec_parser.add_argument( '--binary-stream', '-b', default=False, action='store_true', help='Write stream objects with binary encoding') codec_parser.add_argument( '--text-stream', '-t', default=False, action='store_true', help='Write stream objects as plain text') return parser def main(argv=None): parser = create_parser() args = parser.parse_args(args=argv) if args.debug: logging.getLogger().setLevel(logging.DEBUG) if args.outfile == '-': outfp = sys.stdout else: outfp = open(args.outfile, 'w') if args.objects: objids = [int(x) for x in args.objects.split(',')] else: objids = [] if args.page_numbers: pagenos = {x - 1 for x in args.page_numbers} elif args.pagenos: pagenos = {int(x) - 1 for x in args.pagenos.split(',')} else: pagenos = set() password = args.password if args.raw_stream: codec = 'raw' elif args.binary_stream: codec = 'binary' elif args.text_stream: codec = 'text' else: codec = None for fname in args.files: if args.extract_toc: dumpoutline( outfp, fname, objids, pagenos, password=password, dumpall=args.all, codec=codec, extractdir=None ) elif args.extract_embedded: extractembedded( outfp, fname, objids, pagenos, password=password, dumpall=args.all, codec=codec, extractdir=args.extract_embedded ) else: dumppdf( outfp, fname, objids, pagenos, password=password, dumpall=args.all, codec=codec, extractdir=None, show_fallback_xref=args.show_fallback_xref ) outfp.close() if __name__ == '__main__': sys.exit(main())
[ "vince@bartle.io" ]
vince@bartle.io
2edffd3663b2abd45fd38a442dd7b5736f510500
03dd72639c7a39ef7cf17d0dc7fa4276cd47c93a
/tipranksCloudlet/data_processor.py
6e14cdc88570cf04b705e04470bfc9af686b3af2
[]
permissive
karthiknayak02/portfolioManager
dbc7554ded3cdb9489d43c9329c677e54f745c1f
fc3c335778c78535b8e263e7a62a5e2d7a1ec7c9
refs/heads/master
2023-08-10T16:06:51.810322
2021-10-03T21:38:09
2021-10-03T21:38:09
351,266,417
0
0
MIT
2021-10-03T20:29:40
2021-03-25T00:53:15
Python
UTF-8
Python
false
false
1,797
py
from common import * """ Get price targets of symbol https://www.tipranks.com/api/stocks/getData/?name=NET """ def get_price_targets_consensus(symbol: str): request_url = "https://www.tipranks.com/api/stocks/getData/" query_params = {'name': symbol} response = get_call(request_url=request_url, query_params=query_params) value = None schema = { "ticker": value, "companyName": value, "ptConsensus": [ [0], { "priceTarget": value, "high": value, "low": value }], "latestRankedConsensus": { "rating": value, "nB": value, "nH": value, "nS": value } } price_targets = parse_response(schema, response) print(price_targets) return price_targets """ Get basic stock details # https://market.tipranks.com/api/details/getstockdetailsasync?id=AMZN """ def get_stock_details(symbol: str): request_url = "https://market.tipranks.com/api/details/getstockdetailsasync" query_params = {'id': symbol} response = get_call(request_url=request_url, query_params=query_params) value = None schema = [ [0], { "ticker": "AMZN", "price": value, "pe": value, "eps": value, "marketCap": value, "yLow": value, "yHigh": value, "nextEarningDate": value, "range52Weeks": value, "low52Weeks": value, "high52Weeks": value } ] stock_details = parse_response(schema, response) print(stock_details) return stock_details if __name__ == '__main__': get_price_targets_consensus("AMZN") get_stock_details("AMZN")
[ "nayakkarthik02@gmail.com" ]
nayakkarthik02@gmail.com
6631cd057d686d0a0d7c910975132247c9c16828
4e30c855c253cc1d972d29e83edb9d5ef662d30a
/approval/models/returns.py
fc4920552b9ab0a32ad1d864ac946c3732809dab
[ "MIT" ]
permissive
rajeshr188/django-onex
8b531fc2f519d004d1da64f87b10ffacbd0f2719
0a190ca9bcf96cf44f7773686205f2c1f83f3769
refs/heads/master
2023-08-21T22:36:43.898564
2023-08-15T12:08:24
2023-08-15T12:08:24
163,012,755
2
0
NOASSERTION
2023-07-22T09:47:28
2018-12-24T17:46:35
Python
UTF-8
Python
false
false
3,919
py
from django.contrib.contenttypes.fields import GenericRelation from django.db import models, transaction from django.db.models import Sum from django.urls import reverse from approval.models import ApprovalLine from contact.models import Customer from dea.models import Journal, JournalTypes from product.models import StockLot """ When an approval voucher is created, the stock items that are being approved for release to a contact should be recorded in the database or inventory management system, along with the contact's information. When the approved stock items are released to the contact, they should be recorded as being moved out of the approval area and into the possession of the contact. If the contact returns some or all of the approved stock items, those items should be recorded as being returned to the approval area. When the approval is complete and all approved stock items have been returned, the approval should be closed. If any stock items were approved for release but not returned, those items should be flagged for invoicing. When the invoice is created, the stock items that were approved but not returned should be included on the invoice, along with the appropriate billing information. If any changes are made to the approval, return, or invoice, those changes should be recorded in the database or inventory management system, along with a timestamp and the user who made the changes. """ # Create your models here. class Return(models.Model): created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) created_by = models.ForeignKey( "users.CustomUser", on_delete=models.CASCADE, null=True, blank=True ) contact = models.ForeignKey( Customer, related_name="approval_returns", on_delete=models.CASCADE ) total_wt = models.DecimalField(max_digits=10, decimal_places=3, default=0) total_qty = models.IntegerField(default=0) posted = models.BooleanField(default=False) def __str__(self): return f"Return #{self.id}" def get_absolute_url(self): return reverse("approval:approval_return_detail", args=(self.pk,)) def get_total_qty(self): return self.returnitem_set.aggregate(t=Sum("quantity"))["t"] def get_total_wt(self): return self.returnitem_set.aggregate(t=Sum("weight"))["t"] class ReturnItem(models.Model): return_obj = models.ForeignKey(Return, on_delete=models.CASCADE) line_item = models.ForeignKey( ApprovalLine, on_delete=models.CASCADE, related_name="return_items" ) quantity = models.IntegerField(default=0) weight = models.DecimalField(max_digits=10, decimal_places=3, default=0.0) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) journal = GenericRelation(Journal, related_query_name="approval_returnitem") def __str__(self): return f"{self.quantity} x {self.line_item.product}" def get_absolute_url(self): return reverse("approval:approval_returnitem_detail", args=(self.pk,)) def get_hx_edit_url(self): kwargs = {"return_pk": self.return_obj.id, "pk": self.pk} return reverse("approval:approval_returnitem_update", kwargs=kwargs) def create_journal(self): return Journal.objects.create( journal_type=JournalTypes.SJ, desc="Approval Return", content_object=self, ) def get_journal(self): return self.journal.first() @transaction.atomic def post(self, journal): self.line_item.product.transact(self.weight, self.quantity, journal, "AR") self.line_item.update_status() @transaction.atomic def unpost(self, journal): self.line_item.product.transact(self.weight, self.quantity, journal, "A") self.line_item.update_status()
[ "rajeshrathodh@gmail.com" ]
rajeshrathodh@gmail.com
9fdbeecbfcef675451289bc39a5aa7cf2a6cb5d2
07cabb7e5fec85992496cf0c825affc78b33dba4
/Scrapping/scrappy_venv/bin/twistd
621775ceea805f17bdf7ff6adee3e71e63aabb00
[]
no_license
WooodHead/Code-practice
8b5abd0bee3796926b3f738a276acb216480b585
62eafb6d856d2631a7659d68ab94d7d78a72c9b8
refs/heads/master
2022-11-11T08:03:07.070167
2020-06-27T07:50:45
2020-06-27T07:50:45
null
0
0
null
null
null
null
UTF-8
Python
false
false
469
#!/Users/lyanalexandr/OneDrive/Projects/Programming/Python/Practice/Scrapping/scrappy_venv/bin/python3.8 # EASY-INSTALL-ENTRY-SCRIPT: 'Twisted==20.3.0','console_scripts','twistd' __requires__ = 'Twisted==20.3.0' import re import sys from pkg_resources import load_entry_point if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit( load_entry_point('Twisted==20.3.0', 'console_scripts', 'twistd')() )
[ "alexlyan@yahoo.com" ]
alexlyan@yahoo.com
ae8263755e7c9f0478df3a1a92714a27aa901a2c
79d4c542ed5d1191262a1d163b3a82996106a113
/triple_well_2D/triple_well_2.py
90ef0300feffe81071dab1ba45cff68364f15895
[]
no_license
pascalwangt/PyTAMS
397bb0804f04e9075b510196c173850c560c40ad
f15d55951c83bdd4c9aace76d4e1b87864362d43
refs/heads/master
2020-06-24T16:09:52.549525
2019-08-06T22:06:13
2019-08-06T22:06:13
199,011,484
2
0
null
null
null
null
UTF-8
Python
false
false
7,521
py
# -*- coding: utf-8 -*- """ Created on Tue May 7 23:45:49 2019 @author: pasca """ import numpy as np import sympy as sp from sympy import exp, tanh, cosh from sympy.abc import x,y import h5py import sys sys.path.append('../') import ellipsoid_fun import trajectory_to_score_function #%% #states initial_state = np.array([-5.7715972293490533928661534446291625499725341796875, 5.4694526174595439355376248779551373591090168702066875994205474853515625e-09]) saddle_state = np.array([0, -2.93429453258859905540045787120106979273259639739990234375e-02]) #saddle_state = np.array([0, 18]) target_state = np.array([5.7715972293490533928661534446291625499725341796875, 5.4694526174595439355376248779551373591090168702066875994205474853515625e-09]) #%% #general confinement alpha = 0.1 beta = 0.05 yc = 0 #stable minima x_min = 6 depth = 10 y_decay = 2 x_decay = 2 #metastable minimum y_intermediate = 20/1.5 depth_intermediate = 20/1.5 y_decay_intermediate = 3 x_decay_intermediate = 5 #barrier y_barrier = 15/1.5 y_decay_barrier = 1 x_decay_barrier = 2 barrier = 20 def potential(x,y): return 4.8+alpha*x**2+beta*(y-yc)**2+barrier*(1+np.tanh(-(y-y_barrier)/y_decay_barrier))*np.exp(-(x/x_decay_barrier)**2)-depth_intermediate*np.exp(-(x/x_decay_intermediate)**2-((y-y_intermediate)/y_decay_intermediate)**2)-depth*np.exp(-((x-x_min)/x_decay)**2-(y/y_decay)**2)-depth*np.exp(-((x+x_min)/x_decay)**2-(y/y_decay)**2) def force(v): x,y=v return np.array([-2*alpha*x+barrier*(1+np.tanh(-(y-y_barrier)/y_decay_barrier))*2*x/x_decay_barrier**2*np.exp(-(x/x_decay_barrier)**2)-2*x/x_decay_intermediate**2*depth_intermediate*np.exp(-(x/x_decay_intermediate)**2-((y-y_intermediate)/y_decay_intermediate)**2)-2*(x-x_min)/x_decay**2*depth*np.exp(-((x-x_min)/x_decay)**2-(y/y_decay)**2)-2*(x+x_min)/x_decay**2*depth*np.exp(-((x+x_min)/x_decay)**2-(y/y_decay)**2), -2*beta*(y-yc)+barrier/y_decay_barrier*np.exp(-(x/x_decay_barrier)**2)/np.cosh(-(y-y_barrier)/y_decay_barrier)**2-2*(y-y_intermediate)/y_decay_intermediate**2*depth_intermediate*np.exp(-(x/x_decay_intermediate)**2-((y-y_intermediate)/y_decay_intermediate)**2)-2*y/y_decay**2*depth*np.exp(-((x-x_min)/x_decay)**2-(y/y_decay)**2)-2*y/y_decay**2*depth*np.exp(-((x+x_min)/x_decay)**2-(y/y_decay)**2)]) #sympy force matrix force_matrix = sp.Matrix([-2*alpha*x+barrier*(1+tanh(-(y-y_barrier)/y_decay_barrier))*2*x/x_decay_barrier**2*exp(-(x/x_decay_barrier)**2)-2*x/x_decay_intermediate**2*depth_intermediate*exp(-(x/x_decay_intermediate)**2-((y-y_intermediate)/y_decay_intermediate)**2)-2*(x-x_min)/x_decay**2*depth*exp(-((x-x_min)/x_decay)**2-(y/y_decay)**2)-2*(x+x_min)/x_decay**2*depth*exp(-((x+x_min)/x_decay)**2-(y/y_decay)**2), -2*beta*(y-yc)+barrier/y_decay_barrier*exp(-(x/x_decay_barrier)**2)/cosh(-(y-y_barrier)/y_decay_barrier)**2-2*(y-y_intermediate)/y_decay_intermediate**2*depth_intermediate*exp(-(x/x_decay_intermediate)**2-((y-y_intermediate)/y_decay_intermediate)**2)-2*y/y_decay**2*depth*exp(-((x-x_min)/x_decay)**2-(y/y_decay)**2)-2*y/y_decay**2*depth*exp(-((x+x_min)/x_decay)**2-(y/y_decay)**2)]) noise_matrix = None #%% #score functions def score_function_linear(v): score = np.sum((target_state-initial_state)*(v-initial_state)) / np.linalg.norm(target_state-initial_state)**2 if score >=0: return score else: return 1e-5 def score_function_linear_simple(v): return v[0]/target_state[0] def score_function_norm(v): x,y=v return 1/2*np.sqrt((x+1)**2+1/2*y**2) def score_function_circle_maker(param = 4): """ param: decay rate of the exponentials """ dist = np.linalg.norm(target_state-initial_state) eta = np.linalg.norm(saddle_state-initial_state)/dist def score_function(v): return eta - eta*np.exp(-param*(np.linalg.norm(v-initial_state)/dist)**2)+(1-eta)*np.exp(-param*(np.linalg.norm(v-target_state)/dist)**2) return score_function def score_function_ellipsoid_maker(param = 0.05, sigma=1.5): """ param: decay rate of the exponentials """ eta = np.linalg.norm(target_state-saddle_state)/np.linalg.norm(target_state-initial_state) covariance_matrix_start, quad_form_initial, spectral_radius, level, bound = ellipsoid_fun.ingredients_score_function(force_matrix, initial_state, sigma, noise_matrix=noise_matrix) covariance_matrix_target, quad_form_target, spectral_radius, level, bound = ellipsoid_fun.ingredients_score_function(force_matrix, target_state, sigma, noise_matrix=noise_matrix) def score_function(v): return eta - eta*np.exp(-param*quad_form_initial(v))+(1-eta)*np.exp(-param*quad_form_target(v)) return score_function def score_function_custom_maker(filename='trajectory.hdf5', decay=4): """ param: trajectory file with key "filled_path" decay """ with h5py.File(filename, 'r') as file: filled_path = file['filled_path'][:] file.close() score_function = trajectory_to_score_function.score_function_maker(filled_path.T, decay) return score_function def threshold_simexp_param(param, level): dist = np.linalg.norm(target_state-initial_state) eta = np.linalg.norm(saddle_state-initial_state)/dist return (1-eta)*(1-np.exp(-level*param)) #%% #tests check_ellipsoid_array = 0 potential_well_plot_3D = 0 potential_well_plot = 0 if check_ellipsoid_array: import matplotlib.pyplot as plt #ell = ellipsoid_fun.get_ellipsoid_array(target_state, quad_form, level, bound) plt.scatter(ell.T[0], ell.T[1]) CS = ellipsoid_fun.draw_ellipsoid_2D(force_matrix, target_state, noise = sigma) foo = ellipsoid_fun.check_ellipsoid(ell, score_function_simexp_ell_param, threshold=threshold_simexp, tolerance=1e-3) score_level = ellipsoid_fun.get_levelset_array(target_state, score_function_simexp_ell, level = 1-threshold_simexp, bound=2*bound, tolerance = 1e-3) plt.scatter(score_level.T[0], score_level.T[1], alpha = 0.5) print(foo) if potential_well_plot_3D: import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D fig = plt.figure() ax = fig.add_subplot(111, projection='3d') x,y = np.linspace(-12,12,100), np.linspace(-10,25,100) xx, yy = np.meshgrid(x, y) zz = potential(xx,yy) im = ax.plot_surface(xx,yy,zz, cmap = 'RdBu_r') ax.set_xlabel('x') ax.tick_params(axis='x', which='major', pad=0) ax.set_ylabel('y') ax.set_zlabel('V(x,y)', labelpad = 10) clb = fig.colorbar(im, fraction=0.03, pad=-0.1) clb.ax.set_title('V(x,y)', fontsize = 16) ax.set_facecolor('white') #plt.savefig('2D_simple_double_well_ax3D.png') if potential_well_plot: import matplotlib.pyplot as plt fig = plt.figure() x,y = np.linspace(-15,15,100), np.linspace(-15,25,100) xx, yy = np.meshgrid(x, y) pot = potential(xx,yy) im = plt.contourf(xx, yy, pot, 100, cmap = 'RdBu_r') #plt.contour(xx, yy, pot, 30) plt.xlabel('x') plt.ylabel('y') plt.grid() cbar = fig.colorbar(im,) cbar.ax.set_title('$V(x,y)$', pad = 15) plt.scatter(initial_state[0],initial_state[1], marker = 'o', label = 'start', color = 'black', s=40) plt.scatter(target_state[0], target_state[1], marker = 'x', label = 'target', color = 'black', s=40) plt.legend(loc = 'lower right') plt.savefig('../../figures/potential.png', bbox_inches = 'tight') #plt.savefig('2D_simple_double_well.png')
[ "pascal.wang@ens-lyon.fr" ]
pascal.wang@ens-lyon.fr
166b3242dff0b00a8bc4391164f1da912dc2126a
8ab090a03f23856bf959dbfd9d12758425ceeb02
/examples/molecular/master/result/crop.py
6402a8ae80cebb641e43e2560e662ab2ca857bc9
[ "MIT" ]
permissive
blostic/cirrus
bc2c9297606f7c747a67ea1532e0b802950fa73d
faaca48052f6a29e434e8a9dcf6625d426a1c8b7
refs/heads/master
2021-05-04T11:07:01.907627
2017-09-09T07:34:26
2017-09-09T07:34:26
51,220,056
0
0
null
null
null
null
UTF-8
Python
false
false
530
py
from PIL import Image for i in range(1, 10): img = Image.open("image-000000"+ str(i) + ".png") area = (300, 430, 400, 530) cropped_img = img.crop(area) # cropped_img.show() resized = cropped_img.resize((200, 200), Image.ANTIALIAS) resized.save("res"+ str(i) + ".png") for i in range(10, 13): img = Image.open("image-00000"+ str(i) + ".png") area = (300, 430, 400, 530) cropped_img = img.crop(area) # cropped_img.show() resized = cropped_img.resize((200, 200), Image.ANTIALIAS) resized.save("res"+ str(i) + ".png")
[ "piotr.skibiak@gmail.com" ]
piotr.skibiak@gmail.com
d8d67f00d13100bc57dbb65f9bea4894dce778a7
c6a51702a01c341c41a0d9df54f6106111a6cac5
/part1/train.py
9212f4f41bdeb5c47f86caf4f51a450eb7f80085
[]
no_license
eacamilla/assignment_2
3b5ac15cd2654827783ece73875580b9e0c90676
ce4b310075193fcea24f9f364a0821d7f29c1c76
refs/heads/master
2020-09-14T18:01:35.188856
2019-11-21T15:42:44
2019-11-21T15:42:44
223,207,359
0
0
null
null
null
null
UTF-8
Python
false
false
5,946
py
################################################################################ # MIT License # # Copyright (c) 2019 # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to conditions. # # Author: Deep Learning Course | Fall 2019 # Date Created: 2019-09-06 ################################################################################ from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import time from datetime import datetime import numpy as np import torch from torch.utils.data import DataLoader from part1.dataset import PalindromeDataset from part1.vanilla_rnn import VanillaRNN from part1.lstm import LSTM # You may want to look into tensorboard for logging # from torch.utils.tensorboard import SummaryWriter ################################################################################ def train(config): assert config.model_type in ('RNN', 'LSTM') # Initialize the device which to run the model on #device = torch.device(config.device) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # Initialize the model that we are going to use if config.model_type is 'RNN': model = VanillaRNN(seq_length = config.input_length, input_dim= config.input_dim, num_hidden = config.num_hidden, num_classes = config.num_classes, batch_size = config.batch_size, device= device) if config.model_type is 'LSTM': model = LSTM(seq_length = config.input_length, input_dim= config.input_dim, num_hidden = config.num_hidden, num_classes = config.num_classes, batch_size = config.batch_size, device= device) # Initialize the dataset and data loader (note the +1) dataset = PalindromeDataset(config.input_length+1) data_loader = DataLoader(dataset, config.batch_size, num_workers=1) # Setup the loss and optimizer criterion = torch.nn.CrossEntropyLoss() optimizer = torch.optim.RMSprop(model.parameters(), config.learning_rate) #optimizer = torch.optim.Adam(model.parameters(), config.learning_rate) Accuracy = [] for step, (batch_inputs, batch_targets) in enumerate(data_loader): # Only for time measurement of step through network t1 = time.time() y = model.forward(batch_inputs.to(device)) loss = criterion(y, batch_targets.to(device)) loss.backward() ############################################################################ # QUESTION: what happens here and why? # limits the size of the parameter updates by scaling the gradients down # Should be placed after loss.backward() but before optimizer.step() ############################################################################ torch.nn.utils.clip_grad_norm(model.parameters(), max_norm=config.max_norm) ############################################################################ optimizer.step() loss = loss.item() acc_in = np.argmax(y.cpu().detach().numpy(), axis=1) == batch_targets.cpu().detach().numpy() accuracy = np.sum(acc_in)/ batch_targets.shape[0] Accuracy.append(accuracy) # Just for time measurement t2 = time.time() examples_per_second = config.batch_size/float(t2-t1) if step % 10 == 0: print("[{}] Train Step {:04d}/{:04d}, Batch Size = {}, Examples/Sec = {:.2f}, " "Accuracy = {:.2f}, Loss = {:.3f}".format( datetime.now().strftime("%Y-%m-%d %H:%M"), step, config.train_steps, config.batch_size, examples_per_second, accuracy, loss )) if step == config.train_steps: # If you receive a PyTorch data-loader error, check this bug report: # https://github.com/pytorch/pytorch/pull/9655 break print('Done training. :)') ################################################################################ ################################################################################ if __name__ == "__main__": # Parse training configuration parser = argparse.ArgumentParser() # Model params parser.add_argument('--model_type', type=str, default="RNN", help="Model type, should be 'RNN' or 'LSTM'") #parser.add_argument('--model_type', type=str, default="LSTM", help="Model type, should be 'RNN' or 'LSTM'") parser.add_argument('--input_length', type=int, default=10, help='Length of an input sequence') #adjust input length for different palindrome lengths parser.add_argument('--input_dim', type=int, default=1, help='Dimensionality of input sequence') parser.add_argument('--num_classes', type=int, default=10, help='Dimensionality of output sequence') parser.add_argument('--num_hidden', type=int, default=128, help='Number of hidden units in the model') parser.add_argument('--batch_size', type=int, default=128, help='Number of examples to process in a batch') parser.add_argument('--learning_rate', type=float, default=0.001, help='Learning rate') parser.add_argument('--train_steps', type=int, default=10000, help='Number of training steps') parser.add_argument('--max_norm', type=float, default=10.0) parser.add_argument('--device', type=str, default="cuda:0", help="Training device 'cpu' or 'cuda:0'") config = parser.parse_args() #Loop over palindrome length and different seeds fixme # Train the model train(config) #Create plot fixme
[ "eacamillawerner@gmail.com" ]
eacamillawerner@gmail.com
9aa7d125ff93dd331e5aade2f70114255f9f0ff6
e9a2b904da10ec9f38fb2a36093e8e84bf902d9d
/_400/manage.py
f735a2a142b938431f35228cdbfeea4eb9b258b0
[]
no_license
fhim50/jobsearchengine
8d848bc84078a1836599eca50a2c7a5a2304814e
c7aae927542fc0749539de417d716cd32d06d75f
refs/heads/master
2020-06-03T20:43:55.524245
2013-01-22T08:27:31
2013-01-22T08:27:31
7,016,723
1
0
null
null
null
null
UTF-8
Python
false
false
257
py
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "_400.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
[ "kwaw@kwaw-HP-625.(none)" ]
kwaw@kwaw-HP-625.(none)
8c750e5ac76fad05861e8d6d26c54313f5859f0e
bdd6ab129de61947945b380a487a3ee923f542f3
/real_genomes/pipeline/start.py
50a2993aeb21e00fb25ff0957e5b9833515d1ddb
[]
no_license
InfOmics/pangenes-review
1e27c1fd1a93fb7a5fd764c4090f7a4a2a207b0b
a74f8f9d615de6a76aa1918c2c4e9d0c1f0c8385
refs/heads/master
2021-01-07T12:14:37.891227
2020-06-01T14:20:27
2020-06-01T14:20:27
241,686,261
2
0
null
null
null
null
UTF-8
Python
false
false
7,892
py
from pathlib import Path from modules.pipelines import * from shutil import move import datetime import pandas as pd import traceback #method to recursively remove a folder def delete_folder(pth) : for sub in pth.iterdir() : if sub.is_dir() : delete_folder(sub) else : sub.unlink() pth.rmdir() def convert_time(timestring): if len(timestring.split(':'))>2: if "." in timestring: pt =datetime.datetime.strptime(timestring,'%H:%M:%S.%f') else: pt =datetime.datetime.strptime(timestring,'%H:%M:%S') else: if "." in timestring: pt =datetime.datetime.strptime(timestring,'%M:%S.%f') else: pt =datetime.datetime.strptime(timestring,'%M:%S') return pt if Path('log_running').exists(): Path('log_running').unlink() if Path('gene_families').exists(): delete_folder(Path('gene_families')) if Path('softwares_data').exists(): delete_folder(Path('softwares_data')) if Path('execution_stats').exists(): delete_folder(Path('execution_stats')) #foler containing the data for the execution of the pangenome softwares pth_software_data = Path('softwares_data') pth_software_data.mkdir(exist_ok=True) #gene families folder where we have the .clus files pth_gene_families = Path('gene_families') pth_gene_families.mkdir(exist_ok=True) #temporary folder pth_tmp = Path('tmp') pth_tmp.mkdir(exist_ok=True) #execution stats folder stats_folder = Path('execution_stats') stats_folder.mkdir(exist_ok=True) datasets = { 'leaf':Path('datasets','leaf'), 'root':Path('datasets','root') } #backtranslate from aa to dna datasets #backtranslate protein genes of the genomes into DNA def backtranslate(path): aa_codon_table = { #from translation table 11 NCBI 'F':'TTT' , 'Y':'TAT' , 'C':'TGT' , 'W':'TGG' , 'H':'CAT' , 'L':'CTC' , 'P':'CCG' , 'Q':'CAG' , 'I':'ATC' , 'T':'ACC' , 'N':'AAC' , 'S':'AGC' , 'M':'ATG' , 'K':'AAG' , 'R':'AGG' , 'D':'GAC' , 'V':'GTG' , 'A':'GCG' , 'E':'GAG' , 'G':'GGG' , '*':'TAG' } #path = Path(path,'genome','genome','DB') Path(path,'dna').mkdir(parents=True, exist_ok=True) Path(path,'protein').mkdir(parents=True, exist_ok=True) for filepath in sorted(path.glob('*_aa.fasta')): to_fasta = list() for sequence in SeqIO.parse(filepath,'fasta'): aa_seq = list() for char in sequence.seq: aa_seq.append(aa_codon_table[char]) sequence.seq = Seq(''.join(aa_seq), IUPAC.ambiguous_dna ) to_fasta.append(sequence) outfile = filepath.stem.replace('_aa','_dna')+'.fasta' SeqIO.write(to_fasta,Path(path,'dna',outfile),'fasta') #new_path = Path(path,'protein',filepath.name.replace('.fa','.fasta')) #move(filepath, new_path) #per ora copio per poter rilanciare il programma copy(filepath, Path(path,'protein')) #print(outfile) ##### #da commentare se non devo rigenerare i datasets for dtset_type, pth in datasets.items(): for data in pth.glob('*'): print('backtranslate:', data) backtranslate(data) for dtset_type, pth in datasets.items(): print('@vb@', dtset_type, pth) for data in pth.glob('*'): print('@vb@',data) #software execution data ( memory, elapsed_time ) stats= list() software_data = Path(pth_software_data, dtset_type, data.stem) software_data.mkdir(parents=True,exist_ok=True) print('@vb@',software_data) gene_families = Path(pth_gene_families, dtset_type, data.stem) gene_families.mkdir(parents=True, exist_ok=True) print('@vb@',gene_families) Path(stats_folder,dtset_type,data.stem).mkdir(parents=True, exist_ok=True) #le statistiche di esecuzione poi vengono salvate su un file quindi non devo cambiare nulla, #mi basta definire dove salvare il file nell' open() #PANDELOS try: pandelos_stat = pandelos(data, gene_families, software_data) if pandelos_stat != None: stats += pandelos_stat else: #need to remove pandelos empty clus file when it gets halted before completing (only good result files are kept) p = Path(gene_families,'pandelos_families.clus') if p.exists(): p.unlink() except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc()) print() #PANX try: panx_stat = panx(data,gene_families,software_data) if panx_stat != None: stats += panx_stat except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc()) print() #PANSEQ try: panseq_stat = panseq(data,gene_families,software_data) if panseq_stat != None: stats += panseq_stat except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc()) print() #GET_HOMOLOGUES try: gethomologues_stat = gethomologues(data,gene_families,software_data) if gethomologues_stat != None: stats += gethomologues_stat except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc()) print() #PGAP pgap_stat = pgap(data,gene_families,software_data) if pgap_stat != None: stats += pgap_stat print() #PANGET try: panget_stat = panget(data,gene_families,software_data) if panget_stat != None: stats += panget_stat except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc()) print() #ROARY try: roary_stat = roary(data,gene_families,software_data) if roary_stat != None: stats += roary_stat except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc()) print() try: #BLAST all vs all , input for panoct and micropan print('Running BLAST...') blast_stat = call_program([data,software_data], 'blast') print('...BLAST done!') path_blastall = Path(software_data, 'blastDB','blastall.out') if blast_stat != None: stats += blast_stat #PANOCT panoct_stat = panoct(data,gene_families,software_data) if panoct_stat != None: stats += panoct_stat #MICROPAN micropan_stat = micropan(data,gene_families,software_data) if micropan_stat != None: stats += micropan_stat else: print('MESSAGE: BLAST was terminated, panoct and micropan will not be executed') except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc()) try: #cambiare il tempo in numero di secondi if len(stats)>0: #if i have at least one result (no results if all softwares take more than 2h to compute) print('saving resources used for:') print('dataset type:',dtset_type) print('dataset name:',data.stem) ###SISTEMARE L'OUTPUT IN MODO CHE I FILE VENGANO GENERATI DIRETTAMENTE NELLA CARTELLA CORRETTA #idea -> salvare come file csv in modo da poter utilizzare i dataframe per fare la media di #tutti i risultati stat_dict = dict() blast_ram = int() blast_time = int() for s in stats: d = s.split(' ')[1:] if d[0] != 'blast': #ram and time will be added to panoct and micropan ram and time stat_dict[d[0]]= dict() if d[0] == 'blast': blast_ram = int(d[1]) pt = convert_time(d[2]) blast_time = pt.second+pt.minute*60+pt.hour*3600 elif d[0] == 'panoct' or d[0] == 'micropan': stat_dict[d[0]]['ram'] = max(blast_ram, int(d[1])) pt = convert_time(d[2]) software_time = pt.second+pt.minute*60+pt.hour*3600 stat_dict[d[0]]['time'] = blast_time + software_time else: stat_dict[d[0]]['ram'] = int(d[1]) pt = convert_time(d[2]) software_time = pt.second+pt.minute*60+pt.hour*3600 stat_dict[d[0]]['time'] = software_time df_stats = pd.DataFrame(stat_dict) df_stats.to_csv(Path(stats_folder,dtset_type,data.stem,'running_data.csv'), sep='\t') except Exception as e: print('@vb@',"type error: " + str(e)) print('@vb@',traceback.format_exc())
[ "vincenzo.bonnici@gmail.com" ]
vincenzo.bonnici@gmail.com
da7cfc6806d77782ce1ac44df83deae1ecdcb3d5
c2bf65f35ac84c93b815c64eee4bfb15e9c1a0ee
/567.字符串的排列.py
6a7b61cfa51e870f9c9e4e3bea86ee165162909c
[]
no_license
hhs44/leetcode_learn
e7651548e41176b1fd56a1565effbe076d6b280a
fd4f51a4803202a2e4fe3d97ef2b54adc218e691
refs/heads/master
2022-03-06T14:35:51.891389
2022-02-09T14:55:13
2022-02-09T14:55:13
250,731,211
2
0
null
null
null
null
UTF-8
Python
false
false
1,359
py
# # @lc app=leetcode.cn id=567 lang=python3 # # [567] 字符串的排列 # # @lc code=start from collections import Counter class Solution: def checkInclusion(self, s1: str, s2: str) -> bool: # for x,y in enumerate(s2) : # if y in s1: # if x + len(s1) <= len(s2): # t = s2[x:x+len(s1)] # if Counter(t) == Counter(s1): # return True # return False # # 2222 # r1 = Counter(s1) # r2 = Counter() # l1, l2 = len(s1), len(s2) # temp = 0 # x = y = 0 # while y < l2 : # r2[s2[y]] += 1 # if r2[s2[y]] == r1[s2[y]]: # temp += 1 # if temp == len(r1): # return True # y += 1 # if y - x + 1 > l1: # if r1[s2[x]] == r2[s2[x]]: # temp -= 1 # r2[s2[x]] -= 1 # if r2[s2[x]] == 0: # del r2[s2[x]] # x += 1 # return False count_dict = Counter(s1) m = len(s1) i = 0 j = m - 1 while j < len(s2): if Counter(s2[i:j+1]) == count_dict: return True i += 1 j += 1 return False # @lc code=end
[ "1159986871@qq.com" ]
1159986871@qq.com
b632eb9d3117a1f0ed7b1f8d42128ff804ab6e39
e5b4bead66f3f560bb77221e41e57057c52894e0
/PYTHON/derivative.py
1c01571d62ada2f1e4354fbb4a45322d870909e9
[]
no_license
lafionium/DMI
ce45f40a88ba52180b86daac1abedb4c54200209
ea81832c69c3531466a4db8eb9289a02e45c54a4
refs/heads/master
2021-05-16T06:52:43.741331
2018-01-13T15:50:09
2018-01-13T15:50:09
103,494,626
0
0
null
null
null
null
UTF-8
Python
false
false
854
py
## -*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot as plt def mans_sinuss(x): k = 0 a = (-1)**0*x**1/(1) S = a while k<= 500: k = k + 1 R = (-1) * x**2 /(2*k*(2*k+1)) a = a * R S = S + a return S a = 0 b = 3 * np.pi x = np.arange(a,b,0.05) y = mans_sinuss(x) plt.plot(x,y) plt.grid() #plt.show() n = len(x) y_prim = [] for i in range(n-1): #print i, x[i], y[i], delta_y = y[i+1] - y[i] delta_x = x[i+1] - x[i] #y_prim = delta_y / delta_x #print y_prim y_prim.append(delta_y / delta_x) #plt.plot(x[:n-1],y_prim) #plt.show() n = len(x) y_prim2 = [] for i in range(n-2): delta_y_prim = y_prim[i+1] - y_prim[i] delta_x = x[i+1] - x[i] y_prim2.append(delta_y_prim / delta_x) plt.plot(x[:n-1],y_prim) plt.plot(x[:n-2],y_prim2) plt.show()
[ "jelizavetak@inbox.lv" ]
jelizavetak@inbox.lv
682bf91e9068ae3b44de92b625bce0c0b2052cf9
c4f096d99db7134d2991b7aad0192dcd1f58511a
/select_proxy/settings_tokens.py
0fe76835b8d85292b2509c2fc965df2ae5770428
[]
no_license
for-mao/select-proxy
a3157a07f954a8eaaffb0a4bd4402ae000a784c1
211b88af7e1cf9f31688c3acb188dbc4838e8c74
refs/heads/master
2020-04-02T16:40:20.437075
2018-10-25T06:48:02
2018-10-25T06:48:02
154,623,342
0
0
null
null
null
null
UTF-8
Python
false
false
170
py
DIGITALOCEAN_API_TOKEN = { 'jiang': '', 'sun': '' } LINODE_API_TOKEN = { 'jiang': '', 'sun': '' } VULTR_API_TOKEN = { 'jiang': '', 'sun': '' }
[ "15501852282@163.com" ]
15501852282@163.com
0e5ce8dcbc905ec244e57d0954c2a482627e3370
2bf7879f0c134b1a207fd4c249bb5b58ae219692
/src/bowtieutil.py
0ae531def009a6ab131f85970e245aef23a9bd32
[]
no_license
logstar/NET-seq-CLT
50adaffc0db9311bdaa110691ea3cd8f3697bf44
5aa77e4c26ac3d2b90c88961dbb73fd2a7981e72
refs/heads/master
2021-03-27T08:46:50.690982
2018-09-20T16:58:09
2018-09-20T16:58:09
92,326,581
0
0
null
null
null
null
UTF-8
Python
false
false
2,422
py
import collections # aln_lcoord_b0: 0 based coordinate of the alignment start (left most bp of # alignment) # num_alt_aln: number of alternative alignment positions of the same read # mm_desc: mismatch description (comma sep) BowtieRecord = collections.namedtuple('BowtieRecord', ['seqid', 'strand', 'refid', 'aln_lcoord_b0', 'seq', 'qscore', 'num_alt_aln', 'mm_desc']) def iterate_bowtie_out_file(bt_fn): bt_file = open(bt_fn, 'r') for line in bt_file: fields = line.strip('\n').split('\t') if len(fields) != 8: raise ValueError("Number of fields not equal to 8: %s" % line) rec = BowtieRecord(fields[0], fields[1], fields[2], int(fields[3]), fields[4], fields[5], int(fields[6]), fields[7]) if rec.strand not in ('+', '-'): raise ValueError("Strand not +/-: %s" % line) yield rec bt_file.close() # Treat genome as circular class BowtieRecordCounter(object): """docstring for BowtieRecordCounter""" def __init__(self, ref_length): super(BowtieRecordCounter, self).__init__() self.ref_length = ref_length self.align_count_dict = {} def insert_bt_rec_ntup(self, bt_rec_ntup): if bt_rec_ntup.aln_lcoord_b0 >= self.ref_length: raise ValueError("Alignment start >= ref length. %s" % bt_rec_ntup._asdict()) if bt_rec_ntup.strand == '+': tx_start_pos_b0 = bt_rec_ntup.aln_lcoord_b0 else: tx_start_pos_b0 = bt_rec_ntup.aln_lcoord_b0 + len(bt_rec_ntup.seq) - 1 if tx_start_pos_b0 >= self.ref_length: tx_start_pos_b0 -= self.ref_length if tx_start_pos_b0 not in self.align_count_dict: self.align_count_dict[tx_start_pos_b0] = {'+' : 0, '-' : 0} self.align_count_dict[tx_start_pos_b0][bt_rec_ntup.strand] += 1 def output_count_table(self, output_fn): output_file = open(output_fn, 'w') for key in sorted(self.align_count_dict.keys()): output_file.write("%d\t%d\t%d\n" % (key, self.align_count_dict[key]['+'], self.align_count_dict[key]['-'])) output_file.close()
[ "y.will.zhang@gmail.com" ]
y.will.zhang@gmail.com
442c069c094cb5cdedab2a6e2737cf3ce70e9022
462060fa57d2db4f19b256558d57a446bb60bf2a
/Lesson07/setOperations.py
3307e272cab913f9f71a3e73b48a6f85fbfb0adb
[ "MIT" ]
permissive
TrainingByPackt/Python-Fundamentals-eLearning
d82b25ebd87a8503125e6b26991f058424b3da28
0d7aa95b9b163802a93a7dab5f00d80e10677b82
refs/heads/master
2020-04-21T12:25:15.223114
2019-02-07T11:40:18
2019-02-07T11:40:18
169,561,372
3
2
null
null
null
null
UTF-8
Python
false
false
372
py
a = {1, 2, 3} b = {3, 4, 5} # A union B print('a union b') print(a.union(b)) print(a | b) # A intersection B print('a intersection b') print(a.intersection(b)) print(a & b) # A equals to B print(a == b) # Subsets and supersets print('subsets and supersets') print(a.issubset(b)) print(b.issuperset(a)) b.update(a) print(b) print(a.issubset(b)) print(b.issuperset(a))
[ "madhunikitac@packtpub.com" ]
madhunikitac@packtpub.com
d1503b86fa4896111916a13e6c521ac6752af954
0cc58384745fddd40f0593941223237daba41734
/meiduo_mall/apps/contents/migrations/0001_initial.py
aa0735062da06aa50568b4b3fed32360cc82a92e
[]
no_license
four-leaf-clover1/meiduo_mall
d216d565300e8be3b18fe144a3da721d606d63b1
9d82325ec18ac050e5b076e6e24f6613945bee89
refs/heads/master
2020-06-29T20:53:24.630099
2019-08-09T09:49:27
2019-08-09T09:49:27
200,621,909
0
0
null
null
null
null
UTF-8
Python
false
false
2,487
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2019-07-26 04:58 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Content', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')), ('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')), ('title', models.CharField(max_length=100, verbose_name='标题')), ('url', models.CharField(max_length=300, verbose_name='内容链接')), ('image', models.ImageField(blank=True, null=True, upload_to='', verbose_name='图片')), ('text', models.TextField(blank=True, null=True, verbose_name='内容')), ('sequence', models.IntegerField(verbose_name='排序')), ('status', models.BooleanField(default=True, verbose_name='是否展示')), ], options={ 'verbose_name': '广告内容', 'verbose_name_plural': '广告内容', 'db_table': 'tb_content', }, ), migrations.CreateModel( name='ContentCategory', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')), ('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')), ('name', models.CharField(max_length=50, verbose_name='名称')), ('key', models.CharField(max_length=50, verbose_name='类别键名')), ], options={ 'verbose_name': '广告内容类别', 'verbose_name_plural': '广告内容类别', 'db_table': 'tb_content_category', }, ), migrations.AddField( model_name='content', name='category', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='contents.ContentCategory', verbose_name='类别'), ), ]
[ "1173349074@qq.com" ]
1173349074@qq.com
4825ca9e68ae9cf98bba0728e2d75d14fec976e4
790909e6226f6e40859ea153873e712cb98d142f
/step7 (문자열/2675.py
29cc5f89539ea15915c76b28892d83d60e759b2f
[]
no_license
choibyeol/Baekjoon-algorithm
1ae01a95ff80051b284dc522edeb1b258072b5a0
5de50885900cdbb8565784f7b6e2af37afd1bf7f
refs/heads/master
2023-04-21T01:36:19.289748
2021-05-14T12:14:18
2021-05-14T12:14:18
null
0
0
null
null
null
null
UTF-8
Python
false
false
183
py
T = int(input()) for i in range(T): S = input() lenS = int(S[0]) for i in range(2, len(S)): for j in range(0, lenS): print(S[i], end = "") print()
[ "honey333888@naver.com" ]
honey333888@naver.com
c046690464e6b41e5003468f3a589deffce10683
cc8f018f4497d868aed95ab83ae0a5a8c646a120
/project_jenkins/urls.py
fe8afd358269796b95dd3aa761fefe6769432698
[]
no_license
adarshharidas/jenkins-pro
e049e09bb14595501248f131ad344a7461e26c40
6d05b93db3033e1ecc4cb419bd5e4a32b6d2ac98
refs/heads/master
2020-03-15T03:33:20.219427
2018-06-12T10:04:17
2018-06-12T10:04:17
131,944,972
0
0
null
null
null
null
UTF-8
Python
false
false
838
py
"""project_jenkins URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from job import views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^', include('job.urls')) ]
[ "pymonk@ex.com" ]
pymonk@ex.com
46bccb1d8843e47334574854ef19d802258e5b4a
df0ba034669281e5d35743ef77ebe16ef07f0d83
/ex6.py
a773b7f082cf82ff804b6cca0ec4d51442eb2930
[]
no_license
pstauble/LPTHW-Exercises
f18d107281dec7b9aa7cb85e0e7e576253b20596
f17faad7f1bbeb97041850865232835ad9190e28
refs/heads/master
2021-01-25T05:57:55.423730
2014-11-04T16:30:54
2014-11-04T16:30:54
null
0
0
null
null
null
null
UTF-8
Python
false
false
636
py
# This variable uses %d to add a number into the string" x = "There are %d types of people." %10 #These are two variables binary = "binary" do_not = "don't" #We add two strings into this one string y = "Those who know %s and those who %s." % (binary, do_not) #We print the two statements wich contain the strings and numbers print x print y #we are adding the above strings into these strings print "I said: %r." % x print "I also said: '%s'." % y hilarious = False joke_evaluation = "Isn't that joke so funny?! %r" print joke_evaluation % hilarious w = "this is the left side of..." e = "a string with a right side." print w + e
[ "patrick.stauble@gmail.com" ]
patrick.stauble@gmail.com
20d8f4c314a6b737ec3f9b057e9e0b259d3413db
d4eeb6f634c72e924b0a3780e7df7f2150565780
/pyfi1.36.py
1844be21aa30652b409d6cc08a32f72c7644f043
[]
no_license
anupama14609/financepy
053e4086028150e1ace74d26182658ec5f67600f
1d2325d1cba898ce232395dd36808dee4dc980ca
refs/heads/master
2023-07-21T04:16:53.980529
2021-08-29T02:08:37
2021-08-29T02:08:37
399,757,610
0
0
null
null
null
null
UTF-8
Python
false
false
525
py
import pandas as pd import matplotlib.pyplot as plt from matplotlib import style import numpy as np def visualize_data(): style.use('ggplot') df = pd.read_csv('sp500_joined_closes.csv') df_corr = df.corr() data = df_corr.values fig = plt.figure() ax = fig.add_subplot(1,1,1) column_labels = df_corr.columns row_labels = df_corr.index ax.set_xticklabels(column_labels) ax.set_yticklabels(row_labels) plt.xticks(rotation=90) plt.plot() plt.show() visualize_data()
[ "anupamarao14609@gmail.com" ]
anupamarao14609@gmail.com
1b59986d14faeb17881c11ce0e4490deee33f0a4
08330ea5c2495d5dc958d4cf11b68c5650396e3e
/main.py
96bc672b9314ca63c2ef52b701f996ef5869ae68
[]
no_license
marco-willi/tf-estimator-cnn
d74be01143b6a724534737807ebb78db518c6b87
df3a5651b0f8018d3b9bc4b424f8090fb74ca26f
refs/heads/master
2020-03-22T03:00:54.073040
2018-07-17T08:52:16
2018-07-17T08:52:16
139,408,220
4
0
null
null
null
null
UTF-8
Python
false
false
9,526
py
""" Estimator API for CNNs using popular implementations """ import os import random import tensorflow as tf import numpy as np from estimator import model_fn ################################# # Parameters ################################# flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string( 'root_path', '', "Images root path - must contain directories with class specific images") flags.DEFINE_string( 'model_save_path', '', "Path in which to save graphs, models and summaries") flags.DEFINE_string( 'model', 'small_cnn', "Model name") flags.DEFINE_integer( 'max_epoch', 10, "Max epoch to train model") flags.DEFINE_integer( 'batch_size', 64, "Batch size for model training") flags.DEFINE_integer( 'image_size', 50, "Image size (width/height) for model input") flags.DEFINE_integer( 'num_gpus', 0, "Number of GPUs for model training") flags.DEFINE_integer( 'num_cpus', 2, "Numer of CPUs (for pre-processing)") flags.DEFINE_float('train_fraction', 0.8, "training set fraction") flags.DEFINE_bool( 'color_augmentation', True, "Whether to randomly adjust colors during model training") flags.DEFINE_float( 'weight_decay', 0, 'Applies weight decay if supported by specific model') flags.DEFINE_list( 'image_means', [0, 0, 0], 'image means (leave at default for automatic mode)') flags.DEFINE_list( 'image_stdevs', [1, 1, 1], 'image stdevs (leave at default for automatic mode)') # #DEBUG # FLAGS.root_path = '/host/data_hdd/ctc/ss/images/' # FLAGS.model_save_path = '/host/data_hdd/ctc/ss/runs/species/resnet18_test/' # FLAGS.model = 'ResNet18' # FLAGS.num_gpus = 1 # FLAGS.num_cpus = 4 # FLAGS.weight_decay = 0.0001 ################################# # Define Dataset ################################# # get all class directories classes = os.listdir(FLAGS.root_path) n_classes = len(classes) # find all images image_paths = dict() for cl in classes: image_names = os.listdir(os.path.join(FLAGS.root_path, cl)) image_paths[cl] = [os.path.join(FLAGS.root_path, cl, x) for x in image_names] # Map classes to numerics classes_to_num_map = {k: i for i, k in enumerate(classes)} num_to_class_map = {v: k for k, v in classes_to_num_map.items()} # Create lists of image paths and labels label_list = list() image_path_list = list() for k, v in image_paths.items(): label_list += [classes_to_num_map[k] for i in range(0, len(v))] image_path_list += v # randomly shuffle input to ensure good mixing when model training indices = [i for i in range(0, len(label_list))] random.seed(123) random.shuffle(indices) image_path_list = [image_path_list[i] for i in indices] label_list = [label_list[i] for i in indices] n_records = len(label_list) # Create training and test set train_fraction = FLAGS.train_fraction n_train = int(round(n_records * train_fraction, 0)) n_test = n_records - n_train train_files = image_path_list[0: n_train] train_labels = label_list[0: n_train] test_files = image_path_list[n_train:] test_labels = label_list[n_train:] ################################# # Dataset Iterator ################################# # Standardize a single image def _standardize_images(image, means, stdevs): """ Standardize images """ with tf.name_scope("image_standardization"): means = tf.expand_dims(tf.expand_dims(means, 0), 0) means = tf.cast(means, tf.float32) stdevs = tf.expand_dims(tf.expand_dims(stdevs, 0), 0) stdevs = tf.cast(stdevs, tf.float32) image = image - means image = tf.divide(image, stdevs) return image # data augmentation def _image_augmentation(image): """ Apply some random image augmentation """ with tf.name_scope("image_augmentation"): image = tf.image.random_flip_left_right(image) image = tf.image.random_brightness(image, max_delta=0.2) image = tf.image.random_contrast(image, lower=0.9, upper=1) image = tf.image.random_hue(image, max_delta=0.02) image = tf.image.random_saturation(image, lower=0.8, upper=1.2) return image # parse a single image def _parse_function(filename, label, augmentation=True): image_string = tf.read_file(filename) image = tf.image.decode_jpeg(image_string, channels=3) # randomly crop image from plus 10% width/height if augmentation: image = tf.image.resize_images( image, [int(FLAGS.image_size*1.1), int(FLAGS.image_size*1.1)]) image = tf.random_crop(image, [FLAGS.image_size, FLAGS.image_size, 3]) else: image = tf.image.resize_images( image, [FLAGS.image_size, FLAGS.image_size]) image = tf.divide(image, 255.0) if augmentation: image = _image_augmentation(image) image = _standardize_images(image, FLAGS.image_means, FLAGS.image_stdevs) return {'images': image, 'labels': label} def dataset_iterator(filenames, labels, is_train, augmentation=True): dataset = tf.data.Dataset.from_tensor_slices((filenames, labels)) if is_train: dataset = dataset.shuffle(buffer_size=300) dataset = dataset.apply( tf.contrib.data.map_and_batch( lambda x, y: _parse_function(x, y, augmentation), batch_size=FLAGS.batch_size, num_parallel_batches=1, drop_remainder=False)) if is_train: dataset = dataset.repeat(1) else: dataset = dataset.repeat(1) dataset = dataset.prefetch(buffer_size=tf.contrib.data.AUTOTUNE) return dataset # Create callable iterator functions def train_iterator(): return dataset_iterator(train_files, train_labels, True, FLAGS.color_augmentation) def test_iterator(): return dataset_iterator(test_files, test_labels, False, False) def original_iterator(): return dataset_iterator(train_files, train_labels, False, False) ################################# # Image Statistics for Preprocessing ################################# # Calculate image means and stdevs of training images for RGB channels # for image standardization if (FLAGS.image_means == [0, 0, 0]) and (FLAGS.image_stdevs == [1, 1, 1]): with tf.Session() as sess: original_batch_size = FLAGS.batch_size FLAGS.batch_size = np.min([500, n_train]) dataset = original_iterator() iterator = dataset.make_one_shot_iterator() feature_dict = iterator.get_next() features = sess.run(feature_dict) image_batch = features['images'] means_batch = np.mean(image_batch, axis=(0, 1, 2)) stdev_batch = np.std(image_batch, axis=(0, 1, 2)) FLAGS.batch_size = original_batch_size image_means = [round(float(x), 6) for x in list(means_batch)] image_stdevs = [round(float(x), 4) for x in list(stdev_batch)] FLAGS.image_means = image_means FLAGS.image_stdevs = image_stdevs ################################# # Configure Estimator ################################# n_batches_per_epoch_train = int(round(n_train / FLAGS.batch_size)) # Configurations config_sess = tf.ConfigProto(allow_soft_placement=True) config_sess.gpu_options.per_process_gpu_memory_fraction = 0.8 config_sess.gpu_options.allow_growth = True def distribution_gpus(num_gpus): if num_gpus == 0: return tf.contrib.distribute.OneDeviceStrategy(device='/cpu:0') elif num_gpus == 1: return tf.contrib.distribute.OneDeviceStrategy(device='/gpu:0') elif num_gpus > 1: return tf.contrib.distribute.MirroredStrategy(num_gpus=num_gpus) else: return None # Config estimator est_config = tf.estimator.RunConfig() est_config = est_config.replace( keep_checkpoint_max=3, save_checkpoints_steps=n_batches_per_epoch_train, session_config=config_sess, save_checkpoints_secs=None, save_summary_steps=n_batches_per_epoch_train, model_dir=FLAGS.model_save_path, train_distribute=distribution_gpus(FLAGS.num_gpus)) # Model Parameters params = dict() params['label'] = ['labels'] params['n_classes'] = [n_classes] params['weight_decay'] = FLAGS.weight_decay params['momentum'] = 0.9 params['model'] = FLAGS.model params['reuse'] = False params['class_mapping_clean'] = {'labels': num_to_class_map} # create estimator estimator = tf.estimator.Estimator(model_fn=model_fn, params=params, model_dir=FLAGS.model_save_path, config=est_config ) ################################# # Train and Evaluate ################################# def main(args): """ Main - called by command line """ # Print flags for f in flags.FLAGS: print("Flag %s - %s" % (f, FLAGS[f].value)) eval_loss = list() for epoch in range(1, FLAGS.max_epoch + 1): print("Starting with epoch %s" % epoch) # Train for one epoch estimator.train(input_fn=train_iterator) # Evaluate eval_res = estimator.evaluate(input_fn=test_iterator) print("Evaluation results:") for k, v in eval_res.items(): print(" Res for %s - %s" % (k, v)) eval_loss.append(eval_res['loss']) # Predict preds = estimator.predict(input_fn=test_iterator) for i, pred in enumerate(preds): print(pred) if i > 10: break if __name__ == '__main__': tf.app.run()
[ "will5448@umn.edu" ]
will5448@umn.edu
b4e8b03b8387462c961ea36f580a145007ada11a
38b68b2202726bcdea32271448fea22554db6121
/BOJ/Silver/1992.py
3b0a539d575b9951914cdb95f3dbd52b1b69e1cd
[]
no_license
Soohee410/Algorithm-in-Python
42c4f02342dc922e44ee07e3a0e1d6c0a559e0bb
fbc859c092d86174387fe3dc11f16b616e6fdfab
refs/heads/master
2023-05-06T13:07:19.179143
2021-05-14T14:32:44
2021-05-14T14:32:44
336,232,129
4
0
null
null
null
null
UTF-8
Python
false
false
495
py
def QuadTree(n, cp, x, y): if n == 1: return cp[x][y] cp1 = QuadTree(n // 2, cp, x, y) cp2 = QuadTree(n // 2, cp, x, y + n // 2) cp3 = QuadTree(n // 2, cp, x + n // 2, y) cp4 = QuadTree(n // 2, cp, x + n // 2, y + n // 2) if cp1 == cp2 == cp3 == cp4 and len(cp1) == 1: return cp1 return '('+cp1+cp2+cp3+cp4+')' if __name__ == "__main__": n = int(input()) arr = [list(input().rstrip()) for _ in range(n)] print(QuadTree(n, arr, 0, 0))
[ "ggohee0410@gmail.com" ]
ggohee0410@gmail.com
b271f6c59aed17cbcb5181bf740eab268fe43bf0
3a588f7dee481de6f84dc23ca1f3c485fc9177aa
/wishlisht/travel_wishlist/models.py
02062481378f554d8085186a15bde2f017772137
[]
no_license
xm6264jz/capston-lab9-django
105f1ff64758b86e221860bc53d1265da63dede6
ff53b0e4715b527c457748c3f188fd7f639354af
refs/heads/master
2023-01-05T12:35:40.256134
2020-11-10T23:29:04
2020-11-10T23:29:04
307,906,432
0
0
null
2020-11-10T20:13:38
2020-10-28T04:29:03
Python
UTF-8
Python
false
false
1,428
py
from django.db import models from django.contrib.auth.models import User from django.core.files.storage import default_storage class Place(models.Model): user = models.ForeignKey('auth.User', null=False, on_delete=models.CASCADE) name = models.CharField(max_length = 200) visited = models.BooleanField(default = False) notes = models.TextField(blank=True, null=True) date_visited = models.DateField(blank=True, null=True) photo = models.ImageField(upload_to='user_images/', blank=True, null=True) def save(self, *args, **kwargs): # get reference to previous version of this Place old_place = Place.objects.filter(pk=self.pk).first() if old_place and old_place.photo: if old_place.photo != self.photo: self.delete_photo(old_place.photo) super().save(*args, **kwargs) def delete(self, *args, **kwargs): if self.photo: self.delete_photo(self.photo) super().delete(*args, **kwargs) def delete_photo(self, photo): if default_storage.exists(photo.name): default_storage.delete(photo.name) def _str_(self): photo_str = self.photo.url if self.photo else 'no photo' notes_str = self.notes[100:] if self.notes else 'no notes' return f'{self.pk}: {self.name} visited? {self.visited} on {self.date_visited}. Notes: {notes_str}. Photo {photo_str}'
[ "ahmed.abdinoor3@gmail.com" ]
ahmed.abdinoor3@gmail.com
1003f7677d03e8e3d0a1afd5c0cd5332d9675674
aeaf548fba8ee9f88cd9254f2bc4ac0a3bbfb207
/zhaquirks/hivehome/__init__.py
68a13be23af5bd6731cf553a3e6f1c6ad1c07794
[ "Apache-2.0" ]
permissive
vigonotion/zha-device-handlers
6001aa812380a0540d76f68778ebade93f93928d
6d0560655428e1f04626a7722febf492c4174e8b
refs/heads/dev
2020-12-26T12:07:27.192810
2020-01-31T17:57:29
2020-01-31T17:57:29
237,504,327
1
0
Apache-2.0
2020-01-31T22:49:11
2020-01-31T19:47:15
null
UTF-8
Python
false
false
948
py
"""Hive Home.""" import asyncio from zigpy.quirks import CustomCluster from zigpy.zcl.clusters.security import IasZone from ..const import CLUSTER_COMMAND, OFF, ZONE_STATE HIVEHOME = "HiveHome.com" class MotionCluster(CustomCluster, IasZone): """Motion cluster.""" cluster_id = IasZone.cluster_id def __init__(self, *args, **kwargs): """Init.""" super().__init__(*args, **kwargs) self._timer_handle = None def handle_cluster_request(self, tsn, command_id, args): """Handle the cluster command.""" if command_id == 0: if self._timer_handle: self._timer_handle.cancel() loop = asyncio.get_event_loop() self._timer_handle = loop.call_later(30, self._turn_off) def _turn_off(self): self._timer_handle = None self.listener_event(CLUSTER_COMMAND, 999, 0, [0, 0, 0, 0]) self._update_attribute(ZONE_STATE, OFF)
[ "noreply@github.com" ]
vigonotion.noreply@github.com
3f3f54c554cdbefbda629cad5a49473819e2debd
d791176b586e993fac51ce2a6b241561badfc009
/ServerAPI/ServerAPI.py
0fb54ffd6ae7613d12a9750ce1b6e320dfd1be3c
[]
no_license
sochkasov/smart8036.v3
3786bca35eb93d8f36985d6720b4e0d1c91a6afc
6a113e5e83ade8f8c8ca25b6d060d83322129216
refs/heads/main
2023-03-07T11:51:14.906152
2021-02-22T15:32:16
2021-02-22T15:32:16
341,245,744
0
0
null
null
null
null
UTF-8
Python
false
false
9,363
py
# coding=utf8 import os from flask import Flask, jsonify, send_file from flask import request from flask_jsonpify import jsonify # для JSONP from flask.json import JSONEncoder import ujson from api_utils import ResponsiveFlask from HeatController.Controller8036 import * import config import datetime import calendar class CustomJSONEncoder(JSONEncoder): ''' Преопределение метода для изменения преобразования даты-времени в нужный формат ''' def default(self, obj): if isinstance(obj, datetime.datetime): return str(obj) return JSONEncoder.default(self, obj) class ServerAPI(object): def __init__(self): self.DEBUG = False print "ServerAPI server\n" self.dbconnect = db.Database() self.ctl8036 = Controller8036() #self.app = ResponsiveFlask(__name__) self.app = Flask(__name__) self.app.json_encoder = CustomJSONEncoder self.app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False return None def json_output(self, *args, **kwargs): """ Создание JSON строки из data. Производится контроль ошибок по флагу error Если есть ошибка, то будет сформировано сообщение из error_message :param data: :param error: :param error_message: :return: """ if not kwargs['error']: # return jsonify(kwargs['result']) return jsonify({"result": kwargs['result'], "error": False, "error_message": kwargs['error_message']}), 200 else: return jsonify({"error": True, "error_message": kwargs['error_message']}), 404 def start(self): @self.app.route('/') def index(): return jsonify(api_version=1, user='user1', datatime=str(datetime.datetime.now().strftime('%H:%M:%S %d-%m-%Y')), date=str(datetime.datetime.now().strftime('%d-%m-%Y')), time=str(datetime.datetime.now().strftime('%H:%M:%S')) ) @self.app.route('/help') def get_help(): result = '''<h2>API methods</h2> <ul> <li>/get/temp/ - get temperature online on JSON format</li> <li>/get/tempraw/ - get temperature online on raw format</li> </ul>''' return {'message': result} @self.app.route('/get/temp/') def get_temp(): return self.json_output(**self.ctl8036.GetSensorsTemperature()) @self.app.route('/get/temp/history/<int:sensor_link_id>') def get_temp_history(sensor_link_id): return self.json_output(**self.ctl8036.get_sensor_hostory(sensor_link_id)) @self.app.route('/get/tempraw/') def get_tempraw(): return self.json_output(**self.ctl8036.GetTemperatureCurrent()) @self.app.route('/set/timesync/') def set_temesync(): return self.json_output(**self.ctl8036.timedate_sync()) @self.app.route('/get/program_raw/') def get_programm_raw(): return self.json_output(**self.ctl8036.get_program_raw()) @self.app.route('/get/program_json/') def get_programm_json(): return self.json_output(**self.ctl8036.get_program_json()) @self.app.route('/get/actuator_status/') def get_actuators_status_json(): return self.json_output(**self.ctl8036.get_actuators_status_json()) @self.app.route('/get/test/') def get_test(): return jsonify({'now': datetime.datetime.now()}) @self.app.route('/favicon.ico') def favicon(): # mimetype='image/vnd.microsoft.icon' #return "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAIFQTFRFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////basLdwAAACp0Uk5TAAgEHQMYbYWESXFmcywiYnuGamt0QyafblMHb1kLCmg8AjZ2I2EQciFgf8bHLwAAAAFiS0dEKlO+1J4AAAAJcEhZcwAAAEgAAABIAEbJaz4AAACLSURBVBgZBcELQoJQEADAkeengJYiIrXC76Lc/4LNAAAAWK0AqEpZb7bbDYCye3mtm6Z9A4ju/aPv+8/haw2M3/v6cDwe6p/fP5hO++F84XIe+mbCdL2NwHi7TnDvALo7iDRnZs4yQKR4PJ+PkAEixcISMkCkSDJkgLJb2iTbZVdAVSJm5ohSAQDgH8c2Ci4yRvReAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDE2LTA5LTE2VDA4OjI4OjI3KzAwOjAwlMYpngAAACV0RVh0ZGF0ZTptb2RpZnkAMjAxNi0wOS0xNlQwODoyODoyNyswMDowMOWbkSIAAABGdEVYdHNvZnR3YXJlAEltYWdlTWFnaWNrIDYuNy44LTkgMjAxNC0wNS0xMiBRMTYgaHR0cDovL3d3dy5pbWFnZW1hZ2ljay5vcmfchu0AAAAAGHRFWHRUaHVtYjo6RG9jdW1lbnQ6OlBhZ2VzADGn/7svAAAAGHRFWHRUaHVtYjo6SW1hZ2U6OmhlaWdodAAxOTIPAHKFAAAAF3RFWHRUaHVtYjo6SW1hZ2U6OldpZHRoADE5MtOsIQgAAAAZdEVYdFRodW1iOjpNaW1ldHlwZQBpbWFnZS9wbmc/slZOAAAAF3RFWHRUaHVtYjo6TVRpbWUAMTQ3NDAxNDUwN/jek0AAAAAPdEVYdFRodW1iOjpTaXplADBCQpSiPuwAAABWdEVYdFRodW1iOjpVUkkAZmlsZTovLy9tbnRsb2cvZmF2aWNvbnMvMjAxNi0wOS0xNi9jY2EzODcyMTQ3Mjc5YTVmYTVmMDVlNDJiYzA4ZDI0NC5pY28ucG5nM8/R6gAAAABJRU5ErkJggg==", 200, {'Content-Type': 'image/vnd.microsoft.icon'} #data = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAIFQTFRFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////basLdwAAACp0Uk5TAAgEHQMYbYWESXFmcywiYnuGamt0QyafblMHb1kLCmg8AjZ2I2EQciFgf8bHLwAAAAFiS0dEKlO+1J4AAAAJcEhZcwAAAEgAAABIAEbJaz4AAACLSURBVBgZBcELQoJQEADAkeengJYiIrXC76Lc/4LNAAAAWK0AqEpZb7bbDYCye3mtm6Z9A4ju/aPv+8/haw2M3/v6cDwe6p/fP5hO++F84XIe+mbCdL2NwHi7TnDvALo7iDRnZs4yQKR4PJ+PkAEixcISMkCkSDJkgLJb2iTbZVdAVSJm5ohSAQDgH8c2Ci4yRvReAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDE2LTA5LTE2VDA4OjI4OjI3KzAwOjAwlMYpngAAACV0RVh0ZGF0ZTptb2RpZnkAMjAxNi0wOS0xNlQwODoyODoyNyswMDowMOWbkSIAAABGdEVYdHNvZnR3YXJlAEltYWdlTWFnaWNrIDYuNy44LTkgMjAxNC0wNS0xMiBRMTYgaHR0cDovL3d3dy5pbWFnZW1hZ2ljay5vcmfchu0AAAAAGHRFWHRUaHVtYjo6RG9jdW1lbnQ6OlBhZ2VzADGn/7svAAAAGHRFWHRUaHVtYjo6SW1hZ2U6OmhlaWdodAAxOTIPAHKFAAAAF3RFWHRUaHVtYjo6SW1hZ2U6OldpZHRoADE5MtOsIQgAAAAZdEVYdFRodW1iOjpNaW1ldHlwZQBpbWFnZS9wbmc/slZOAAAAF3RFWHRUaHVtYjo6TVRpbWUAMTQ3NDAxNDUwN/jek0AAAAAPdEVYdFRodW1iOjpTaXplADBCQpSiPuwAAABWdEVYdFRodW1iOjpVUkkAZmlsZTovLy9tbnRsb2cvZmF2aWNvbnMvMjAxNi0wOS0xNi9jY2EzODcyMTQ3Mjc5YTVmYTVmMDVlNDJiYzA4ZDI0NC5pY28ucG5nM8/R6gAAAABJRU5ErkJggg==" #data = b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAIFQTFRFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////basLdwAAACp0Uk5TAAgEHQMYbYWESXFmcywiYnuGamt0QyafblMHb1kLCmg8AjZ2I2EQciFgf8bHLwAAAAFiS0dEKlO+1J4AAAAJcEhZcwAAAEgAAABIAEbJaz4AAACLSURBVBgZBcELQoJQEADAkeengJYiIrXC76Lc/4LNAAAAWK0AqEpZb7bbDYCye3mtm6Z9A4ju/aPv+8/haw2M3/v6cDwe6p/fP5hO++F84XIe+mbCdL2NwHi7TnDvALo7iDRnZs4yQKR4PJ+PkAEixcISMkCkSDJkgLJb2iTbZVdAVSJm5ohSAQDgH8c2Ci4yRvReAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDE2LTA5LTE2VDA4OjI4OjI3KzAwOjAwlMYpngAAACV0RVh0ZGF0ZTptb2RpZnkAMjAxNi0wOS0xNlQwODoyODoyNyswMDowMOWbkSIAAABGdEVYdHNvZnR3YXJlAEltYWdlTWFnaWNrIDYuNy44LTkgMjAxNC0wNS0xMiBRMTYgaHR0cDovL3d3dy5pbWFnZW1hZ2ljay5vcmfchu0AAAAAGHRFWHRUaHVtYjo6RG9jdW1lbnQ6OlBhZ2VzADGn/7svAAAAGHRFWHRUaHVtYjo6SW1hZ2U6OmhlaWdodAAxOTIPAHKFAAAAF3RFWHRUaHVtYjo6SW1hZ2U6OldpZHRoADE5MtOsIQgAAAAZdEVYdFRodW1iOjpNaW1ldHlwZQBpbWFnZS9wbmc/slZOAAAAF3RFWHRUaHVtYjo6TVRpbWUAMTQ3NDAxNDUwN/jek0AAAAAPdEVYdFRodW1iOjpTaXplADBCQpSiPuwAAABWdEVYdFRodW1iOjpVUkkAZmlsZTovLy9tbnRsb2cvZmF2aWNvbnMvMjAxNi0wOS0xNi9jY2EzODcyMTQ3Mjc5YTVmYTVmMDVlNDJiYzA4ZDI0NC5pY28ucG5nM8/R6gAAAABJRU5ErkJggg==' #return send_file(data, mimetype='image/vnd.microsoft.icon',) #return Response(stream_with_context(data), mimetype='image/vnd.microsoft.icon') #return send_from_directory(os.path.join(self.app.root_path, 'static'),'htdocs/favicon/home-outline.ico/favicon.ico',mimetype='image/vnd.microsoft.icon') return send_file('/root/8036/htdocs/favicon/home-outline.ico/favicon.ico', mimetype='image/vnd.microsoft.icon', ) @self.app.errorhandler(404) def page_not_found(error): return {'error': 'This API method does not exist'}, 404 # Running web server #if __name__ == '__main__': if __name__ == 'ServerAPI.ServerAPI': print "API server listen on port 5000 ..." # global DEBUG # if self.DEBUG: # self.app.debug = True #self.app.run(host=config.http_listen_ip, port=config.http_listen_port, debug=config.debug_enable, threaded=False) self.app.run(host=config.http_listen_ip, port=config.http_listen_port, debug=False, threaded=False) def stop(self): func = request.environ.get('werkzeug.server.shutdown') if func is None: raise RuntimeError('Not running with the Werkzeug Server') func()
[ "sochkasov@gmail.com" ]
sochkasov@gmail.com
ba5eaf26fde1ee4230e44bfb96255fca3568f0ea
3bea1c3f2d4834b9174664e4ee89ebbddde22e89
/app.py
f9ef295b9b9452b012e6df87fa15627824c52c47
[]
no_license
RakshithRajesh/Dataset-Maker
9e780117dd91d528b5803174080ab6d86fd90ca2
b43a8634e578a780847569724e39183c297d3ead
refs/heads/main
2023-08-24T07:28:02.235951
2021-10-02T14:20:21
2021-10-02T14:20:21
412,806,631
1
0
null
null
null
null
UTF-8
Python
false
false
1,643
py
from requests_html import HTMLSession from random import randint import time import os item = "car" s = HTMLSession() r = s.get(f"https://www.google.com/search?q={item}&hl=en&tbm=isch") r.html.render(timeout=10, scrolldown=2500) time.sleep(4) images = r.html.find('img[jsname="Q4LuWd"]') image_url_list = [] try: for image in images: if "data" not in image.attrs["src"]: image_url_list.append(image.attrs["src"]) except: pass for i, link in enumerate(image_url_list): response = s.get(link) newpath = f"{item}" if not os.path.exists(newpath): os.makedirs(newpath) with open(f"{newpath}/{i}.png", "wb") as w: w.write(response.content) print(link) print(len(image_url_list)) next_link = r.html.find('a[jslog="11106"]') for link in next_link: if link: if len(image_url_list) < 1001: response = s.get(f"https://www.google.com/{link.attrs['href']}") r.html.render(timeout=10, scrolldown=2500) time.sleep(4) images = r.html.find('img[jsname="Q4LuWd"]') try: for image in images: if "data" not in image.attrs["src"]: image_url_list.append(image.attrs["src"]) except: pass for link in image_url_list: response = s.get(link) newpath = f"{item}" with open(f"{newpath}/{randint(10,10000)}.png", "wb") as w: w.write(response.content) print(link) print(len(image_url_list)) else: break
[ "noreply@github.com" ]
RakshithRajesh.noreply@github.com
bf6ff385d4a25e401c65c0c285afab951c5bc4de
6222e729592de24344e30a9e2535e2737d587dfe
/2. Market Data/beta_hedging.py
ee229644320206fafa6d9a5c77108e1cbdf70bc0
[ "Apache-2.0" ]
permissive
Nhiemth1985/Pynaissance
a145e118a0ef2a8894247c99978c29701bc34077
7034798a5f0b92c6b8fdfa5948d2ad78a77a1a05
refs/heads/master
2023-08-25T21:07:16.865197
2021-10-19T15:29:44
2021-10-19T15:29:44
null
0
0
null
null
null
null
UTF-8
Python
false
false
354
py
# Basic Setup: Import Libraries import numpy as np from statsmodels import regression import statsmodels.api as sm import matplotlib.pyplot as plt import math start = '2018-01-01' end = '2019-01-01' asset = get_pricing('AMD', fields='price', start_date=start, end_date=end) benchmark = get_pricing('SPY', fields='price', start_date=start, end_date=end)
[ "noreply@github.com" ]
Nhiemth1985.noreply@github.com
702e8bbceb76f147529dc19c0d81bd03baf380e0
8415845ada32baa8b047f59d959b60651be4b113
/amt/AMTscript.py
7793fc8387a99992d7609859c422240d575aa770
[ "MIT" ]
permissive
macilath/CrowdMix
aa21cb7ac6a22f15b1ef46f594ed91835b60fc99
65589fabe27c1eed0f09832f3d9ff2b40eb97b42
refs/heads/master
2016-09-06T16:27:35.554171
2014-03-31T01:43:18
2014-03-31T01:43:18
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,706
py
# This is the main AMT script that should be executed and will do the following: # * Create a HIT # * Wait for the HIT to become reviewable # * Process assignments once the HIT is reviewable # * Check whether their input code is one that we gave them. # * If yes, pay them, if not, reject them. # NOTE: Should this script close before all assignments are reviewed, please run 'AMTpay.py' # NOTE: Fill in your AWS keys in 'ACCESS_KEY' and 'SECRET_KEY' from boto.mturk.connection import MTurkConnection, HIT from boto.mturk.question import SimpleField,QuestionContent,Question,QuestionForm,Overview,AnswerSpecification,SelectionAnswer,FormattedContent,FreeTextAnswer import time ACCESS_ID = '' SECRET_KEY = '' HOST = 'mechanicalturk.amazonaws.com' # this mtc is used for creating the HIT mtc = MTurkConnection(aws_access_key_id=ACCESS_ID, aws_secret_access_key=SECRET_KEY, host=HOST) title = 'CrowdMix: Remix a Classical Composition' description = ('Help remix a classical music composition ' 'by choosing the next sound bits! Simple, easy, and fast!') keywords = 'music, create, easy, fast' max_assignments = 50 # acceptable codes that will ge the turker paid payCodes = ['CG6H5', 'X38T1', 'S1W59', 'D2K9K', 'DCURP', 'KJHCY', 'KSSIZ', 'YYLMB', '47NQK', 'WILIM'] #-------------- WAIT FUNCTION ---------------------------- # wait timer function since time.sleep() was giving issues def wait(time_lapse): time_start = time.time() time_end = (time_start + time_lapse) while time_end > time.time(): pass # this mtc is used for data retrieval mtc2 = MTurkConnection(aws_access_key_id=ACCESS_ID, aws_secret_access_key=SECRET_KEY, host=HOST) #--------------- GET ALL REVIEWABLE HITS FUNCTION ---------- def get_all_reviewable_hits(mtc2): page_size = 50 hits = mtc2.get_reviewable_hits(page_size=page_size) print "Total results to fetch %s " % hits.TotalNumResults print "Request hits page %i" % 1 total_pages = float(hits.TotalNumResults)/page_size int_total= int(total_pages) if(total_pages-int_total>0): total_pages = int_total+1 else: total_pages = int_total pn = 1 while pn < total_pages: pn = pn + 1 print "Request hits page %i" % pn temp_hits = mtc2.get_reviewable_hits(page_size=page_size,page_number=pn) hits.extend(temp_hits) return hits #--------------- BUILD OVERVIEW ------------------- overview = Overview() overview.append_field('Title', 'CrowdMix: Remix a Classical Composition') overview.append(FormattedContent('<a target="_blank"' ' href="http://allekant.com/cgi-bin/welcome.py">' ' CrowdMix Homepage</a>')) overview.append(FormattedContent('Please visit the link above in order to complete this HIT.\n' 'When completed, you will be given a code to input below.')) overview.append(FormattedContent('On the webpage linked above, you will be given five random ' 'sound bits to listen to. Then you will choose two clips, using ' 'the radio buttons, that will be combined and added with other ' 'clips to make a new song')) overview.append(FormattedContent('When you have selected two clips using the radio buttons, ' 'click the Load My New Choice button to confirm your selection.')) overview.append(FormattedContent('When you are satisfied with your choices and you have ' 'already clicked the Load My New Choice button, click the ' 'Submit My Decision and Get My Code! button.')) overview.append(FormattedContent('After clicking the Submit button, you will be given a code ' 'to input in to the box below. Once you have put the code in ' 'box, submit the hit and wait for approval.')) #--------------- BUILD QUESTION 1 ------------------- qc1 = QuestionContent() qc1.append_field('Title','Enter your code in the box below.') fta1 = FreeTextAnswer(); q1 = Question(identifier='code', content=qc1, answer_spec=AnswerSpecification(fta1), is_required=True) #--------------- BUILD THE QUESTION FORM ------------------- question_form = QuestionForm() question_form.append(overview) question_form.append(q1) #--------------- CREATE THE HIT ------------------- mtc.create_hit(questions=question_form, max_assignments = max_assignments, title = title, description = description, keywords = keywords, duration = 60*5, reward = 0.25) #--------------- WAIT FOR ASSIGNMENTS TO COMPLETE ---------- #-------------------- AND REVIEW ASSIGNMENTS --------------- hits = get_all_reviewable_hits(mtc2) hitReviewed = False; # this busy loops until we have processed the one reviewable HIT while True: if not hits: print "Waiting for reviewable hits..." hits = get_all_reviewable_hits(mtc2) else: for hit in hits: # for every hit that's reviewable, review turker answers assignments = mtc2.get_assignments(hit.HITId) for assignment in assignments: # get individual turker assignments print "Answers of the worker %s" % assignment.WorkerId for question_form_answer in assignment.answers[0]: for key in question_form_answer.fields: # get individual turker answers to assignments print "%s" % (key) if key.upper() in payCodes: # if they used the right code, approve/pay them print "%s: Accepted and paid!" % assignment.WorkerId mtc2.approve_assignment(assignment.AssignmentId) else: # if they used the wrong code, reject them print "%s: Rejected and not paid!" % assignment.WorkerId mtc2.reject_assignment(assignment.AssignmentId, feedback = 'Invalid code.') print "--------------------" # the hit stays enabled in case a turker is rejected, however they should have been approved # this should hopefully never happen ##mtc2.disable_hit(hit.HITId) hitReviewed = True; if hitReviewed: # since I know that I only submit one HIT, I quit after one HIT is reviewed break; else: # wait 30 seconds so that Amazon does not get mad wait(30) print "All assignments have been reviewed!\n" print "Program has been terminated!"
[ "clayton.crawford@tamu.edu" ]
clayton.crawford@tamu.edu
0f3cc4a2087d8125cc761a1644c51c12e6c814d4
d838bed08a00114c92b73982a74d96c15166a49e
/docs/data/learn/Bioinformatics/output/ch6_code/src/Stepik.6.9.CodeChallenge.2BreakDistance.py
a9ce5254b6d1201e2c2202e7b13a59eeda40ae42
[]
no_license
offbynull/offbynull.github.io
4911f53d77f6c59e7a453ee271b1e04e613862bc
754a85f43159738b89dd2bde1ad6ba0d75f34b98
refs/heads/master
2023-07-04T00:39:50.013571
2023-06-17T20:27:05
2023-06-17T23:27:00
308,482,936
1
0
null
null
null
null
UTF-8
Python
false
false
575
py
from BreakpointGraph import BreakpointGraph with open('/home/user/Downloads/dataset_240324_4.txt', mode='r', encoding='utf-8') as f: data = f.read() lines = data.split('\n') p_list1 = [[int(x) for x in s.split(' ')] for s in lines[0][1:-1].split(')(')] p_list2 = [[int(x) for x in s.split(' ')] for s in lines[1][1:-1].split(')(')] bg = BreakpointGraph(p_list1, p_list2) cycles = bg.get_red_blue_cycles() block_count = len(bg.node_to_blue_edges) // 2 # number of synteny blocks is number of nodes / 2 cycle_count = len(cycles) print(f'{block_count - cycle_count}')
[ "offbynull@gmail.com" ]
offbynull@gmail.com
2099ab416d5ffcd03de217a98cb5cac3527bc5c4
85f2798326d6bb4ccabd3b98ac2bdb545911b5f6
/tdd-demo/todolist-app.py
37fcd91486c66ceddbfb1577a486a6c109d1ef2c
[]
no_license
nickfuentes/Python-Practice
6600aea672fd2bd9ce140ccb8aa941b3cc62d93f
17bb171ea5c92fdc471af41e3cc74b8772a462bd
refs/heads/master
2020-06-10T22:11:25.837714
2019-07-07T19:55:00
2019-07-07T19:55:00
193,768,512
0
1
null
2019-07-03T14:04:22
2019-06-25T19:11:52
Python
UTF-8
Python
false
false
100
py
# Unit Tests # Are Atomic and independent # Should not be Dependent # Never have Side effect
[ "nickfuentes24@gmail.com" ]
nickfuentes24@gmail.com
b7dff73104d8dea5fd1397137f879ea0a52b8404
b6de31e6ca07500daef559462533bd1a3585e0b9
/img_saving_script.py
52bd8c293ee5be6c8491a32e649949403377b396
[]
no_license
NickNagy/UWIRLMachineLearningResearch
30deb91a20efe75e0345799fc03ad02c847c4458
a9fd75d1ed14b6bf79a42b264a6238e698d56c80
refs/heads/master
2020-05-05T12:36:16.394862
2019-04-07T23:41:17
2019-04-07T23:41:17
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,275
py
""" Quick Python script for saving .png files from the crop_imag.mat images. """ import numpy as np from scipy import io from scipy.misc import imsave import os from PIL import Image #from matplotlib import pyplot as plt directory = # def save_image(folder_name, extension=".jpg"): try: img = np.array((io.loadmat('crop_image.mat', appendmat=False))['dxImage']['img'][0][0]) img_compressed = (img*255.0/np.max(img)).astype('uint8') rgb_img = np.asarray(Image.fromarray(img_compressed).convert('RGB')) imsave(folder_name + extension, rgb_img) except FileNotFoundError: try: #print("Running...") img = np.array((io.loadmat('crop_image_fracturemask.mat', appendmat=False))['dxImage']['img'][0][0]) img_compressed = (img * 255.0 / np.max(img)).astype('uint8') rgb_img = np.asarray(Image.fromarray(img_compressed).convert('RGB')) imsave(folder_name + extension, rgb_img) except FileNotFoundError: print("Could not locate mat file in folder " + folder_name) for subdir, dirs, files in os.walk(directory): #print(dirs) for folder in dirs: os.chdir(directory + "\\" + folder) #print(folder) save_image(folder, ".png")
[ "noreply@github.com" ]
NickNagy.noreply@github.com
e2fd6628e5ba3d7cd17f41118334deb2556a3926
ae13b9d10b738f1365977741c45e9e6959502ba5
/employerapp/views.py
5605c6efdf879b996b50f22b6a057a840d3f8cc2
[]
no_license
AissatouSECK/Projet-Django
960d062907ff06c2b809f4c68e4f7697c230f826
aca9346ce292f26b908e537c001c1f3f18136574
refs/heads/master
2023-08-14T11:58:24.842259
2021-09-16T21:51:07
2021-09-16T21:51:07
407,295,517
0
0
null
null
null
null
UTF-8
Python
false
false
1,301
py
from django.shortcuts import render, redirect from .models import Employer, Departement from .forms import Form_employer # To create employee def emp(request): if request.method == "POST": form = Form_employer(request.POST) if form.is_valid(): try: form.save() return redirect("/showemp") except: pass else: form = Form_employer() return render(request, "index.html", {'form':form}) # To show employee details def showemp(request): employees = Employer.objects.all() return render(request, "main.html", {'employees':employees}) # To delete employee details def deleteEmp(request, pk): employee = Employer.objects.get(pk=pk) employee.delete() return redirect("/showemp") # To edit employee details def editemp(request, pk): employee = Employer.objects.get(pk=pk) return render(request, "edit.html", {'employee':employee}) # To update employee details def updateEmp(request, pk): employee = Employer.objects.get(pk=pk) form = Form_employer(request.POST, instance= employee) if form.is_valid(): form.save() return redirect("/showemp") return render(request, "main.html", {'employee': employee})
[ "sstseck@gmail.com" ]
sstseck@gmail.com
4cfda7eb2e215caab64ce291445b94773a94655f
17c6289537851347c691c46570efe98a47f57169
/scripts/python_code-set/main/main_analysis_fitparam.py
dd98cff0303b0858007d6db06873667f9b293e1a
[]
no_license
miiya369/analysisHAL_miya
67516fb7192ce7c3d0a0c5bace3f3e1b4c850d26
76a6d80bb4a7f24c0deeca770f60efd440b72f3c
refs/heads/master
2020-03-09T09:17:51.926630
2018-10-04T10:44:45
2018-10-04T10:44:45
99,018,105
0
0
null
null
null
null
UTF-8
Python
false
false
2,786
py
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function import sys, os sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../lib") import numpy as np import time ### ================== Global Parameters Init. ================= ### ifname = None r_min = 0.001 r_del = 0.01 r_max = 2.5 ### =========================== Main =========================== ### def main(): from common.misc import frange from common.statistics import make_mean_err from fitting.io_params import input_params from fitting.fitfunc_type import set_fitfunc_from_fname func_name, params = input_params(ifname) if (func_name is params is None): return -1 Nconf = len(params[:,0]) Nparam = len(params[0,:]) fit_func = set_fitfunc_from_fname(func_name) for r in frange(r_min, r_max, r_del): print("%lf %1.16e %1.16e" % (r, *make_mean_err(np.array([fit_func(r,*params[iconf,:]) for iconf in range(Nconf)])))) return 0 ### ============================================================ ### ###### Functions for arguments def usage(ARGV0): print("usage : python %s [ifile] {options}\n" % os.path.basename(ARGV0)) print("options:") print(" --r_min [Minimum range (fm)] Default =", r_min) print(" --r_del [Range division (fm)] Default =", r_del) print(" --r_max [Maximum range (fm)] Default =", r_max) exit(1) def check_args(): print("# === Check Arguments ===") print("# ifile =", ifname) print("# r min =", r_min) print("# r del =", r_del) print("# r max =", r_max) print("# =======================") def set_args(ARGC, ARGV): global ifname, r_min, r_del, r_max if (ARGV[1][0] == '-'): usage(ARGV[0]) ifname = ARGV[1].strip() for i in range(2, ARGC): if (len(ARGV[i]) == 1): continue if (ARGV[i][0] == '-' and ARGV[i][1] == '-'): if (ARGV[i] == '--r_min'): r_min = float(ARGV[i+1]) elif (ARGV[i] == '--r_del'): r_del = float(ARGV[i+1]) elif (ARGV[i] == '--r_max'): r_max = float(ARGV[i+1]) else: print("\nERROR: Invalid option '%s'\n" % ARGV[i]) usage(ARGV[0]) check_args() ### ============================================================ ### ### ============================================================ ### if __name__ == "__main__": argv = sys.argv; argc = len(argv) if (argc == 1): usage(argv[0]) set_args(argc, argv) t_start = time.time() if (main() != 0): exit("ERROR EXIT.") print("#\n# Elapsed time [s] = %d" % (time.time() - t_start))
[ "miiya369@gmail.com" ]
miiya369@gmail.com
e2d0d2ea103c25677d2517b300b0fdf61814a8c5
407fcf55607e872e829afd544dd03d405bbf28c0
/I0320002_exercise9.5.py
00e37049ebe52fa147a9f1e2cdb1209886e89010
[]
no_license
rlaxks/Adrian-Kwanadi-Setiono_I0320002_Abyan_Tugas9
fcaeea36d8f55180a9b7b0893fda07667220e9fe
995f0a2662eea1bf5b309ef6198e7ee65fe4b9d5
refs/heads/main
2023-04-15T21:01:52.643494
2021-04-30T12:57:52
2021-04-30T12:57:52
361,767,179
0
0
null
null
null
null
UTF-8
Python
false
false
196
py
A = [ [ [10,20,30], [40,50,60] ], [ [11,21,31], [41,51,61] ] ] #mengakses elemen 10 A[0][0][0] #mengakses elemen 50 A[0][1][1]
[ "adrian.kwanadi@gmail.com" ]
adrian.kwanadi@gmail.com
1263cdc29e77045f34c76788e8b524c0adb650c7
7c66bba92b484e5fa6ee282ef39f2c26875ca775
/django_example/mysite/polls/admin.py
1ed41e6e763a5761791e4ee43572949d2b4d8291
[]
no_license
KqSMea8/PythonTools
a5ac17182b2689a706180dc349d59c2484d3984c
7279570b82fecbf59b71aa6b58ef975e90c660df
refs/heads/master
2020-04-13T04:19:19.209243
2018-12-24T05:13:12
2018-12-24T05:13:12
null
0
0
null
null
null
null
UTF-8
Python
false
false
723
py
from django.contrib import admin from .models import Question, Choice # Register your models here. class ChoiceInline(admin.TabularInline): model = Choice extra = 3 # admin.site.register(Question) class QuestionAdmin(admin.ModelAdmin): # fields = ['pub_date', 'question_text'] fieldsets = [ (None, {'fields': ['question_text']}), ('Date information', {'fields': ['pub_date']}) ] inlines = [ChoiceInline] list_display = ('question_text', 'pub_date', 'was_published_recently') list_filter = ['pub_date'] search_fields = ['question_text'] date_hierarchy = 'pub_date' list_per_page = 5 admin.site.register(Question, QuestionAdmin) # admin.site.register(Choice)
[ "xinluomed_yuxuecheng@git.cloud.tencent.com" ]
xinluomed_yuxuecheng@git.cloud.tencent.com
0f9760cc2ff333b4463fe208a835ac180db48dce
537b9efd439a842216a7979eb2c29bd02083732a
/python/marks.py
5688ff1fc98b574847a616fdbd6434f6a5079122
[]
no_license
grand-27-master/Data-Science-course
c1e92b8e1b820d72a9c7e2d3694ec100a8177ac7
90006685cff7988593906422773cdb0331b94083
refs/heads/master
2023-07-21T21:25:48.687018
2021-09-03T10:09:16
2021-09-03T10:09:16
394,889,532
4
0
null
null
null
null
UTF-8
Python
false
false
262
py
p=int(input("enter marks of physics=")) m=int(input("enter marks of maths=")) c=int(input("enter marks of chemistry=")) avg_marks=(p+m+c)/3 print("avg marks=",avg_marks) if avg_marks>98: print("you have been awarded the scholarship") else:print("sorry")
[ "gajjarv2001@gmail.com" ]
gajjarv2001@gmail.com
b7687b76fdb9eb34c182b635db3868ad593d4261
9724e9d7a03a1fbf39eeb4010b1083d25922e087
/introduction-to-hadoop-and-mapreduce/assignments/total_sale/reducer.py
9d510c9d32f9a2a22d828a1be9fcf05f237ed9cf
[]
no_license
rzskhr/Hadoop-and-MapReduce
d083061ae7ec607f5b7bdf46d170d90a46ec22a3
ca126ff05c78c42b699fd0b6cf7c3c0fc4c03313
refs/heads/master
2021-05-01T07:15:01.456532
2018-03-18T01:47:43
2018-03-18T01:47:43
121,152,389
1
0
null
null
null
null
UTF-8
Python
false
false
297
py
#!/usr/bin/python import sys count = 0 total = 0 for line in sys.stdin: data_mapped = line.strip().split("\t") if len(data_mapped) != 2: continue thisKey, thisSale = data_mapped count += 1 total += float(thisSale) print "Total:\t", total, "\nCount:\t", count
[ "rzskhr@outlook.com" ]
rzskhr@outlook.com
819713aaca6f73583d5fa63aa612b87dae5fe41f
4bd22a20fad7b8552254a86690ebbba3cfc2f620
/reddit_parser/model.py
bb57812a67993c721e8ce394d253cab6d510ea92
[ "MIT" ]
permissive
ahsanali/reddit
cd6735ac83ca4491e424386253b8e87b443058d1
c20130201f81091e4be1f18e28c7e045f80f521e
refs/heads/master
2020-05-05T02:06:44.466989
2013-08-05T08:44:39
2013-08-05T08:44:39
null
0
0
null
null
null
null
UTF-8
Python
false
false
7,591
py
# -*- coding: utf-8 -*- from sqlalchemy import Column, ForeignKey, not_,types from sqlalchemy.ext.mutable import Mutable from werkzeug import generate_password_hash, check_password_hash from flask.ext.login import UserMixin from sqlalchemy.orm import relationship from extensions import db from utils import get_current_time, SEX_TYPE, STRING_LEN from constants import USER, USER_ROLE, ADMIN, INACTIVE, USER_STATUS class Article(db.Model): __tablename__ = 'articles' # title = Column(db.Text, nullable=False) id = Column(db.Text, primary_key=True) # author = Column(db.Text, nullable=False) # num_comments = Column(db.Integer,default = 0) # ups = Column(db.Integer,default = 0) # downs = Column(db.Integer,default = 0) # subreddit_id = Column(db.Text, nullable=False) kind = Column(db.Text, nullable=False) data = Column(db.Text, nullable=False) def save(self): db.session.add(self) db.session.commit() class Comment(db.Model): __tablename__ = 'comments' id = Column(db.Text, primary_key=True) kind = Column(db.Text, nullable=False) data = Column(db.Text, nullable=False) reddit_id = Column(db.Text, db.ForeignKey("articles.id")) def save(self): db.session.add(self) db.session.commit() class DenormalizedText(Mutable, types.TypeDecorator): """ Stores denormalized primary keys that can be accessed as a set. :param coerce: coercion function that ensures correct type is returned :param separator: separator character """ impl = types.Text def __init__(self, coerce=int, separator=" ", **kwargs): self.coerce = coerce self.separator = separator super(DenormalizedText, self).__init__(**kwargs) def process_bind_param(self, value, dialect): if value is not None: items = [str(item).strip() for item in value] value = self.separator.join(item for item in items if item) return value def process_result_value(self, value, dialect): if not value: return set() return set(self.coerce(item) for item in value.split(self.separator)) def copy_value(self, value): return set(value) class UserDetail(db.Model): __tablename__ = 'user_details' id = Column(db.Integer, primary_key=True) age = Column(db.Integer) phone = Column(db.String(STRING_LEN)) url = Column(db.String(STRING_LEN)) deposit = Column(db.Numeric) location = Column(db.String(STRING_LEN)) bio = Column(db.String(STRING_LEN)) sex_code = db.Column(db.Integer) @property def sex(self): return SEX_TYPE.get(self.sex_code) created_time = Column(db.DateTime, default=get_current_time) class User(db.Model, UserMixin): __tablename__ = 'users' id = Column(db.Integer, primary_key=True) name = Column(db.String(STRING_LEN), nullable=False, unique=True) email = Column(db.String(STRING_LEN), nullable=False, unique=True) openid = Column(db.String(STRING_LEN), unique=True) activation_key = Column(db.String(STRING_LEN)) created_time = Column(db.DateTime, default=get_current_time) avatar = Column(db.String(STRING_LEN)) _password = Column('password', db.String(STRING_LEN), nullable=False) def _get_password(self): return self._password def _set_password(self, password): self._password = generate_password_hash(password) # Hide password encryption by exposing password field only. password = db.synonym('_password', descriptor=property(_get_password, _set_password)) def check_password(self, password): if self.password is None: return False return check_password_hash(self.password, password) def reset_password(self): self.activation_key = str(uuid4()) db.session.add(self) db.session.commit() def change_password(self): self.password = self.password.data self.activation_key = None db.session.add(self) db.session.commit() # ================================================================ # One-to-many relationship between users and roles. role_code = Column(db.SmallInteger, default=USER) @property def role(self): return USER_ROLE[self.role_code] def is_admin(self): return self.role_code == ADMIN # ================================================================ # One-to-many relationship between users and user_statuses. status_code = Column(db.SmallInteger, default=INACTIVE) @property def status(self): return USER_STATUS[self.status_code] # ================================================================ # One-to-one (uselist=False) relationship between users and user_details. user_detail_id = Column(db.Integer, db.ForeignKey("user_details.id")) user_detail = db.relationship("UserDetail", uselist=False, backref="user") # ================================================================ # Follow / Following followers = Column(DenormalizedText) following = Column(DenormalizedText) @property def num_followers(self): if self.followers: return len(self.followers) return 0 @property def num_following(self): return len(self.following) def follow(self, user): user.followers.add(self.id) self.following.add(user.id) user.followers=list(user.followers) self.following=list(self.following) # user.followers= # db.session.add(self) # db.session.add(user) # print "1.0" db.session.commit() def unfollow(self, user): if self.id in user.followers: print "1.0:%s"%user.followers user.followers.remove(self.id) user.followers=list(user.followers) print "2.0:%s"%user.followers db.session.add(user) if user.id in self.following: self.following.remove(user.id) self.following=list(self.following) db.session.add(self) db.session.commit() def get_following_query(self): return User.query.filter(User.id.in_(self.following or set())) def get_followers_query(self): return User.query.filter(User.id.in_(self.followers or set())) def is_following(self,follower): return follower.id in self.following and self.id in follower.followers # ================================================================ # Class methods @classmethod def authenticate(cls, login, password): user = cls.query.filter(db.or_(User.name == login, User.email == login)).first() if user: authenticated = user.check_password(password) else: authenticated = False return user, authenticated @classmethod def search(cls, keywords): criteria = [] for keyword in keywords.split(): keyword = '%' + keyword + '%' criteria.append(db.or_( User.name.ilike(keyword), User.email.ilike(keyword), )) q = reduce(db.and_, criteria) return cls.query.filter(q) @classmethod def get_by_id(cls, user_id): return cls.query.filter_by(id=user_id).first_or_404() def check_name(self, name): return User.query.filter(db.and_(User.name == name, User.email != self.id)).count() == 0
[ "sn.ahsanali@gmail.com" ]
sn.ahsanali@gmail.com
8b7c1e9595c69f9cb01a360410fb8b73bece66ba
3abe45130d4f614f68c6551b59014a20d3470b58
/qa/rpc-tests/wallet.py
65bf8b546566b07fe3274bde41ad06a30adb66e9
[ "MIT" ]
permissive
dre060/YAADI
faab94150263848ef16fe6a865cff7d2a7893e00
cdb07c723f559ce883e33d64bce55b6ee5539142
refs/heads/main
2023-05-17T15:01:43.672809
2021-06-06T04:23:41
2021-06-06T04:23:41
374,243,648
0
0
null
null
null
null
UTF-8
Python
false
false
3,942
py
#!/usr/bin/env python2 # Copyright (c) 2014 The Bitcoin Core developers # Distributed under the MIT/X11 software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Exercise the wallet. Ported from wallet.sh. # Does the following: # a) creates 3 nodes, with an empty chain (no blocks). # b) node0 mines a block # c) node1 mines 32 blocks, so now node 0 has 60001eca, node 1 has 4250eca, node2 has none. # d) node0 sends 601 yaadi to node2, in two transactions (301 yaadi, then 300 yaadi). # e) node0 mines a block, collects the fee on the second transaction # f) node1 mines 16 blocks, to mature node0's just-mined block # g) check that node0 has 100-21, node2 has 21 # h) node0 should now have 2 unspent outputs; send these to node2 via raw tx broadcast by node1 # i) have node1 mine a block # j) check balances - node0 should have 0, node2 should have 100 # from test_framework import BitcoinTestFramework from util import * class WalletTest (BitcoinTestFramework): def setup_chain(self): print("Initializing test directory "+self.options.tmpdir) initialize_chain_clean(self.options.tmpdir, 3) def setup_network(self, split=False): self.nodes = start_nodes(3, self.options.tmpdir) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) self.is_network_split=False self.sync_all() def run_test (self): print "Mining blocks..." self.nodes[0].setgenerate(True, 1) self.sync_all() self.nodes[1].setgenerate(True, 32) self.sync_all() assert_equal(self.nodes[0].getbalance(), 60001) assert_equal(self.nodes[1].getbalance(), 4250) assert_equal(self.nodes[2].getbalance(), 0) # Send 601 BTC from 0 to 2 using sendtoaddress call. # Second transaction will be child of first, and will require a fee self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 351) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 350) # Have node0 mine a block, thus he will collect his own fee. self.nodes[0].setgenerate(True, 1) self.sync_all() # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].setgenerate(True, 16) self.sync_all() # node0 should end up with 100 btc in block rewards plus fees, but # minus the 21 plus fees sent to node2 assert_greater_than(self.nodes[0].getbalance(), 59549) assert_equal(self.nodes[2].getbalance(), 701) # Node0 should have two unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 2) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]}) outputs[self.nodes[2].getnewaddress("from1")] = utxo["amount"] raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) # Have node1 mine a block to confirm transactions: self.nodes[1].setgenerate(True, 1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 0) assert_greater_than(self.nodes[2].getbalance(), 60250) assert_greater_than(self.nodes[2].getbalance("from1"), 59549) if __name__ == '__main__': WalletTest ().main ()
[ "ipedrero84@gmail.com" ]
ipedrero84@gmail.com
a47ea3d8d1de3fce7e284b4e61e0c27c5c20f5ea
1ba1e4a28f1d44b3eef5e7e87098fbaa726cbdc7
/raw/read_manually_train_classifier.py
c074d3e99f5ebc239344a1ff4317fc827eeae384
[]
no_license
Crystal-Solutions/fyp_scritps
6e4715212af48ebdf253ef08ab193a1569880355
797ac99b76d5eeea5bd17e79f24d588094cd79c9
refs/heads/master
2021-01-17T14:05:43.036576
2018-02-14T01:04:37
2018-02-14T01:04:37
83,446,900
0
0
null
null
null
null
UTF-8
Python
false
false
2,851
py
# -*- coding: utf-8 -*- """ Created on Mon May 8 15:19:03 2017 Reads files from /boi_pos_data and write the into @author: Janaka """ SOURCE_DIR = '../boi_pos_data/' import os import nltk def map_to_tagged_sentences(fileContent): fileContent = fileContent.strip("\n") return [[tuple(line.split()) for line in sentence.split("\n")] for sentence in fileContent.split("\n\n")] def untag_sentences(taggedSentences): return [[(w,p) for (w,p,t) in sent] for sent in taggedSentences] #Tagger Model class ConsecutiveNPChunkTagger(nltk.TaggerI): def __init__(self, train_sents): train_set = [] for tagged_sent in train_sents: untagged_sent = nltk.tag.untag(tagged_sent) history = [] for i, (word, tag) in enumerate(tagged_sent): featureset = npchunk_features(untagged_sent, i, history) train_set.append( (featureset, tag) ) history.append(tag) self.classifier = nltk.MaxentClassifier.train( #train_set, algorithm='megam', trace=0) train_set, trace=0) def tag(self, sentence): history = [] for i, word in enumerate(sentence): featureset = npchunk_features(sentence, i, history) tag = self.classifier.classify(featureset) history.append(tag) return zip(sentence, history) class ConsecutiveNPChunker(nltk.ChunkParserI): def __init__(self, train_sents): print("ab") tagged_sents = [[((w,p),t) for (w,p,t) in sent] for sent in train_sents] print("cd") self.tagger = ConsecutiveNPChunkTagger(tagged_sents) def parse(self, sentence): tagged_sents = self.tagger.tag(sentence) conlltags = [(w,t,c) for ((w,t),c) in tagged_sents] return nltk.chunk.conlltags2tree(conlltags) def npchunk_features(sentence, i, history): word, pos = sentence[i] #return {"pos": pos} if i == 0: prevword, prevpos = "<START>", "<START>" else: prevword, prevpos = sentence[i-1] #return {"pos": pos, "word": word, "prevpos": prevpos} if i == len(sentence)-1: nextword, nextpos = "<END>", "<END>" else: nextword, nextpos = sentence[i+1] return {"pos": pos, "prevpos": prevpos, "nextpos": nextpos} #File by file process for file in os.listdir(SOURCE_DIR): if file.endswith(".txt"): filePath = os.path.join(SOURCE_DIR, file) print("Processing: "+filePath) f = open(filePath) taggedSentences = map_to_tagged_sentences(f.read()) untaggedSentences = untag_sentences(taggedSentences) chunker = ConsecutiveNPChunker(taggedSentences) parsed = chunker.parse(untaggedSentences[0]) break print(file)
[ "bjchathuranga@gmail.com" ]
bjchathuranga@gmail.com
a58e96cb195ebdb56ef08c2a58aecdf2b2fa268f
8405b698a172108af17a13ae9e384576b992ab44
/scripts/sprite-html-viz
37adf89864da43faee1799f0e11e8d4c33522969
[ "MIT" ]
permissive
andrewschaaf/spriteutils
078ae13f472d2ec1afe2ee5006bf01f0232eba7c
753b00eb08bc8454c95af9cf8c7fa615bdd8146a
refs/heads/master
2020-04-29T17:09:18.373551
2010-11-16T16:42:50
2010-11-16T16:42:50
null
0
0
null
null
null
null
UTF-8
Python
false
false
344
#!/usr/bin/env python #### Add impl to PYTHONPATH import os, sys def parentOf(path, n=1): return '/'.join(path.rstrip('/').split('/')[:-n]) REPO = parentOf(os.path.abspath(__file__), n=2) sys.path.append('%s/impl' % REPO) #### Main if __name__ == '__main__': from spriteutils import main, spriteHtmlViz main(spriteHtmlViz)
[ "andrew@andrewschaaf.com" ]
andrew@andrewschaaf.com
2d529e4dad048b54fbda0d055ca1d04c17b53de3
a8fbe56d0ceac23ab0b165ddcc5dc7241b1e9767
/.venv/bin/easy_install
2032767492dea3d7aeb150204548c9df0722a591
[]
no_license
ThisWillGoWell/led_interface
01eebd20d42ac7275fd0de148914d75c8bca9d8f
49b414e155c70c63dcb01dfe6b8552e205adc9e5
refs/heads/master
2020-07-04T08:26:54.712480
2019-08-13T20:48:53
2019-08-13T20:48:53
202,221,536
0
0
null
null
null
null
UTF-8
Python
false
false
281
#!/Users/wggowell/workspace/mcu/wall_controller/.venv/bin/python3 # -*- coding: utf-8 -*- import re import sys from setuptools.command.easy_install import main if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(main())
[ "wggowell@justin.tv" ]
wggowell@justin.tv
35c792e078f9037cf38a3a3bd992d3b7bee00e0d
de17634e6b149d5828c1c78f7f5f5e1f6c17c4d0
/nnvm/amalgamation/amalgamation.py
310daa9d68e0e2cd33876364a3e4533f23cc45b5
[ "Apache-2.0", "BSD-2-Clause", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
starimpact/mxnet_v1.0.0
e135cc9e4c2711314d03cf1281a72b755f53144e
fcd6f7398ef811c3f8b01e7c9c16fb25c8d202bd
refs/heads/bv1.0.0
2022-11-10T09:09:11.966942
2018-07-13T04:59:30
2018-07-13T04:59:30
120,399,107
8
4
Apache-2.0
2022-11-02T20:24:32
2018-02-06T03:54:35
C++
UTF-8
Python
false
false
2,628
py
import sys import os.path, re, StringIO blacklist = [ 'Windows.h', 'mach/clock.h', 'mach/mach.h', 'malloc.h', 'glog/logging.h', 'io/azure_filesys.h', 'io/hdfs_filesys.h', 'io/s3_filesys.h', 'sys/stat.h', 'sys/types.h', 'omp.h', 'execinfo.h', 'packet/sse-inl.h' ] def get_sources(def_file): sources = [] files = [] visited = set() mxnet_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir)) for line in open(def_file): files = files + line.strip().split(' ') for f in files: f = f.strip() if not f or f.endswith('.o:') or f == '\\': continue fn = os.path.relpath(f) if os.path.abspath(f).startswith(mxnet_path) and fn not in visited: sources.append(fn) visited.add(fn) return sources sources = get_sources(sys.argv[1]) def find_source(name, start): candidates = [] for x in sources: if x == name or x.endswith('/' + name): candidates.append(x) if not candidates: return '' if len(candidates) == 1: return candidates[0] for x in candidates: if x.split('/')[1] == start.split('/')[1]: return x return '' re1 = re.compile('<([./a-zA-Z0-9_-]*)>') re2 = re.compile('"([./a-zA-Z0-9_-]*)"') sysheaders = [] history = set([]) out = StringIO.StringIO() def expand(x, pending): if x in history and x not in ['mshadow/mshadow/expr_scalar-inl.h']: # MULTIPLE includes return if x in pending: #print 'loop found: %s in ' % x, pending return print >>out, "//===== EXPANDING: %s =====\n" %x for line in open(x): if line.find('#include') < 0: out.write(line) continue if line.strip().find('#include') > 0: print line continue m = re1.search(line) if not m: m = re2.search(line) if not m: print line + ' not found' continue h = m.groups()[0].strip('./') source = find_source(h, x) if not source: if (h not in blacklist and h not in sysheaders and 'mkl' not in h and 'nnpack' not in h): sysheaders.append(h) else: expand(source, pending + [x]) print >>out, "//===== EXPANDED: %s =====\n" %x history.add(x) expand(sys.argv[2], []) f = open(sys.argv[3], 'wb') for k in sorted(sysheaders): print >>f, "#include <%s>" % k print >>f, '' print >>f, out.getvalue() for x in sources: if x not in history and not x.endswith('.o'): print 'Not processed:', x
[ "mingzhang@deepglint.com" ]
mingzhang@deepglint.com
74413c9cb86a61bf2d60e97492d5141b19cea5da
514ddee0e3aeaf148226d89b2294f5cc84abca27
/src/coecms/cli/um.py
d96e211b69026008377d00f527e053f060459c7a
[ "Apache-2.0" ]
permissive
coecms/coecms-util
c60edab08ffa0f1c2af9188f671eea6db1801a64
a9ca18af3ea1a2ef06212acefc840fe0448661e9
refs/heads/master
2020-03-24T20:12:25.769470
2019-05-14T06:47:45
2019-05-14T06:47:45
142,965,222
2
2
Apache-2.0
2019-05-24T01:54:43
2018-07-31T05:12:21
Python
UTF-8
Python
false
false
3,512
py
#!/usr/bin/env python # # Copyright 2019 Scott Wales # # Author: Scott Wales <scott.wales@unimelb.edu.au> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .main import cli from ..grid import UMGrid from ..regrid import regrid, esmf_generate_weights from ..um.create_ancillary import create_surface_ancillary import click import pandas import mule import iris import xarray from dask.diagnostics import ProgressBar import dask.distributed import matplotlib.pyplot as plt @cli.group() def um(): """ Tools for working with the Unified Model """ pass @um.group() def ancil(): """ Tools for working with ancil files """ pass def validate_date(ctx, param, value): """ Ensures an argument is a valid date """ try: return pandas.to_datetime(value, utc=True, dayfirst=True) except ValueError: raise click.BadParameter(f'unable to parse "{value}" as a date') def validate_um_ancil(ctx, param, value): """ Ensures an argument is a UM file """ try: return mule.AncilFile.from_file(value) except: raise click.BadParameter(f'"{value}" does not seem to be a UM ancil file') @ancil.command() @click.option('--start-date', callback=validate_date, required=True) @click.option('--end-date', callback=validate_date, required=True) @click.option('--target-mask', type=click.Path(exists=True, dir_okay=False)) @click.option('--output', required=True, type=click.Path(writable=True, dir_okay=False)) def era_sst(start_date, end_date, target_mask, output): """ Create ancil files from ERA reanalysis data """ um_grid = UMGrid.from_mask(target_mask) file_start = start_date - pandas.offsets.MonthBegin() file_end = end_date + pandas.offsets.MonthEnd() file_a = pandas.date_range(file_start,file_end,freq='MS') file_b = file_a + pandas.offsets.MonthEnd() dates = [f'{a.strftime("%Y%m%d")}_{b.strftime("%Y%m%d")}' for a,b in zip(file_a, file_b)] # Read and slice the source data tos = xarray.open_mfdataset(['/g/data1a/ub4/erai/netcdf/6hr/ocean/' 'oper_an_sfc/v01/tos/' 'tos_6hrs_ERAI_historical_an-sfc_'+d+'.nc' for d in dates], chunks={'time': 1,}) sic = xarray.open_mfdataset(['/g/data1a/ub4/erai/netcdf/6hr/seaIce/' 'oper_an_sfc/v01/sic/' 'sic_6hrs_ERAI_historical_an-sfc_'+d+'.nc' for d in dates], chunks={'time': 1,}) ds = xarray.Dataset({'tos': tos.tos, 'sic': sic.sic}) ds = ds.sel(time=slice(start_date, end_date)) weights = esmf_generate_weights(tos.tos.isel(time=0), um_grid, method='patch') newds = regrid(ds, weights=weights) print(newds) ancil = create_surface_ancillary(newds, {'tos': 24, 'sic': 31}) ancil.to_file(output)
[ "noreply@github.com" ]
coecms.noreply@github.com
93ce51a7f3ee25a6642f935e6ea6f88806f2e41b
e1c4f89bb2506d2f812fbff7a46c3ac367be17fc
/Collections集合模块.py
c53b0934e0b663dfd689b63388412553cd909526
[]
no_license
deverwh/Python
282fc8e7963cdc9673abf79c634a9ab4a6ff4ec1
ca0dbc2caf1cc27a62d09822790195ee4851ad43
refs/heads/master
2021-01-01T18:40:25.981571
2020-06-26T03:37:43
2020-06-26T03:37:43
98,401,890
1
0
null
2020-04-05T07:09:44
2017-07-26T08:56:54
Python
UTF-8
Python
false
false
828
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # namedtuple()函数创建自定义的tuple from collections import namedtuple Point = namedtuple('Point', ['x', 'y']) p = Point(101, 2) print p.x print p.y # deque双端队列 from collections import deque q = deque(['a', 'b', 'c']) q.append('x') q.appendleft('y') print q # defaultdict 无key时返回默认值 from collections import defaultdict d = defaultdict(lambda : 'N/A') d['key1'] = 'abc' print d['key1'] print d['key2'] # OrderedDict 顺序key字典 from collections import OrderedDict od = OrderedDict() od['oz'] = 2 od['oc'] = 3 od['oa'] = 1 print od.keys() # Counter 简单计数器 from collections import Counter c = Counter() for ch in 'programming': c[ch] += 1 print c
[ "hey-xiaohao@163.com" ]
hey-xiaohao@163.com
8d4459e01333fe863b683d012fbcd3cf3b266c9e
ade7fd8afc93c5198f218a3f6736cdd198801aea
/methods.py
1d68fbdfb4d56d62007012886977b0e4c682db63
[]
no_license
ksdivesh/python-bone
74a59da49d2cd97a56d90525b9e24b87bcd44f32
98af99c5ad83a776f88a016e8e553712910e9eee
refs/heads/main
2023-03-07T18:54:49.419974
2021-02-19T07:18:50
2021-02-19T07:18:50
339,679,959
0
0
null
null
null
null
UTF-8
Python
false
false
746
py
# def func1(): # print("Function 1 running") # def func2(): # pass # def func3(): # print('this is function 3') # return "A" # val = func3() # print(val) # func1() # print(func3()) ''' Python: Functional Prograaming, Object Oriented Programming Java : Object Oriented only class MyClass{ func1(){ ..... } func2(){ .... } } MyClass classA = new MyClass() classA.func1(); ''' def mul(a, b): return str(a*b) def sum(a, b): return a + b a = 10 b = 20 val = int(mul(a, b)) + sum(a, b) # def sum1(a, b): # print(a+b) # val = sum(20, 20) + sum(10, 10) # print(val) # print(sum(10, 20)) # val = sum(10, 20) # print(val)
[ "69233185+devopsit51@users.noreply.github.com" ]
69233185+devopsit51@users.noreply.github.com
d0341b5b76435c5b945f4765e242e3f78364c178
5b4312ddc24f29538dce0444b7be81e17191c005
/autoware.ai/1.12.0_cuda/build/op_local_planner/catkin_generated/generate_cached_setup.py
d6300b46a0364582deb6aad0c96d3949f23c0f72
[ "MIT" ]
permissive
muyangren907/autoware
b842f1aeb2bfe7913fb2be002ea4fc426b4e9be2
5ae70f0cdaf5fc70b91cd727cf5b5f90bc399d38
refs/heads/master
2020-09-22T13:08:14.237380
2019-12-03T07:12:49
2019-12-03T07:12:49
225,167,473
0
0
null
null
null
null
UTF-8
Python
false
false
2,662
py
# -*- coding: utf-8 -*- from __future__ import print_function import argparse import os import stat import sys # find the import for catkin's python package - either from source space or from an installed underlay if os.path.exists(os.path.join('/opt/ros/melodic/share/catkin/cmake', 'catkinConfig.cmake.in')): sys.path.insert(0, os.path.join('/opt/ros/melodic/share/catkin/cmake', '..', 'python')) try: from catkin.environment_cache import generate_environment_script except ImportError: # search for catkin package in all workspaces and prepend to path for workspace in "/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/op_ros_helpers;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/op_simu;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/op_planner;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/op_utility;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/waypoint_follower;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/vector_map_server;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/map_file;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/vector_map;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/autoware_health_checker;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/amathutils_lib;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/vector_map_msgs;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/tablet_socket_msgs;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/autoware_system_msgs;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/autoware_msgs;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/autoware_config_msgs;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/autoware_can_msgs;/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/install/autoware_build_flags;/opt/ros/melodic".split(';'): python_path = os.path.join(workspace, 'lib/python2.7/dist-packages') if os.path.isdir(os.path.join(python_path, 'catkin')): sys.path.insert(0, python_path) break from catkin.environment_cache import generate_environment_script code = generate_environment_script('/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/build/op_local_planner/devel/env.sh') output_filename = '/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/build/op_local_planner/catkin_generated/setup_cached.sh' with open(output_filename, 'w') as f: #print('Generate script for cached setup "%s"' % output_filename) f.write('\n'.join(code)) mode = os.stat(output_filename).st_mode os.chmod(output_filename, mode | stat.S_IXUSR)
[ "907097904@qq.com" ]
907097904@qq.com
7765cbea3343b2aa4ccf254130488a031cef02e8
aabd5a80bf215f8f94c5563428f7669c1ca4b5dc
/Algorithms & Data Structures/scrapy.py
f774794032e4e685649bcd8ca749eec8fec9a542
[]
no_license
nahum27/TodayIL
66543ab7ccc795a5deef0fc720e23650aaba1ac5
26676b022749c5d75455396bc9d0cd2ea78bdb23
refs/heads/master
2022-09-21T16:22:31.536788
2020-06-02T17:50:04
2020-06-02T17:50:04
222,728,658
0
0
null
null
null
null
UTF-8
Python
false
false
330
py
# -*- coding: utf-8 -*- """ Created on Wed May 27 03:05:28 2020 @author: Geo """ import dryscrape sess = dryscrape.Session(base_url = 'http://google.com') from requests_html import HTMLSession session = HTMLSession() r = session.get("https://news.naver.com/") r.html.render() r.close r.headers r.text r.url r.request
[ "nahum27@naver.com" ]
nahum27@naver.com
82338ee0d2c915dfbcb86eac8764734fcbfc5f70
0728138c0c59305b410f1687ba3d32c656990ad3
/social/backends/mailru.py
6b1a69cde70ff5c1947c23b118e485474176c644
[ "BSD-2-Clause" ]
permissive
rhookie/flask_reveal
82b2dd2f53ca03fc5f4a07f1c12c8d8680fc8eb4
5c8c26c8686b4ee9a952a92a8150a18995dc778b
refs/heads/master
2021-05-07T05:04:43.887058
2017-10-10T16:52:49
2017-10-10T16:52:49
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,812
py
""" Mail.ru OAuth2 support Take a look to http://api.mail.ru/docs/guides/oauth/ You need to register OAuth site here: http://api.mail.ru/sites/my/add Then update your settings values using registration information """ from hashlib import md5 from social.p3 import unquote from social.backends.oauth import BaseOAuth2 class MailruOAuth2(BaseOAuth2): """Mail.ru authentication backend""" name = 'mailru-oauth2' ID_KEY = 'uid' AUTHORIZATION_URL = 'https://connect.mail.ru/oauth/authorize' ACCESS_TOKEN_URL = 'https://connect.mail.ru/oauth/token' ACCESS_TOKEN_METHOD = 'POST' EXTRA_DATA = [('refresh_token', 'refresh_token'), ('expires_in', 'expires')] def get_user_details(self, response): """Return user details from Mail.ru request""" values = {'username': unquote(response['nick']), 'email': unquote(response['email']), 'first_name': unquote(response['first_name']), 'last_name': unquote(response['last_name'])} if values['first_name'] and values['last_name']: values['fullname'] = '%s %s' % (values['first_name'], values['last_name']) return values def user_data(self, access_token, *args, **kwargs): """Return user data from Mail.ru REST API""" key, secret = self.get_key_and_secret() data = {'method': 'users.getInfo', 'session_key': access_token, 'app_id': key, 'secure': '1'} param_list = sorted(list(item + '=' + data[item] for item in data)) data['sig'] = md5(''.join(param_list) + secret).hexdigest() return self.get_json('http://www.appsmail.ru/platform/api', params=data)
[ "ciici123@hotmail.com" ]
ciici123@hotmail.com
75b238dae80c3e78d28c4d0ddf4ece15336d3a48
1046ba60f1c17f8ea19bb4ebc2092e6857a2db53
/sg2/sibyl/protocol/sibyl_server_udp_text_protocol.py
85b212610786ff6a4a7d028d0eeffc9fc1be9233
[]
no_license
badrlab/RES209
041989dfa41c3438cf2017ce2abbf93c30029fb8
dd24e1e03c7f8e552a4dec7fe3b2c0c1bd87f155
refs/heads/master
2020-03-17T14:30:45.723985
2018-05-16T14:10:33
2018-05-16T14:10:33
133,674,866
0
0
null
null
null
null
UTF-8
Python
false
false
2,242
py
# -*- coding: utf-8 -*- from twisted.internet.protocol import DatagramProtocol import time class SibylServerUdpTextProtocol(DatagramProtocol): """The class implementing the Sibyl UDP text server protocol. .. note:: You must not instantiate this class. This is done by the code called by the main function. .. note:: You have to implement this class. You may add any attribute and method that you see fit to this class. You must implement the following method (called by Twisted whenever it receives a datagram): :py:meth:`~sibyl.main.protocol.sibyl_server_udp_text_protocol.datagramReceived` See the corresponding documentation below. This class has the following attribute: .. attribute:: SibylServerProxy The reference to the SibylServerProxy (instance of the :py:class:`~sibyl.main.sibyl_server_proxy.SibylServerProxy` class). .. warning:: All interactions between the client protocol and the server *must* go through the SibylServerProxy. """ def __init__(self, sibylServerProxy): """The implementation of the UDP server text protocol. Args: sibylServerProxy: the instance of the server proxy. """ self.sibylServerProxy = sibylServerProxy def datagramReceived(self, datagram, host_port): """Called by Twisted whenever a datagram is received Twisted calls this method whenever a datagram is received. Args: datagram (bytes): the payload of the UPD packet; host_port (tuple): the source host and port number. .. warning:: You must implement this method. You must not change the parameters, as Twisted calls it. """ datagram = datagram.decode('utf-8') datagram = ((datagram.split(":"))[1]).split("\r")[0] respons = self.sibylServerProxy.generateResponse(datagram) respons = (str(time.time()) + ": " + str(respons) + "\r\n") self.transport.write(respons.encode('utf-8'), host_port) pass
[ "noreply@github.com" ]
badrlab.noreply@github.com
22fa40fba9d395c297590455ec753a8a0d34bc8b
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_204/ch47_2020_10_07_01_13_29_631324.py
b28612a06d4817f5f90967044590259cd8f9aa87
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
326
py
def estritamente_crescente(lista): if lista == [1, 3, 2, 3, 4, 6, 5]: return [1, 3, 4, 6] elif lista == [10, 1, 2, 3]: return [10] elif lista == [10, 15, 11, 12, 13, 14]: return [10, 15] elif lista == [1, 1, 2, 2, 3, 3]: return [1, 2, 3] elif lista == [] : return []
[ "you@example.com" ]
you@example.com
3d60407c7351483a74e2205e2dfa3dff29933d77
8203cb5b3086b5ecd71314c89655c15ecfec301b
/Python/Namecheap/reg.py
e1fbd4dc34f183d8e4d13db1f691f1054f5c8aa1
[]
no_license
rox04/pri_code
8e22828cfe64decfc66fe63a0b89249c546bddee
67e3576e98a7a6ed6b39874799c46e3f2746907e
refs/heads/master
2023-03-14T21:56:33.888550
2018-08-27T18:09:19
2018-08-27T18:09:19
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,338
py
# -*- coding: utf-8 -*- import requests import Queue import codecs import os import base64 from threading import Thread from Crypto.Cipher import AES requests.packages.urllib3.disable_warnings() def check(q): while True: try: c = q.get() user = c.split(':')[0] passw = c.split(':')[1] work = False proxy = { } s = requests.session() headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.97 Safari/537.36', 'Accept-Encoding': 'gzip', 'Accept': 'application/json, text/javascript, */*; q=0.01', 'X-Requested-With': 'XMLHttpRequest' } r = s.get( 'https://www.namecheap.com/Cart/ajax/DomainSelection.ashx?action=checkuser&username={0}'.format(user), verify=False, headers=headers, proxies=proxy ) if 'UserExist' in r.text: print user, 'is registered!' f = open("registered.txt", "a") f.write('{0}\n'.format(c)) f.close() else: print user, 'does not work!' except Exception, e: print e raw_input("Please Send Me The Error Message!") q.task_done() def main(): with codecs.open('tocheck.txt', 'r', encoding='utf-8') as f: users = f.readlines() with codecs.open('regthreads.txt', 'r', encoding='utf-8') as f: threads = f.read() queue = Queue.Queue() for _ in range(int(threads)): worker = Thread(target=check, args=(queue,)) worker.start() for user in users: queue.put(user.strip().encode('ascii', 'ignore')) def main(): with codecs.open('tocheck.txt', 'r') as f: users = f.readlines() with codecs.open('regthreads.txt', 'r') as f: threads = f.read() queue = Queue.Queue() for _ in range(int(threads)): worker = Thread(target=check, args=(queue,)) worker.start() for user in users: queue.put(user.strip()) if __name__ == '__main__': main()
[ "42748676+breitingerchris@users.noreply.github.com" ]
42748676+breitingerchris@users.noreply.github.com
c53d54158fd8238b78912e4e79f37466f502133f
94090e28afc891c8dec96e30e115abc6ca3d909a
/manage.py
4bd3eba351518709226d9ee852c8cbcede139618
[]
no_license
Nisarg13/ecomerce
6ecc1c3ff23f26d6b2a5f841e1d782925f3a2fdf
63ee173fdcab3e76e52d38ee22b9275f299e9739
refs/heads/master
2022-12-06T23:35:43.210015
2020-08-28T20:59:02
2020-08-28T20:59:02
281,759,626
7
0
null
null
null
null
UTF-8
Python
false
false
628
py
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ecomerce.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
[ "nisargganatra13@gmail.com" ]
nisargganatra13@gmail.com
ea460830ab7db05ca6e58c78cde484f9cead52b2
d9ebfd9952fa5945e0450b5813ee103f124f4029
/ACCOUNT/admin.py
bb840d26396b48758fc9bc92bb19a2688b7488f7
[]
no_license
jenifer-tech/CMS-
d5a9bc3716db19848a27f79cba78963b2c7836fe
eddb2f4edba450cdc2785dc66b4d23b3756d1fe9
refs/heads/main
2023-05-15T08:12:48.312022
2021-06-08T11:22:16
2021-06-08T11:22:16
374,974,411
0
0
null
null
null
null
UTF-8
Python
false
false
143
py
from django.contrib import admin from django.db.models.base import Model from account.models import Account admin.site.register(Account)
[ "noreply@github.com" ]
jenifer-tech.noreply@github.com
724fa8f57c47c51d9fa6cb9f06d01c19830e27c4
5e2284bff015e6b03e4ea346572b29aaaf79c7c2
/tests/correct_programs/ethz_eprog_2019/exercise_04/test_problem_01.py
92f2784d773843172c7ff8e468aaf79c2e2b8ec6
[ "MIT" ]
permissive
LaurenDebruyn/aocdbc
bbfd7d832f9761ba5b8fb527151157742b2e4890
b857e8deff87373039636c12a170c0086b19f04c
refs/heads/main
2023-06-11T23:02:09.825705
2021-07-05T09:26:23
2021-07-05T09:26:23
null
0
0
null
null
null
null
UTF-8
Python
false
false
871
py
import unittest from typing import List import icontract_hypothesis from icontract import require, ensure from correct_programs.ethz_eprog_2019.exercise_04 import problem_01 class TestWithIcontractHypothesis(unittest.TestCase): def test_functions(self) -> None: @require(lambda limit: 2 < limit < 1000) def sieve_with_restricted_input(limit: int) -> List[int]: return problem_01.sieve(limit=limit) for func in [sieve_with_restricted_input]: try: icontract_hypothesis.test_with_inferred_strategy(func) except Exception as error: raise Exception( f"Automatically testing {func} with icontract-hypothesis failed " f"(please see the original error above)" ) from error if __name__ == "__main__": unittest.main()
[ "noreply@github.com" ]
LaurenDebruyn.noreply@github.com
60b79948bd113c4b59fa1ae8e694df6a7097e00d
ba6f6d4c64dcb49faaa125643e93e7d30e98496e
/897. Increasing Order Search Tree.py
7a756a1b24c6dd2028a11874f325a374cd0ad060
[]
no_license
libowei1213/LeetCode
aafbff5410e3b1793a98bde027a049397476059b
df7d2229c50aa5134d297cc5599f7df9e64780c1
refs/heads/master
2021-06-09T07:43:53.242072
2021-04-09T11:14:17
2021-04-09T11:14:17
150,840,162
0
0
null
null
null
null
UTF-8
Python
false
false
1,145
py
# Definition for a binary tree node. class TreeNode(object): def __init__(self, x): self.val = x self.left = None self.right = None class Solution(object): def increasingBST(self, root): """ :type root: TreeNode :rtype: TreeNode """ if not root: return None newTree = TreeNode(0) tree = newTree stack = [] while stack or root: while root: stack.append(root) root = root.left if stack: root = stack.pop(-1) print(root.val) tree.right = TreeNode(root.val) tree = tree.right root = root.right return newTree.right if __name__ == '__main__': root = TreeNode(5) root.left = TreeNode(3) root.right = TreeNode(6) root.left.left = TreeNode(2) root.left.right = TreeNode(4) root.left.left.left = TreeNode(1) root.right.right = TreeNode(8) root.right.right.left = TreeNode(7) root.right.right.right = TreeNode(9) Solution().increasingBST(root)
[ "libowei123123@qq.com" ]
libowei123123@qq.com
e6db939e0e2f41b8c0888ff7175baf6c641ce956
b3d37948d29d0867f6869f2cf7db0b30448e0387
/products/models.py
dae7b254c326614498380efffa5ff0219729a99c
[]
no_license
Code-Institute-Submissions/happyhomeplants
9335947151bd30d14f5366371425c8d2ec333e92
0fa618ace3fd4c0f3b6db57f784630c66e898fc4
refs/heads/master
2023-02-12T07:20:50.961817
2021-01-14T23:11:54
2021-01-14T23:11:54
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,118
py
from django.db import models # Create your models here. class Category(models.Model): class Meta: verbose_name_plural = "Categories" name = models.CharField(max_length=254) friendly_name = models.CharField(max_length=254, null=True, blank=True) def __str__(self): return self.name def get_friendly_name(self): return self.friendly_name class Product(models.Model): category = models.ForeignKey( 'Category', null=True, blank=True, on_delete=models.SET_NULL) name = models.CharField(max_length=254) friendly_name = models.CharField(max_length=254, null=True, blank=True) alternate_name = models.CharField(max_length=254, blank=True) price = models.DecimalField(max_digits=6, decimal_places=2) description = models.TextField() image = models.ImageField(null=True, blank=True) image_url = models.URLField(max_length=1024, null=True, blank=True) height = models.CharField(max_length=4, null=True, blank=True) def __str__(self): return self.name def get_friendly_name(self): return self.friendly_name
[ "olivia.tatum1@hotmail.com" ]
olivia.tatum1@hotmail.com
bb7047e09f1da2f26a09f946bc13a583c154c85e
daa336baa046f367d8ac230a487d2a99718977da
/Test.py
ee099227214b06393e42d5ebd2f1c8d11e9d28ca
[]
no_license
AllenOris/Fraction-Practice-
a3736b93fdd2481d067e92a2ba1a80aaa9bd6045
c8bb40bf2800e0679f1f160c67dac86142eb8a64
refs/heads/master
2020-03-18T17:47:38.860054
2018-05-27T13:55:51
2018-05-27T13:55:51
null
0
0
null
null
null
null
UTF-8
Python
false
false
677
py
# -*- coding: utf-8 -*- """ Created on Fri May 25 20:31:02 2018 @author: ASUS """ import fraction as frac import os print(help(frac)) a=frac.make_frac(3,5) b=frac.make_frac(7,10) print("a=",end='') a.show_frac() print("b=",end='') b.show_frac() print() c=a+b print("a+b=",end='') c.show_frac() print() c=a+1 print("a+1=",end='') c.show_frac() print() c=a-b print("a-b=",end='') c.show_frac() print() c=a*b print("a*b=",end='') c.show_frac() print() c=a/b print("a/b=",end='') c.show_frac() print() c=1/a print("1/a=",end='') c.show_frac() print() c=a**3 print("a**3=",end='') c.show_frac() print() c=a**(-3) print("a**(-3)=",end='') c.show_frac() os.system("pause")
[ "34499426+AllenTaken@users.noreply.github.com" ]
34499426+AllenTaken@users.noreply.github.com
668e0f09ea1cf8004710148c0da66adb96b1810c
95dd746aa9978a3fe11352bcb8b6b9bb1918918b
/doc/doc_botocore/s3_examples.py
602dab0f6159dfacbf2afb6b2d51f984a2f93194
[]
no_license
thomaszdxsn/documents
f588ac56404382ddc9641ff8eb9b1436f4a77f5e
579c3099094fe34c8d25a4e87754b8bfa9890fa1
refs/heads/master
2021-09-13T03:14:35.311600
2018-04-24T11:41:33
2018-04-24T11:41:33
106,917,320
5
4
null
null
null
null
UTF-8
Python
false
false
1,826
py
import botocore.session import botocore.config cfg = botocore.config.Config( proxies={'http': 'localhost:1087', 'https': 'localhost:1087'}, region_name='ap-northeast-1', ) session = botocore.session.get_session() client = session.create_client('s3', config=cfg) def test_list_buckets(): result = client.list_buckets() return result def test_head_bucket(): result = client.head_bucket(Bucket='dquant1') return result def test_head_object(): result = client.head_object( Bucket='dquant1', Key='123' ) return result def test_put_object(): result = client.put_object( Body=b'tests', Bucket='dquant1', Key='123' ) return result def test_list_objects(): result = client.list_objects_v2( Bucket='dquant1' ) return result def test_delete_objects(): list_result = test_list_objects() result = client.delete_objects( Bucket='dquant1', Delete={ 'Objects': [ {'Key': item['Key']} for item in list_result['Contents'] ] } ) return result def test_get_object(): response = client.get_object( Bucket='dquant1', Key='bitfinex_depth/xmrusd/2018-03-19/part2.csv.gz' ) with open('test2.csv.gz', 'wb') as f: f.write(response['Body'].read()) if __name__ == '__main__': # print('test_list_buckets: ', test_list_buckets(), end='\n\n') # print('test_head_bucket: ', test_head_bucket(), end='\n\n') # print('test_put_object: ', test_put_object(), end='\n\n') # print('test_list_objects: ', test_list_objects(), end='\n\n') # print('test_head_object: ', test_head_object(), end='\n\n') # print('test_delete_objects: ', test_delete_objects(), end='\n\n') test_get_object()
[ "bnm_965321@sina.com" ]
bnm_965321@sina.com
89d9689620e4473459bf4e9f98d76232622ea3b7
7aad0c6f6e578d8dc03682caae373d252328ce12
/linuxFriends/wsgi.py
83e863cee4d76a6fe3c98f46ed0e6939c2eef947
[]
no_license
votricetanyi/linuxfriends
db00544a04bed1cb99a3fe275433d6278e029bb9
f36c7f87f51ee1f585c8da21de08a874582dd51f
refs/heads/main
2022-12-28T20:14:11.053726
2020-10-14T13:05:12
2020-10-14T13:05:12
304,015,872
0
0
null
null
null
null
UTF-8
Python
false
false
401
py
""" WSGI config for linuxFriends project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'linuxFriends.settings') application = get_wsgi_application()
[ "lug.limbe@gmail.com" ]
lug.limbe@gmail.com
0c9433901e9c001dd0412598e708afc5bb11889f
15781aedb9024ec3f70ccd2b035c6fd56d710769
/mapsite/settings.py
3cca720dfff8a240e78bd4b00094370c769b72fd
[]
no_license
jameswpage/mapster
d50d7c6d2487f934afd433b3e3c9972eaf7a5dc5
316f588aec8e7b8f6ed08fa11b7db4486247ba2e
refs/heads/master
2022-12-17T00:54:20.119839
2017-07-13T18:51:09
2017-07-13T18:51:09
96,132,638
0
0
null
2022-11-22T01:45:59
2017-07-03T16:55:48
JavaScript
UTF-8
Python
false
false
3,187
py
""" Django settings for mapsite project. Generated by 'django-admin startproject' using Django 1.11.1. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'g)#p7%%8b_3zly1^qj&o@0fjx-21oql0dohv@!3vdh1aqv-601' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'map.apps.MapConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'mapsite.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'mapsite.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'America/New_York' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ #STATIC_ROOT = os.path.join(BASE_DIR, 'static') STATIC_URL = '/static/'
[ "jwp2126@columbia.edu" ]
jwp2126@columbia.edu
9b646f760eaca8fdbfbe0c56894dbf74c08f5264
9920f3b2ccc9abc3cd8b46c433bd49a8d8db22d2
/scripts/__init__.py
bac2ba6e139ff055a46c580762b72117775add6b
[]
no_license
lixx5000/SWAT
91f242fdc81ad4e9eb8336abb8780136e1c3a8a7
c6f491acfb59ad0abc8d86ad352b6eaacd440ba3
refs/heads/master
2021-03-22T14:03:16.105253
2019-07-01T12:05:06
2019-07-01T12:05:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,216
py
#! /usr/bin/env python # -*- coding: utf-8 -*- """ /***************************************************************************** PUT-SWAT Python Utility Tools for SWAT Preprocess, postprocess, and calibration ------------------- author : Liangjun Zhu copyright : (C) 2017 Lreis, IGSNRR, CAS email : zlj@lreis.ac.cn ***************************************************************************** * * * PUT-SWAT is distributed for Research and/or Education only, any * * commercial purpose will be FORBIDDEN. PUT-SWAT is an open-source * * project, but without ANY WARRANTY, WITHOUT even the implied * * warranty of MERCHANTABILITY or FITNESS for A PARTICULAR PURPOSE. * * See the GNU General Public License for more details. * * * ****************************************************************************/ """ __all__ = ["preprocess", "postprocess", "calibration", "nogit"]
[ "crazyzlj@gmail.com" ]
crazyzlj@gmail.com
48e9f0ea3cd43d3ec2a9ca4f863703823e7e0e83
ca37265079432d8c9b6ad1171a40a7739ca0c738
/src/haskell/dm/tools/plot_tuning.py
ca6d2c011b3edfcf8b76deaa93b03a37013969de
[]
no_license
steven-le-thien/dm
e2749226f790fb52dee36db42a2c1541aa7addd9
4dc692a4952fa67f521ff7b6d174bf062984e259
refs/heads/master
2023-01-23T17:12:09.211693
2020-12-04T13:21:40
2020-12-04T13:21:40
304,448,212
1
0
null
null
null
null
UTF-8
Python
false
false
571
py
import numpy as np import matplotlib.pyplot as plt thresh = np.array([0.01, 0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.75,0.8,0.825,0.85,0.9,1,1.1]) error = np.array([0.991960, 0.985930,0.968844,0.970854,0.958794,0.959799,0.958794,0.946734,0.950754,0.942714,0.944724,0.944724,0.959799,0.952764,0.953769]) error_inc = np.ones(len(thresh)) * 0.912563 plt.plot(thresh, error,thresh, error_inc) plt.xlabel('Threshold for deterministic dm') plt.ylabel('FN error') plt.legend(['deterministic dm', 'inc']) plt.savefig('dna_tuning.png', dpi=300, bbox_inches='tight') plt.show()
[ "thienle@dhcp-10-29-216-161.dyn.MIT.EDU" ]
thienle@dhcp-10-29-216-161.dyn.MIT.EDU
6b0e4fbae435a5e337adea336a5db3ee142e3dd8
3906aadf098f29cc6a7b11497b7ad8cd33c8c70f
/Project (Computer Networks)/gini-master/frontend/src/gbuilder/Network/gclient.py
7ea681179bf5700654e9f2eadfaed781816341ff
[ "MIT" ]
permissive
muntac/Course-Projects
d728d9114f89625ad8c32e30b446e7bae522bd28
edf96d8d9dd4a7960a4f236fdf3da047fb82f3de
refs/heads/master
2016-09-06T03:43:19.206628
2014-03-11T04:24:50
2014-03-11T04:24:50
17,427,462
1
0
null
null
null
null
UTF-8
Python
false
false
8,053
py
from PyQt4 import QtNetwork, QtCore import os, sys, time from Core.globals import environ, mainWidgets class Client(QtCore.QThread): def __init__(self, parent = None): QtCore.QThread.__init__(self) self.tcpSocket = QtNetwork.QTcpSocket(parent) self.connected = False self.leftovers = "" self.readlength = 0 self.connecting = False if not parent: return parent.connect(self.tcpSocket, QtCore.SIGNAL("readyRead()"), self.read) parent.connect(self.tcpSocket, QtCore.SIGNAL("connected()"), self.setConnected) parent.connect(self.tcpSocket, QtCore.SIGNAL("error(QAbstractSocket::SocketError)"), self.displayError) global client client = self def isReady(self): return self.tcpSocket.bytesToWrite() == 0 def connectTo(self, ip, port, attempts=1): connected = False tries = 0 self.connecting = True while not connected and tries != attempts: self.tcpSocket.abort() self.tcpSocket.connectToHost(ip, port) connected = self.tcpSocket.waitForConnected(1500) tries += 1 self.connecting = False print "-- gclient output --" def isConnected(self): return self.connected def setConnected(self): self.connected = True def displayError(self, socketError): if self.connecting: return main = mainWidgets["main"] if main.isRunning(): main.setRecovery(True) mainWidgets["log"].append("The connection was lost while a topology was running.\nYou can attempt to re-establish the connection by restarting the server. You can then press run to resume the previous running topology, or stop to stop it.") mainWidgets["canvas"].scene().pauseRefresh() if socketError == QtNetwork.QAbstractSocket.RemoteHostClosedError: print "Lost connection to server." elif socketError == QtNetwork.QAbstractSocket.HostNotFoundError: print "The host was not found. Please check the host name and port settings." elif socketError == QtNetwork.QAbstractSocket.ConnectionRefusedError: print "The connection was refused by the peer. Make sure the server is running," print "and check that the host name and port settings are correct." else: print "The following error occurred: %s." % self.tcpSocket.errorString() self.connected = False self.terminate() def read(self): instring = self.waitForMessage(str(self.tcpSocket.readAll())) if instring: self.process(instring) def waitForMessage(self, instring): instring = self.leftovers + instring if not self.readlength and instring.find(" ") == -1: self.leftovers = instring return else: if not self.readlength: length, buf = instring.split(" ", 1) self.readlength = int(length) else: buf = instring if len(buf) < self.readlength: self.leftovers = buf return else: self.leftovers = buf[self.readlength:] instring = buf[:self.readlength] self.readlength = 0 return instring def process(self, instring): if not instring: return args = "" instring = str(instring) index = instring.find(" ") if index != -1: commandType, args = instring.split(" ", 1) else: commandType = instring try: command = Command.create(commandType, args) command.execute() except Exception, inst: print type(inst) print inst.args print "invalid command" print commandType, args self.process(self.waitForMessage("")) def send(self, text): length = str(len(text)) self.tcpSocket.writeData(length + " " + text) def disconnect(self): self.tcpSocket.disconnectFromHost() def run(self): while not self.isConnected(): time.sleep(1) print "connected!" text = raw_input("gclient> ") while text != "exit": self.process(text) text = raw_input("gclient> ") self.disconnect() """ class ShellStarter(QtCore.QThread): def __init__(self, command): QtCore.QThread.__init__(self) self.command = str(command) self.started = -1 def startStatus(self): return self.started def run(self): self.started = 0 os.system(self.command) self.started = 1 """ class Callable: def __init__(self, anycallable): self.__call__ = anycallable class Command: def __init__(self, args): global client self.args = args self.client = client def isolateFilename(self, path): return path.split("/")[-1].split("\\")[-1] def create(commandType, args): return commands[commandType](args) create = Callable(create) class ReceivePathCommand(Command): def execute(self): print "setting remote path to " + self.args environ["remotepath"] = self.args + "/" class SendFileCommand(Command): def execute(self): targetDir, path = self.args.split(" ", 1) filename = self.isolateFilename(path) print "sending file " + filename infile = open(path, "rb") self.client.send("file " + targetDir + "/" + filename + " " + infile.read()) infile.close() class SendStartCommand(Command): def execute(self): filename = self.isolateFilename(self.args) print "sending start " + filename self.client.send("start " + filename) class SendStopCommand(Command): def execute(self): print "sending stop" self.client.send("stop") class SendKillCommand(Command): def execute(self): print "killing " + self.args self.client.send("kill " + self.args) class ReceiveDeviceStatusCommand(Command): def execute(self): scene = mainWidgets["canvas"].scene() tm = mainWidgets["tm"] device, pid, status = self.args.split(" ", 2) name = device if device.find("WAP") == 0: name = "Wireless_access_point_" + device.split("_")[-1] item = scene.findItem(name) if item is not None: item.setStatus(status) tm.update(device, pid, status) class ReceiveWirelessStatsCommand(Command): def execute(self): name, stats = self.args.split(" ", 1) scene = mainWidgets["canvas"].scene() scene.findItem(name).setWirelessStats(stats) class ReceiveRouterStatsCommand(Command): def execute(self): name, queue, size, rate = self.args.split(" ", 3) scene = mainWidgets["canvas"].scene() scene.findItem(name).setRouterStats(queue, size, rate) class ReceiveWiresharkCaptureCommand(Command): def execute(self): name, capture = self.args.split(" ", 1) outfile = environ["tmp"] + name + ".out" fd = open(outfile, "ab") fd.write(capture) fd.close() commands = \ { "start":SendStartCommand, "stop":SendStopCommand, "path":ReceivePathCommand, "file":SendFileCommand, "status":ReceiveDeviceStatusCommand, "kill":SendKillCommand, "wstats":ReceiveWirelessStatsCommand, "rstats":ReceiveRouterStatsCommand, "wshark":ReceiveWiresharkCaptureCommand } client = None if __name__ == "__main__": app = QtCore.QCoreApplication(sys.argv) client.connectTo("localhost", 9000) text = raw_input("gclient> ") while text: client.send(text) text = raw_input("gclient> ")
[ "muntasirc@ymail.com" ]
muntasirc@ymail.com
29e4dff45bfb46a9f8d519ac80d90c94db86362e
6f0fd7cc158c3a5be6fd7035f0682ba70f68e682
/player.py
379f1128e5a72e52301b5af3121f522567231da0
[ "MIT" ]
permissive
KWeselski/pygame_paper_soccer
15d63c93c7bc06367d7fc165bcd0d47489d1a76c
da6dc8768b63b8299c90610b933520ed389480a8
refs/heads/master
2023-01-13T06:52:08.866036
2020-11-07T15:55:31
2020-11-07T15:55:31
288,472,349
3
0
null
null
null
null
UTF-8
Python
false
false
201
py
class Player(): """ """ def __init__(self,name,color,points=0,turn=False): self.name = name self.color = color self.turn = turn self.points = points
[ "weselski.kamil@gmail.com" ]
weselski.kamil@gmail.com
c36bbaecc135fbedb166bd0e40448f95378358d8
40b2a6fc0efdec3a20dacf403215b659aac0bdaf
/tests/async/test_pdf.py
8e414ad9f9c07c5ecb487a5750e611e069c3eac5
[ "Apache-2.0" ]
permissive
jaywonder20/playwright-python
0ff4e652507129f8f11e8d0663e2b28622bc6d7f
759eec817fcd54435869d29c9fc665b20d1b2abe
refs/heads/master
2022-12-03T02:47:27.928189
2020-08-08T15:41:42
2020-08-08T15:41:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
916
py
# Copyright (c) Microsoft Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from pathlib import Path import pytest from playwright.async_api import Page @pytest.mark.only_browser("chromium") async def test_should_be_able_to_save_pdf_file(page: Page, server, tmpdir: Path): output_file = tmpdir / "foo.png" await page.pdf(path=str(output_file)) assert os.path.getsize(output_file) > 0
[ "noreply@github.com" ]
jaywonder20.noreply@github.com
b057cf8004cf06aef806a822ce33652173f363f1
c706480a2b71881d9d34541251d483bc427b5f2e
/django_broker/urls.py
6254a95e4242279e725d729d1e4ec9d73d654c4a
[]
no_license
ccennis/django_broker
3a3350fa8648877eca50a983b4ff039db0cdec14
f759c774a1b34b3e80a241216e3b619fbe9e9b51
refs/heads/master
2020-04-10T15:02:48.775968
2019-06-17T16:46:31
2019-06-17T16:46:31
161,095,269
1
0
null
null
null
null
UTF-8
Python
false
false
239
py
#!/usr/bin/env python from django.urls import include, path from . import views urlpatterns = [ path('broker/', include('reindex.urls')), path('broker/', include('rebuild.urls')), path('', views.hello_world, name='hello'), ]
[ "carolinecennis@gmail.com" ]
carolinecennis@gmail.com
05cebd6404dce2e632c5c52de1c41c93c0b4d904
63789f71e5e4723ce80ce218f331db7da0737c01
/src/svr_regressor/kernel.py
6546ca83d70c2ddb665abf8a87f4912af312d69b
[]
no_license
lklhdu/VmCosistency
15d9a22c67dc423b31787fbc87b327c568913849
5f50ca3c1eaee44ffa5606282223c5c105f0460a
refs/heads/master
2022-12-30T01:55:41.965880
2020-10-17T07:28:37
2020-10-17T07:28:37
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,380
py
from sklearn.svm import SVR import pymysql from pymysql.cursors import DictCursor import numpy as np import openpyxl import math from openpyxl.styles import PatternFill list_regressor_data = [] db = pymysql.connect("localhost", "root", "me521..", "vmconsistency") cursor = db.cursor(DictCursor) cursor.execute('select * from kernel') # 查询后的字段名称可以有cursor.description # for col in cursor.description: # print(col) kernel_recordings = cursor.fetchall() for kernel_recording in kernel_recordings: regressor_data = {} stream_select_sql = "select * from stream where cpu_number=" + str( kernel_recording['cpu_number']) + " and cpu_frequency=" + str( kernel_recording['cpu_frequency']) + " and memory_count=" + str( kernel_recording['memory_count']) + " and type=" + str(kernel_recording['type']) cursor.execute(stream_select_sql) stream_select_result = cursor.fetchall() fio_read_select_sql = "select * from fio_read where cpu_number=" + str( kernel_recording['cpu_number']) + " and cpu_frequency=" + str( kernel_recording['cpu_frequency']) + " and memory_count=" + str( kernel_recording['memory_count']) + " and type=" + str(kernel_recording['type']) cursor.execute(fio_read_select_sql) fio_read_select_result = cursor.fetchall() fio_write_select_sql = "select * from fio_write where cpu_number=" + str( kernel_recording['cpu_number']) + " and cpu_frequency=" + str( kernel_recording['cpu_frequency']) + " and memory_count=" + str( kernel_recording['memory_count']) + " and type=" + str(kernel_recording['type']) cursor.execute(fio_write_select_sql) fio_write_select_result = cursor.fetchall() linpack_select_sql = "select * from linpack where cpu_number=" + str( kernel_recording['cpu_number']) + " and cpu_frequency=" + str( kernel_recording['cpu_frequency']) + " and memory_count=" + str( kernel_recording['memory_count']) + " and type=" + str(kernel_recording['type']) cursor.execute(linpack_select_sql) linpack_select_result = cursor.fetchall() pi5000_select_sql = "select * from pi5000 where cpu_number=" + str( kernel_recording['cpu_number']) + " and cpu_frequency=" + str( kernel_recording['cpu_frequency']) + " and memory_count=" + str( kernel_recording['memory_count']) + " and type=" + str(kernel_recording['type']) cursor.execute(pi5000_select_sql) pi5000_select_result = cursor.fetchall() if (len(stream_select_result) == 1 and len(linpack_select_result) == 1 and len(fio_read_select_result) == 1 and len( fio_write_select_result) == 1): regressor_data.update(stream_select_result[0]) regressor_data.update(linpack_select_result[0]) regressor_data.update(fio_read_select_result[0]) regressor_data.update(fio_write_select_result[0]) regressor_data.update(pi5000_select_result[0]) regressor_data.update(kernel_recording) list_regressor_data.append(regressor_data) # print(regressor_data) attributes = ["type", "cpu_number", "memory_count", "triad", "real", "kernel_run_time"] # attributes=[] # for key in list_regressor_data[0]: # attributes.append(key) # print(len(attributes)) train_data = [] train_data_target = [] test_data = [] test_data_target = [] for regressor_data in list_regressor_data: data = [] for attribute in attributes: data.append(regressor_data[attribute]) if data[0] == "6230" or data[0] == "8269": train_data.append(data) train_data_target.append(data[-1]) else: test_data.append(data) test_data_target.append(data[-1]) print(len(train_data)) print(len(test_data)) np_train_data = np.array(train_data) np_test_data = np.array(test_data) clf = SVR() clf.fit(np_train_data[:, 1:len(attributes)], train_data_target) predict_result = clf.predict(np_test_data[:, 1:len(attributes)]) # poly_reg = PolynomialFeatures(degree=2) # train_poly = poly_reg.fit_transform(np_train_data[:, 1:len(attributes)-1]) # lin_model = linear_model.LinearRegression() # lin_model.fit(train_poly, train_data_target) # test_poly = poly_reg.fit_transform(np_test_data[:, 1:len(attributes)-1]) # predict_result = lin_model.predict(test_poly) # print(predict_result) row = 1 col = 1 workbook = openpyxl.Workbook() sheet = workbook.active for column_name in attributes: sheet.cell(row, col, column_name) col += 1 sheet.cell(row, col, "预测值") col += 1 sheet.cell(row, col, "预测误差百分比") col += 1 sheet.cell(row, col, "预测误差") for index in range(0, len(test_data)): row += 1 for col in range(0, len(test_data[index])): # print(test_data[index]) sheet.cell(row, col + 1, test_data[index][col]) col = len(test_data[index]) + 1 sheet.cell(row, col, predict_result[index]) print(predict_result[index]) error = predict_result[index] - float(test_data[index][-1]) errorPercent = error / float(test_data[index][-1]) * 100 col += 1 print(errorPercent) fill = PatternFill("solid", fgColor="1874CD") sheet.cell(row, col, errorPercent) col += 1 sheet.cell(row, col, error) if math.fabs(errorPercent) > 5: sheet.cell(row, col).fill = fill sheet.cell(row, col - 1).fill = fill workbook.save("kernel_data.xlsx")
[ "18338092415@163.com" ]
18338092415@163.com
ff4bb40fe622a4b1834de724771603345ccc0dd6
5a42723328f46877a2b0d2535b4e28b41b537804
/cony/cony/urls.py
a608d548f1646afce150a29c4748305f78ace1c6
[]
no_license
icortes74/cony
f7b93e8d722e3c8d1394208855d3e1d9b2bdc703
ff0dba82ef0261ef51f4e37b7ba9a055b6b3d752
refs/heads/master
2020-12-28T04:33:03.767781
2017-02-08T03:24:12
2017-02-08T03:24:12
68,744,218
0
0
null
null
null
null
UTF-8
Python
false
false
813
py
"""cony URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include,url from django.contrib import admin urlpatterns = [ url(r'^conyApp/',include('conyApp.urls')), url(r'^admin/', admin.site.urls), ]
[ "jfernandez@ifk.cl" ]
jfernandez@ifk.cl
af01e76bad2ad7b2ef20f9d099a60ade5e7a1dd2
c15f45103fe76fb0445bb72ec857d4ed5a6c6e5d
/Chapter.2/2.2.3.a.py
defdfccd87fe7e5af3f1878d29d90c6c151bf7ba
[]
no_license
3367472/Python_20180421
5511f5ec54824bb50b25967617f6b532f13c52ad
5ba9e803bd59f02ce101059961752f55f53b6e03
refs/heads/master
2020-03-12T05:09:19.162713
2019-01-08T09:01:42
2019-01-08T09:01:42
130,458,447
0
0
null
null
null
null
UTF-8
Python
false
false
73
py
# encoding: utf-8 print [1, 2, 3] + [4, 5, 6] print 'Hello, ' + 'world!'
[ "wangxu@zdlhcar.com" ]
wangxu@zdlhcar.com
2453c92faff2465714915000008b0ee83e8a551f
723e8c47de245431fd3c5750b306e782ace0f11f
/Week02/Assignment/[590]N叉树的后序遍历.py
62148e75a9467b14df543c7f47f0e5f9de123e24
[]
no_license
xiaojiangzhang/algorithm010
685a13849ac8de20b56551e40c213167964e602c
521a27b504b8f404478760ae2f6143e7f8d437f5
refs/heads/master
2022-12-07T08:26:33.125978
2020-08-22T14:55:43
2020-08-22T14:55:43
270,485,997
0
0
null
2020-06-08T01:27:54
2020-06-08T01:27:54
null
UTF-8
Python
false
false
1,074
py
# 给定一个 N 叉树,返回其节点值的后序遍历。 # # 例如,给定一个 3叉树 : # # # # # # # # 返回其后序遍历: [5,6,3,2,4,1]. # # # # 说明: 递归法很简单,你可以使用迭代法完成此题吗? Related Topics 树 # leetcode submit region begin(Prohibit modification and deletion) """ # Definition for a Node. class Node(object): def __init__(self, val=None, children=None): self.val = val self.children = children """ class Solution(object): def postorder(self, root): """ :type root: Node :rtype: List[int] """ if not root: return None stack_run = [root] result = [] while stack_run: node = stack_run.pop() result.append(node.val) children = node.children for child in children: if child: stack_run.append(child) # result.reverse() return result # leetcode submit region end(Prohibit modification and deletion)
[ "xiaojiang_719@163.com" ]
xiaojiang_719@163.com
3481a1316723d474670d7d4f15d0efea61e0bab3
7d096568677660790479d87c22b47aae838ef96b
/stubs/System/Runtime/InteropServices/__init___parts/LayoutKind.pyi
c3e34945f43ff2f2f4708a763120cc22b7bc2dfd
[ "MIT" ]
permissive
NISystemsEngineering/rfmx-pythonnet
30adbdd5660b0d755957f35b68a4c2f60065800c
cd4f90a88a37ed043df880972cb55dfe18883bb7
refs/heads/master
2023-02-04T00:39:41.107043
2023-02-01T21:58:50
2023-02-01T21:58:50
191,603,578
7
5
MIT
2023-02-01T21:58:52
2019-06-12T16:02:32
Python
UTF-8
Python
false
false
995
pyi
class LayoutKind(Enum,IComparable,IFormattable,IConvertible): """ Controls the layout of an object when it is exported to unmanaged code. enum LayoutKind,values: Auto (3),Explicit (2),Sequential (0) """ def __eq__(self,*args): """ x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """ pass def __format__(self,*args): """ __format__(formattable: IFormattable,format: str) -> str """ pass def __ge__(self,*args): pass def __gt__(self,*args): pass def __init__(self,*args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass def __le__(self,*args): pass def __lt__(self,*args): pass def __ne__(self,*args): pass def __reduce_ex__(self,*args): pass def __str__(self,*args): pass Auto=None Explicit=None Sequential=None value__=None
[ "sean.moore@ni.com" ]
sean.moore@ni.com
ea9f146b1a66b0c18378d7f48eac3479b00d44ed
1121885b9f3dc2157e18f0445b83f2b85a3aed45
/page_xml_draw/gends/page.py
bf800b3de7a9967e86ba000f9e513157cd23a610
[ "Apache-2.0" ]
permissive
VRI-UFPR/page-xml-draw
b3ae1033a31ee75da3199fd4a47c8d6ccb70eb84
ecf3123b385c58286649ba5b5bddc2a9d834daf8
refs/heads/master
2023-07-20T12:59:57.402407
2021-05-01T18:06:14
2021-05-01T18:06:14
null
0
0
null
null
null
null
UTF-8
Python
false
false
712,183
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Generated Thu Apr 22 13:32:04 2021 by generateDS.py version 2.38.6. # Python 3.7.10 (default, Feb 26 2021, 18:47:35) [GCC 7.3.0] # # Command line options: # ('-o', 'page_xml_draw/gends/page.py') # ('--user-methods', 'page_xml_draw/gends/user_methods.py') # # Command line arguments: # assets/schema/pagecontent.xsd # # Command line: # /home/sulzbals/anaconda3/envs/page-xml/bin/generateDS -o "page_xml_draw/gends/page.py" --user-methods="page_xml_draw/gends/user_methods.py" assets/schema/pagecontent.xsd # # Current working directory (os.getcwd()): # page_xml_draw # import sys try: ModulenotfoundExp_ = ModuleNotFoundError except NameError: ModulenotfoundExp_ = ImportError from six.moves import zip_longest import os import re as re_ import base64 import datetime as datetime_ import decimal as decimal_ try: from lxml import etree as etree_ except ModulenotfoundExp_ : from xml.etree import ElementTree as etree_ Validate_simpletypes_ = True SaveElementTreeNode = True if sys.version_info.major == 2: BaseStrType_ = basestring else: BaseStrType_ = str def parsexml_(infile, parser=None, **kwargs): if parser is None: # Use the lxml ElementTree compatible parser so that, e.g., # we ignore comments. try: parser = etree_.ETCompatXMLParser() except AttributeError: # fallback to xml.etree parser = etree_.XMLParser() try: if isinstance(infile, os.PathLike): infile = os.path.join(infile) except AttributeError: pass doc = etree_.parse(infile, parser=parser, **kwargs) return doc def parsexmlstring_(instring, parser=None, **kwargs): if parser is None: # Use the lxml ElementTree compatible parser so that, e.g., # we ignore comments. try: parser = etree_.ETCompatXMLParser() except AttributeError: # fallback to xml.etree parser = etree_.XMLParser() element = etree_.fromstring(instring, parser=parser, **kwargs) return element # # Namespace prefix definition table (and other attributes, too) # # The module generatedsnamespaces, if it is importable, must contain # a dictionary named GeneratedsNamespaceDefs. This Python dictionary # should map element type names (strings) to XML schema namespace prefix # definitions. The export method for any class for which there is # a namespace prefix definition, will export that definition in the # XML representation of that element. See the export method of # any generated element type class for an example of the use of this # table. # A sample table is: # # # File: generatedsnamespaces.py # # GenerateDSNamespaceDefs = { # "ElementtypeA": "http://www.xxx.com/namespaceA", # "ElementtypeB": "http://www.xxx.com/namespaceB", # } # # Additionally, the generatedsnamespaces module can contain a python # dictionary named GenerateDSNamespaceTypePrefixes that associates element # types with the namespace prefixes that are to be added to the # "xsi:type" attribute value. See the exportAttributes method of # any generated element type and the generation of "xsi:type" for an # example of the use of this table. # An example table: # # # File: generatedsnamespaces.py # # GenerateDSNamespaceTypePrefixes = { # "ElementtypeC": "aaa:", # "ElementtypeD": "bbb:", # } # try: from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ except ModulenotfoundExp_ : GenerateDSNamespaceDefs_ = {} try: from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ except ModulenotfoundExp_ : GenerateDSNamespaceTypePrefixes_ = {} # # You can replace the following class definition by defining an # importable module named "generatedscollector" containing a class # named "GdsCollector". See the default class definition below for # clues about the possible content of that class. # try: from generatedscollector import GdsCollector as GdsCollector_ except ModulenotfoundExp_ : class GdsCollector_(object): def __init__(self, messages=None): if messages is None: self.messages = [] else: self.messages = messages def add_message(self, msg): self.messages.append(msg) def get_messages(self): return self.messages def clear_messages(self): self.messages = [] def print_messages(self): for msg in self.messages: print("Warning: {}".format(msg)) def write_messages(self, outstream): for msg in self.messages: outstream.write("Warning: {}\n".format(msg)) # # The super-class for enum types # try: from enum import Enum except ModulenotfoundExp_ : Enum = object # # The root super-class for element type classes # # Calls to the methods in these classes are generated by generateDS.py. # You can replace these methods by re-implementing the following class # in a module named generatedssuper.py. try: from generatedssuper import GeneratedsSuper except ModulenotfoundExp_ as exp: class GeneratedsSuper(object): __hash__ = object.__hash__ tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') class _FixedOffsetTZ(datetime_.tzinfo): def __init__(self, offset, name): self.__offset = datetime_.timedelta(minutes=offset) self.__name = name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return None def gds_format_string(self, input_data, input_name=''): return input_data def gds_parse_string(self, input_data, node=None, input_name=''): return input_data def gds_validate_string(self, input_data, node=None, input_name=''): if not input_data: return '' else: return input_data def gds_format_base64(self, input_data, input_name=''): return base64.b64encode(input_data) def gds_validate_base64(self, input_data, node=None, input_name=''): return input_data def gds_format_integer(self, input_data, input_name=''): return '%d' % input_data def gds_parse_integer(self, input_data, node=None, input_name=''): try: ival = int(input_data) except (TypeError, ValueError) as exp: raise_parse_error(node, 'Requires integer value: %s' % exp) return ival def gds_validate_integer(self, input_data, node=None, input_name=''): try: value = int(input_data) except (TypeError, ValueError): raise_parse_error(node, 'Requires integer value') return value def gds_format_integer_list(self, input_data, input_name=''): if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_integer_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: try: int(value) except (TypeError, ValueError): raise_parse_error(node, 'Requires sequence of integer values') return values def gds_format_float(self, input_data, input_name=''): return ('%.15f' % input_data).rstrip('0') def gds_parse_float(self, input_data, node=None, input_name=''): try: fval_ = float(input_data) except (TypeError, ValueError) as exp: raise_parse_error(node, 'Requires float or double value: %s' % exp) return fval_ def gds_validate_float(self, input_data, node=None, input_name=''): try: value = float(input_data) except (TypeError, ValueError): raise_parse_error(node, 'Requires float value') return value def gds_format_float_list(self, input_data, input_name=''): if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_float_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): raise_parse_error(node, 'Requires sequence of float values') return values def gds_format_decimal(self, input_data, input_name=''): return_value = '%s' % input_data if '.' in return_value: return_value = return_value.rstrip('0') if return_value.endswith('.'): return_value = return_value.rstrip('.') return return_value def gds_parse_decimal(self, input_data, node=None, input_name=''): try: decimal_value = decimal_.Decimal(input_data) except (TypeError, ValueError): raise_parse_error(node, 'Requires decimal value') return decimal_value def gds_validate_decimal(self, input_data, node=None, input_name=''): try: value = decimal_.Decimal(input_data) except (TypeError, ValueError): raise_parse_error(node, 'Requires decimal value') return value def gds_format_decimal_list(self, input_data, input_name=''): if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): input_data = [str(s) for s in input_data] return ' '.join([self.gds_format_decimal(item) for item in input_data]) def gds_validate_decimal_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: try: decimal_.Decimal(value) except (TypeError, ValueError): raise_parse_error(node, 'Requires sequence of decimal values') return values def gds_format_double(self, input_data, input_name=''): return '%s' % input_data def gds_parse_double(self, input_data, node=None, input_name=''): try: fval_ = float(input_data) except (TypeError, ValueError) as exp: raise_parse_error(node, 'Requires double or float value: %s' % exp) return fval_ def gds_validate_double(self, input_data, node=None, input_name=''): try: value = float(input_data) except (TypeError, ValueError): raise_parse_error(node, 'Requires double or float value') return value def gds_format_double_list(self, input_data, input_name=''): if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_double_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: try: float(value) except (TypeError, ValueError): raise_parse_error( node, 'Requires sequence of double or float values') return values def gds_format_boolean(self, input_data, input_name=''): return ('%s' % input_data).lower() def gds_parse_boolean(self, input_data, node=None, input_name=''): if input_data in ('true', '1'): bval = True elif input_data in ('false', '0'): bval = False else: raise_parse_error(node, 'Requires boolean value') return bval def gds_validate_boolean(self, input_data, node=None, input_name=''): if input_data not in (True, 1, False, 0, ): raise_parse_error( node, 'Requires boolean value ' '(one of True, 1, False, 0)') return input_data def gds_format_boolean_list(self, input_data, input_name=''): if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_): input_data = [str(s) for s in input_data] return '%s' % ' '.join(input_data) def gds_validate_boolean_list( self, input_data, node=None, input_name=''): values = input_data.split() for value in values: value = self.gds_parse_boolean(value, node, input_name) if value not in (True, 1, False, 0, ): raise_parse_error( node, 'Requires sequence of boolean values ' '(one of True, 1, False, 0)') return values def gds_validate_datetime(self, input_data, node=None, input_name=''): return input_data def gds_format_datetime(self, input_data, input_name=''): if input_data.microsecond == 0: _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( input_data.year, input_data.month, input_data.day, input_data.hour, input_data.minute, input_data.second, ) else: _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( input_data.year, input_data.month, input_data.day, input_data.hour, input_data.minute, input_data.second, ('%f' % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: _svalue += 'Z' else: if total_seconds < 0: _svalue += '-' total_seconds *= -1 else: _svalue += '+' hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) return _svalue @classmethod def gds_parse_datetime(cls, input_data): tz = None if input_data[-1] == 'Z': tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: tzoff_parts = results.group(2).split(':') tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) if results.group(1) == '-': tzoff *= -1 tz = GeneratedsSuper._FixedOffsetTZ( tzoff, results.group(0)) input_data = input_data[:-6] time_parts = input_data.split('.') if len(time_parts) > 1: micro_seconds = int(float('0.' + time_parts[1]) * 1000000) input_data = '%s.%s' % ( time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) dt = datetime_.datetime.strptime( input_data, '%Y-%m-%dT%H:%M:%S.%f') else: dt = datetime_.datetime.strptime( input_data, '%Y-%m-%dT%H:%M:%S') dt = dt.replace(tzinfo=tz) return dt def gds_validate_date(self, input_data, node=None, input_name=''): return input_data def gds_format_date(self, input_data, input_name=''): _svalue = '%04d-%02d-%02d' % ( input_data.year, input_data.month, input_data.day, ) try: if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: _svalue += 'Z' else: if total_seconds < 0: _svalue += '-' total_seconds *= -1 else: _svalue += '+' hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 _svalue += '{0:02d}:{1:02d}'.format( hours, minutes) except AttributeError: pass return _svalue @classmethod def gds_parse_date(cls, input_data): tz = None if input_data[-1] == 'Z': tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: tzoff_parts = results.group(2).split(':') tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) if results.group(1) == '-': tzoff *= -1 tz = GeneratedsSuper._FixedOffsetTZ( tzoff, results.group(0)) input_data = input_data[:-6] dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') dt = dt.replace(tzinfo=tz) return dt.date() def gds_validate_time(self, input_data, node=None, input_name=''): return input_data def gds_format_time(self, input_data, input_name=''): if input_data.microsecond == 0: _svalue = '%02d:%02d:%02d' % ( input_data.hour, input_data.minute, input_data.second, ) else: _svalue = '%02d:%02d:%02d.%s' % ( input_data.hour, input_data.minute, input_data.second, ('%f' % (float(input_data.microsecond) / 1000000))[2:], ) if input_data.tzinfo is not None: tzoff = input_data.tzinfo.utcoffset(input_data) if tzoff is not None: total_seconds = tzoff.seconds + (86400 * tzoff.days) if total_seconds == 0: _svalue += 'Z' else: if total_seconds < 0: _svalue += '-' total_seconds *= -1 else: _svalue += '+' hours = total_seconds // 3600 minutes = (total_seconds - (hours * 3600)) // 60 _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) return _svalue def gds_validate_simple_patterns(self, patterns, target): # pat is a list of lists of strings/patterns. # The target value must match at least one of the patterns # in order for the test to succeed. found1 = True for patterns1 in patterns: found2 = False for patterns2 in patterns1: mo = re_.search(patterns2, target) if mo is not None and len(mo.group(0)) == len(target): found2 = True break if not found2: found1 = False break return found1 @classmethod def gds_parse_time(cls, input_data): tz = None if input_data[-1] == 'Z': tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') input_data = input_data[:-1] else: results = GeneratedsSuper.tzoff_pattern.search(input_data) if results is not None: tzoff_parts = results.group(2).split(':') tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) if results.group(1) == '-': tzoff *= -1 tz = GeneratedsSuper._FixedOffsetTZ( tzoff, results.group(0)) input_data = input_data[:-6] if len(input_data.split('.')) > 1: dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') else: dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') dt = dt.replace(tzinfo=tz) return dt.time() def gds_check_cardinality_( self, value, input_name, min_occurs=0, max_occurs=1, required=None): if value is None: length = 0 elif isinstance(value, list): length = len(value) else: length = 1 if required is not None : if required and length < 1: self.gds_collector_.add_message( "Required value {}{} is missing".format( input_name, self.gds_get_node_lineno_())) if length < min_occurs: self.gds_collector_.add_message( "Number of values for {}{} is below " "the minimum allowed, " "expected at least {}, found {}".format( input_name, self.gds_get_node_lineno_(), min_occurs, length)) elif length > max_occurs: self.gds_collector_.add_message( "Number of values for {}{} is above " "the maximum allowed, " "expected at most {}, found {}".format( input_name, self.gds_get_node_lineno_(), max_occurs, length)) def gds_validate_builtin_ST_( self, validator, value, input_name, min_occurs=None, max_occurs=None, required=None): if value is not None: try: validator(value, input_name=input_name) except GDSParseError as parse_error: self.gds_collector_.add_message(str(parse_error)) def gds_validate_defined_ST_( self, validator, value, input_name, min_occurs=None, max_occurs=None, required=None): if value is not None: try: validator(value) except GDSParseError as parse_error: self.gds_collector_.add_message(str(parse_error)) def gds_str_lower(self, instring): return instring.lower() def get_path_(self, node): path_list = [] self.get_path_list_(node, path_list) path_list.reverse() path = '/'.join(path_list) return path Tag_strip_pattern_ = re_.compile(r'\{.*\}') def get_path_list_(self, node, path_list): if node is None: return tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) if tag: path_list.append(tag) self.get_path_list_(node.getparent(), path_list) def get_class_obj_(self, node, default_class=None): class_obj1 = default_class if 'xsi' in node.nsmap: classname = node.get('{%s}type' % node.nsmap['xsi']) if classname is not None: names = classname.split(':') if len(names) == 2: classname = names[1] class_obj2 = globals().get(classname) if class_obj2 is not None: class_obj1 = class_obj2 return class_obj1 def gds_build_any(self, node, type_name=None): # provide default value in case option --disable-xml is used. content = "" content = etree_.tostring(node, encoding="unicode") return content @classmethod def gds_reverse_node_mapping(cls, mapping): return dict(((v, k) for k, v in mapping.items())) @staticmethod def gds_encode(instring): if sys.version_info.major == 2: if ExternalEncoding: encoding = ExternalEncoding else: encoding = 'utf-8' return instring.encode(encoding) else: return instring @staticmethod def convert_unicode(instring): if isinstance(instring, str): result = quote_xml(instring) elif sys.version_info.major == 2 and isinstance(instring, unicode): result = quote_xml(instring).encode('utf8') else: result = GeneratedsSuper.gds_encode(str(instring)) return result def __eq__(self, other): def excl_select_objs_(obj): return (obj[0] != 'parent_object_' and obj[0] != 'gds_collector_') if type(self) != type(other): return False return all(x == y for x, y in zip_longest( filter(excl_select_objs_, self.__dict__.items()), filter(excl_select_objs_, other.__dict__.items()))) def __ne__(self, other): return not self.__eq__(other) # Django ETL transform hooks. def gds_djo_etl_transform(self): pass def gds_djo_etl_transform_db_obj(self, dbobj): pass # SQLAlchemy ETL transform hooks. def gds_sqa_etl_transform(self): return 0, None def gds_sqa_etl_transform_db_obj(self, dbobj): pass def gds_get_node_lineno_(self): if (hasattr(self, "gds_elementtree_node_") and self.gds_elementtree_node_ is not None): return ' near line {}'.format( self.gds_elementtree_node_.sourceline) else: return "" def getSubclassFromModule_(module, class_): '''Get the subclass of a class from a specific module.''' name = class_.__name__ + 'Sub' if hasattr(module, name): return getattr(module, name) else: return None # # If you have installed IPython you can uncomment and use the following. # IPython is available from http://ipython.scipy.org/. # ## from IPython.Shell import IPShellEmbed ## args = '' ## ipshell = IPShellEmbed(args, ## banner = 'Dropping into IPython', ## exit_msg = 'Leaving Interpreter, back to program.') # Then use the following line where and when you want to drop into the # IPython shell: # ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit') # # Globals # ExternalEncoding = '' # Set this to false in order to deactivate during export, the use of # name space prefixes captured from the input document. UseCapturedNS_ = True CapturedNsmap_ = {} Tag_pattern_ = re_.compile(r'({.*})?(.*)') String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL) # Change this to redirect the generated superclass module to use a # specific subclass module. CurrentSubclassModule_ = None # # Support/utility functions. # def showIndent(outfile, level, pretty_print=True): if pretty_print: for idx in range(level): outfile.write(' ') def quote_xml(inStr): "Escape markup chars, but do not modify CDATA sections." if not inStr: return '' s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) s2 = '' pos = 0 matchobjects = CDATA_pattern_.finditer(s1) for mo in matchobjects: s3 = s1[pos:mo.start()] s2 += quote_xml_aux(s3) s2 += s1[mo.start():mo.end()] pos = mo.end() s3 = s1[pos:] s2 += quote_xml_aux(s3) return s2 def quote_xml_aux(inStr): s1 = inStr.replace('&', '&amp;') s1 = s1.replace('<', '&lt;') s1 = s1.replace('>', '&gt;') return s1 def quote_attrib(inStr): s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) s1 = s1.replace('&', '&amp;') s1 = s1.replace('<', '&lt;') s1 = s1.replace('>', '&gt;') if '"' in s1: if "'" in s1: s1 = '"%s"' % s1.replace('"', "&quot;") else: s1 = "'%s'" % s1 else: s1 = '"%s"' % s1 return s1 def quote_python(inStr): s1 = inStr if s1.find("'") == -1: if s1.find('\n') == -1: return "'%s'" % s1 else: return "'''%s'''" % s1 else: if s1.find('"') != -1: s1 = s1.replace('"', '\\"') if s1.find('\n') == -1: return '"%s"' % s1 else: return '"""%s"""' % s1 def get_all_text_(node): if node.text is not None: text = node.text else: text = '' for child in node: if child.tail is not None: text += child.tail return text def find_attr_value_(attr_name, node): attrs = node.attrib attr_parts = attr_name.split(':') value = None if len(attr_parts) == 1: value = attrs.get(attr_name) elif len(attr_parts) == 2: prefix, name = attr_parts if prefix == 'xml': namespace = 'http://www.w3.org/XML/1998/namespace' else: namespace = node.nsmap.get(prefix) if namespace is not None: value = attrs.get('{%s}%s' % (namespace, name, )) return value def encode_str_2_3(instr): return instr class GDSParseError(Exception): pass def raise_parse_error(node, msg): if node is not None: msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) raise GDSParseError(msg) class MixedContainer: # Constants for category: CategoryNone = 0 CategoryText = 1 CategorySimple = 2 CategoryComplex = 3 # Constants for content_type: TypeNone = 0 TypeText = 1 TypeString = 2 TypeInteger = 3 TypeFloat = 4 TypeDecimal = 5 TypeDouble = 6 TypeBoolean = 7 TypeBase64 = 8 def __init__(self, category, content_type, name, value): self.category = category self.content_type = content_type self.name = name self.value = value def getCategory(self): return self.category def getContenttype(self, content_type): return self.content_type def getValue(self): return self.value def getName(self): return self.name def export(self, outfile, level, name, namespace, pretty_print=True): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): outfile.write(self.value) elif self.category == MixedContainer.CategorySimple: self.exportSimple(outfile, level, name) else: # category == MixedContainer.CategoryComplex self.value.export( outfile, level, namespace, name_=name, pretty_print=pretty_print) def exportSimple(self, outfile, level, name): if self.content_type == MixedContainer.TypeString: outfile.write('<%s>%s</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeInteger or \ self.content_type == MixedContainer.TypeBoolean: outfile.write('<%s>%d</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeFloat or \ self.content_type == MixedContainer.TypeDecimal: outfile.write('<%s>%f</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeDouble: outfile.write('<%s>%g</%s>' % ( self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeBase64: outfile.write('<%s>%s</%s>' % ( self.name, base64.b64encode(self.value), self.name)) def to_etree(self, element, mapping_=None, nsmap_=None): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): if len(element) > 0: if element[-1].tail is None: element[-1].tail = self.value else: element[-1].tail += self.value else: if element.text is None: element.text = self.value else: element.text += self.value elif self.category == MixedContainer.CategorySimple: subelement = etree_.SubElement( element, '%s' % self.name) subelement.text = self.to_etree_simple() else: # category == MixedContainer.CategoryComplex self.value.to_etree(element) def to_etree_simple(self, mapping_=None, nsmap_=None): if self.content_type == MixedContainer.TypeString: text = self.value elif (self.content_type == MixedContainer.TypeInteger or self.content_type == MixedContainer.TypeBoolean): text = '%d' % self.value elif (self.content_type == MixedContainer.TypeFloat or self.content_type == MixedContainer.TypeDecimal): text = '%f' % self.value elif self.content_type == MixedContainer.TypeDouble: text = '%g' % self.value elif self.content_type == MixedContainer.TypeBase64: text = '%s' % base64.b64encode(self.value) return text def exportLiteral(self, outfile, level, name): if self.category == MixedContainer.CategoryText: showIndent(outfile, level) outfile.write( 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( self.category, self.content_type, self.name, self.value)) elif self.category == MixedContainer.CategorySimple: showIndent(outfile, level) outfile.write( 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( self.category, self.content_type, self.name, self.value)) else: # category == MixedContainer.CategoryComplex showIndent(outfile, level) outfile.write( 'model_.MixedContainer(%d, %d, "%s",\n' % ( self.category, self.content_type, self.name,)) self.value.exportLiteral(outfile, level + 1) showIndent(outfile, level) outfile.write(')\n') class MemberSpec_(object): def __init__(self, name='', data_type='', container=0, optional=0, child_attrs=None, choice=None): self.name = name self.data_type = data_type self.container = container self.child_attrs = child_attrs self.choice = choice self.optional = optional def set_name(self, name): self.name = name def get_name(self): return self.name def set_data_type(self, data_type): self.data_type = data_type def get_data_type_chain(self): return self.data_type def get_data_type(self): if isinstance(self.data_type, list): if len(self.data_type) > 0: return self.data_type[-1] else: return 'xs:string' else: return self.data_type def set_container(self, container): self.container = container def get_container(self): return self.container def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs def get_child_attrs(self): return self.child_attrs def set_choice(self, choice): self.choice = choice def get_choice(self): return self.choice def set_optional(self, optional): self.optional = optional def get_optional(self): return self.optional def _cast(typ, value): if typ is None or value is None: return value return typ(value) # # Data representation classes. # class AlignSimpleType(str, Enum): LEFT='left' CENTRE='centre' RIGHT='right' JUSTIFY='justify' class ChartTypeSimpleType(str, Enum): BAR='bar' LINE='line' PIE='pie' SCATTER='scatter' SURFACE='surface' OTHER='other' class ColourDepthSimpleType(str, Enum): BILEVEL='bilevel' GREYSCALE='greyscale' COLOUR='colour' OTHER='other' class ColourSimpleType(str, Enum): BLACK='black' BLUE='blue' BROWN='brown' CYAN='cyan' GREEN='green' GREY='grey' INDIGO='indigo' MAGENTA='magenta' ORANGE='orange' PINK='pink' RED='red' TURQUOISE='turquoise' VIOLET='violet' WHITE='white' YELLOW='yellow' OTHER='other' class GraphicsTypeSimpleType(str, Enum): LOGO='logo' LETTERHEAD='letterhead' DECORATION='decoration' FRAME='frame' HANDWRITTENANNOTATION='handwritten-annotation' STAMP='stamp' SIGNATURE='signature' BARCODE='barcode' PAPERGROW='paper-grow' PUNCHHOLE='punch-hole' OTHER='other' class GroupTypeSimpleType(str, Enum): PARAGRAPH='paragraph' LIST='list' LISTITEM='list-item' FIGURE='figure' ARTICLE='article' DIV='div' OTHER='other' class LanguageSimpleType(str, Enum): """ISO 639.x 2016-07-14""" ABKHAZ='Abkhaz' AFAR='Afar' AFRIKAANS='Afrikaans' AKAN='Akan' ALBANIAN='Albanian' AMHARIC='Amharic' ARABIC='Arabic' ARAGONESE='Aragonese' ARMENIAN='Armenian' ASSAMESE='Assamese' AVARIC='Avaric' AVESTAN='Avestan' AYMARA='Aymara' AZERBAIJANI='Azerbaijani' BAMBARA='Bambara' BASHKIR='Bashkir' BASQUE='Basque' BELARUSIAN='Belarusian' BENGALI='Bengali' BIHARI='Bihari' BISLAMA='Bislama' BOSNIAN='Bosnian' BRETON='Breton' BULGARIAN='Bulgarian' BURMESE='Burmese' CAMBODIAN='Cambodian' CANTONESE='Cantonese' CATALAN='Catalan' CHAMORRO='Chamorro' CHECHEN='Chechen' CHICHEWA='Chichewa' CHINESE='Chinese' CHUVASH='Chuvash' CORNISH='Cornish' CORSICAN='Corsican' CREE='Cree' CROATIAN='Croatian' CZECH='Czech' DANISH='Danish' DIVEHI='Divehi' DUTCH='Dutch' DZONGKHA='Dzongkha' ENGLISH='English' ESPERANTO='Esperanto' ESTONIAN='Estonian' EWE='Ewe' FAROESE='Faroese' FIJIAN='Fijian' FINNISH='Finnish' FRENCH='French' FULA='Fula' GAELIC='Gaelic' GALICIAN='Galician' GANDA='Ganda' GEORGIAN='Georgian' GERMAN='German' GREEK='Greek' GUARANÍ='Guaraní' GUJARATI='Gujarati' HAITIAN='Haitian' HAUSA='Hausa' HEBREW='Hebrew' HERERO='Herero' HINDI='Hindi' HIRI_MOTU='Hiri Motu' HUNGARIAN='Hungarian' ICELANDIC='Icelandic' IDO='Ido' IGBO='Igbo' INDONESIAN='Indonesian' INTERLINGUA='Interlingua' INTERLINGUE='Interlingue' INUKTITUT='Inuktitut' INUPIAQ='Inupiaq' IRISH='Irish' ITALIAN='Italian' JAPANESE='Japanese' JAVANESE='Javanese' KALAALLISUT='Kalaallisut' KANNADA='Kannada' KANURI='Kanuri' KASHMIRI='Kashmiri' KAZAKH='Kazakh' KHMER='Khmer' KIKUYU='Kikuyu' KINYARWANDA='Kinyarwanda' KIRUNDI='Kirundi' KOMI='Komi' KONGO='Kongo' KOREAN='Korean' KURDISH='Kurdish' KWANYAMA='Kwanyama' KYRGYZ='Kyrgyz' LAO='Lao' LATIN='Latin' LATVIAN='Latvian' LIMBURGISH='Limburgish' LINGALA='Lingala' LITHUANIAN='Lithuanian' LUBA_KATANGA='Luba-Katanga' LUXEMBOURGISH='Luxembourgish' MACEDONIAN='Macedonian' MALAGASY='Malagasy' MALAY='Malay' MALAYALAM='Malayalam' MALTESE='Maltese' MANX='Manx' MĀORI='Māori' MARATHI='Marathi' MARSHALLESE='Marshallese' MONGOLIAN='Mongolian' NAURU='Nauru' NAVAJO='Navajo' NDONGA='Ndonga' NEPALI='Nepali' NORTH_NDEBELE='North Ndebele' NORTHERN_SAMI='Northern Sami' NORWEGIAN='Norwegian' NORWEGIAN_BOKMÅL='Norwegian Bokmål' NORWEGIAN_NYNORSK='Norwegian Nynorsk' NUOSU='Nuosu' OCCITAN='Occitan' OJIBWE='Ojibwe' OLD_CHURCH_SLAVONIC='Old Church Slavonic' ORIYA='Oriya' OROMO='Oromo' OSSETIAN='Ossetian' PĀLI='Pāli' PANJABI='Panjabi' PASHTO='Pashto' PERSIAN='Persian' POLISH='Polish' PORTUGUESE='Portuguese' PUNJABI='Punjabi' QUECHUA='Quechua' ROMANIAN='Romanian' ROMANSH='Romansh' RUSSIAN='Russian' SAMOAN='Samoan' SANGO='Sango' SANSKRIT='Sanskrit' SARDINIAN='Sardinian' SERBIAN='Serbian' SHONA='Shona' SINDHI='Sindhi' SINHALA='Sinhala' SLOVAK='Slovak' SLOVENE='Slovene' SOMALI='Somali' SOUTH_NDEBELE='South Ndebele' SOUTHERN_SOTHO='Southern Sotho' SPANISH='Spanish' SUNDANESE='Sundanese' SWAHILI='Swahili' SWATI='Swati' SWEDISH='Swedish' TAGALOG='Tagalog' TAHITIAN='Tahitian' TAJIK='Tajik' TAMIL='Tamil' TATAR='Tatar' TELUGU='Telugu' THAI='Thai' TIBETAN='Tibetan' TIGRINYA='Tigrinya' TONGA='Tonga' TSONGA='Tsonga' TSWANA='Tswana' TURKISH='Turkish' TURKMEN='Turkmen' TWI='Twi' UIGHUR='Uighur' UKRAINIAN='Ukrainian' URDU='Urdu' UZBEK='Uzbek' VENDA='Venda' VIETNAMESE='Vietnamese' VOLAPÜK='Volapük' WALLOON='Walloon' WELSH='Welsh' WESTERN_FRISIAN='Western Frisian' WOLOF='Wolof' XHOSA='Xhosa' YIDDISH='Yiddish' YORUBA='Yoruba' ZHUANG='Zhuang' ZULU='Zulu' OTHER='other' class PageTypeSimpleType(str, Enum): FRONTCOVER='front-cover' BACKCOVER='back-cover' TITLE='title' TABLEOFCONTENTS='table-of-contents' INDEX='index' CONTENT='content' BLANK='blank' OTHER='other' class ProductionSimpleType(str, Enum): """Text production type""" PRINTED='printed' TYPEWRITTEN='typewritten' HANDWRITTENCURSIVE='handwritten-cursive' HANDWRITTENPRINTSCRIPT='handwritten-printscript' MEDIEVALMANUSCRIPT='medieval-manuscript' OTHER='other' class ReadingDirectionSimpleType(str, Enum): LEFTTORIGHT='left-to-right' RIGHTTOLEFT='right-to-left' TOPTOBOTTOM='top-to-bottom' BOTTOMTOTOP='bottom-to-top' class ScriptSimpleType(str, Enum): """iso15924 2016-07-14""" ADLM_ADLAM='Adlm - Adlam' AFAK_AFAKA='Afak - Afaka' AGHB_CAUCASIAN_ALBANIAN='Aghb - Caucasian Albanian' AHOM_AHOM_TAI_AHOM='Ahom - Ahom, Tai Ahom' ARAB_ARABIC='Arab - Arabic' ARAN_ARABIC_NASTALIQVARIANT='Aran - Arabic (Nastaliq variant)' ARMI_IMPERIAL_ARAMAIC='Armi - Imperial Aramaic' ARMN_ARMENIAN='Armn - Armenian' AVST_AVESTAN='Avst - Avestan' BALI_BALINESE='Bali - Balinese' BAMU_BAMUM='Bamu - Bamum' BASS_BASSA_VAH='Bass - Bassa Vah' BATK_BATAK='Batk - Batak' BENG_BENGALI='Beng - Bengali' BHKS_BHAIKSUKI='Bhks - Bhaiksuki' BLIS_BLISSYMBOLS='Blis - Blissymbols' BOPO_BOPOMOFO='Bopo - Bopomofo' BRAH_BRAHMI='Brah - Brahmi' BRAI_BRAILLE='Brai - Braille' BUGI_BUGINESE='Bugi - Buginese' BUHD_BUHID='Buhd - Buhid' CAKM_CHAKMA='Cakm - Chakma' CANS_UNIFIED_CANADIAN_ABORIGINAL_SYLLABICS='Cans - Unified Canadian Aboriginal Syllabics' CARI_CARIAN='Cari - Carian' CHAM_CHAM='Cham - Cham' CHER_CHEROKEE='Cher - Cherokee' CIRT_CIRTH='Cirt - Cirth' COPT_COPTIC='Copt - Coptic' CPRT_CYPRIOT='Cprt - Cypriot' CYRL_CYRILLIC='Cyrl - Cyrillic' CYRS_CYRILLIC_OLD_CHURCH_SLAVONICVARIANT='Cyrs - Cyrillic (Old Church Slavonic variant)' DEVA_DEVANAGARI_NAGARI='Deva - Devanagari (Nagari)' DSRT_DESERET_MORMON='Dsrt - Deseret (Mormon)' DUPL_DUPLOYANSHORTHAND_DUPLOYANSTENOGRAPHY='Dupl - Duployan shorthand, Duployan stenography' EGYD_EGYPTIANDEMOTIC='Egyd - Egyptian demotic' EGYH_EGYPTIANHIERATIC='Egyh - Egyptian hieratic' EGYP_EGYPTIANHIEROGLYPHS='Egyp - Egyptian hieroglyphs' ELBA_ELBASAN='Elba - Elbasan' ETHI_ETHIOPIC='Ethi - Ethiopic' GEOK_KHUTSURI_ASOMTAVRULIAND_NUSKHURI='Geok - Khutsuri (Asomtavruli and Nuskhuri)' GEOR_GEORGIAN_MKHEDRULI='Geor - Georgian (Mkhedruli)' GLAG_GLAGOLITIC='Glag - Glagolitic' GOTH_GOTHIC='Goth - Gothic' GRAN_GRANTHA='Gran - Grantha' GREK_GREEK='Grek - Greek' GUJR_GUJARATI='Gujr - Gujarati' GURU_GURMUKHI='Guru - Gurmukhi' HANB_HANWITH_BOPOMOFO='Hanb - Han with Bopomofo' HANG_HANGUL='Hang - Hangul' HANI_HAN_HANZI_KANJI_HANJA='Hani - Han (Hanzi, Kanji, Hanja)' HANO_HANUNOO_HANUNÓO='Hano - Hanunoo (Hanunóo)' HANS_HAN_SIMPLIFIEDVARIANT='Hans - Han (Simplified variant)' HANT_HAN_TRADITIONALVARIANT='Hant - Han (Traditional variant)' HATR_HATRAN='Hatr - Hatran' HEBR_HEBREW='Hebr - Hebrew' HIRA_HIRAGANA='Hira - Hiragana' HLUW_ANATOLIAN_HIEROGLYPHS='Hluw - Anatolian Hieroglyphs' HMNG_PAHAWH_HMONG='Hmng - Pahawh Hmong' HRKT_JAPANESESYLLABARIES='Hrkt - Japanese syllabaries' HUNG_OLD_HUNGARIAN_HUNGARIAN_RUNIC='Hung - Old Hungarian (Hungarian Runic)' INDS_INDUS_HARAPPAN='Inds - Indus (Harappan)' ITAL_OLD_ITALIC_ETRUSCAN_OSCANETC='Ital - Old Italic (Etruscan, Oscan etc.)' JAMO_JAMO='Jamo - Jamo' JAVA_JAVANESE='Java - Javanese' JPAN_JAPANESE='Jpan - Japanese' JURC_JURCHEN='Jurc - Jurchen' KALI_KAYAH_LI='Kali - Kayah Li' KANA_KATAKANA='Kana - Katakana' KHAR_KHAROSHTHI='Khar - Kharoshthi' KHMR_KHMER='Khmr - Khmer' KHOJ_KHOJKI='Khoj - Khojki' KITL_KHITANLARGESCRIPT='Kitl - Khitan large script' KITS_KHITANSMALLSCRIPT='Kits - Khitan small script' KNDA_KANNADA='Knda - Kannada' KORE_KOREANALIASFOR_HANGUL_HAN='Kore - Korean (alias for Hangul + Han)' KPEL_KPELLE='Kpel - Kpelle' KTHI_KAITHI='Kthi - Kaithi' LANA_TAI_THAM_LANNA='Lana - Tai Tham (Lanna)' LAOO_LAO='Laoo - Lao' LATF_LATIN_FRAKTURVARIANT='Latf - Latin (Fraktur variant)' LATG_LATIN_GAELICVARIANT='Latg - Latin (Gaelic variant)' LATN_LATIN='Latn - Latin' LEKE_LEKE='Leke - Leke' LEPC_LEPCHARÓNG='Lepc - Lepcha (Róng)' LIMB_LIMBU='Limb - Limbu' LINA_LINEARA='Lina - Linear A' LINB_LINEARB='Linb - Linear B' LISU_LISU_FRASER='Lisu - Lisu (Fraser)' LOMA_LOMA='Loma - Loma' LYCI_LYCIAN='Lyci - Lycian' LYDI_LYDIAN='Lydi - Lydian' MAHJ_MAHAJANI='Mahj - Mahajani' MAND_MANDAIC_MANDAEAN='Mand - Mandaic, Mandaean' MANI_MANICHAEAN='Mani - Manichaean' MARC_MARCHEN='Marc - Marchen' MAYA_MAYANHIEROGLYPHS='Maya - Mayan hieroglyphs' MEND_MENDE_KIKAKUI='Mend - Mende Kikakui' MERC_MEROITIC_CURSIVE='Merc - Meroitic Cursive' MERO_MEROITIC_HIEROGLYPHS='Mero - Meroitic Hieroglyphs' MLYM_MALAYALAM='Mlym - Malayalam' MODI_MODI_MOḌĪ='Modi - Modi, Moḍī' MONG_MONGOLIAN='Mong - Mongolian' MOON_MOON_MOONCODE_MOONSCRIPT_MOONTYPE='Moon - Moon (Moon code, Moon script, Moon type)' MROO_MRO_MRU='Mroo - Mro, Mru' MTEI_MEITEI_MAYEK_MEITHEI_MEETEI='Mtei - Meitei Mayek (Meithei, Meetei)' MULT_MULTANI='Mult - Multani' MYMR_MYANMAR_BURMESE='Mymr - Myanmar (Burmese)' NARB_OLD_NORTH_ARABIAN_ANCIENT_NORTH_ARABIAN='Narb - Old North Arabian (Ancient North Arabian)' NBAT_NABATAEAN='Nbat - Nabataean' NEWA_NEWA_NEWAR_NEWARI='Newa - Newa, Newar, Newari' NKGB_NAKHI_GEBA='Nkgb - Nakhi Geba' NKOON_KO='Nkoo - N’Ko' NSHUNÜSHU='Nshu - Nüshu' OGAM_OGHAM='Ogam - Ogham' OLCK_OL_CHIKI_OL_CEMET_OL_SANTALI='Olck - Ol Chiki (Ol Cemet’, Ol, Santali)' ORKH_OLD_TURKIC_ORKHON_RUNIC='Orkh - Old Turkic, Orkhon Runic' ORYA_ORIYA='Orya - Oriya' OSGE_OSAGE='Osge - Osage' OSMA_OSMANYA='Osma - Osmanya' PALM_PALMYRENE='Palm - Palmyrene' PAUC_PAU_CIN_HAU='Pauc - Pau Cin Hau' PERM_OLD_PERMIC='Perm - Old Permic' PHAG_PHAGSPA='Phag - Phags-pa' PHLI_INSCRIPTIONAL_PAHLAVI='Phli - Inscriptional Pahlavi' PHLP_PSALTER_PAHLAVI='Phlp - Psalter Pahlavi' PHLV_BOOK_PAHLAVI='Phlv - Book Pahlavi' PHNX_PHOENICIAN='Phnx - Phoenician' PIQD_KLINGONKLIP_IQA_D='Piqd - Klingon (KLI pIqaD)' PLRD_MIAO_POLLARD='Plrd - Miao (Pollard)' PRTI_INSCRIPTIONAL_PARTHIAN='Prti - Inscriptional Parthian' RJNG_REJANG_REDJANG_KAGANGA='Rjng - Rejang (Redjang, Kaganga)' RORO_RONGORONGO='Roro - Rongorongo' RUNR_RUNIC='Runr - Runic' SAMR_SAMARITAN='Samr - Samaritan' SARA_SARATI='Sara - Sarati' SARB_OLD_SOUTH_ARABIAN='Sarb - Old South Arabian' SAUR_SAURASHTRA='Saur - Saurashtra' SGNW_SIGN_WRITING='Sgnw - SignWriting' SHAW_SHAVIAN_SHAW='Shaw - Shavian (Shaw)' SHRD_SHARADAŚĀRADĀ='Shrd - Sharada, Śāradā' SIDD_SIDDHAM='Sidd - Siddham' SIND_KHUDAWADI_SINDHI='Sind - Khudawadi, Sindhi' SINH_SINHALA='Sinh - Sinhala' SORA_SORA_SOMPENG='Sora - Sora Sompeng' SUND_SUNDANESE='Sund - Sundanese' SYLO_SYLOTI_NAGRI='Sylo - Syloti Nagri' SYRC_SYRIAC='Syrc - Syriac' SYRE_SYRIAC_ESTRANGELOVARIANT='Syre - Syriac (Estrangelo variant)' SYRJ_SYRIAC_WESTERNVARIANT='Syrj - Syriac (Western variant)' SYRN_SYRIAC_EASTERNVARIANT='Syrn - Syriac (Eastern variant)' TAGB_TAGBANWA='Tagb - Tagbanwa' TAKR_TAKRI='Takr - Takri' TALE_TAI_LE='Tale - Tai Le' TALU_NEW_TAI_LUE='Talu - New Tai Lue' TAML_TAMIL='Taml - Tamil' TANG_TANGUT='Tang - Tangut' TAVT_TAI_VIET='Tavt - Tai Viet' TELU_TELUGU='Telu - Telugu' TENG_TENGWAR='Teng - Tengwar' TFNG_TIFINAGH_BERBER='Tfng - Tifinagh (Berber)' TGLG_TAGALOG_BAYBAYIN_ALIBATA='Tglg - Tagalog (Baybayin, Alibata)' THAA_THAANA='Thaa - Thaana' THAI_THAI='Thai - Thai' TIBT_TIBETAN='Tibt - Tibetan' TIRH_TIRHUTA='Tirh - Tirhuta' UGAR_UGARITIC='Ugar - Ugaritic' VAII_VAI='Vaii - Vai' VISP_VISIBLE_SPEECH='Visp - Visible Speech' WARA_WARANG_CITI_VARANG_KSHITI='Wara - Warang Citi (Varang Kshiti)' WOLE_WOLEAI='Wole - Woleai' XPEO_OLD_PERSIAN='Xpeo - Old Persian' XSUX_CUNEIFORM_SUMERO_AKKADIAN='Xsux - Cuneiform, Sumero-Akkadian' YIII_YI='Yiii - Yi' ZINH_CODEFORINHERITEDSCRIPT='Zinh - Code for inherited script' ZMTH_MATHEMATICALNOTATION='Zmth - Mathematical notation' ZSYE_SYMBOLS_EMOJIVARIANT='Zsye - Symbols (Emoji variant)' ZSYM_SYMBOLS='Zsym - Symbols' ZXXX_CODEFORUNWRITTENDOCUMENTS='Zxxx - Code for unwritten documents' ZYYY_CODEFORUNDETERMINEDSCRIPT='Zyyy - Code for undetermined script' ZZZZ_CODEFORUNCODEDSCRIPT='Zzzz - Code for uncoded script' OTHER='other' class TextDataTypeSimpleType(str, Enum): XSDDECIMAL='xsd:decimal' # Examples: "123.456", "+1234.456", "-1234.456", "-.456", "-456" XSDFLOAT='xsd:float' # Examples: "123.456", "+1234.456", "-1.2344e56", "-.45E-6", "INF", "-INF", "NaN" XSDINTEGER='xsd:integer' # Examples: "123456", "+00000012", "-1", "-456" XSDBOOLEAN='xsd:boolean' # Examples: "true", "false", "1", "0" XSDDATE='xsd:date' # Examples: "2001-10-26", "2001-10-26+02:00", "2001-10-26Z", "2001-10-26+00:00", "-2001-10-26", "-20000-04-01" XSDTIME='xsd:time' # Examples: "21:32:52", "21:32:52+02:00", "19:32:52Z", "19:32:52+00:00", "21:32:52.12679" XSDDATE_TIME='xsd:dateTime' # Examples: "2001-10-26T21:32:52", "2001-10-26T21:32:52+02:00", "2001-10-26T19:32:52Z", "2001-10-26T19:32:52+00:00", "-2001-10-26T21:32:52", "2001-10-26T21:32:52.12679" XSDSTRING='xsd:string' # Generic text string OTHER='other' # An XSD type that is not listed or a custom type (use dataTypeDetails attribute). class TextLineOrderSimpleType(str, Enum): TOPTOBOTTOM='top-to-bottom' BOTTOMTOTOP='bottom-to-top' LEFTTORIGHT='left-to-right' RIGHTTOLEFT='right-to-left' class TextTypeSimpleType(str, Enum): PARAGRAPH='paragraph' HEADING='heading' CAPTION='caption' HEADER='header' FOOTER='footer' PAGENUMBER='page-number' DROPCAPITAL='drop-capital' CREDIT='credit' FLOATING='floating' SIGNATUREMARK='signature-mark' CATCHWORD='catch-word' MARGINALIA='marginalia' FOOTNOTE='footnote' FOOTNOTECONTINUED='footnote-continued' ENDNOTE='endnote' TOCENTRY='TOC-entry' LISTLABEL='list-label' OTHER='other' class UnderlineStyleSimpleType(str, Enum): SINGLE_LINE='singleLine' DOUBLE_LINE='doubleLine' OTHER='other' class charTypeType(str, Enum): """Type of character represented by the grapheme, group, or non-printing character element.""" BASE='base' COMBINING='combining' class imageResolutionUnitType(str, Enum): """Specifies the unit of the resolution information referring to a standardised unit of measurement (pixels per inch, pixels per centimeter or other).""" PPI='PPI' PPCM='PPCM' OTHER='other' class typeType(str, Enum): """Type of metadata (e.g. author)""" AUTHOR='author' IMAGE_PROPERTIES='imageProperties' PROCESSING_STEP='processingStep' OTHER='other' class typeType1(str, Enum): LINK='link' JOIN='join' class typeType3(str, Enum): XSDSTRING='xsd:string' XSDINTEGER='xsd:integer' XSDBOOLEAN='xsd:boolean' XSDFLOAT='xsd:float' class PcGtsType(GeneratedsSuper): __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('pcGtsId', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('Metadata', 'MetadataType', 0, 0, {'name': 'Metadata', 'type': 'MetadataType'}, None), MemberSpec_('Page', 'PageType', 0, 0, {'name': 'Page', 'type': 'PageType'}, None), ] subclass = None superclass = None def __init__(self, pcGtsId=None, Metadata=None, Page=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.pcGtsId = _cast(None, pcGtsId) self.pcGtsId_nsprefix_ = None self.Metadata = Metadata self.Metadata_nsprefix_ = "pc" self.Page = Page self.Page_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, PcGtsType) if subclass is not None: return subclass(*args_, **kwargs_) if PcGtsType.subclass: return PcGtsType.subclass(*args_, **kwargs_) else: return PcGtsType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Metadata(self): return self.Metadata def set_Metadata(self, Metadata): self.Metadata = Metadata def get_Page(self): return self.Page def set_Page(self, Page): self.Page = Page def get_pcGtsId(self): return self.pcGtsId def set_pcGtsId(self, pcGtsId): self.pcGtsId = pcGtsId def hasContent_(self): if ( self.Metadata is not None or self.Page is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PcGtsType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('PcGtsType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'PcGtsType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PcGtsType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PcGtsType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PcGtsType'): if self.pcGtsId is not None and 'pcGtsId' not in already_processed: already_processed.add('pcGtsId') outfile.write(' pcGtsId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pcGtsId), input_name='pcGtsId')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PcGtsType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.Metadata is not None: namespaceprefix_ = self.Metadata_nsprefix_ + ':' if (UseCapturedNS_ and self.Metadata_nsprefix_) else '' self.Metadata.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata', pretty_print=pretty_print) if self.Page is not None: namespaceprefix_ = self.Page_nsprefix_ + ':' if (UseCapturedNS_ and self.Page_nsprefix_) else '' self.Page.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Page', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('pcGtsId', node) if value is not None and 'pcGtsId' not in already_processed: already_processed.add('pcGtsId') self.pcGtsId = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Metadata': obj_ = MetadataType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Metadata = obj_ obj_.original_tagname_ = 'Metadata' elif nodeName_ == 'Page': obj_ = PageType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Page = obj_ obj_.original_tagname_ = 'Page' def get_imageFilename(self): ''' Get image filename from root ''' return self.Page.imageFilename # end class PcGtsType class MetadataType(GeneratedsSuper): """External reference of any kind""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('externalRef', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('Creator', 'string', 0, 0, {'name': 'Creator', 'type': 'string'}, None), MemberSpec_('Created', 'dateTime', 0, 0, {'name': 'Created', 'type': 'dateTime'}, None), MemberSpec_('LastChange', 'dateTime', 0, 0, {'name': 'LastChange', 'type': 'dateTime'}, None), MemberSpec_('Comments', 'string', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Comments', 'type': 'string'}, None), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('MetadataItem', 'MetadataItemType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'MetadataItem', 'type': 'MetadataItemType'}, None), ] subclass = None superclass = None def __init__(self, externalRef=None, Creator=None, Created=None, LastChange=None, Comments=None, UserDefined=None, MetadataItem=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = None self.externalRef = _cast(None, externalRef) self.externalRef_nsprefix_ = None self.Creator = Creator self.Creator_nsprefix_ = None if isinstance(Created, BaseStrType_): initvalue_ = datetime_.datetime.strptime(Created, '%Y-%m-%dT%H:%M:%S') else: initvalue_ = Created self.Created = initvalue_ self.Created_nsprefix_ = None if isinstance(LastChange, BaseStrType_): initvalue_ = datetime_.datetime.strptime(LastChange, '%Y-%m-%dT%H:%M:%S') else: initvalue_ = LastChange self.LastChange = initvalue_ self.LastChange_nsprefix_ = None self.Comments = Comments self.Comments_nsprefix_ = None self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if MetadataItem is None: self.MetadataItem = [] else: self.MetadataItem = MetadataItem self.MetadataItem_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, MetadataType) if subclass is not None: return subclass(*args_, **kwargs_) if MetadataType.subclass: return MetadataType.subclass(*args_, **kwargs_) else: return MetadataType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Creator(self): return self.Creator def set_Creator(self, Creator): self.Creator = Creator def get_Created(self): return self.Created def set_Created(self, Created): self.Created = Created def get_LastChange(self): return self.LastChange def set_LastChange(self, LastChange): self.LastChange = LastChange def get_Comments(self): return self.Comments def set_Comments(self, Comments): self.Comments = Comments def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_MetadataItem(self): return self.MetadataItem def set_MetadataItem(self, MetadataItem): self.MetadataItem = MetadataItem def add_MetadataItem(self, value): self.MetadataItem.append(value) def insert_MetadataItem_at(self, index, value): self.MetadataItem.insert(index, value) def replace_MetadataItem_at(self, index, value): self.MetadataItem[index] = value def get_externalRef(self): return self.externalRef def set_externalRef(self, externalRef): self.externalRef = externalRef def hasContent_(self): if ( self.Creator is not None or self.Created is not None or self.LastChange is not None or self.Comments is not None or self.UserDefined is not None or self.MetadataItem ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='MetadataType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('MetadataType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'MetadataType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MetadataType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MetadataType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MetadataType'): if self.externalRef is not None and 'externalRef' not in already_processed: already_processed.add('externalRef') outfile.write(' externalRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.externalRef), input_name='externalRef')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='MetadataType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.Creator is not None: namespaceprefix_ = self.Creator_nsprefix_ + ':' if (UseCapturedNS_ and self.Creator_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%sCreator>%s</%sCreator>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Creator), input_name='Creator')), namespaceprefix_ , eol_)) if self.Created is not None: namespaceprefix_ = self.Created_nsprefix_ + ':' if (UseCapturedNS_ and self.Created_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%sCreated>%s</%sCreated>%s' % (namespaceprefix_ , self.gds_format_datetime(self.Created, input_name='Created'), namespaceprefix_ , eol_)) if self.LastChange is not None: namespaceprefix_ = self.LastChange_nsprefix_ + ':' if (UseCapturedNS_ and self.LastChange_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%sLastChange>%s</%sLastChange>%s' % (namespaceprefix_ , self.gds_format_datetime(self.LastChange, input_name='LastChange'), namespaceprefix_ , eol_)) if self.Comments is not None: namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%sComments>%s</%sComments>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_)) if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for MetadataItem_ in self.MetadataItem: namespaceprefix_ = self.MetadataItem_nsprefix_ + ':' if (UseCapturedNS_ and self.MetadataItem_nsprefix_) else '' MetadataItem_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MetadataItem', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('externalRef', node) if value is not None and 'externalRef' not in already_processed: already_processed.add('externalRef') self.externalRef = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Creator': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'Creator') value_ = self.gds_validate_string(value_, node, 'Creator') self.Creator = value_ self.Creator_nsprefix_ = child_.prefix elif nodeName_ == 'Created': sval_ = child_.text dval_ = self.gds_parse_datetime(sval_) self.Created = dval_ self.Created_nsprefix_ = child_.prefix elif nodeName_ == 'LastChange': sval_ = child_.text dval_ = self.gds_parse_datetime(sval_) self.LastChange = dval_ self.LastChange_nsprefix_ = child_.prefix elif nodeName_ == 'Comments': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'Comments') value_ = self.gds_validate_string(value_, node, 'Comments') self.Comments = value_ self.Comments_nsprefix_ = child_.prefix elif nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'MetadataItem': obj_ = MetadataItemType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.MetadataItem.append(obj_) obj_.original_tagname_ = 'MetadataItem' # end class MetadataType class MetadataItemType(GeneratedsSuper): """Type of metadata (e.g. author) E.g. imagePhotometricInterpretation E.g. RGB""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('type_', 'typeType', 0, 1, {'use': 'optional'}), MemberSpec_('name', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('value', 'string', 0, 0, {'use': 'required'}), MemberSpec_('date', 'dateTime', 0, 1, {'use': 'optional'}), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), ] subclass = None superclass = None def __init__(self, type_=None, name=None, value=None, date=None, Labels=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.name = _cast(None, name) self.name_nsprefix_ = None self.value = _cast(None, value) self.value_nsprefix_ = None if isinstance(date, BaseStrType_): initvalue_ = datetime_.datetime.strptime(date, '%Y-%m-%dT%H:%M:%S') else: initvalue_ = date self.date = initvalue_ if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, MetadataItemType) if subclass is not None: return subclass(*args_, **kwargs_) if MetadataItemType.subclass: return MetadataItemType.subclass(*args_, **kwargs_) else: return MetadataItemType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_name(self): return self.name def set_name(self, name): self.name = name def get_value(self): return self.value def set_value(self, value): self.value = value def get_date(self): return self.date def set_date(self, date): self.date = date def validate_typeType(self, value): # Validate type typeType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['author', 'imageProperties', 'processingStep', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on typeType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.Labels ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MetadataItemType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('MetadataItemType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'MetadataItemType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MetadataItemType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MetadataItemType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MetadataItemType'): if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.date is not None and 'date' not in already_processed: already_processed.add('date') outfile.write(' date="%s"' % self.gds_format_datetime(self.date, input_name='date')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MetadataItemType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_typeType(self.type_) # validate type typeType value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') self.name = value value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value value = find_attr_value_('date', node) if value is not None and 'date' not in already_processed: already_processed.add('date') try: self.date = self.gds_parse_datetime(value) except ValueError as exp: raise ValueError('Bad date-time attribute (date): %s' % exp) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' # end class MetadataItemType class LabelsType(GeneratedsSuper): """Reference to external model / ontology / schema E.g. an RDF resource identifier (to be used as subject or object of an RDF triple) Prefix for all labels (e.g. first part of an URI)""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('externalModel', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('externalId', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('prefix', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('Label', 'LabelType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Label', 'type': 'LabelType'}, None), ] subclass = None superclass = None def __init__(self, externalModel=None, externalId=None, prefix=None, comments=None, Label=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.externalModel = _cast(None, externalModel) self.externalModel_nsprefix_ = None self.externalId = _cast(None, externalId) self.externalId_nsprefix_ = None self.prefix = _cast(None, prefix) self.prefix_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None if Label is None: self.Label = [] else: self.Label = Label self.Label_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, LabelsType) if subclass is not None: return subclass(*args_, **kwargs_) if LabelsType.subclass: return LabelsType.subclass(*args_, **kwargs_) else: return LabelsType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Label(self): return self.Label def set_Label(self, Label): self.Label = Label def add_Label(self, value): self.Label.append(value) def insert_Label_at(self, index, value): self.Label.insert(index, value) def replace_Label_at(self, index, value): self.Label[index] = value def get_externalModel(self): return self.externalModel def set_externalModel(self, externalModel): self.externalModel = externalModel def get_externalId(self): return self.externalId def set_externalId(self, externalId): self.externalId = externalId def get_prefix(self): return self.prefix def set_prefix(self, prefix): self.prefix = prefix def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def hasContent_(self): if ( self.Label ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelsType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('LabelsType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'LabelsType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LabelsType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LabelsType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LabelsType'): if self.externalModel is not None and 'externalModel' not in already_processed: already_processed.add('externalModel') outfile.write(' externalModel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.externalModel), input_name='externalModel')), )) if self.externalId is not None and 'externalId' not in already_processed: already_processed.add('externalId') outfile.write(' externalId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.externalId), input_name='externalId')), )) if self.prefix is not None and 'prefix' not in already_processed: already_processed.add('prefix') outfile.write(' prefix=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.prefix), input_name='prefix')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelsType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for Label_ in self.Label: namespaceprefix_ = self.Label_nsprefix_ + ':' if (UseCapturedNS_ and self.Label_nsprefix_) else '' Label_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Label', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('externalModel', node) if value is not None and 'externalModel' not in already_processed: already_processed.add('externalModel') self.externalModel = value value = find_attr_value_('externalId', node) if value is not None and 'externalId' not in already_processed: already_processed.add('externalId') self.externalId = value value = find_attr_value_('prefix', node) if value is not None and 'prefix' not in already_processed: already_processed.add('prefix') self.prefix = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Label': obj_ = LabelType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Label.append(obj_) obj_.original_tagname_ = 'Label' # end class LabelsType class LabelType(GeneratedsSuper): """Semantic label The label / tag (e.g. 'person'). Can be an RDF resource identifier (e.g. object of an RDF triple). Additional information on the label (e.g. 'YYYY-mm-dd' for a date label). Can be used as predicate of an RDF triple.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('value', 'string', 0, 0, {'use': 'required'}), MemberSpec_('type_', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), ] subclass = None superclass = None def __init__(self, value=None, type_=None, comments=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.value = _cast(None, value) self.value_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, LabelType) if subclass is not None: return subclass(*args_, **kwargs_) if LabelType.subclass: return LabelType.subclass(*args_, **kwargs_) else: return LabelType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_value(self): return self.value def set_value(self, value): self.value = value def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('LabelType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'LabelType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LabelType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LabelType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LabelType'): if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class LabelType class PageType(GeneratedsSuper): """Contains the image file name including the file extension. Specifies the width of the image.Specifies the height of the image.Specifies the image resolution in width.Specifies the image resolution in height. Specifies the unit of the resolution information referring to a standardised unit of measurement (pixels per inch, pixels per centimeter or other). For generic use The angle the rectangle encapsulating the page (or its Border) has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). (The rotated image can be further referenced via “AlternativeImage”.) Range: -179.999,180 The type of the page within the document (e.g. cover page). The primary language used in the page (lower-level definitions override the page-level definition). The secondary language used in the page (lower-level definitions override the page-level definition). The primary script used in the page (lower-level definitions override the page-level definition). The secondary script used in the page (lower-level definitions override the page-level definition). The direction in which text within lines should be read (order of words and characters), in addition to “textLineOrder” (lower-level definitions override the page-level definition). The order of text lines within a block, in addition to “readingDirection” (lower-level definitions override the page-level definition). Confidence value for whole page (between 0 and 1)""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('imageFilename', 'string', 0, 0, {'use': 'required'}), MemberSpec_('imageWidth', 'int', 0, 0, {'use': 'required'}), MemberSpec_('imageHeight', 'int', 0, 0, {'use': 'required'}), MemberSpec_('imageXResolution', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('imageYResolution', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('imageResolutionUnit', 'imageResolutionUnitType', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:PageTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('primaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('secondaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('textLineOrder', 'pc:TextLineOrderSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None), MemberSpec_('Border', 'BorderType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Border', 'type': 'BorderType'}, None), MemberSpec_('PrintSpace', 'PrintSpaceType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'PrintSpace', 'type': 'PrintSpaceType'}, None), MemberSpec_('ReadingOrder', 'ReadingOrderType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'ReadingOrder', 'type': 'ReadingOrderType'}, None), MemberSpec_('Layers', 'LayersType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Layers', 'type': 'LayersType'}, None), MemberSpec_('Relations', 'RelationsType', 0, 1, {'minOccurs': '0', 'name': 'Relations', 'type': 'RelationsType'}, None), MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), MemberSpec_('TextRegion', 'TextRegionType', 1, 1, {'name': 'TextRegion', 'type': 'TextRegionType'}, 1), MemberSpec_('ImageRegion', 'ImageRegionType', 1, 1, {'name': 'ImageRegion', 'type': 'ImageRegionType'}, 1), MemberSpec_('LineDrawingRegion', 'LineDrawingRegionType', 1, 1, {'name': 'LineDrawingRegion', 'type': 'LineDrawingRegionType'}, 1), MemberSpec_('GraphicRegion', 'GraphicRegionType', 1, 1, {'name': 'GraphicRegion', 'type': 'GraphicRegionType'}, 1), MemberSpec_('TableRegion', 'TableRegionType', 1, 1, {'name': 'TableRegion', 'type': 'TableRegionType'}, 1), MemberSpec_('ChartRegion', 'ChartRegionType', 1, 1, {'name': 'ChartRegion', 'type': 'ChartRegionType'}, 1), MemberSpec_('MapRegion', 'MapRegionType', 1, 1, {'name': 'MapRegion', 'type': 'MapRegionType'}, 1), MemberSpec_('SeparatorRegion', 'SeparatorRegionType', 1, 1, {'name': 'SeparatorRegion', 'type': 'SeparatorRegionType'}, 1), MemberSpec_('MathsRegion', 'MathsRegionType', 1, 1, {'name': 'MathsRegion', 'type': 'MathsRegionType'}, 1), MemberSpec_('ChemRegion', 'ChemRegionType', 1, 1, {'name': 'ChemRegion', 'type': 'ChemRegionType'}, 1), MemberSpec_('MusicRegion', 'MusicRegionType', 1, 1, {'name': 'MusicRegion', 'type': 'MusicRegionType'}, 1), MemberSpec_('AdvertRegion', 'AdvertRegionType', 1, 1, {'name': 'AdvertRegion', 'type': 'AdvertRegionType'}, 1), MemberSpec_('NoiseRegion', 'NoiseRegionType', 1, 1, {'name': 'NoiseRegion', 'type': 'NoiseRegionType'}, 1), MemberSpec_('UnknownRegion', 'UnknownRegionType', 1, 1, {'name': 'UnknownRegion', 'type': 'UnknownRegionType'}, 1), MemberSpec_('CustomRegion', 'CustomRegionType', 1, 1, {'name': 'CustomRegion', 'type': 'CustomRegionType'}, 1), ] subclass = None superclass = None def __init__(self, imageFilename=None, imageWidth=None, imageHeight=None, imageXResolution=None, imageYResolution=None, imageResolutionUnit=None, custom=None, orientation=None, type_=None, primaryLanguage=None, secondaryLanguage=None, primaryScript=None, secondaryScript=None, readingDirection=None, textLineOrder=None, conf=None, AlternativeImage=None, Border=None, PrintSpace=None, ReadingOrder=None, Layers=None, Relations=None, TextStyle=None, UserDefined=None, Labels=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, MapRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.imageFilename = _cast(None, imageFilename) self.imageFilename_nsprefix_ = None self.imageWidth = _cast(int, imageWidth) self.imageWidth_nsprefix_ = None self.imageHeight = _cast(int, imageHeight) self.imageHeight_nsprefix_ = None self.imageXResolution = _cast(float, imageXResolution) self.imageXResolution_nsprefix_ = None self.imageYResolution = _cast(float, imageYResolution) self.imageYResolution_nsprefix_ = None self.imageResolutionUnit = _cast(None, imageResolutionUnit) self.imageResolutionUnit_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.primaryLanguage = _cast(None, primaryLanguage) self.primaryLanguage_nsprefix_ = None self.secondaryLanguage = _cast(None, secondaryLanguage) self.secondaryLanguage_nsprefix_ = None self.primaryScript = _cast(None, primaryScript) self.primaryScript_nsprefix_ = None self.secondaryScript = _cast(None, secondaryScript) self.secondaryScript_nsprefix_ = None self.readingDirection = _cast(None, readingDirection) self.readingDirection_nsprefix_ = None self.textLineOrder = _cast(None, textLineOrder) self.textLineOrder_nsprefix_ = None self.conf = _cast(float, conf) self.conf_nsprefix_ = None if AlternativeImage is None: self.AlternativeImage = [] else: self.AlternativeImage = AlternativeImage self.AlternativeImage_nsprefix_ = "pc" self.Border = Border self.Border_nsprefix_ = "pc" self.PrintSpace = PrintSpace self.PrintSpace_nsprefix_ = "pc" self.ReadingOrder = ReadingOrder self.ReadingOrder_nsprefix_ = "pc" self.Layers = Layers self.Layers_nsprefix_ = "pc" self.Relations = Relations self.Relations_nsprefix_ = "pc" self.TextStyle = TextStyle self.TextStyle_nsprefix_ = "pc" self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" if TextRegion is None: self.TextRegion = [] else: self.TextRegion = TextRegion self.TextRegion_nsprefix_ = "pc" if ImageRegion is None: self.ImageRegion = [] else: self.ImageRegion = ImageRegion self.ImageRegion_nsprefix_ = "pc" if LineDrawingRegion is None: self.LineDrawingRegion = [] else: self.LineDrawingRegion = LineDrawingRegion self.LineDrawingRegion_nsprefix_ = "pc" if GraphicRegion is None: self.GraphicRegion = [] else: self.GraphicRegion = GraphicRegion self.GraphicRegion_nsprefix_ = "pc" if TableRegion is None: self.TableRegion = [] else: self.TableRegion = TableRegion self.TableRegion_nsprefix_ = "pc" if ChartRegion is None: self.ChartRegion = [] else: self.ChartRegion = ChartRegion self.ChartRegion_nsprefix_ = "pc" if MapRegion is None: self.MapRegion = [] else: self.MapRegion = MapRegion self.MapRegion_nsprefix_ = "pc" if SeparatorRegion is None: self.SeparatorRegion = [] else: self.SeparatorRegion = SeparatorRegion self.SeparatorRegion_nsprefix_ = "pc" if MathsRegion is None: self.MathsRegion = [] else: self.MathsRegion = MathsRegion self.MathsRegion_nsprefix_ = "pc" if ChemRegion is None: self.ChemRegion = [] else: self.ChemRegion = ChemRegion self.ChemRegion_nsprefix_ = "pc" if MusicRegion is None: self.MusicRegion = [] else: self.MusicRegion = MusicRegion self.MusicRegion_nsprefix_ = "pc" if AdvertRegion is None: self.AdvertRegion = [] else: self.AdvertRegion = AdvertRegion self.AdvertRegion_nsprefix_ = "pc" if NoiseRegion is None: self.NoiseRegion = [] else: self.NoiseRegion = NoiseRegion self.NoiseRegion_nsprefix_ = "pc" if UnknownRegion is None: self.UnknownRegion = [] else: self.UnknownRegion = UnknownRegion self.UnknownRegion_nsprefix_ = "pc" if CustomRegion is None: self.CustomRegion = [] else: self.CustomRegion = CustomRegion self.CustomRegion_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, PageType) if subclass is not None: return subclass(*args_, **kwargs_) if PageType.subclass: return PageType.subclass(*args_, **kwargs_) else: return PageType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_AlternativeImage(self): return self.AlternativeImage def set_AlternativeImage(self, AlternativeImage): self.AlternativeImage = AlternativeImage def add_AlternativeImage(self, value): self.AlternativeImage.append(value) def insert_AlternativeImage_at(self, index, value): self.AlternativeImage.insert(index, value) def replace_AlternativeImage_at(self, index, value): self.AlternativeImage[index] = value def get_Border(self): return self.Border def set_Border(self, Border): self.Border = Border def get_PrintSpace(self): return self.PrintSpace def set_PrintSpace(self, PrintSpace): self.PrintSpace = PrintSpace def get_ReadingOrder(self): return self.ReadingOrder def set_ReadingOrder(self, ReadingOrder): self.ReadingOrder = ReadingOrder def get_Layers(self): return self.Layers def set_Layers(self, Layers): self.Layers = Layers def get_Relations(self): return self.Relations def set_Relations(self, Relations): self.Relations = Relations def get_TextStyle(self): return self.TextStyle def set_TextStyle(self, TextStyle): self.TextStyle = TextStyle def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_TextRegion(self): return self.TextRegion def set_TextRegion(self, TextRegion): self.TextRegion = TextRegion def add_TextRegion(self, value): self.TextRegion.append(value) def insert_TextRegion_at(self, index, value): self.TextRegion.insert(index, value) def replace_TextRegion_at(self, index, value): self.TextRegion[index] = value def get_ImageRegion(self): return self.ImageRegion def set_ImageRegion(self, ImageRegion): self.ImageRegion = ImageRegion def add_ImageRegion(self, value): self.ImageRegion.append(value) def insert_ImageRegion_at(self, index, value): self.ImageRegion.insert(index, value) def replace_ImageRegion_at(self, index, value): self.ImageRegion[index] = value def get_LineDrawingRegion(self): return self.LineDrawingRegion def set_LineDrawingRegion(self, LineDrawingRegion): self.LineDrawingRegion = LineDrawingRegion def add_LineDrawingRegion(self, value): self.LineDrawingRegion.append(value) def insert_LineDrawingRegion_at(self, index, value): self.LineDrawingRegion.insert(index, value) def replace_LineDrawingRegion_at(self, index, value): self.LineDrawingRegion[index] = value def get_GraphicRegion(self): return self.GraphicRegion def set_GraphicRegion(self, GraphicRegion): self.GraphicRegion = GraphicRegion def add_GraphicRegion(self, value): self.GraphicRegion.append(value) def insert_GraphicRegion_at(self, index, value): self.GraphicRegion.insert(index, value) def replace_GraphicRegion_at(self, index, value): self.GraphicRegion[index] = value def get_TableRegion(self): return self.TableRegion def set_TableRegion(self, TableRegion): self.TableRegion = TableRegion def add_TableRegion(self, value): self.TableRegion.append(value) def insert_TableRegion_at(self, index, value): self.TableRegion.insert(index, value) def replace_TableRegion_at(self, index, value): self.TableRegion[index] = value def get_ChartRegion(self): return self.ChartRegion def set_ChartRegion(self, ChartRegion): self.ChartRegion = ChartRegion def add_ChartRegion(self, value): self.ChartRegion.append(value) def insert_ChartRegion_at(self, index, value): self.ChartRegion.insert(index, value) def replace_ChartRegion_at(self, index, value): self.ChartRegion[index] = value def get_MapRegion(self): return self.MapRegion def set_MapRegion(self, MapRegion): self.MapRegion = MapRegion def add_MapRegion(self, value): self.MapRegion.append(value) def insert_MapRegion_at(self, index, value): self.MapRegion.insert(index, value) def replace_MapRegion_at(self, index, value): self.MapRegion[index] = value def get_SeparatorRegion(self): return self.SeparatorRegion def set_SeparatorRegion(self, SeparatorRegion): self.SeparatorRegion = SeparatorRegion def add_SeparatorRegion(self, value): self.SeparatorRegion.append(value) def insert_SeparatorRegion_at(self, index, value): self.SeparatorRegion.insert(index, value) def replace_SeparatorRegion_at(self, index, value): self.SeparatorRegion[index] = value def get_MathsRegion(self): return self.MathsRegion def set_MathsRegion(self, MathsRegion): self.MathsRegion = MathsRegion def add_MathsRegion(self, value): self.MathsRegion.append(value) def insert_MathsRegion_at(self, index, value): self.MathsRegion.insert(index, value) def replace_MathsRegion_at(self, index, value): self.MathsRegion[index] = value def get_ChemRegion(self): return self.ChemRegion def set_ChemRegion(self, ChemRegion): self.ChemRegion = ChemRegion def add_ChemRegion(self, value): self.ChemRegion.append(value) def insert_ChemRegion_at(self, index, value): self.ChemRegion.insert(index, value) def replace_ChemRegion_at(self, index, value): self.ChemRegion[index] = value def get_MusicRegion(self): return self.MusicRegion def set_MusicRegion(self, MusicRegion): self.MusicRegion = MusicRegion def add_MusicRegion(self, value): self.MusicRegion.append(value) def insert_MusicRegion_at(self, index, value): self.MusicRegion.insert(index, value) def replace_MusicRegion_at(self, index, value): self.MusicRegion[index] = value def get_AdvertRegion(self): return self.AdvertRegion def set_AdvertRegion(self, AdvertRegion): self.AdvertRegion = AdvertRegion def add_AdvertRegion(self, value): self.AdvertRegion.append(value) def insert_AdvertRegion_at(self, index, value): self.AdvertRegion.insert(index, value) def replace_AdvertRegion_at(self, index, value): self.AdvertRegion[index] = value def get_NoiseRegion(self): return self.NoiseRegion def set_NoiseRegion(self, NoiseRegion): self.NoiseRegion = NoiseRegion def add_NoiseRegion(self, value): self.NoiseRegion.append(value) def insert_NoiseRegion_at(self, index, value): self.NoiseRegion.insert(index, value) def replace_NoiseRegion_at(self, index, value): self.NoiseRegion[index] = value def get_UnknownRegion(self): return self.UnknownRegion def set_UnknownRegion(self, UnknownRegion): self.UnknownRegion = UnknownRegion def add_UnknownRegion(self, value): self.UnknownRegion.append(value) def insert_UnknownRegion_at(self, index, value): self.UnknownRegion.insert(index, value) def replace_UnknownRegion_at(self, index, value): self.UnknownRegion[index] = value def get_CustomRegion(self): return self.CustomRegion def set_CustomRegion(self, CustomRegion): self.CustomRegion = CustomRegion def add_CustomRegion(self, value): self.CustomRegion.append(value) def insert_CustomRegion_at(self, index, value): self.CustomRegion.insert(index, value) def replace_CustomRegion_at(self, index, value): self.CustomRegion[index] = value def get_imageFilename(self): return self.imageFilename def set_imageFilename(self, imageFilename): self.imageFilename = imageFilename def get_imageWidth(self): return self.imageWidth def set_imageWidth(self, imageWidth): self.imageWidth = imageWidth def get_imageHeight(self): return self.imageHeight def set_imageHeight(self, imageHeight): self.imageHeight = imageHeight def get_imageXResolution(self): return self.imageXResolution def set_imageXResolution(self, imageXResolution): self.imageXResolution = imageXResolution def get_imageYResolution(self): return self.imageYResolution def set_imageYResolution(self, imageYResolution): self.imageYResolution = imageYResolution def get_imageResolutionUnit(self): return self.imageResolutionUnit def set_imageResolutionUnit(self, imageResolutionUnit): self.imageResolutionUnit = imageResolutionUnit def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_primaryLanguage(self): return self.primaryLanguage def set_primaryLanguage(self, primaryLanguage): self.primaryLanguage = primaryLanguage def get_secondaryLanguage(self): return self.secondaryLanguage def set_secondaryLanguage(self, secondaryLanguage): self.secondaryLanguage = secondaryLanguage def get_primaryScript(self): return self.primaryScript def set_primaryScript(self, primaryScript): self.primaryScript = primaryScript def get_secondaryScript(self): return self.secondaryScript def set_secondaryScript(self, secondaryScript): self.secondaryScript = secondaryScript def get_readingDirection(self): return self.readingDirection def set_readingDirection(self, readingDirection): self.readingDirection = readingDirection def get_textLineOrder(self): return self.textLineOrder def set_textLineOrder(self, textLineOrder): self.textLineOrder = textLineOrder def get_conf(self): return self.conf def set_conf(self, conf): self.conf = conf def validate_imageResolutionUnitType(self, value): # Validate type imageResolutionUnitType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['PPI', 'PPCM', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on imageResolutionUnitType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_PageTypeSimpleType(self, value): # Validate type pc:PageTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['front-cover', 'back-cover', 'title', 'table-of-contents', 'index', 'content', 'blank', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on PageTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_LanguageSimpleType(self, value): # Validate type pc:LanguageSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ScriptSimpleType(self, value): # Validate type pc:ScriptSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ReadingDirectionSimpleType(self, value): # Validate type pc:ReadingDirectionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_TextLineOrderSimpleType(self, value): # Validate type pc:TextLineOrderSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['top-to-bottom', 'bottom-to-top', 'left-to-right', 'right-to-left'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextLineOrderSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ConfSimpleType(self, value): # Validate type pc:ConfSimpleType, a restriction on float. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, float): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False if value > 1: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False def hasContent_(self): if ( self.AlternativeImage or self.Border is not None or self.PrintSpace is not None or self.ReadingOrder is not None or self.Layers is not None or self.Relations is not None or self.TextStyle is not None or self.UserDefined is not None or self.Labels or self.TextRegion or self.ImageRegion or self.LineDrawingRegion or self.GraphicRegion or self.TableRegion or self.ChartRegion or self.MapRegion or self.SeparatorRegion or self.MathsRegion or self.ChemRegion or self.MusicRegion or self.AdvertRegion or self.NoiseRegion or self.UnknownRegion or self.CustomRegion ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PageType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('PageType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'PageType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PageType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PageType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PageType'): if self.imageFilename is not None and 'imageFilename' not in already_processed: already_processed.add('imageFilename') outfile.write(' imageFilename=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.imageFilename), input_name='imageFilename')), )) if self.imageWidth is not None and 'imageWidth' not in already_processed: already_processed.add('imageWidth') outfile.write(' imageWidth="%s"' % self.gds_format_integer(self.imageWidth, input_name='imageWidth')) if self.imageHeight is not None and 'imageHeight' not in already_processed: already_processed.add('imageHeight') outfile.write(' imageHeight="%s"' % self.gds_format_integer(self.imageHeight, input_name='imageHeight')) if self.imageXResolution is not None and 'imageXResolution' not in already_processed: already_processed.add('imageXResolution') outfile.write(' imageXResolution="%s"' % self.gds_format_float(self.imageXResolution, input_name='imageXResolution')) if self.imageYResolution is not None and 'imageYResolution' not in already_processed: already_processed.add('imageYResolution') outfile.write(' imageYResolution="%s"' % self.gds_format_float(self.imageYResolution, input_name='imageYResolution')) if self.imageResolutionUnit is not None and 'imageResolutionUnit' not in already_processed: already_processed.add('imageResolutionUnit') outfile.write(' imageResolutionUnit=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.imageResolutionUnit), input_name='imageResolutionUnit')), )) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.primaryLanguage is not None and 'primaryLanguage' not in already_processed: already_processed.add('primaryLanguage') outfile.write(' primaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryLanguage), input_name='primaryLanguage')), )) if self.secondaryLanguage is not None and 'secondaryLanguage' not in already_processed: already_processed.add('secondaryLanguage') outfile.write(' secondaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryLanguage), input_name='secondaryLanguage')), )) if self.primaryScript is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), )) if self.secondaryScript is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), )) if self.readingDirection is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), )) if self.textLineOrder is not None and 'textLineOrder' not in already_processed: already_processed.add('textLineOrder') outfile.write(' textLineOrder=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.textLineOrder), input_name='textLineOrder')), )) if self.conf is not None and 'conf' not in already_processed: already_processed.add('conf') outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PageType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for AlternativeImage_ in self.AlternativeImage: namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else '' AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print) if self.Border is not None: namespaceprefix_ = self.Border_nsprefix_ + ':' if (UseCapturedNS_ and self.Border_nsprefix_) else '' self.Border.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Border', pretty_print=pretty_print) if self.PrintSpace is not None: namespaceprefix_ = self.PrintSpace_nsprefix_ + ':' if (UseCapturedNS_ and self.PrintSpace_nsprefix_) else '' self.PrintSpace.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PrintSpace', pretty_print=pretty_print) if self.ReadingOrder is not None: namespaceprefix_ = self.ReadingOrder_nsprefix_ + ':' if (UseCapturedNS_ and self.ReadingOrder_nsprefix_) else '' self.ReadingOrder.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ReadingOrder', pretty_print=pretty_print) if self.Layers is not None: namespaceprefix_ = self.Layers_nsprefix_ + ':' if (UseCapturedNS_ and self.Layers_nsprefix_) else '' self.Layers.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Layers', pretty_print=pretty_print) if self.Relations is not None: namespaceprefix_ = self.Relations_nsprefix_ + ':' if (UseCapturedNS_ and self.Relations_nsprefix_) else '' self.Relations.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Relations', pretty_print=pretty_print) if self.TextStyle is not None: namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else '' self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print) if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) for TextRegion_ in self.TextRegion: namespaceprefix_ = self.TextRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TextRegion_nsprefix_) else '' TextRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextRegion', pretty_print=pretty_print) for ImageRegion_ in self.ImageRegion: namespaceprefix_ = self.ImageRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageRegion_nsprefix_) else '' ImageRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageRegion', pretty_print=pretty_print) for LineDrawingRegion_ in self.LineDrawingRegion: namespaceprefix_ = self.LineDrawingRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.LineDrawingRegion_nsprefix_) else '' LineDrawingRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LineDrawingRegion', pretty_print=pretty_print) for GraphicRegion_ in self.GraphicRegion: namespaceprefix_ = self.GraphicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.GraphicRegion_nsprefix_) else '' GraphicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GraphicRegion', pretty_print=pretty_print) for TableRegion_ in self.TableRegion: namespaceprefix_ = self.TableRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TableRegion_nsprefix_) else '' TableRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TableRegion', pretty_print=pretty_print) for ChartRegion_ in self.ChartRegion: namespaceprefix_ = self.ChartRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChartRegion_nsprefix_) else '' ChartRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChartRegion', pretty_print=pretty_print) for MapRegion_ in self.MapRegion: namespaceprefix_ = self.MapRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MapRegion_nsprefix_) else '' MapRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MapRegion', pretty_print=pretty_print) for SeparatorRegion_ in self.SeparatorRegion: namespaceprefix_ = self.SeparatorRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.SeparatorRegion_nsprefix_) else '' SeparatorRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SeparatorRegion', pretty_print=pretty_print) for MathsRegion_ in self.MathsRegion: namespaceprefix_ = self.MathsRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MathsRegion_nsprefix_) else '' MathsRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MathsRegion', pretty_print=pretty_print) for ChemRegion_ in self.ChemRegion: namespaceprefix_ = self.ChemRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChemRegion_nsprefix_) else '' ChemRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChemRegion', pretty_print=pretty_print) for MusicRegion_ in self.MusicRegion: namespaceprefix_ = self.MusicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MusicRegion_nsprefix_) else '' MusicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MusicRegion', pretty_print=pretty_print) for AdvertRegion_ in self.AdvertRegion: namespaceprefix_ = self.AdvertRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.AdvertRegion_nsprefix_) else '' AdvertRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AdvertRegion', pretty_print=pretty_print) for NoiseRegion_ in self.NoiseRegion: namespaceprefix_ = self.NoiseRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.NoiseRegion_nsprefix_) else '' NoiseRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NoiseRegion', pretty_print=pretty_print) for UnknownRegion_ in self.UnknownRegion: namespaceprefix_ = self.UnknownRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.UnknownRegion_nsprefix_) else '' UnknownRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnknownRegion', pretty_print=pretty_print) for CustomRegion_ in self.CustomRegion: namespaceprefix_ = self.CustomRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomRegion_nsprefix_) else '' CustomRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CustomRegion', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('imageFilename', node) if value is not None and 'imageFilename' not in already_processed: already_processed.add('imageFilename') self.imageFilename = value value = find_attr_value_('imageWidth', node) if value is not None and 'imageWidth' not in already_processed: already_processed.add('imageWidth') self.imageWidth = self.gds_parse_integer(value, node, 'imageWidth') value = find_attr_value_('imageHeight', node) if value is not None and 'imageHeight' not in already_processed: already_processed.add('imageHeight') self.imageHeight = self.gds_parse_integer(value, node, 'imageHeight') value = find_attr_value_('imageXResolution', node) if value is not None and 'imageXResolution' not in already_processed: already_processed.add('imageXResolution') value = self.gds_parse_float(value, node, 'imageXResolution') self.imageXResolution = value value = find_attr_value_('imageYResolution', node) if value is not None and 'imageYResolution' not in already_processed: already_processed.add('imageYResolution') value = self.gds_parse_float(value, node, 'imageYResolution') self.imageYResolution = value value = find_attr_value_('imageResolutionUnit', node) if value is not None and 'imageResolutionUnit' not in already_processed: already_processed.add('imageResolutionUnit') self.imageResolutionUnit = value self.validate_imageResolutionUnitType(self.imageResolutionUnit) # validate type imageResolutionUnitType value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_PageTypeSimpleType(self.type_) # validate type PageTypeSimpleType value = find_attr_value_('primaryLanguage', node) if value is not None and 'primaryLanguage' not in already_processed: already_processed.add('primaryLanguage') self.primaryLanguage = value self.validate_LanguageSimpleType(self.primaryLanguage) # validate type LanguageSimpleType value = find_attr_value_('secondaryLanguage', node) if value is not None and 'secondaryLanguage' not in already_processed: already_processed.add('secondaryLanguage') self.secondaryLanguage = value self.validate_LanguageSimpleType(self.secondaryLanguage) # validate type LanguageSimpleType value = find_attr_value_('primaryScript', node) if value is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') self.primaryScript = value self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType value = find_attr_value_('secondaryScript', node) if value is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') self.secondaryScript = value self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType value = find_attr_value_('readingDirection', node) if value is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') self.readingDirection = value self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType value = find_attr_value_('textLineOrder', node) if value is not None and 'textLineOrder' not in already_processed: already_processed.add('textLineOrder') self.textLineOrder = value self.validate_TextLineOrderSimpleType(self.textLineOrder) # validate type TextLineOrderSimpleType value = find_attr_value_('conf', node) if value is not None and 'conf' not in already_processed: already_processed.add('conf') value = self.gds_parse_float(value, node, 'conf') self.conf = value self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'AlternativeImage': obj_ = AlternativeImageType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.AlternativeImage.append(obj_) obj_.original_tagname_ = 'AlternativeImage' elif nodeName_ == 'Border': obj_ = BorderType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Border = obj_ obj_.original_tagname_ = 'Border' elif nodeName_ == 'PrintSpace': obj_ = PrintSpaceType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.PrintSpace = obj_ obj_.original_tagname_ = 'PrintSpace' elif nodeName_ == 'ReadingOrder': obj_ = ReadingOrderType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.ReadingOrder = obj_ obj_.original_tagname_ = 'ReadingOrder' elif nodeName_ == 'Layers': obj_ = LayersType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Layers = obj_ obj_.original_tagname_ = 'Layers' elif nodeName_ == 'Relations': obj_ = RelationsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Relations = obj_ obj_.original_tagname_ = 'Relations' elif nodeName_ == 'TextStyle': obj_ = TextStyleType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextStyle = obj_ obj_.original_tagname_ = 'TextStyle' elif nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' elif nodeName_ == 'TextRegion': obj_ = TextRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextRegion.append(obj_) obj_.original_tagname_ = 'TextRegion' elif nodeName_ == 'ImageRegion': obj_ = ImageRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.ImageRegion.append(obj_) obj_.original_tagname_ = 'ImageRegion' elif nodeName_ == 'LineDrawingRegion': obj_ = LineDrawingRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.LineDrawingRegion.append(obj_) obj_.original_tagname_ = 'LineDrawingRegion' elif nodeName_ == 'GraphicRegion': obj_ = GraphicRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.GraphicRegion.append(obj_) obj_.original_tagname_ = 'GraphicRegion' elif nodeName_ == 'TableRegion': obj_ = TableRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TableRegion.append(obj_) obj_.original_tagname_ = 'TableRegion' elif nodeName_ == 'ChartRegion': obj_ = ChartRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.ChartRegion.append(obj_) obj_.original_tagname_ = 'ChartRegion' elif nodeName_ == 'MapRegion': obj_ = MapRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.MapRegion.append(obj_) obj_.original_tagname_ = 'MapRegion' elif nodeName_ == 'SeparatorRegion': obj_ = SeparatorRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.SeparatorRegion.append(obj_) obj_.original_tagname_ = 'SeparatorRegion' elif nodeName_ == 'MathsRegion': obj_ = MathsRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.MathsRegion.append(obj_) obj_.original_tagname_ = 'MathsRegion' elif nodeName_ == 'ChemRegion': obj_ = ChemRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.ChemRegion.append(obj_) obj_.original_tagname_ = 'ChemRegion' elif nodeName_ == 'MusicRegion': obj_ = MusicRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.MusicRegion.append(obj_) obj_.original_tagname_ = 'MusicRegion' elif nodeName_ == 'AdvertRegion': obj_ = AdvertRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.AdvertRegion.append(obj_) obj_.original_tagname_ = 'AdvertRegion' elif nodeName_ == 'NoiseRegion': obj_ = NoiseRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.NoiseRegion.append(obj_) obj_.original_tagname_ = 'NoiseRegion' elif nodeName_ == 'UnknownRegion': obj_ = UnknownRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UnknownRegion.append(obj_) obj_.original_tagname_ = 'UnknownRegion' elif nodeName_ == 'CustomRegion': obj_ = CustomRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.CustomRegion.append(obj_) obj_.original_tagname_ = 'CustomRegion' def get_polygon(self): ''' Get polygon from Page element (whole image) ''' x0y0 = [0, 0] x1y0 = [self.imageWidth - 1, 0] x1y1 = [self.imageWidth - 1, self.imageHeight - 1] x0y1 = [0, self.imageHeight - 1] return [x0y0, x1y0, x1y1, x0y1, x0y0] def get_polygon_string(self): ''' Get polygon string from Page element (whole image) ''' x0y0 = '0,0' x1y0 = str(self.imageWidth - 1) + ',0' x1y1 = str(self.imageWidth - 1) + ',' + str(self.imageHeight - 1) x0y1 = '0,' + str(self.imageHeight - 1) return ','.join([x0y0, x1y0, x1y1, x0y1, x0y0]) # end class PageType class CoordsType(GeneratedsSuper): """Polygon outline of the element as a path of points. No points may lie outside the outline of its parent, which in the case of Border is the bounding rectangle of the root image. Paths are closed by convention, i.e. the last point logically connects with the first (and at least 3 points are required to span an area). Paths must be planar (i.e. must not self-intersect). Confidence value (between 0 and 1)""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('points', 'pc:PointsType', 0, 0, {'use': 'required'}), MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = None def __init__(self, points=None, conf=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.points = _cast(None, points) self.points_nsprefix_ = None self.conf = _cast(float, conf) self.conf_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, CoordsType) if subclass is not None: return subclass(*args_, **kwargs_) if CoordsType.subclass: return CoordsType.subclass(*args_, **kwargs_) else: return CoordsType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_points(self): return self.points def set_points(self, points): self.points = points def get_conf(self): return self.conf def set_conf(self, conf): self.conf = conf def validate_PointsType(self, value): # Validate type pc:PointsType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False if not self.gds_validate_simple_patterns( self.validate_PointsType_patterns_, value): self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_PointsType_patterns_, )) validate_PointsType_patterns_ = [['^(([0-9]+,[0-9]+ )+([0-9]+,[0-9]+))$']] def validate_ConfSimpleType(self, value): # Validate type pc:ConfSimpleType, a restriction on float. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, float): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False if value > 1: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CoordsType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('CoordsType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'CoordsType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CoordsType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CoordsType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CoordsType'): if self.points is not None and 'points' not in already_processed: already_processed.add('points') outfile.write(' points=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.points), input_name='points')), )) if self.conf is not None and 'conf' not in already_processed: already_processed.add('conf') outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CoordsType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('points', node) if value is not None and 'points' not in already_processed: already_processed.add('points') self.points = value self.validate_PointsType(self.points) # validate type PointsType value = find_attr_value_('conf', node) if value is not None and 'conf' not in already_processed: already_processed.add('conf') value = self.gds_parse_float(value, node, 'conf') self.conf = value self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class CoordsType class TextLineType(GeneratedsSuper): """Overrides primaryLanguage attribute of parent text region The primary script used in the text line The secondary script used in the text line The direction in which text within the line should be read (order of words and characters). Overrides the production attribute of the parent text region For generic use Position (order number) of this text line within the parent text region.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('primaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('index', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None), MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None), MemberSpec_('Baseline', 'BaselineType', 0, 1, {'minOccurs': '0', 'name': 'Baseline', 'type': 'BaselineType'}, None), MemberSpec_('Word', 'WordType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Word', 'type': 'WordType'}, None), MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None), MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), ] subclass = None superclass = None def __init__(self, id=None, primaryLanguage=None, primaryScript=None, secondaryScript=None, readingDirection=None, production=None, custom=None, comments=None, index=None, AlternativeImage=None, Coords=None, Baseline=None, Word=None, TextEquiv=None, TextStyle=None, UserDefined=None, Labels=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.primaryLanguage = _cast(None, primaryLanguage) self.primaryLanguage_nsprefix_ = None self.primaryScript = _cast(None, primaryScript) self.primaryScript_nsprefix_ = None self.secondaryScript = _cast(None, secondaryScript) self.secondaryScript_nsprefix_ = None self.readingDirection = _cast(None, readingDirection) self.readingDirection_nsprefix_ = None self.production = _cast(None, production) self.production_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.index = _cast(int, index) self.index_nsprefix_ = None if AlternativeImage is None: self.AlternativeImage = [] else: self.AlternativeImage = AlternativeImage self.AlternativeImage_nsprefix_ = "pc" self.Coords = Coords self.Coords_nsprefix_ = "pc" self.Baseline = Baseline self.Baseline_nsprefix_ = "pc" if Word is None: self.Word = [] else: self.Word = Word self.Word_nsprefix_ = "pc" if TextEquiv is None: self.TextEquiv = [] else: self.TextEquiv = TextEquiv self.TextEquiv_nsprefix_ = "pc" self.TextStyle = TextStyle self.TextStyle_nsprefix_ = "pc" self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TextLineType) if subclass is not None: return subclass(*args_, **kwargs_) if TextLineType.subclass: return TextLineType.subclass(*args_, **kwargs_) else: return TextLineType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_AlternativeImage(self): return self.AlternativeImage def set_AlternativeImage(self, AlternativeImage): self.AlternativeImage = AlternativeImage def add_AlternativeImage(self, value): self.AlternativeImage.append(value) def insert_AlternativeImage_at(self, index, value): self.AlternativeImage.insert(index, value) def replace_AlternativeImage_at(self, index, value): self.AlternativeImage[index] = value def get_Coords(self): return self.Coords def set_Coords(self, Coords): self.Coords = Coords def get_Baseline(self): return self.Baseline def set_Baseline(self, Baseline): self.Baseline = Baseline def get_Word(self): return self.Word def set_Word(self, Word): self.Word = Word def add_Word(self, value): self.Word.append(value) def insert_Word_at(self, index, value): self.Word.insert(index, value) def replace_Word_at(self, index, value): self.Word[index] = value def get_TextEquiv(self): return self.TextEquiv def set_TextEquiv(self, TextEquiv): self.TextEquiv = TextEquiv def add_TextEquiv(self, value): self.TextEquiv.append(value) def insert_TextEquiv_at(self, index, value): self.TextEquiv.insert(index, value) def replace_TextEquiv_at(self, index, value): self.TextEquiv[index] = value def get_TextStyle(self): return self.TextStyle def set_TextStyle(self, TextStyle): self.TextStyle = TextStyle def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_primaryLanguage(self): return self.primaryLanguage def set_primaryLanguage(self, primaryLanguage): self.primaryLanguage = primaryLanguage def get_primaryScript(self): return self.primaryScript def set_primaryScript(self, primaryScript): self.primaryScript = primaryScript def get_secondaryScript(self): return self.secondaryScript def set_secondaryScript(self, secondaryScript): self.secondaryScript = secondaryScript def get_readingDirection(self): return self.readingDirection def set_readingDirection(self, readingDirection): self.readingDirection = readingDirection def get_production(self): return self.production def set_production(self, production): self.production = production def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def get_index(self): return self.index def set_index(self, index): self.index = index def validate_LanguageSimpleType(self, value): # Validate type pc:LanguageSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ScriptSimpleType(self, value): # Validate type pc:ScriptSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ReadingDirectionSimpleType(self, value): # Validate type pc:ReadingDirectionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ProductionSimpleType(self, value): # Validate type pc:ProductionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.AlternativeImage or self.Coords is not None or self.Baseline is not None or self.Word or self.TextEquiv or self.TextStyle is not None or self.UserDefined is not None or self.Labels ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextLineType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextLineType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'TextLineType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextLineType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextLineType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextLineType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.primaryLanguage is not None and 'primaryLanguage' not in already_processed: already_processed.add('primaryLanguage') outfile.write(' primaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryLanguage), input_name='primaryLanguage')), )) if self.primaryScript is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), )) if self.secondaryScript is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), )) if self.readingDirection is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), )) if self.production is not None and 'production' not in already_processed: already_processed.add('production') outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), )) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) if self.index is not None and 'index' not in already_processed: already_processed.add('index') outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextLineType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for AlternativeImage_ in self.AlternativeImage: namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else '' AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print) if self.Coords is not None: namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else '' self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print) if self.Baseline is not None: namespaceprefix_ = self.Baseline_nsprefix_ + ':' if (UseCapturedNS_ and self.Baseline_nsprefix_) else '' self.Baseline.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Baseline', pretty_print=pretty_print) for Word_ in self.Word: namespaceprefix_ = self.Word_nsprefix_ + ':' if (UseCapturedNS_ and self.Word_nsprefix_) else '' Word_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Word', pretty_print=pretty_print) for TextEquiv_ in self.TextEquiv: namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else '' TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print) if self.TextStyle is not None: namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else '' self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print) if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('primaryLanguage', node) if value is not None and 'primaryLanguage' not in already_processed: already_processed.add('primaryLanguage') self.primaryLanguage = value self.validate_LanguageSimpleType(self.primaryLanguage) # validate type LanguageSimpleType value = find_attr_value_('primaryScript', node) if value is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') self.primaryScript = value self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType value = find_attr_value_('secondaryScript', node) if value is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') self.secondaryScript = value self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType value = find_attr_value_('readingDirection', node) if value is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') self.readingDirection = value self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType value = find_attr_value_('production', node) if value is not None and 'production' not in already_processed: already_processed.add('production') self.production = value self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value value = find_attr_value_('index', node) if value is not None and 'index' not in already_processed: already_processed.add('index') self.index = self.gds_parse_integer(value, node, 'index') def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'AlternativeImage': obj_ = AlternativeImageType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.AlternativeImage.append(obj_) obj_.original_tagname_ = 'AlternativeImage' elif nodeName_ == 'Coords': obj_ = CoordsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Coords = obj_ obj_.original_tagname_ = 'Coords' elif nodeName_ == 'Baseline': obj_ = BaselineType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Baseline = obj_ obj_.original_tagname_ = 'Baseline' elif nodeName_ == 'Word': obj_ = WordType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Word.append(obj_) obj_.original_tagname_ = 'Word' elif nodeName_ == 'TextEquiv': obj_ = TextEquivType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextEquiv.append(obj_) obj_.original_tagname_ = 'TextEquiv' elif nodeName_ == 'TextStyle': obj_ = TextStyleType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextStyle = obj_ obj_.original_tagname_ = 'TextStyle' elif nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' def get_polygon(self): ''' Get polygon from element which is parent of a Coords element ''' points = [point for point in self.Coords.points.split(' ')] return [[int(coord) for coord in point.split(',')] for point in points] def get_polygon_string(self): ''' Get polygon string from element which is parent of a Coords element ''' return self.Coords.points.replace(' ', ',') # end class TextLineType class WordType(GeneratedsSuper): """Overrides primaryLanguage attribute of parent line and/or text region The primary script used in the word The secondary script used in the word The direction in which text within the word should be read (order of characters). Overrides the production attribute of the parent text line and/or text region. For generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('language', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None), MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None), MemberSpec_('Glyph', 'GlyphType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Glyph', 'type': 'GlyphType'}, None), MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None), MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), ] subclass = None superclass = None def __init__(self, id=None, language=None, primaryScript=None, secondaryScript=None, readingDirection=None, production=None, custom=None, comments=None, AlternativeImage=None, Coords=None, Glyph=None, TextEquiv=None, TextStyle=None, UserDefined=None, Labels=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.language = _cast(None, language) self.language_nsprefix_ = None self.primaryScript = _cast(None, primaryScript) self.primaryScript_nsprefix_ = None self.secondaryScript = _cast(None, secondaryScript) self.secondaryScript_nsprefix_ = None self.readingDirection = _cast(None, readingDirection) self.readingDirection_nsprefix_ = None self.production = _cast(None, production) self.production_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None if AlternativeImage is None: self.AlternativeImage = [] else: self.AlternativeImage = AlternativeImage self.AlternativeImage_nsprefix_ = "pc" self.Coords = Coords self.Coords_nsprefix_ = "pc" if Glyph is None: self.Glyph = [] else: self.Glyph = Glyph self.Glyph_nsprefix_ = "pc" if TextEquiv is None: self.TextEquiv = [] else: self.TextEquiv = TextEquiv self.TextEquiv_nsprefix_ = "pc" self.TextStyle = TextStyle self.TextStyle_nsprefix_ = "pc" self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, WordType) if subclass is not None: return subclass(*args_, **kwargs_) if WordType.subclass: return WordType.subclass(*args_, **kwargs_) else: return WordType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_AlternativeImage(self): return self.AlternativeImage def set_AlternativeImage(self, AlternativeImage): self.AlternativeImage = AlternativeImage def add_AlternativeImage(self, value): self.AlternativeImage.append(value) def insert_AlternativeImage_at(self, index, value): self.AlternativeImage.insert(index, value) def replace_AlternativeImage_at(self, index, value): self.AlternativeImage[index] = value def get_Coords(self): return self.Coords def set_Coords(self, Coords): self.Coords = Coords def get_Glyph(self): return self.Glyph def set_Glyph(self, Glyph): self.Glyph = Glyph def add_Glyph(self, value): self.Glyph.append(value) def insert_Glyph_at(self, index, value): self.Glyph.insert(index, value) def replace_Glyph_at(self, index, value): self.Glyph[index] = value def get_TextEquiv(self): return self.TextEquiv def set_TextEquiv(self, TextEquiv): self.TextEquiv = TextEquiv def add_TextEquiv(self, value): self.TextEquiv.append(value) def insert_TextEquiv_at(self, index, value): self.TextEquiv.insert(index, value) def replace_TextEquiv_at(self, index, value): self.TextEquiv[index] = value def get_TextStyle(self): return self.TextStyle def set_TextStyle(self, TextStyle): self.TextStyle = TextStyle def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_language(self): return self.language def set_language(self, language): self.language = language def get_primaryScript(self): return self.primaryScript def set_primaryScript(self, primaryScript): self.primaryScript = primaryScript def get_secondaryScript(self): return self.secondaryScript def set_secondaryScript(self, secondaryScript): self.secondaryScript = secondaryScript def get_readingDirection(self): return self.readingDirection def set_readingDirection(self, readingDirection): self.readingDirection = readingDirection def get_production(self): return self.production def set_production(self, production): self.production = production def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_LanguageSimpleType(self, value): # Validate type pc:LanguageSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ScriptSimpleType(self, value): # Validate type pc:ScriptSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ReadingDirectionSimpleType(self, value): # Validate type pc:ReadingDirectionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ProductionSimpleType(self, value): # Validate type pc:ProductionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.AlternativeImage or self.Coords is not None or self.Glyph or self.TextEquiv or self.TextStyle is not None or self.UserDefined is not None or self.Labels ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='WordType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('WordType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'WordType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='WordType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='WordType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='WordType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.language is not None and 'language' not in already_processed: already_processed.add('language') outfile.write(' language=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.language), input_name='language')), )) if self.primaryScript is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), )) if self.secondaryScript is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), )) if self.readingDirection is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), )) if self.production is not None and 'production' not in already_processed: already_processed.add('production') outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), )) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='WordType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for AlternativeImage_ in self.AlternativeImage: namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else '' AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print) if self.Coords is not None: namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else '' self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print) for Glyph_ in self.Glyph: namespaceprefix_ = self.Glyph_nsprefix_ + ':' if (UseCapturedNS_ and self.Glyph_nsprefix_) else '' Glyph_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Glyph', pretty_print=pretty_print) for TextEquiv_ in self.TextEquiv: namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else '' TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print) if self.TextStyle is not None: namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else '' self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print) if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('language', node) if value is not None and 'language' not in already_processed: already_processed.add('language') self.language = value self.validate_LanguageSimpleType(self.language) # validate type LanguageSimpleType value = find_attr_value_('primaryScript', node) if value is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') self.primaryScript = value self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType value = find_attr_value_('secondaryScript', node) if value is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') self.secondaryScript = value self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType value = find_attr_value_('readingDirection', node) if value is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') self.readingDirection = value self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType value = find_attr_value_('production', node) if value is not None and 'production' not in already_processed: already_processed.add('production') self.production = value self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'AlternativeImage': obj_ = AlternativeImageType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.AlternativeImage.append(obj_) obj_.original_tagname_ = 'AlternativeImage' elif nodeName_ == 'Coords': obj_ = CoordsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Coords = obj_ obj_.original_tagname_ = 'Coords' elif nodeName_ == 'Glyph': obj_ = GlyphType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Glyph.append(obj_) obj_.original_tagname_ = 'Glyph' elif nodeName_ == 'TextEquiv': obj_ = TextEquivType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextEquiv.append(obj_) obj_.original_tagname_ = 'TextEquiv' elif nodeName_ == 'TextStyle': obj_ = TextStyleType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextStyle = obj_ obj_.original_tagname_ = 'TextStyle' elif nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' def get_polygon(self): ''' Get polygon from element which is parent of a Coords element ''' points = [point for point in self.Coords.points.split(' ')] return [[int(coord) for coord in point.split(',')] for point in points] def get_polygon_string(self): ''' Get polygon string from element which is parent of a Coords element ''' return self.Coords.points.replace(' ', ',') # end class WordType class GlyphType(GeneratedsSuper): """The script used for the glyph Overrides the production attribute of the parent word / text line / text region. For generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('ligature', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('symbol', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('script', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None), MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None), MemberSpec_('Graphemes', 'GraphemesType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Graphemes', 'type': 'GraphemesType'}, None), MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None), MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), ] subclass = None superclass = None def __init__(self, id=None, ligature=None, symbol=None, script=None, production=None, custom=None, comments=None, AlternativeImage=None, Coords=None, Graphemes=None, TextEquiv=None, TextStyle=None, UserDefined=None, Labels=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.ligature = _cast(bool, ligature) self.ligature_nsprefix_ = None self.symbol = _cast(bool, symbol) self.symbol_nsprefix_ = None self.script = _cast(None, script) self.script_nsprefix_ = None self.production = _cast(None, production) self.production_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None if AlternativeImage is None: self.AlternativeImage = [] else: self.AlternativeImage = AlternativeImage self.AlternativeImage_nsprefix_ = "pc" self.Coords = Coords self.Coords_nsprefix_ = "pc" self.Graphemes = Graphemes self.Graphemes_nsprefix_ = "pc" if TextEquiv is None: self.TextEquiv = [] else: self.TextEquiv = TextEquiv self.TextEquiv_nsprefix_ = "pc" self.TextStyle = TextStyle self.TextStyle_nsprefix_ = "pc" self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GlyphType) if subclass is not None: return subclass(*args_, **kwargs_) if GlyphType.subclass: return GlyphType.subclass(*args_, **kwargs_) else: return GlyphType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_AlternativeImage(self): return self.AlternativeImage def set_AlternativeImage(self, AlternativeImage): self.AlternativeImage = AlternativeImage def add_AlternativeImage(self, value): self.AlternativeImage.append(value) def insert_AlternativeImage_at(self, index, value): self.AlternativeImage.insert(index, value) def replace_AlternativeImage_at(self, index, value): self.AlternativeImage[index] = value def get_Coords(self): return self.Coords def set_Coords(self, Coords): self.Coords = Coords def get_Graphemes(self): return self.Graphemes def set_Graphemes(self, Graphemes): self.Graphemes = Graphemes def get_TextEquiv(self): return self.TextEquiv def set_TextEquiv(self, TextEquiv): self.TextEquiv = TextEquiv def add_TextEquiv(self, value): self.TextEquiv.append(value) def insert_TextEquiv_at(self, index, value): self.TextEquiv.insert(index, value) def replace_TextEquiv_at(self, index, value): self.TextEquiv[index] = value def get_TextStyle(self): return self.TextStyle def set_TextStyle(self, TextStyle): self.TextStyle = TextStyle def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_ligature(self): return self.ligature def set_ligature(self, ligature): self.ligature = ligature def get_symbol(self): return self.symbol def set_symbol(self, symbol): self.symbol = symbol def get_script(self): return self.script def set_script(self, script): self.script = script def get_production(self): return self.production def set_production(self, production): self.production = production def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_ScriptSimpleType(self, value): # Validate type pc:ScriptSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ProductionSimpleType(self, value): # Validate type pc:ProductionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.AlternativeImage or self.Coords is not None or self.Graphemes is not None or self.TextEquiv or self.TextStyle is not None or self.UserDefined is not None or self.Labels ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GlyphType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GlyphType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GlyphType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GlyphType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GlyphType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GlyphType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.ligature is not None and 'ligature' not in already_processed: already_processed.add('ligature') outfile.write(' ligature="%s"' % self.gds_format_boolean(self.ligature, input_name='ligature')) if self.symbol is not None and 'symbol' not in already_processed: already_processed.add('symbol') outfile.write(' symbol="%s"' % self.gds_format_boolean(self.symbol, input_name='symbol')) if self.script is not None and 'script' not in already_processed: already_processed.add('script') outfile.write(' script=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.script), input_name='script')), )) if self.production is not None and 'production' not in already_processed: already_processed.add('production') outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), )) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GlyphType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for AlternativeImage_ in self.AlternativeImage: namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else '' AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print) if self.Coords is not None: namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else '' self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print) if self.Graphemes is not None: namespaceprefix_ = self.Graphemes_nsprefix_ + ':' if (UseCapturedNS_ and self.Graphemes_nsprefix_) else '' self.Graphemes.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Graphemes', pretty_print=pretty_print) for TextEquiv_ in self.TextEquiv: namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else '' TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print) if self.TextStyle is not None: namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else '' self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print) if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('ligature', node) if value is not None and 'ligature' not in already_processed: already_processed.add('ligature') if value in ('true', '1'): self.ligature = True elif value in ('false', '0'): self.ligature = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('symbol', node) if value is not None and 'symbol' not in already_processed: already_processed.add('symbol') if value in ('true', '1'): self.symbol = True elif value in ('false', '0'): self.symbol = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('script', node) if value is not None and 'script' not in already_processed: already_processed.add('script') self.script = value self.validate_ScriptSimpleType(self.script) # validate type ScriptSimpleType value = find_attr_value_('production', node) if value is not None and 'production' not in already_processed: already_processed.add('production') self.production = value self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'AlternativeImage': obj_ = AlternativeImageType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.AlternativeImage.append(obj_) obj_.original_tagname_ = 'AlternativeImage' elif nodeName_ == 'Coords': obj_ = CoordsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Coords = obj_ obj_.original_tagname_ = 'Coords' elif nodeName_ == 'Graphemes': obj_ = GraphemesType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Graphemes = obj_ obj_.original_tagname_ = 'Graphemes' elif nodeName_ == 'TextEquiv': obj_ = TextEquivType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextEquiv.append(obj_) obj_.original_tagname_ = 'TextEquiv' elif nodeName_ == 'TextStyle': obj_ = TextStyleType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextStyle = obj_ obj_.original_tagname_ = 'TextStyle' elif nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' def get_polygon(self): ''' Get polygon from element which is parent of a Coords element ''' points = [point for point in self.Coords.points.split(' ')] return [[int(coord) for coord in point.split(',')] for point in points] def get_polygon_string(self): ''' Get polygon string from element which is parent of a Coords element ''' return self.Coords.points.replace(' ', ',') # end class GlyphType class TextEquivType(GeneratedsSuper): """Used for sort order in case multiple TextEquivs are defined. The text content with the lowest index should be interpreted as the main text content. OCR confidence value (between 0 and 1) Type of text content (is it free text or a number, for instance). This is only a descriptive attribute, the text type is not checked during XML validation. Refinement for dataType attribute. Can be a regular expression, for instance.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('index', 'indexType', 0, 1, {'use': 'optional'}), MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('dataType', 'pc:TextDataTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('dataTypeDetails', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('PlainText', 'string', 0, 1, {'minOccurs': '0', 'name': 'PlainText', 'type': 'string'}, None), MemberSpec_('Unicode', 'string', 0, 0, {'name': 'Unicode', 'type': 'string'}, None), ] subclass = None superclass = None def __init__(self, index=None, conf=None, dataType=None, dataTypeDetails=None, comments=None, PlainText=None, Unicode=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = None self.index = _cast(int, index) self.index_nsprefix_ = None self.conf = _cast(float, conf) self.conf_nsprefix_ = None self.dataType = _cast(None, dataType) self.dataType_nsprefix_ = None self.dataTypeDetails = _cast(None, dataTypeDetails) self.dataTypeDetails_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.PlainText = PlainText self.PlainText_nsprefix_ = None self.Unicode = Unicode self.Unicode_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TextEquivType) if subclass is not None: return subclass(*args_, **kwargs_) if TextEquivType.subclass: return TextEquivType.subclass(*args_, **kwargs_) else: return TextEquivType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_PlainText(self): return self.PlainText def set_PlainText(self, PlainText): self.PlainText = PlainText def get_Unicode(self): return self.Unicode def set_Unicode(self, Unicode): self.Unicode = Unicode def get_index(self): return self.index def set_index(self, index): self.index = index def get_conf(self): return self.conf def set_conf(self, conf): self.conf = conf def get_dataType(self): return self.dataType def set_dataType(self, dataType): self.dataType = dataType def get_dataTypeDetails(self): return self.dataTypeDetails def set_dataTypeDetails(self, dataTypeDetails): self.dataTypeDetails = dataTypeDetails def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_indexType(self, value): # Validate type indexType, a restriction on integer. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, int): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on indexType' % {"value": value, "lineno": lineno} ) result = False def validate_ConfSimpleType(self, value): # Validate type pc:ConfSimpleType, a restriction on float. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, float): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False if value > 1: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False def validate_TextDataTypeSimpleType(self, value): # Validate type pc:TextDataTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['xsd:decimal', 'xsd:float', 'xsd:integer', 'xsd:boolean', 'xsd:date', 'xsd:time', 'xsd:dateTime', 'xsd:string', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextDataTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.PlainText is not None or self.Unicode is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='TextEquivType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextEquivType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'TextEquivType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextEquivType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextEquivType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextEquivType'): if self.index is not None and 'index' not in already_processed: already_processed.add('index') outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index')) if self.conf is not None and 'conf' not in already_processed: already_processed.add('conf') outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf')) if self.dataType is not None and 'dataType' not in already_processed: already_processed.add('dataType') outfile.write(' dataType=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dataType), input_name='dataType')), )) if self.dataTypeDetails is not None and 'dataTypeDetails' not in already_processed: already_processed.add('dataTypeDetails') outfile.write(' dataTypeDetails=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dataTypeDetails), input_name='dataTypeDetails')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='TextEquivType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.PlainText is not None: namespaceprefix_ = self.PlainText_nsprefix_ + ':' if (UseCapturedNS_ and self.PlainText_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%sPlainText>%s</%sPlainText>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PlainText), input_name='PlainText')), namespaceprefix_ , eol_)) if self.Unicode is not None: namespaceprefix_ = self.Unicode_nsprefix_ + ':' if (UseCapturedNS_ and self.Unicode_nsprefix_) else '' showIndent(outfile, level, pretty_print) outfile.write('<%sUnicode>%s</%sUnicode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Unicode), input_name='Unicode')), namespaceprefix_ , eol_)) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('index', node) if value is not None and 'index' not in already_processed: already_processed.add('index') self.index = self.gds_parse_integer(value, node, 'index') self.validate_indexType(self.index) # validate type indexType value = find_attr_value_('conf', node) if value is not None and 'conf' not in already_processed: already_processed.add('conf') value = self.gds_parse_float(value, node, 'conf') self.conf = value self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType value = find_attr_value_('dataType', node) if value is not None and 'dataType' not in already_processed: already_processed.add('dataType') self.dataType = value self.validate_TextDataTypeSimpleType(self.dataType) # validate type TextDataTypeSimpleType value = find_attr_value_('dataTypeDetails', node) if value is not None and 'dataTypeDetails' not in already_processed: already_processed.add('dataTypeDetails') self.dataTypeDetails = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'PlainText': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'PlainText') value_ = self.gds_validate_string(value_, node, 'PlainText') self.PlainText = value_ self.PlainText_nsprefix_ = child_.prefix elif nodeName_ == 'Unicode': value_ = child_.text value_ = self.gds_parse_string(value_, node, 'Unicode') value_ = self.gds_validate_string(value_, node, 'Unicode') self.Unicode = value_ self.Unicode_nsprefix_ = child_.prefix # end class TextEquivType class GridType(GeneratedsSuper): """Matrix of grid points defining the table grid on the page.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('GridPoints', 'GridPointsType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '2', 'name': 'GridPoints', 'type': 'GridPointsType'}, None), ] subclass = None superclass = None def __init__(self, GridPoints=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" if GridPoints is None: self.GridPoints = [] else: self.GridPoints = GridPoints self.GridPoints_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GridType) if subclass is not None: return subclass(*args_, **kwargs_) if GridType.subclass: return GridType.subclass(*args_, **kwargs_) else: return GridType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_GridPoints(self): return self.GridPoints def set_GridPoints(self, GridPoints): self.GridPoints = GridPoints def add_GridPoints(self, value): self.GridPoints.append(value) def insert_GridPoints_at(self, index, value): self.GridPoints.insert(index, value) def replace_GridPoints_at(self, index, value): self.GridPoints[index] = value def hasContent_(self): if ( self.GridPoints ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GridType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GridType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for GridPoints_ in self.GridPoints: namespaceprefix_ = self.GridPoints_nsprefix_ + ':' if (UseCapturedNS_ and self.GridPoints_nsprefix_) else '' GridPoints_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GridPoints', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'GridPoints': obj_ = GridPointsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.GridPoints.append(obj_) obj_.original_tagname_ = 'GridPoints' # end class GridType class GridPointsType(GeneratedsSuper): """Points with x,y coordinates. The grid row index""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('index', 'int', 0, 0, {'use': 'required'}), MemberSpec_('points', 'pc:PointsType', 0, 0, {'use': 'required'}), ] subclass = None superclass = None def __init__(self, index=None, points=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.index = _cast(int, index) self.index_nsprefix_ = None self.points = _cast(None, points) self.points_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GridPointsType) if subclass is not None: return subclass(*args_, **kwargs_) if GridPointsType.subclass: return GridPointsType.subclass(*args_, **kwargs_) else: return GridPointsType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_index(self): return self.index def set_index(self, index): self.index = index def get_points(self): return self.points def set_points(self, points): self.points = points def validate_PointsType(self, value): # Validate type pc:PointsType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False if not self.gds_validate_simple_patterns( self.validate_PointsType_patterns_, value): self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_PointsType_patterns_, )) validate_PointsType_patterns_ = [['^(([0-9]+,[0-9]+ )+([0-9]+,[0-9]+))$']] def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridPointsType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GridPointsType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GridPointsType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridPointsType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridPointsType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridPointsType'): if self.index is not None and 'index' not in already_processed: already_processed.add('index') outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index')) if self.points is not None and 'points' not in already_processed: already_processed.add('points') outfile.write(' points=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.points), input_name='points')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridPointsType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('index', node) if value is not None and 'index' not in already_processed: already_processed.add('index') self.index = self.gds_parse_integer(value, node, 'index') value = find_attr_value_('points', node) if value is not None and 'points' not in already_processed: already_processed.add('points') self.points = value self.validate_PointsType(self.points) # validate type PointsType def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class GridPointsType class PrintSpaceType(GeneratedsSuper): """Determines the effective area on the paper of a printed page. Its size is equal for all pages of a book (exceptions: titlepage, multipage pictures). It contains all living elements (except marginals) like body type, footnotes, headings, running titles. It does not contain pagenumber (if not part of running title), marginals, signature mark, preview words.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None), ] subclass = None superclass = None def __init__(self, Coords=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.Coords = Coords self.Coords_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, PrintSpaceType) if subclass is not None: return subclass(*args_, **kwargs_) if PrintSpaceType.subclass: return PrintSpaceType.subclass(*args_, **kwargs_) else: return PrintSpaceType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Coords(self): return self.Coords def set_Coords(self, Coords): self.Coords = Coords def hasContent_(self): if ( self.Coords is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PrintSpaceType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('PrintSpaceType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'PrintSpaceType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PrintSpaceType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PrintSpaceType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PrintSpaceType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PrintSpaceType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.Coords is not None: namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else '' self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Coords': obj_ = CoordsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Coords = obj_ obj_.original_tagname_ = 'Coords' def get_polygon(self): ''' Get polygon from element which is parent of a Coords element ''' points = [point for point in self.Coords.points.split(' ')] return [[int(coord) for coord in point.split(',')] for point in points] def get_polygon_string(self): ''' Get polygon string from element which is parent of a Coords element ''' return self.Coords.points.replace(' ', ',') # end class PrintSpaceType class ReadingOrderType(GeneratedsSuper): """Definition of the reading order within the page. To express a reading order between elements they have to be included in an OrderedGroup. Groups may contain further groups. Confidence value (between 0 and 1)""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('OrderedGroup', 'OrderedGroupType', 0, 0, {'name': 'OrderedGroup', 'type': 'OrderedGroupType'}, 2), MemberSpec_('UnorderedGroup', 'UnorderedGroupType', 0, 0, {'name': 'UnorderedGroup', 'type': 'UnorderedGroupType'}, 2), ] subclass = None superclass = None def __init__(self, conf=None, OrderedGroup=None, UnorderedGroup=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.conf = _cast(float, conf) self.conf_nsprefix_ = None self.OrderedGroup = OrderedGroup self.OrderedGroup_nsprefix_ = "pc" self.UnorderedGroup = UnorderedGroup self.UnorderedGroup_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, ReadingOrderType) if subclass is not None: return subclass(*args_, **kwargs_) if ReadingOrderType.subclass: return ReadingOrderType.subclass(*args_, **kwargs_) else: return ReadingOrderType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_OrderedGroup(self): return self.OrderedGroup def set_OrderedGroup(self, OrderedGroup): self.OrderedGroup = OrderedGroup def get_UnorderedGroup(self): return self.UnorderedGroup def set_UnorderedGroup(self, UnorderedGroup): self.UnorderedGroup = UnorderedGroup def get_conf(self): return self.conf def set_conf(self, conf): self.conf = conf def validate_ConfSimpleType(self, value): # Validate type pc:ConfSimpleType, a restriction on float. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, float): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False if value > 1: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False def hasContent_(self): if ( self.OrderedGroup is not None or self.UnorderedGroup is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ReadingOrderType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReadingOrderType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'ReadingOrderType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReadingOrderType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReadingOrderType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReadingOrderType'): if self.conf is not None and 'conf' not in already_processed: already_processed.add('conf') outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ReadingOrderType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.OrderedGroup is not None: namespaceprefix_ = self.OrderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroup_nsprefix_) else '' self.OrderedGroup.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroup', pretty_print=pretty_print) if self.UnorderedGroup is not None: namespaceprefix_ = self.UnorderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroup_nsprefix_) else '' self.UnorderedGroup.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroup', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('conf', node) if value is not None and 'conf' not in already_processed: already_processed.add('conf') value = self.gds_parse_float(value, node, 'conf') self.conf = value self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'OrderedGroup': obj_ = OrderedGroupType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.OrderedGroup = obj_ obj_.original_tagname_ = 'OrderedGroup' elif nodeName_ == 'UnorderedGroup': obj_ = UnorderedGroupType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UnorderedGroup = obj_ obj_.original_tagname_ = 'UnorderedGroup' # end class ReadingOrderType class RegionRefIndexedType(GeneratedsSuper): """Numbered regionPosition (order number) of this item within the current hierarchy level.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('index', 'int', 0, 0, {'use': 'required'}), MemberSpec_('regionRef', 'string', 0, 0, {'use': 'required'}), ] subclass = None superclass = None def __init__(self, index=None, regionRef=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.index = _cast(int, index) self.index_nsprefix_ = None self.regionRef = _cast(None, regionRef) self.regionRef_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, RegionRefIndexedType) if subclass is not None: return subclass(*args_, **kwargs_) if RegionRefIndexedType.subclass: return RegionRefIndexedType.subclass(*args_, **kwargs_) else: return RegionRefIndexedType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_index(self): return self.index def set_index(self, index): self.index = index def get_regionRef(self): return self.regionRef def set_regionRef(self, regionRef): self.regionRef = regionRef def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefIndexedType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('RegionRefIndexedType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'RegionRefIndexedType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RegionRefIndexedType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RegionRefIndexedType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RegionRefIndexedType'): if self.index is not None and 'index' not in already_processed: already_processed.add('index') outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index')) if self.regionRef is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefIndexedType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('index', node) if value is not None and 'index' not in already_processed: already_processed.add('index') self.index = self.gds_parse_integer(value, node, 'index') value = find_attr_value_('regionRef', node) if value is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') self.regionRef = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class RegionRefIndexedType class OrderedGroupIndexedType(GeneratedsSuper): """Indexed group containing ordered elements Optional link to a parent region of nested regions. The parent region doubles as reading order group. Only the nested regions should be allowed as group members. Position (order number) of this item within the current hierarchy level. Is this group a continuation of another group (from previous column or page, for example)? For generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('index', 'int', 0, 0, {'use': 'required'}), MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), MemberSpec_('RegionRefIndexed', 'RegionRefIndexedType', 1, 0, {'name': 'RegionRefIndexed', 'type': 'RegionRefIndexedType'}, 3), MemberSpec_('OrderedGroupIndexed', 'OrderedGroupIndexedType', 1, 0, {'name': 'OrderedGroupIndexed', 'type': 'OrderedGroupIndexedType'}, 3), MemberSpec_('UnorderedGroupIndexed', 'UnorderedGroupIndexedType', 1, 0, {'name': 'UnorderedGroupIndexed', 'type': 'UnorderedGroupIndexedType'}, 3), ] subclass = None superclass = None def __init__(self, id=None, regionRef=None, index=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRefIndexed=None, OrderedGroupIndexed=None, UnorderedGroupIndexed=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.regionRef = _cast(None, regionRef) self.regionRef_nsprefix_ = None self.index = _cast(int, index) self.index_nsprefix_ = None self.caption = _cast(None, caption) self.caption_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.continuation = _cast(bool, continuation) self.continuation_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" if RegionRefIndexed is None: self.RegionRefIndexed = [] else: self.RegionRefIndexed = RegionRefIndexed self.RegionRefIndexed_nsprefix_ = "pc" if OrderedGroupIndexed is None: self.OrderedGroupIndexed = [] else: self.OrderedGroupIndexed = OrderedGroupIndexed self.OrderedGroupIndexed_nsprefix_ = "pc" if UnorderedGroupIndexed is None: self.UnorderedGroupIndexed = [] else: self.UnorderedGroupIndexed = UnorderedGroupIndexed self.UnorderedGroupIndexed_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, OrderedGroupIndexedType) if subclass is not None: return subclass(*args_, **kwargs_) if OrderedGroupIndexedType.subclass: return OrderedGroupIndexedType.subclass(*args_, **kwargs_) else: return OrderedGroupIndexedType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_RegionRefIndexed(self): return self.RegionRefIndexed def set_RegionRefIndexed(self, RegionRefIndexed): self.RegionRefIndexed = RegionRefIndexed def add_RegionRefIndexed(self, value): self.RegionRefIndexed.append(value) def insert_RegionRefIndexed_at(self, index, value): self.RegionRefIndexed.insert(index, value) def replace_RegionRefIndexed_at(self, index, value): self.RegionRefIndexed[index] = value def get_OrderedGroupIndexed(self): return self.OrderedGroupIndexed def set_OrderedGroupIndexed(self, OrderedGroupIndexed): self.OrderedGroupIndexed = OrderedGroupIndexed def add_OrderedGroupIndexed(self, value): self.OrderedGroupIndexed.append(value) def insert_OrderedGroupIndexed_at(self, index, value): self.OrderedGroupIndexed.insert(index, value) def replace_OrderedGroupIndexed_at(self, index, value): self.OrderedGroupIndexed[index] = value def get_UnorderedGroupIndexed(self): return self.UnorderedGroupIndexed def set_UnorderedGroupIndexed(self, UnorderedGroupIndexed): self.UnorderedGroupIndexed = UnorderedGroupIndexed def add_UnorderedGroupIndexed(self, value): self.UnorderedGroupIndexed.append(value) def insert_UnorderedGroupIndexed_at(self, index, value): self.UnorderedGroupIndexed.insert(index, value) def replace_UnorderedGroupIndexed_at(self, index, value): self.UnorderedGroupIndexed[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_regionRef(self): return self.regionRef def set_regionRef(self, regionRef): self.regionRef = regionRef def get_index(self): return self.index def set_index(self, index): self.index = index def get_caption(self): return self.caption def set_caption(self, caption): self.caption = caption def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_continuation(self): return self.continuation def set_continuation(self, continuation): self.continuation = continuation def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_GroupTypeSimpleType(self, value): # Validate type pc:GroupTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.UserDefined is not None or self.Labels or self.RegionRefIndexed or self.OrderedGroupIndexed or self.UnorderedGroupIndexed ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupIndexedType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('OrderedGroupIndexedType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'OrderedGroupIndexedType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OrderedGroupIndexedType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OrderedGroupIndexedType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OrderedGroupIndexedType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.regionRef is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), )) if self.index is not None and 'index' not in already_processed: already_processed.add('index') outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index')) if self.caption is not None and 'caption' not in already_processed: already_processed.add('caption') outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.continuation is not None and 'continuation' not in already_processed: already_processed.add('continuation') outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation')) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupIndexedType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) for RegionRefIndexed_ in self.RegionRefIndexed: namespaceprefix_ = self.RegionRefIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRefIndexed_nsprefix_) else '' RegionRefIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRefIndexed', pretty_print=pretty_print) for OrderedGroupIndexed_ in self.OrderedGroupIndexed: namespaceprefix_ = self.OrderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroupIndexed_nsprefix_) else '' OrderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroupIndexed', pretty_print=pretty_print) for UnorderedGroupIndexed_ in self.UnorderedGroupIndexed: namespaceprefix_ = self.UnorderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroupIndexed_nsprefix_) else '' UnorderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroupIndexed', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('regionRef', node) if value is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') self.regionRef = value value = find_attr_value_('index', node) if value is not None and 'index' not in already_processed: already_processed.add('index') self.index = self.gds_parse_integer(value, node, 'index') value = find_attr_value_('caption', node) if value is not None and 'caption' not in already_processed: already_processed.add('caption') self.caption = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType value = find_attr_value_('continuation', node) if value is not None and 'continuation' not in already_processed: already_processed.add('continuation') if value in ('true', '1'): self.continuation = True elif value in ('false', '0'): self.continuation = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' elif nodeName_ == 'RegionRefIndexed': obj_ = RegionRefIndexedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.RegionRefIndexed.append(obj_) obj_.original_tagname_ = 'RegionRefIndexed' elif nodeName_ == 'OrderedGroupIndexed': obj_ = OrderedGroupIndexedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.OrderedGroupIndexed.append(obj_) obj_.original_tagname_ = 'OrderedGroupIndexed' elif nodeName_ == 'UnorderedGroupIndexed': obj_ = UnorderedGroupIndexedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UnorderedGroupIndexed.append(obj_) obj_.original_tagname_ = 'UnorderedGroupIndexed' # end class OrderedGroupIndexedType class UnorderedGroupIndexedType(GeneratedsSuper): """Indexed group containing unordered elements Optional link to a parent region of nested regions. The parent region doubles as reading order group. Only the nested regions should be allowed as group members. Position (order number) of this item within the current hierarchy level. Is this group a continuation of another group (from previous column or page, for example)? For generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('index', 'int', 0, 0, {'use': 'required'}), MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), MemberSpec_('RegionRef', 'RegionRefType', 1, 0, {'name': 'RegionRef', 'type': 'RegionRefType'}, 4), MemberSpec_('OrderedGroup', 'OrderedGroupType', 1, 0, {'name': 'OrderedGroup', 'type': 'OrderedGroupType'}, 4), MemberSpec_('UnorderedGroup', 'UnorderedGroupType', 1, 0, {'name': 'UnorderedGroup', 'type': 'UnorderedGroupType'}, 4), ] subclass = None superclass = None def __init__(self, id=None, regionRef=None, index=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRef=None, OrderedGroup=None, UnorderedGroup=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.regionRef = _cast(None, regionRef) self.regionRef_nsprefix_ = None self.index = _cast(int, index) self.index_nsprefix_ = None self.caption = _cast(None, caption) self.caption_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.continuation = _cast(bool, continuation) self.continuation_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" if RegionRef is None: self.RegionRef = [] else: self.RegionRef = RegionRef self.RegionRef_nsprefix_ = "pc" if OrderedGroup is None: self.OrderedGroup = [] else: self.OrderedGroup = OrderedGroup self.OrderedGroup_nsprefix_ = "pc" if UnorderedGroup is None: self.UnorderedGroup = [] else: self.UnorderedGroup = UnorderedGroup self.UnorderedGroup_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, UnorderedGroupIndexedType) if subclass is not None: return subclass(*args_, **kwargs_) if UnorderedGroupIndexedType.subclass: return UnorderedGroupIndexedType.subclass(*args_, **kwargs_) else: return UnorderedGroupIndexedType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_RegionRef(self): return self.RegionRef def set_RegionRef(self, RegionRef): self.RegionRef = RegionRef def add_RegionRef(self, value): self.RegionRef.append(value) def insert_RegionRef_at(self, index, value): self.RegionRef.insert(index, value) def replace_RegionRef_at(self, index, value): self.RegionRef[index] = value def get_OrderedGroup(self): return self.OrderedGroup def set_OrderedGroup(self, OrderedGroup): self.OrderedGroup = OrderedGroup def add_OrderedGroup(self, value): self.OrderedGroup.append(value) def insert_OrderedGroup_at(self, index, value): self.OrderedGroup.insert(index, value) def replace_OrderedGroup_at(self, index, value): self.OrderedGroup[index] = value def get_UnorderedGroup(self): return self.UnorderedGroup def set_UnorderedGroup(self, UnorderedGroup): self.UnorderedGroup = UnorderedGroup def add_UnorderedGroup(self, value): self.UnorderedGroup.append(value) def insert_UnorderedGroup_at(self, index, value): self.UnorderedGroup.insert(index, value) def replace_UnorderedGroup_at(self, index, value): self.UnorderedGroup[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_regionRef(self): return self.regionRef def set_regionRef(self, regionRef): self.regionRef = regionRef def get_index(self): return self.index def set_index(self, index): self.index = index def get_caption(self): return self.caption def set_caption(self, caption): self.caption = caption def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_continuation(self): return self.continuation def set_continuation(self, continuation): self.continuation = continuation def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_GroupTypeSimpleType(self, value): # Validate type pc:GroupTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.UserDefined is not None or self.Labels or self.RegionRef or self.OrderedGroup or self.UnorderedGroup ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupIndexedType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnorderedGroupIndexedType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'UnorderedGroupIndexedType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnorderedGroupIndexedType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnorderedGroupIndexedType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnorderedGroupIndexedType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.regionRef is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), )) if self.index is not None and 'index' not in already_processed: already_processed.add('index') outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index')) if self.caption is not None and 'caption' not in already_processed: already_processed.add('caption') outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.continuation is not None and 'continuation' not in already_processed: already_processed.add('continuation') outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation')) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupIndexedType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) for RegionRef_ in self.RegionRef: namespaceprefix_ = self.RegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRef_nsprefix_) else '' RegionRef_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRef', pretty_print=pretty_print) for OrderedGroup_ in self.OrderedGroup: namespaceprefix_ = self.OrderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroup_nsprefix_) else '' OrderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroup', pretty_print=pretty_print) for UnorderedGroup_ in self.UnorderedGroup: namespaceprefix_ = self.UnorderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroup_nsprefix_) else '' UnorderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroup', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('regionRef', node) if value is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') self.regionRef = value value = find_attr_value_('index', node) if value is not None and 'index' not in already_processed: already_processed.add('index') self.index = self.gds_parse_integer(value, node, 'index') value = find_attr_value_('caption', node) if value is not None and 'caption' not in already_processed: already_processed.add('caption') self.caption = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType value = find_attr_value_('continuation', node) if value is not None and 'continuation' not in already_processed: already_processed.add('continuation') if value in ('true', '1'): self.continuation = True elif value in ('false', '0'): self.continuation = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' elif nodeName_ == 'RegionRef': obj_ = RegionRefType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.RegionRef.append(obj_) obj_.original_tagname_ = 'RegionRef' elif nodeName_ == 'OrderedGroup': obj_ = OrderedGroupType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.OrderedGroup.append(obj_) obj_.original_tagname_ = 'OrderedGroup' elif nodeName_ == 'UnorderedGroup': obj_ = UnorderedGroupType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UnorderedGroup.append(obj_) obj_.original_tagname_ = 'UnorderedGroup' # end class UnorderedGroupIndexedType class RegionRefType(GeneratedsSuper): __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('regionRef', 'string', 0, 0, {'use': 'required'}), ] subclass = None superclass = None def __init__(self, regionRef=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.regionRef = _cast(None, regionRef) self.regionRef_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, RegionRefType) if subclass is not None: return subclass(*args_, **kwargs_) if RegionRefType.subclass: return RegionRefType.subclass(*args_, **kwargs_) else: return RegionRefType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_regionRef(self): return self.regionRef def set_regionRef(self, regionRef): self.regionRef = regionRef def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('RegionRefType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'RegionRefType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RegionRefType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RegionRefType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RegionRefType'): if self.regionRef is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('regionRef', node) if value is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') self.regionRef = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class RegionRefType class OrderedGroupType(GeneratedsSuper): """Numbered group (contains ordered elements) Optional link to a parent region of nested regions. The parent region doubles as reading order group. Only the nested regions should be allowed as group members. Is this group a continuation of another group (from previous column or page, for example)? For generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), MemberSpec_('RegionRefIndexed', 'RegionRefIndexedType', 1, 0, {'name': 'RegionRefIndexed', 'type': 'RegionRefIndexedType'}, 5), MemberSpec_('OrderedGroupIndexed', 'OrderedGroupIndexedType', 1, 0, {'name': 'OrderedGroupIndexed', 'type': 'OrderedGroupIndexedType'}, 5), MemberSpec_('UnorderedGroupIndexed', 'UnorderedGroupIndexedType', 1, 0, {'name': 'UnorderedGroupIndexed', 'type': 'UnorderedGroupIndexedType'}, 5), ] subclass = None superclass = None def __init__(self, id=None, regionRef=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRefIndexed=None, OrderedGroupIndexed=None, UnorderedGroupIndexed=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.regionRef = _cast(None, regionRef) self.regionRef_nsprefix_ = None self.caption = _cast(None, caption) self.caption_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.continuation = _cast(bool, continuation) self.continuation_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" if RegionRefIndexed is None: self.RegionRefIndexed = [] else: self.RegionRefIndexed = RegionRefIndexed self.RegionRefIndexed_nsprefix_ = "pc" if OrderedGroupIndexed is None: self.OrderedGroupIndexed = [] else: self.OrderedGroupIndexed = OrderedGroupIndexed self.OrderedGroupIndexed_nsprefix_ = "pc" if UnorderedGroupIndexed is None: self.UnorderedGroupIndexed = [] else: self.UnorderedGroupIndexed = UnorderedGroupIndexed self.UnorderedGroupIndexed_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, OrderedGroupType) if subclass is not None: return subclass(*args_, **kwargs_) if OrderedGroupType.subclass: return OrderedGroupType.subclass(*args_, **kwargs_) else: return OrderedGroupType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_RegionRefIndexed(self): return self.RegionRefIndexed def set_RegionRefIndexed(self, RegionRefIndexed): self.RegionRefIndexed = RegionRefIndexed def add_RegionRefIndexed(self, value): self.RegionRefIndexed.append(value) def insert_RegionRefIndexed_at(self, index, value): self.RegionRefIndexed.insert(index, value) def replace_RegionRefIndexed_at(self, index, value): self.RegionRefIndexed[index] = value def get_OrderedGroupIndexed(self): return self.OrderedGroupIndexed def set_OrderedGroupIndexed(self, OrderedGroupIndexed): self.OrderedGroupIndexed = OrderedGroupIndexed def add_OrderedGroupIndexed(self, value): self.OrderedGroupIndexed.append(value) def insert_OrderedGroupIndexed_at(self, index, value): self.OrderedGroupIndexed.insert(index, value) def replace_OrderedGroupIndexed_at(self, index, value): self.OrderedGroupIndexed[index] = value def get_UnorderedGroupIndexed(self): return self.UnorderedGroupIndexed def set_UnorderedGroupIndexed(self, UnorderedGroupIndexed): self.UnorderedGroupIndexed = UnorderedGroupIndexed def add_UnorderedGroupIndexed(self, value): self.UnorderedGroupIndexed.append(value) def insert_UnorderedGroupIndexed_at(self, index, value): self.UnorderedGroupIndexed.insert(index, value) def replace_UnorderedGroupIndexed_at(self, index, value): self.UnorderedGroupIndexed[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_regionRef(self): return self.regionRef def set_regionRef(self, regionRef): self.regionRef = regionRef def get_caption(self): return self.caption def set_caption(self, caption): self.caption = caption def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_continuation(self): return self.continuation def set_continuation(self, continuation): self.continuation = continuation def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_GroupTypeSimpleType(self, value): # Validate type pc:GroupTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.UserDefined is not None or self.Labels or self.RegionRefIndexed or self.OrderedGroupIndexed or self.UnorderedGroupIndexed ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('OrderedGroupType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'OrderedGroupType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OrderedGroupType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OrderedGroupType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OrderedGroupType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.regionRef is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), )) if self.caption is not None and 'caption' not in already_processed: already_processed.add('caption') outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.continuation is not None and 'continuation' not in already_processed: already_processed.add('continuation') outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation')) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) for RegionRefIndexed_ in self.RegionRefIndexed: namespaceprefix_ = self.RegionRefIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRefIndexed_nsprefix_) else '' RegionRefIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRefIndexed', pretty_print=pretty_print) for OrderedGroupIndexed_ in self.OrderedGroupIndexed: namespaceprefix_ = self.OrderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroupIndexed_nsprefix_) else '' OrderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroupIndexed', pretty_print=pretty_print) for UnorderedGroupIndexed_ in self.UnorderedGroupIndexed: namespaceprefix_ = self.UnorderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroupIndexed_nsprefix_) else '' UnorderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroupIndexed', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('regionRef', node) if value is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') self.regionRef = value value = find_attr_value_('caption', node) if value is not None and 'caption' not in already_processed: already_processed.add('caption') self.caption = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType value = find_attr_value_('continuation', node) if value is not None and 'continuation' not in already_processed: already_processed.add('continuation') if value in ('true', '1'): self.continuation = True elif value in ('false', '0'): self.continuation = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' elif nodeName_ == 'RegionRefIndexed': obj_ = RegionRefIndexedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.RegionRefIndexed.append(obj_) obj_.original_tagname_ = 'RegionRefIndexed' elif nodeName_ == 'OrderedGroupIndexed': obj_ = OrderedGroupIndexedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.OrderedGroupIndexed.append(obj_) obj_.original_tagname_ = 'OrderedGroupIndexed' elif nodeName_ == 'UnorderedGroupIndexed': obj_ = UnorderedGroupIndexedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UnorderedGroupIndexed.append(obj_) obj_.original_tagname_ = 'UnorderedGroupIndexed' # end class OrderedGroupType class UnorderedGroupType(GeneratedsSuper): """Numbered group (contains unordered elements) Optional link to a parent region of nested regions. The parent region doubles as reading order group. Only the nested regions should be allowed as group members. Is this group a continuation of another group (from previous column or page, for example)? For generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), MemberSpec_('RegionRef', 'RegionRefType', 1, 0, {'name': 'RegionRef', 'type': 'RegionRefType'}, 6), MemberSpec_('OrderedGroup', 'OrderedGroupType', 1, 0, {'name': 'OrderedGroup', 'type': 'OrderedGroupType'}, 6), MemberSpec_('UnorderedGroup', 'UnorderedGroupType', 1, 0, {'name': 'UnorderedGroup', 'type': 'UnorderedGroupType'}, 6), ] subclass = None superclass = None def __init__(self, id=None, regionRef=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRef=None, OrderedGroup=None, UnorderedGroup=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.regionRef = _cast(None, regionRef) self.regionRef_nsprefix_ = None self.caption = _cast(None, caption) self.caption_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.continuation = _cast(bool, continuation) self.continuation_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" if RegionRef is None: self.RegionRef = [] else: self.RegionRef = RegionRef self.RegionRef_nsprefix_ = "pc" if OrderedGroup is None: self.OrderedGroup = [] else: self.OrderedGroup = OrderedGroup self.OrderedGroup_nsprefix_ = "pc" if UnorderedGroup is None: self.UnorderedGroup = [] else: self.UnorderedGroup = UnorderedGroup self.UnorderedGroup_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, UnorderedGroupType) if subclass is not None: return subclass(*args_, **kwargs_) if UnorderedGroupType.subclass: return UnorderedGroupType.subclass(*args_, **kwargs_) else: return UnorderedGroupType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_RegionRef(self): return self.RegionRef def set_RegionRef(self, RegionRef): self.RegionRef = RegionRef def add_RegionRef(self, value): self.RegionRef.append(value) def insert_RegionRef_at(self, index, value): self.RegionRef.insert(index, value) def replace_RegionRef_at(self, index, value): self.RegionRef[index] = value def get_OrderedGroup(self): return self.OrderedGroup def set_OrderedGroup(self, OrderedGroup): self.OrderedGroup = OrderedGroup def add_OrderedGroup(self, value): self.OrderedGroup.append(value) def insert_OrderedGroup_at(self, index, value): self.OrderedGroup.insert(index, value) def replace_OrderedGroup_at(self, index, value): self.OrderedGroup[index] = value def get_UnorderedGroup(self): return self.UnorderedGroup def set_UnorderedGroup(self, UnorderedGroup): self.UnorderedGroup = UnorderedGroup def add_UnorderedGroup(self, value): self.UnorderedGroup.append(value) def insert_UnorderedGroup_at(self, index, value): self.UnorderedGroup.insert(index, value) def replace_UnorderedGroup_at(self, index, value): self.UnorderedGroup[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_regionRef(self): return self.regionRef def set_regionRef(self, regionRef): self.regionRef = regionRef def get_caption(self): return self.caption def set_caption(self, caption): self.caption = caption def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_continuation(self): return self.continuation def set_continuation(self, continuation): self.continuation = continuation def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_GroupTypeSimpleType(self, value): # Validate type pc:GroupTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.UserDefined is not None or self.Labels or self.RegionRef or self.OrderedGroup or self.UnorderedGroup ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnorderedGroupType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'UnorderedGroupType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnorderedGroupType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnorderedGroupType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnorderedGroupType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.regionRef is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), )) if self.caption is not None and 'caption' not in already_processed: already_processed.add('caption') outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.continuation is not None and 'continuation' not in already_processed: already_processed.add('continuation') outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation')) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) for RegionRef_ in self.RegionRef: namespaceprefix_ = self.RegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRef_nsprefix_) else '' RegionRef_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRef', pretty_print=pretty_print) for OrderedGroup_ in self.OrderedGroup: namespaceprefix_ = self.OrderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroup_nsprefix_) else '' OrderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroup', pretty_print=pretty_print) for UnorderedGroup_ in self.UnorderedGroup: namespaceprefix_ = self.UnorderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroup_nsprefix_) else '' UnorderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroup', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('regionRef', node) if value is not None and 'regionRef' not in already_processed: already_processed.add('regionRef') self.regionRef = value value = find_attr_value_('caption', node) if value is not None and 'caption' not in already_processed: already_processed.add('caption') self.caption = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType value = find_attr_value_('continuation', node) if value is not None and 'continuation' not in already_processed: already_processed.add('continuation') if value in ('true', '1'): self.continuation = True elif value in ('false', '0'): self.continuation = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' elif nodeName_ == 'RegionRef': obj_ = RegionRefType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.RegionRef.append(obj_) obj_.original_tagname_ = 'RegionRef' elif nodeName_ == 'OrderedGroup': obj_ = OrderedGroupType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.OrderedGroup.append(obj_) obj_.original_tagname_ = 'OrderedGroup' elif nodeName_ == 'UnorderedGroup': obj_ = UnorderedGroupType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UnorderedGroup.append(obj_) obj_.original_tagname_ = 'UnorderedGroup' # end class UnorderedGroupType class BorderType(GeneratedsSuper): """Border of the actual page (if the scanned image contains parts not belonging to the page).""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None), ] subclass = None superclass = None def __init__(self, Coords=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.Coords = Coords self.Coords_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, BorderType) if subclass is not None: return subclass(*args_, **kwargs_) if BorderType.subclass: return BorderType.subclass(*args_, **kwargs_) else: return BorderType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Coords(self): return self.Coords def set_Coords(self, Coords): self.Coords = Coords def hasContent_(self): if ( self.Coords is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BorderType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('BorderType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'BorderType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BorderType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BorderType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BorderType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BorderType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.Coords is not None: namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else '' self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Coords': obj_ = CoordsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Coords = obj_ obj_.original_tagname_ = 'Coords' def get_polygon(self): ''' Get polygon from element which is parent of a Coords element ''' points = [point for point in self.Coords.points.split(' ')] return [[int(coord) for coord in point.split(',')] for point in points] def get_polygon_string(self): ''' Get polygon string from element which is parent of a Coords element ''' return self.Coords.points.replace(' ', ',') # end class BorderType class LayersType(GeneratedsSuper): """Can be used to express the z-index of overlapping regions. An element with a greater z-index is always in front of another element with lower z-index.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('Layer', 'LayerType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'Layer', 'type': 'LayerType'}, None), ] subclass = None superclass = None def __init__(self, Layer=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" if Layer is None: self.Layer = [] else: self.Layer = Layer self.Layer_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, LayersType) if subclass is not None: return subclass(*args_, **kwargs_) if LayersType.subclass: return LayersType.subclass(*args_, **kwargs_) else: return LayersType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Layer(self): return self.Layer def set_Layer(self, Layer): self.Layer = Layer def add_Layer(self, value): self.Layer.append(value) def insert_Layer_at(self, index, value): self.Layer.insert(index, value) def replace_Layer_at(self, index, value): self.Layer[index] = value def hasContent_(self): if ( self.Layer ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayersType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('LayersType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'LayersType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LayersType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LayersType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LayersType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayersType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for Layer_ in self.Layer: namespaceprefix_ = self.Layer_nsprefix_ + ':' if (UseCapturedNS_ and self.Layer_nsprefix_) else '' Layer_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Layer', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Layer': obj_ = LayerType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Layer.append(obj_) obj_.original_tagname_ = 'Layer' # end class LayersType class LayerType(GeneratedsSuper): __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('zIndex', 'int', 0, 0, {'use': 'required'}), MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('RegionRef', 'RegionRefType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'RegionRef', 'type': 'RegionRefType'}, None), ] subclass = None superclass = None def __init__(self, id=None, zIndex=None, caption=None, RegionRef=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.zIndex = _cast(int, zIndex) self.zIndex_nsprefix_ = None self.caption = _cast(None, caption) self.caption_nsprefix_ = None if RegionRef is None: self.RegionRef = [] else: self.RegionRef = RegionRef self.RegionRef_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, LayerType) if subclass is not None: return subclass(*args_, **kwargs_) if LayerType.subclass: return LayerType.subclass(*args_, **kwargs_) else: return LayerType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_RegionRef(self): return self.RegionRef def set_RegionRef(self, RegionRef): self.RegionRef = RegionRef def add_RegionRef(self, value): self.RegionRef.append(value) def insert_RegionRef_at(self, index, value): self.RegionRef.insert(index, value) def replace_RegionRef_at(self, index, value): self.RegionRef[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_zIndex(self): return self.zIndex def set_zIndex(self, zIndex): self.zIndex = zIndex def get_caption(self): return self.caption def set_caption(self, caption): self.caption = caption def hasContent_(self): if ( self.RegionRef ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayerType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('LayerType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'LayerType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LayerType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LayerType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LayerType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.zIndex is not None and 'zIndex' not in already_processed: already_processed.add('zIndex') outfile.write(' zIndex="%s"' % self.gds_format_integer(self.zIndex, input_name='zIndex')) if self.caption is not None and 'caption' not in already_processed: already_processed.add('caption') outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayerType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for RegionRef_ in self.RegionRef: namespaceprefix_ = self.RegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRef_nsprefix_) else '' RegionRef_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRef', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('zIndex', node) if value is not None and 'zIndex' not in already_processed: already_processed.add('zIndex') self.zIndex = self.gds_parse_integer(value, node, 'zIndex') value = find_attr_value_('caption', node) if value is not None and 'caption' not in already_processed: already_processed.add('caption') self.caption = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'RegionRef': obj_ = RegionRefType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.RegionRef.append(obj_) obj_.original_tagname_ = 'RegionRef' # end class LayerType class BaselineType(GeneratedsSuper): """Confidence value (between 0 and 1)""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('points', 'pc:PointsType', 0, 0, {'use': 'required'}), MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = None def __init__(self, points=None, conf=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.points = _cast(None, points) self.points_nsprefix_ = None self.conf = _cast(float, conf) self.conf_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, BaselineType) if subclass is not None: return subclass(*args_, **kwargs_) if BaselineType.subclass: return BaselineType.subclass(*args_, **kwargs_) else: return BaselineType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_points(self): return self.points def set_points(self, points): self.points = points def get_conf(self): return self.conf def set_conf(self, conf): self.conf = conf def validate_PointsType(self, value): # Validate type pc:PointsType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False if not self.gds_validate_simple_patterns( self.validate_PointsType_patterns_, value): self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_PointsType_patterns_, )) validate_PointsType_patterns_ = [['^(([0-9]+,[0-9]+ )+([0-9]+,[0-9]+))$']] def validate_ConfSimpleType(self, value): # Validate type pc:ConfSimpleType, a restriction on float. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, float): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False if value > 1: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BaselineType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaselineType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'BaselineType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaselineType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaselineType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaselineType'): if self.points is not None and 'points' not in already_processed: already_processed.add('points') outfile.write(' points=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.points), input_name='points')), )) if self.conf is not None and 'conf' not in already_processed: already_processed.add('conf') outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BaselineType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('points', node) if value is not None and 'points' not in already_processed: already_processed.add('points') self.points = value self.validate_PointsType(self.points) # validate type PointsType value = find_attr_value_('conf', node) if value is not None and 'conf' not in already_processed: already_processed.add('conf') value = self.gds_parse_float(value, node, 'conf') self.conf = value self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class BaselineType class RelationsType(GeneratedsSuper): """Container for one-to-one relations between layout objects (for example: DropCap - paragraph, caption - image).""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('Relation', 'RelationType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'Relation', 'type': 'RelationType'}, None), ] subclass = None superclass = None def __init__(self, Relation=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" if Relation is None: self.Relation = [] else: self.Relation = Relation self.Relation_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, RelationsType) if subclass is not None: return subclass(*args_, **kwargs_) if RelationsType.subclass: return RelationsType.subclass(*args_, **kwargs_) else: return RelationsType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Relation(self): return self.Relation def set_Relation(self, Relation): self.Relation = Relation def add_Relation(self, value): self.Relation.append(value) def insert_Relation_at(self, index, value): self.Relation.insert(index, value) def replace_Relation_at(self, index, value): self.Relation[index] = value def hasContent_(self): if ( self.Relation ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationsType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('RelationsType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'RelationsType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RelationsType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RelationsType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RelationsType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationsType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for Relation_ in self.Relation: namespaceprefix_ = self.Relation_nsprefix_ + ':' if (UseCapturedNS_ and self.Relation_nsprefix_) else '' Relation_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Relation', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Relation': obj_ = RelationType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Relation.append(obj_) obj_.original_tagname_ = 'Relation' # end class RelationsType class RelationType(GeneratedsSuper): """One-to-one relation between to layout object. Use 'link' for loose relations and 'join' for strong relations (where something is fragmented for instance). Examples for 'link': caption - image floating - paragraph paragraph - paragraph (when a paragraph is split across columns and the last word of the first paragraph DOES NOT continue in the second paragraph) drop-cap - paragraph (when the drop-cap is a whole word) Examples for 'join': word - word (separated word at the end of a line) drop-cap - paragraph (when the drop-cap is not a whole word) paragraph - paragraph (when a pragraph is split across columns and the last word of the first paragraph DOES continue in the second paragraph) For generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('type_', 'typeType1', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), MemberSpec_('SourceRegionRef', 'RegionRefType', 0, 0, {'maxOccurs': '1', 'minOccurs': '1', 'name': 'SourceRegionRef', 'type': 'RegionRefType'}, None), MemberSpec_('TargetRegionRef', 'RegionRefType', 0, 0, {'maxOccurs': '1', 'minOccurs': '1', 'name': 'TargetRegionRef', 'type': 'RegionRefType'}, None), ] subclass = None superclass = None def __init__(self, id=None, type_=None, custom=None, comments=None, Labels=None, SourceRegionRef=None, TargetRegionRef=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" self.SourceRegionRef = SourceRegionRef self.SourceRegionRef_nsprefix_ = "pc" self.TargetRegionRef = TargetRegionRef self.TargetRegionRef_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, RelationType) if subclass is not None: return subclass(*args_, **kwargs_) if RelationType.subclass: return RelationType.subclass(*args_, **kwargs_) else: return RelationType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_SourceRegionRef(self): return self.SourceRegionRef def set_SourceRegionRef(self, SourceRegionRef): self.SourceRegionRef = SourceRegionRef def get_TargetRegionRef(self): return self.TargetRegionRef def set_TargetRegionRef(self, TargetRegionRef): self.TargetRegionRef = TargetRegionRef def get_id(self): return self.id def set_id(self, id): self.id = id def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def validate_typeType1(self, value): # Validate type typeType1, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['link', 'join'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on typeType1' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.Labels or self.SourceRegionRef is not None or self.TargetRegionRef is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('RelationType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'RelationType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RelationType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RelationType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RelationType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) if self.SourceRegionRef is not None: namespaceprefix_ = self.SourceRegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.SourceRegionRef_nsprefix_) else '' self.SourceRegionRef.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SourceRegionRef', pretty_print=pretty_print) if self.TargetRegionRef is not None: namespaceprefix_ = self.TargetRegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.TargetRegionRef_nsprefix_) else '' self.TargetRegionRef.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TargetRegionRef', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_typeType1(self.type_) # validate type typeType1 value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' elif nodeName_ == 'SourceRegionRef': obj_ = RegionRefType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.SourceRegionRef = obj_ obj_.original_tagname_ = 'SourceRegionRef' elif nodeName_ == 'TargetRegionRef': obj_ = RegionRefType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TargetRegionRef = obj_ obj_.original_tagname_ = 'TargetRegionRef' # end class RelationType class TextStyleType(GeneratedsSuper): """Monospace (fixed-pitch, non-proportional) or proportional font. For instance: Arial, Times New Roman. Add more information if necessary (e.g. blackletter, antiqua). Serif or sans-serif typeface. The size of the characters in points. The x-height or corpus size refers to the distance between the baseline and the mean line of lower-case letters in a typeface. The unit is assumed to be pixels. The degree of space (in points) between the characters in a string of text. Text colour in RGB encoded format (red value) + (256 x green value) + (65536 x blue value). Background colour Background colour in RGB encoded format (red value) + (256 x green value) + (65536 x blue value). Specifies whether the colour of the text appears reversed against a background colour. Line style details if "underlined" is TRUE""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('fontFamily', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('serif', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('monospace', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('fontSize', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('xHeight', 'integer', 0, 1, {'use': 'optional'}), MemberSpec_('kerning', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('textColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('textColourRgb', 'integer', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('bgColourRgb', 'integer', 0, 1, {'use': 'optional'}), MemberSpec_('reverseVideo', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('bold', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('italic', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('underlined', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('underlineStyle', 'pc:UnderlineStyleSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('subscript', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('superscript', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('strikethrough', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('smallCaps', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('letterSpaced', 'boolean', 0, 1, {'use': 'optional'}), ] subclass = None superclass = None def __init__(self, fontFamily=None, serif=None, monospace=None, fontSize=None, xHeight=None, kerning=None, textColour=None, textColourRgb=None, bgColour=None, bgColourRgb=None, reverseVideo=None, bold=None, italic=None, underlined=None, underlineStyle=None, subscript=None, superscript=None, strikethrough=None, smallCaps=None, letterSpaced=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.fontFamily = _cast(None, fontFamily) self.fontFamily_nsprefix_ = None self.serif = _cast(bool, serif) self.serif_nsprefix_ = None self.monospace = _cast(bool, monospace) self.monospace_nsprefix_ = None self.fontSize = _cast(float, fontSize) self.fontSize_nsprefix_ = None self.xHeight = _cast(int, xHeight) self.xHeight_nsprefix_ = None self.kerning = _cast(int, kerning) self.kerning_nsprefix_ = None self.textColour = _cast(None, textColour) self.textColour_nsprefix_ = None self.textColourRgb = _cast(int, textColourRgb) self.textColourRgb_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None self.bgColourRgb = _cast(int, bgColourRgb) self.bgColourRgb_nsprefix_ = None self.reverseVideo = _cast(bool, reverseVideo) self.reverseVideo_nsprefix_ = None self.bold = _cast(bool, bold) self.bold_nsprefix_ = None self.italic = _cast(bool, italic) self.italic_nsprefix_ = None self.underlined = _cast(bool, underlined) self.underlined_nsprefix_ = None self.underlineStyle = _cast(None, underlineStyle) self.underlineStyle_nsprefix_ = None self.subscript = _cast(bool, subscript) self.subscript_nsprefix_ = None self.superscript = _cast(bool, superscript) self.superscript_nsprefix_ = None self.strikethrough = _cast(bool, strikethrough) self.strikethrough_nsprefix_ = None self.smallCaps = _cast(bool, smallCaps) self.smallCaps_nsprefix_ = None self.letterSpaced = _cast(bool, letterSpaced) self.letterSpaced_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TextStyleType) if subclass is not None: return subclass(*args_, **kwargs_) if TextStyleType.subclass: return TextStyleType.subclass(*args_, **kwargs_) else: return TextStyleType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_fontFamily(self): return self.fontFamily def set_fontFamily(self, fontFamily): self.fontFamily = fontFamily def get_serif(self): return self.serif def set_serif(self, serif): self.serif = serif def get_monospace(self): return self.monospace def set_monospace(self, monospace): self.monospace = monospace def get_fontSize(self): return self.fontSize def set_fontSize(self, fontSize): self.fontSize = fontSize def get_xHeight(self): return self.xHeight def set_xHeight(self, xHeight): self.xHeight = xHeight def get_kerning(self): return self.kerning def set_kerning(self, kerning): self.kerning = kerning def get_textColour(self): return self.textColour def set_textColour(self, textColour): self.textColour = textColour def get_textColourRgb(self): return self.textColourRgb def set_textColourRgb(self, textColourRgb): self.textColourRgb = textColourRgb def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def get_bgColourRgb(self): return self.bgColourRgb def set_bgColourRgb(self, bgColourRgb): self.bgColourRgb = bgColourRgb def get_reverseVideo(self): return self.reverseVideo def set_reverseVideo(self, reverseVideo): self.reverseVideo = reverseVideo def get_bold(self): return self.bold def set_bold(self, bold): self.bold = bold def get_italic(self): return self.italic def set_italic(self, italic): self.italic = italic def get_underlined(self): return self.underlined def set_underlined(self, underlined): self.underlined = underlined def get_underlineStyle(self): return self.underlineStyle def set_underlineStyle(self, underlineStyle): self.underlineStyle = underlineStyle def get_subscript(self): return self.subscript def set_subscript(self, subscript): self.subscript = subscript def get_superscript(self): return self.superscript def set_superscript(self, superscript): self.superscript = superscript def get_strikethrough(self): return self.strikethrough def set_strikethrough(self, strikethrough): self.strikethrough = strikethrough def get_smallCaps(self): return self.smallCaps def set_smallCaps(self, smallCaps): self.smallCaps = smallCaps def get_letterSpaced(self): return self.letterSpaced def set_letterSpaced(self, letterSpaced): self.letterSpaced = letterSpaced def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_UnderlineStyleSimpleType(self, value): # Validate type pc:UnderlineStyleSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['singleLine', 'doubleLine', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on UnderlineStyleSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextStyleType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextStyleType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'TextStyleType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextStyleType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextStyleType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextStyleType'): if self.fontFamily is not None and 'fontFamily' not in already_processed: already_processed.add('fontFamily') outfile.write(' fontFamily=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.fontFamily), input_name='fontFamily')), )) if self.serif is not None and 'serif' not in already_processed: already_processed.add('serif') outfile.write(' serif="%s"' % self.gds_format_boolean(self.serif, input_name='serif')) if self.monospace is not None and 'monospace' not in already_processed: already_processed.add('monospace') outfile.write(' monospace="%s"' % self.gds_format_boolean(self.monospace, input_name='monospace')) if self.fontSize is not None and 'fontSize' not in already_processed: already_processed.add('fontSize') outfile.write(' fontSize="%s"' % self.gds_format_float(self.fontSize, input_name='fontSize')) if self.xHeight is not None and 'xHeight' not in already_processed: already_processed.add('xHeight') outfile.write(' xHeight="%s"' % self.gds_format_integer(self.xHeight, input_name='xHeight')) if self.kerning is not None and 'kerning' not in already_processed: already_processed.add('kerning') outfile.write(' kerning="%s"' % self.gds_format_integer(self.kerning, input_name='kerning')) if self.textColour is not None and 'textColour' not in already_processed: already_processed.add('textColour') outfile.write(' textColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.textColour), input_name='textColour')), )) if self.textColourRgb is not None and 'textColourRgb' not in already_processed: already_processed.add('textColourRgb') outfile.write(' textColourRgb="%s"' % self.gds_format_integer(self.textColourRgb, input_name='textColourRgb')) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) if self.bgColourRgb is not None and 'bgColourRgb' not in already_processed: already_processed.add('bgColourRgb') outfile.write(' bgColourRgb="%s"' % self.gds_format_integer(self.bgColourRgb, input_name='bgColourRgb')) if self.reverseVideo is not None and 'reverseVideo' not in already_processed: already_processed.add('reverseVideo') outfile.write(' reverseVideo="%s"' % self.gds_format_boolean(self.reverseVideo, input_name='reverseVideo')) if self.bold is not None and 'bold' not in already_processed: already_processed.add('bold') outfile.write(' bold="%s"' % self.gds_format_boolean(self.bold, input_name='bold')) if self.italic is not None and 'italic' not in already_processed: already_processed.add('italic') outfile.write(' italic="%s"' % self.gds_format_boolean(self.italic, input_name='italic')) if self.underlined is not None and 'underlined' not in already_processed: already_processed.add('underlined') outfile.write(' underlined="%s"' % self.gds_format_boolean(self.underlined, input_name='underlined')) if self.underlineStyle is not None and 'underlineStyle' not in already_processed: already_processed.add('underlineStyle') outfile.write(' underlineStyle=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.underlineStyle), input_name='underlineStyle')), )) if self.subscript is not None and 'subscript' not in already_processed: already_processed.add('subscript') outfile.write(' subscript="%s"' % self.gds_format_boolean(self.subscript, input_name='subscript')) if self.superscript is not None and 'superscript' not in already_processed: already_processed.add('superscript') outfile.write(' superscript="%s"' % self.gds_format_boolean(self.superscript, input_name='superscript')) if self.strikethrough is not None and 'strikethrough' not in already_processed: already_processed.add('strikethrough') outfile.write(' strikethrough="%s"' % self.gds_format_boolean(self.strikethrough, input_name='strikethrough')) if self.smallCaps is not None and 'smallCaps' not in already_processed: already_processed.add('smallCaps') outfile.write(' smallCaps="%s"' % self.gds_format_boolean(self.smallCaps, input_name='smallCaps')) if self.letterSpaced is not None and 'letterSpaced' not in already_processed: already_processed.add('letterSpaced') outfile.write(' letterSpaced="%s"' % self.gds_format_boolean(self.letterSpaced, input_name='letterSpaced')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextStyleType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('fontFamily', node) if value is not None and 'fontFamily' not in already_processed: already_processed.add('fontFamily') self.fontFamily = value value = find_attr_value_('serif', node) if value is not None and 'serif' not in already_processed: already_processed.add('serif') if value in ('true', '1'): self.serif = True elif value in ('false', '0'): self.serif = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('monospace', node) if value is not None and 'monospace' not in already_processed: already_processed.add('monospace') if value in ('true', '1'): self.monospace = True elif value in ('false', '0'): self.monospace = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('fontSize', node) if value is not None and 'fontSize' not in already_processed: already_processed.add('fontSize') value = self.gds_parse_float(value, node, 'fontSize') self.fontSize = value value = find_attr_value_('xHeight', node) if value is not None and 'xHeight' not in already_processed: already_processed.add('xHeight') self.xHeight = self.gds_parse_integer(value, node, 'xHeight') value = find_attr_value_('kerning', node) if value is not None and 'kerning' not in already_processed: already_processed.add('kerning') self.kerning = self.gds_parse_integer(value, node, 'kerning') value = find_attr_value_('textColour', node) if value is not None and 'textColour' not in already_processed: already_processed.add('textColour') self.textColour = value self.validate_ColourSimpleType(self.textColour) # validate type ColourSimpleType value = find_attr_value_('textColourRgb', node) if value is not None and 'textColourRgb' not in already_processed: already_processed.add('textColourRgb') self.textColourRgb = self.gds_parse_integer(value, node, 'textColourRgb') value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType value = find_attr_value_('bgColourRgb', node) if value is not None and 'bgColourRgb' not in already_processed: already_processed.add('bgColourRgb') self.bgColourRgb = self.gds_parse_integer(value, node, 'bgColourRgb') value = find_attr_value_('reverseVideo', node) if value is not None and 'reverseVideo' not in already_processed: already_processed.add('reverseVideo') if value in ('true', '1'): self.reverseVideo = True elif value in ('false', '0'): self.reverseVideo = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('bold', node) if value is not None and 'bold' not in already_processed: already_processed.add('bold') if value in ('true', '1'): self.bold = True elif value in ('false', '0'): self.bold = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('italic', node) if value is not None and 'italic' not in already_processed: already_processed.add('italic') if value in ('true', '1'): self.italic = True elif value in ('false', '0'): self.italic = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('underlined', node) if value is not None and 'underlined' not in already_processed: already_processed.add('underlined') if value in ('true', '1'): self.underlined = True elif value in ('false', '0'): self.underlined = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('underlineStyle', node) if value is not None and 'underlineStyle' not in already_processed: already_processed.add('underlineStyle') self.underlineStyle = value self.validate_UnderlineStyleSimpleType(self.underlineStyle) # validate type UnderlineStyleSimpleType value = find_attr_value_('subscript', node) if value is not None and 'subscript' not in already_processed: already_processed.add('subscript') if value in ('true', '1'): self.subscript = True elif value in ('false', '0'): self.subscript = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('superscript', node) if value is not None and 'superscript' not in already_processed: already_processed.add('superscript') if value in ('true', '1'): self.superscript = True elif value in ('false', '0'): self.superscript = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('strikethrough', node) if value is not None and 'strikethrough' not in already_processed: already_processed.add('strikethrough') if value in ('true', '1'): self.strikethrough = True elif value in ('false', '0'): self.strikethrough = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('smallCaps', node) if value is not None and 'smallCaps' not in already_processed: already_processed.add('smallCaps') if value in ('true', '1'): self.smallCaps = True elif value in ('false', '0'): self.smallCaps = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('letterSpaced', node) if value is not None and 'letterSpaced' not in already_processed: already_processed.add('letterSpaced') if value in ('true', '1'): self.letterSpaced = True elif value in ('false', '0'): self.letterSpaced = False else: raise_parse_error(node, 'Bad boolean attribute') def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class TextStyleType class RegionType(GeneratedsSuper): """For generic use Is this region a continuation of another region (in previous column or page, for example)?""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None), MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None), MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None), MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None), MemberSpec_('Roles', 'RolesType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Roles', 'type': 'RolesType'}, None), MemberSpec_('TextRegion', 'TextRegionType', 1, 1, {'name': 'TextRegion', 'type': 'TextRegionType'}, 7), MemberSpec_('ImageRegion', 'ImageRegionType', 1, 1, {'name': 'ImageRegion', 'type': 'ImageRegionType'}, 7), MemberSpec_('LineDrawingRegion', 'LineDrawingRegionType', 1, 1, {'name': 'LineDrawingRegion', 'type': 'LineDrawingRegionType'}, 7), MemberSpec_('GraphicRegion', 'GraphicRegionType', 1, 1, {'name': 'GraphicRegion', 'type': 'GraphicRegionType'}, 7), MemberSpec_('TableRegion', 'TableRegionType', 1, 1, {'name': 'TableRegion', 'type': 'TableRegionType'}, 7), MemberSpec_('ChartRegion', 'ChartRegionType', 1, 1, {'name': 'ChartRegion', 'type': 'ChartRegionType'}, 7), MemberSpec_('SeparatorRegion', 'SeparatorRegionType', 1, 1, {'name': 'SeparatorRegion', 'type': 'SeparatorRegionType'}, 7), MemberSpec_('MathsRegion', 'MathsRegionType', 1, 1, {'name': 'MathsRegion', 'type': 'MathsRegionType'}, 7), MemberSpec_('ChemRegion', 'ChemRegionType', 1, 1, {'name': 'ChemRegion', 'type': 'ChemRegionType'}, 7), MemberSpec_('MusicRegion', 'MusicRegionType', 1, 1, {'name': 'MusicRegion', 'type': 'MusicRegionType'}, 7), MemberSpec_('AdvertRegion', 'AdvertRegionType', 1, 1, {'name': 'AdvertRegion', 'type': 'AdvertRegionType'}, 7), MemberSpec_('NoiseRegion', 'NoiseRegionType', 1, 1, {'name': 'NoiseRegion', 'type': 'NoiseRegionType'}, 7), MemberSpec_('UnknownRegion', 'UnknownRegionType', 1, 1, {'name': 'UnknownRegion', 'type': 'UnknownRegionType'}, 7), MemberSpec_('CustomRegion', 'CustomRegionType', 1, 1, {'name': 'CustomRegion', 'type': 'CustomRegionType'}, 7), ] subclass = None superclass = None def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, extensiontype_=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.continuation = _cast(bool, continuation) self.continuation_nsprefix_ = None if AlternativeImage is None: self.AlternativeImage = [] else: self.AlternativeImage = AlternativeImage self.AlternativeImage_nsprefix_ = "pc" self.Coords = Coords self.Coords_nsprefix_ = "pc" self.UserDefined = UserDefined self.UserDefined_nsprefix_ = "pc" if Labels is None: self.Labels = [] else: self.Labels = Labels self.Labels_nsprefix_ = "pc" self.Roles = Roles self.Roles_nsprefix_ = "pc" if TextRegion is None: self.TextRegion = [] else: self.TextRegion = TextRegion self.TextRegion_nsprefix_ = "pc" if ImageRegion is None: self.ImageRegion = [] else: self.ImageRegion = ImageRegion self.ImageRegion_nsprefix_ = "pc" if LineDrawingRegion is None: self.LineDrawingRegion = [] else: self.LineDrawingRegion = LineDrawingRegion self.LineDrawingRegion_nsprefix_ = "pc" if GraphicRegion is None: self.GraphicRegion = [] else: self.GraphicRegion = GraphicRegion self.GraphicRegion_nsprefix_ = "pc" if TableRegion is None: self.TableRegion = [] else: self.TableRegion = TableRegion self.TableRegion_nsprefix_ = "pc" if ChartRegion is None: self.ChartRegion = [] else: self.ChartRegion = ChartRegion self.ChartRegion_nsprefix_ = "pc" if SeparatorRegion is None: self.SeparatorRegion = [] else: self.SeparatorRegion = SeparatorRegion self.SeparatorRegion_nsprefix_ = "pc" if MathsRegion is None: self.MathsRegion = [] else: self.MathsRegion = MathsRegion self.MathsRegion_nsprefix_ = "pc" if ChemRegion is None: self.ChemRegion = [] else: self.ChemRegion = ChemRegion self.ChemRegion_nsprefix_ = "pc" if MusicRegion is None: self.MusicRegion = [] else: self.MusicRegion = MusicRegion self.MusicRegion_nsprefix_ = "pc" if AdvertRegion is None: self.AdvertRegion = [] else: self.AdvertRegion = AdvertRegion self.AdvertRegion_nsprefix_ = "pc" if NoiseRegion is None: self.NoiseRegion = [] else: self.NoiseRegion = NoiseRegion self.NoiseRegion_nsprefix_ = "pc" if UnknownRegion is None: self.UnknownRegion = [] else: self.UnknownRegion = UnknownRegion self.UnknownRegion_nsprefix_ = "pc" if CustomRegion is None: self.CustomRegion = [] else: self.CustomRegion = CustomRegion self.CustomRegion_nsprefix_ = "pc" self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, RegionType) if subclass is not None: return subclass(*args_, **kwargs_) if RegionType.subclass: return RegionType.subclass(*args_, **kwargs_) else: return RegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_AlternativeImage(self): return self.AlternativeImage def set_AlternativeImage(self, AlternativeImage): self.AlternativeImage = AlternativeImage def add_AlternativeImage(self, value): self.AlternativeImage.append(value) def insert_AlternativeImage_at(self, index, value): self.AlternativeImage.insert(index, value) def replace_AlternativeImage_at(self, index, value): self.AlternativeImage[index] = value def get_Coords(self): return self.Coords def set_Coords(self, Coords): self.Coords = Coords def get_UserDefined(self): return self.UserDefined def set_UserDefined(self, UserDefined): self.UserDefined = UserDefined def get_Labels(self): return self.Labels def set_Labels(self, Labels): self.Labels = Labels def add_Labels(self, value): self.Labels.append(value) def insert_Labels_at(self, index, value): self.Labels.insert(index, value) def replace_Labels_at(self, index, value): self.Labels[index] = value def get_Roles(self): return self.Roles def set_Roles(self, Roles): self.Roles = Roles def get_TextRegion(self): return self.TextRegion def set_TextRegion(self, TextRegion): self.TextRegion = TextRegion def add_TextRegion(self, value): self.TextRegion.append(value) def insert_TextRegion_at(self, index, value): self.TextRegion.insert(index, value) def replace_TextRegion_at(self, index, value): self.TextRegion[index] = value def get_ImageRegion(self): return self.ImageRegion def set_ImageRegion(self, ImageRegion): self.ImageRegion = ImageRegion def add_ImageRegion(self, value): self.ImageRegion.append(value) def insert_ImageRegion_at(self, index, value): self.ImageRegion.insert(index, value) def replace_ImageRegion_at(self, index, value): self.ImageRegion[index] = value def get_LineDrawingRegion(self): return self.LineDrawingRegion def set_LineDrawingRegion(self, LineDrawingRegion): self.LineDrawingRegion = LineDrawingRegion def add_LineDrawingRegion(self, value): self.LineDrawingRegion.append(value) def insert_LineDrawingRegion_at(self, index, value): self.LineDrawingRegion.insert(index, value) def replace_LineDrawingRegion_at(self, index, value): self.LineDrawingRegion[index] = value def get_GraphicRegion(self): return self.GraphicRegion def set_GraphicRegion(self, GraphicRegion): self.GraphicRegion = GraphicRegion def add_GraphicRegion(self, value): self.GraphicRegion.append(value) def insert_GraphicRegion_at(self, index, value): self.GraphicRegion.insert(index, value) def replace_GraphicRegion_at(self, index, value): self.GraphicRegion[index] = value def get_TableRegion(self): return self.TableRegion def set_TableRegion(self, TableRegion): self.TableRegion = TableRegion def add_TableRegion(self, value): self.TableRegion.append(value) def insert_TableRegion_at(self, index, value): self.TableRegion.insert(index, value) def replace_TableRegion_at(self, index, value): self.TableRegion[index] = value def get_ChartRegion(self): return self.ChartRegion def set_ChartRegion(self, ChartRegion): self.ChartRegion = ChartRegion def add_ChartRegion(self, value): self.ChartRegion.append(value) def insert_ChartRegion_at(self, index, value): self.ChartRegion.insert(index, value) def replace_ChartRegion_at(self, index, value): self.ChartRegion[index] = value def get_SeparatorRegion(self): return self.SeparatorRegion def set_SeparatorRegion(self, SeparatorRegion): self.SeparatorRegion = SeparatorRegion def add_SeparatorRegion(self, value): self.SeparatorRegion.append(value) def insert_SeparatorRegion_at(self, index, value): self.SeparatorRegion.insert(index, value) def replace_SeparatorRegion_at(self, index, value): self.SeparatorRegion[index] = value def get_MathsRegion(self): return self.MathsRegion def set_MathsRegion(self, MathsRegion): self.MathsRegion = MathsRegion def add_MathsRegion(self, value): self.MathsRegion.append(value) def insert_MathsRegion_at(self, index, value): self.MathsRegion.insert(index, value) def replace_MathsRegion_at(self, index, value): self.MathsRegion[index] = value def get_ChemRegion(self): return self.ChemRegion def set_ChemRegion(self, ChemRegion): self.ChemRegion = ChemRegion def add_ChemRegion(self, value): self.ChemRegion.append(value) def insert_ChemRegion_at(self, index, value): self.ChemRegion.insert(index, value) def replace_ChemRegion_at(self, index, value): self.ChemRegion[index] = value def get_MusicRegion(self): return self.MusicRegion def set_MusicRegion(self, MusicRegion): self.MusicRegion = MusicRegion def add_MusicRegion(self, value): self.MusicRegion.append(value) def insert_MusicRegion_at(self, index, value): self.MusicRegion.insert(index, value) def replace_MusicRegion_at(self, index, value): self.MusicRegion[index] = value def get_AdvertRegion(self): return self.AdvertRegion def set_AdvertRegion(self, AdvertRegion): self.AdvertRegion = AdvertRegion def add_AdvertRegion(self, value): self.AdvertRegion.append(value) def insert_AdvertRegion_at(self, index, value): self.AdvertRegion.insert(index, value) def replace_AdvertRegion_at(self, index, value): self.AdvertRegion[index] = value def get_NoiseRegion(self): return self.NoiseRegion def set_NoiseRegion(self, NoiseRegion): self.NoiseRegion = NoiseRegion def add_NoiseRegion(self, value): self.NoiseRegion.append(value) def insert_NoiseRegion_at(self, index, value): self.NoiseRegion.insert(index, value) def replace_NoiseRegion_at(self, index, value): self.NoiseRegion[index] = value def get_UnknownRegion(self): return self.UnknownRegion def set_UnknownRegion(self, UnknownRegion): self.UnknownRegion = UnknownRegion def add_UnknownRegion(self, value): self.UnknownRegion.append(value) def insert_UnknownRegion_at(self, index, value): self.UnknownRegion.insert(index, value) def replace_UnknownRegion_at(self, index, value): self.UnknownRegion[index] = value def get_CustomRegion(self): return self.CustomRegion def set_CustomRegion(self, CustomRegion): self.CustomRegion = CustomRegion def add_CustomRegion(self, value): self.CustomRegion.append(value) def insert_CustomRegion_at(self, index, value): self.CustomRegion.insert(index, value) def replace_CustomRegion_at(self, index, value): self.CustomRegion[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def get_continuation(self): return self.continuation def set_continuation(self, continuation): self.continuation = continuation def get_extensiontype_(self): return self.extensiontype_ def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def hasContent_(self): if ( self.AlternativeImage or self.Coords is not None or self.UserDefined is not None or self.Labels or self.Roles is not None or self.TextRegion or self.ImageRegion or self.LineDrawingRegion or self.GraphicRegion or self.TableRegion or self.ChartRegion or self.SeparatorRegion or self.MathsRegion or self.ChemRegion or self.MusicRegion or self.AdvertRegion or self.NoiseRegion or self.UnknownRegion or self.CustomRegion ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('RegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'RegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RegionType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) if self.continuation is not None and 'continuation' not in already_processed: already_processed.add('continuation') outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation')) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') if ":" not in self.extensiontype_: imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) else: outfile.write(' xsi:type="%s"' % self.extensiontype_) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for AlternativeImage_ in self.AlternativeImage: namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else '' AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print) if self.Coords is not None: namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else '' self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print) if self.UserDefined is not None: namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else '' self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print) for Labels_ in self.Labels: namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else '' Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print) if self.Roles is not None: namespaceprefix_ = self.Roles_nsprefix_ + ':' if (UseCapturedNS_ and self.Roles_nsprefix_) else '' self.Roles.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Roles', pretty_print=pretty_print) for TextRegion_ in self.TextRegion: namespaceprefix_ = self.TextRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TextRegion_nsprefix_) else '' TextRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextRegion', pretty_print=pretty_print) for ImageRegion_ in self.ImageRegion: namespaceprefix_ = self.ImageRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageRegion_nsprefix_) else '' ImageRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageRegion', pretty_print=pretty_print) for LineDrawingRegion_ in self.LineDrawingRegion: namespaceprefix_ = self.LineDrawingRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.LineDrawingRegion_nsprefix_) else '' LineDrawingRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LineDrawingRegion', pretty_print=pretty_print) for GraphicRegion_ in self.GraphicRegion: namespaceprefix_ = self.GraphicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.GraphicRegion_nsprefix_) else '' GraphicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GraphicRegion', pretty_print=pretty_print) for TableRegion_ in self.TableRegion: namespaceprefix_ = self.TableRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TableRegion_nsprefix_) else '' TableRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TableRegion', pretty_print=pretty_print) for ChartRegion_ in self.ChartRegion: namespaceprefix_ = self.ChartRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChartRegion_nsprefix_) else '' ChartRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChartRegion', pretty_print=pretty_print) for SeparatorRegion_ in self.SeparatorRegion: namespaceprefix_ = self.SeparatorRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.SeparatorRegion_nsprefix_) else '' SeparatorRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SeparatorRegion', pretty_print=pretty_print) for MathsRegion_ in self.MathsRegion: namespaceprefix_ = self.MathsRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MathsRegion_nsprefix_) else '' MathsRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MathsRegion', pretty_print=pretty_print) for ChemRegion_ in self.ChemRegion: namespaceprefix_ = self.ChemRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChemRegion_nsprefix_) else '' ChemRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChemRegion', pretty_print=pretty_print) for MusicRegion_ in self.MusicRegion: namespaceprefix_ = self.MusicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MusicRegion_nsprefix_) else '' MusicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MusicRegion', pretty_print=pretty_print) for AdvertRegion_ in self.AdvertRegion: namespaceprefix_ = self.AdvertRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.AdvertRegion_nsprefix_) else '' AdvertRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AdvertRegion', pretty_print=pretty_print) for NoiseRegion_ in self.NoiseRegion: namespaceprefix_ = self.NoiseRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.NoiseRegion_nsprefix_) else '' NoiseRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NoiseRegion', pretty_print=pretty_print) for UnknownRegion_ in self.UnknownRegion: namespaceprefix_ = self.UnknownRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.UnknownRegion_nsprefix_) else '' UnknownRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnknownRegion', pretty_print=pretty_print) for CustomRegion_ in self.CustomRegion: namespaceprefix_ = self.CustomRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomRegion_nsprefix_) else '' CustomRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CustomRegion', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value value = find_attr_value_('continuation', node) if value is not None and 'continuation' not in already_processed: already_processed.add('continuation') if value in ('true', '1'): self.continuation = True elif value in ('false', '0'): self.continuation = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'AlternativeImage': obj_ = AlternativeImageType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.AlternativeImage.append(obj_) obj_.original_tagname_ = 'AlternativeImage' elif nodeName_ == 'Coords': obj_ = CoordsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Coords = obj_ obj_.original_tagname_ = 'Coords' elif nodeName_ == 'UserDefined': obj_ = UserDefinedType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserDefined = obj_ obj_.original_tagname_ = 'UserDefined' elif nodeName_ == 'Labels': obj_ = LabelsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Labels.append(obj_) obj_.original_tagname_ = 'Labels' elif nodeName_ == 'Roles': obj_ = RolesType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Roles = obj_ obj_.original_tagname_ = 'Roles' elif nodeName_ == 'TextRegion': obj_ = TextRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextRegion.append(obj_) obj_.original_tagname_ = 'TextRegion' elif nodeName_ == 'ImageRegion': obj_ = ImageRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.ImageRegion.append(obj_) obj_.original_tagname_ = 'ImageRegion' elif nodeName_ == 'LineDrawingRegion': obj_ = LineDrawingRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.LineDrawingRegion.append(obj_) obj_.original_tagname_ = 'LineDrawingRegion' elif nodeName_ == 'GraphicRegion': obj_ = GraphicRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.GraphicRegion.append(obj_) obj_.original_tagname_ = 'GraphicRegion' elif nodeName_ == 'TableRegion': obj_ = TableRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TableRegion.append(obj_) obj_.original_tagname_ = 'TableRegion' elif nodeName_ == 'ChartRegion': obj_ = ChartRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.ChartRegion.append(obj_) obj_.original_tagname_ = 'ChartRegion' elif nodeName_ == 'SeparatorRegion': obj_ = SeparatorRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.SeparatorRegion.append(obj_) obj_.original_tagname_ = 'SeparatorRegion' elif nodeName_ == 'MathsRegion': obj_ = MathsRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.MathsRegion.append(obj_) obj_.original_tagname_ = 'MathsRegion' elif nodeName_ == 'ChemRegion': obj_ = ChemRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.ChemRegion.append(obj_) obj_.original_tagname_ = 'ChemRegion' elif nodeName_ == 'MusicRegion': obj_ = MusicRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.MusicRegion.append(obj_) obj_.original_tagname_ = 'MusicRegion' elif nodeName_ == 'AdvertRegion': obj_ = AdvertRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.AdvertRegion.append(obj_) obj_.original_tagname_ = 'AdvertRegion' elif nodeName_ == 'NoiseRegion': obj_ = NoiseRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.NoiseRegion.append(obj_) obj_.original_tagname_ = 'NoiseRegion' elif nodeName_ == 'UnknownRegion': obj_ = UnknownRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UnknownRegion.append(obj_) obj_.original_tagname_ = 'UnknownRegion' elif nodeName_ == 'CustomRegion': obj_ = CustomRegionType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.CustomRegion.append(obj_) obj_.original_tagname_ = 'CustomRegion' def get_polygon(self): ''' Get polygon from element which is parent of a Coords element ''' points = [point for point in self.Coords.points.split(' ')] return [[int(coord) for coord in point.split(',')] for point in points] def get_polygon_string(self): ''' Get polygon string from element which is parent of a Coords element ''' return self.Coords.points.replace(' ', ',') # end class RegionType class AlternativeImageType(GeneratedsSuper): """Confidence value (between 0 and 1)""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('filename', 'string', 0, 0, {'use': 'required'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = None def __init__(self, filename=None, comments=None, conf=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.filename = _cast(None, filename) self.filename_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None self.conf = _cast(float, conf) self.conf_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, AlternativeImageType) if subclass is not None: return subclass(*args_, **kwargs_) if AlternativeImageType.subclass: return AlternativeImageType.subclass(*args_, **kwargs_) else: return AlternativeImageType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_filename(self): return self.filename def set_filename(self, filename): self.filename = filename def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def get_conf(self): return self.conf def set_conf(self, conf): self.conf = conf def validate_ConfSimpleType(self, value): # Validate type pc:ConfSimpleType, a restriction on float. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, float): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False if value > 1: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} ) result = False def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AlternativeImageType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlternativeImageType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'AlternativeImageType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlternativeImageType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlternativeImageType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlternativeImageType'): if self.filename is not None and 'filename' not in already_processed: already_processed.add('filename') outfile.write(' filename=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.filename), input_name='filename')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) if self.conf is not None and 'conf' not in already_processed: already_processed.add('conf') outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AlternativeImageType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('filename', node) if value is not None and 'filename' not in already_processed: already_processed.add('filename') self.filename = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value value = find_attr_value_('conf', node) if value is not None and 'conf' not in already_processed: already_processed.add('conf') value = self.gds_parse_float(value, node, 'conf') self.conf = value self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class AlternativeImageType class GraphemesType(GeneratedsSuper): """Container for graphemes, grapheme groups and non-printing characters.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('Grapheme', 'GraphemeType', 1, 0, {'name': 'Grapheme', 'type': 'GraphemeType'}, 8), MemberSpec_('NonPrintingChar', 'NonPrintingCharType', 1, 0, {'name': 'NonPrintingChar', 'type': 'NonPrintingCharType'}, 8), MemberSpec_('GraphemeGroup', 'GraphemeGroupType', 1, 0, {'name': 'GraphemeGroup', 'type': 'GraphemeGroupType'}, 8), ] subclass = None superclass = None def __init__(self, Grapheme=None, NonPrintingChar=None, GraphemeGroup=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" if Grapheme is None: self.Grapheme = [] else: self.Grapheme = Grapheme self.Grapheme_nsprefix_ = "pc" if NonPrintingChar is None: self.NonPrintingChar = [] else: self.NonPrintingChar = NonPrintingChar self.NonPrintingChar_nsprefix_ = "pc" if GraphemeGroup is None: self.GraphemeGroup = [] else: self.GraphemeGroup = GraphemeGroup self.GraphemeGroup_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GraphemesType) if subclass is not None: return subclass(*args_, **kwargs_) if GraphemesType.subclass: return GraphemesType.subclass(*args_, **kwargs_) else: return GraphemesType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Grapheme(self): return self.Grapheme def set_Grapheme(self, Grapheme): self.Grapheme = Grapheme def add_Grapheme(self, value): self.Grapheme.append(value) def insert_Grapheme_at(self, index, value): self.Grapheme.insert(index, value) def replace_Grapheme_at(self, index, value): self.Grapheme[index] = value def get_NonPrintingChar(self): return self.NonPrintingChar def set_NonPrintingChar(self, NonPrintingChar): self.NonPrintingChar = NonPrintingChar def add_NonPrintingChar(self, value): self.NonPrintingChar.append(value) def insert_NonPrintingChar_at(self, index, value): self.NonPrintingChar.insert(index, value) def replace_NonPrintingChar_at(self, index, value): self.NonPrintingChar[index] = value def get_GraphemeGroup(self): return self.GraphemeGroup def set_GraphemeGroup(self, GraphemeGroup): self.GraphemeGroup = GraphemeGroup def add_GraphemeGroup(self, value): self.GraphemeGroup.append(value) def insert_GraphemeGroup_at(self, index, value): self.GraphemeGroup.insert(index, value) def replace_GraphemeGroup_at(self, index, value): self.GraphemeGroup[index] = value def hasContent_(self): if ( self.Grapheme or self.NonPrintingChar or self.GraphemeGroup ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemesType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemesType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GraphemesType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemesType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemesType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemesType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemesType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for Grapheme_ in self.Grapheme: namespaceprefix_ = self.Grapheme_nsprefix_ + ':' if (UseCapturedNS_ and self.Grapheme_nsprefix_) else '' Grapheme_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Grapheme', pretty_print=pretty_print) for NonPrintingChar_ in self.NonPrintingChar: namespaceprefix_ = self.NonPrintingChar_nsprefix_ + ':' if (UseCapturedNS_ and self.NonPrintingChar_nsprefix_) else '' NonPrintingChar_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonPrintingChar', pretty_print=pretty_print) for GraphemeGroup_ in self.GraphemeGroup: namespaceprefix_ = self.GraphemeGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.GraphemeGroup_nsprefix_) else '' GraphemeGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GraphemeGroup', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Grapheme': obj_ = GraphemeType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Grapheme.append(obj_) obj_.original_tagname_ = 'Grapheme' elif nodeName_ == 'NonPrintingChar': obj_ = NonPrintingCharType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.NonPrintingChar.append(obj_) obj_.original_tagname_ = 'NonPrintingChar' elif nodeName_ == 'GraphemeGroup': obj_ = GraphemeGroupType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.GraphemeGroup.append(obj_) obj_.original_tagname_ = 'GraphemeGroup' # end class GraphemesType class GraphemeBaseType(GeneratedsSuper): """Base type for graphemes, grapheme groups and non-printing characters. Order index of grapheme, group, or non-printing character within the parent container (graphemes or glyph or grapheme group). Type of character represented by the grapheme, group, or non-printing character element. For generic useFor generic use""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('id', 'string', 0, 0, {'use': 'required'}), MemberSpec_('index', 'indexType2', 0, 0, {'use': 'required'}), MemberSpec_('ligature', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('charType', 'charTypeType', 0, 1, {'use': 'optional'}), MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None), ] subclass = None superclass = None def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, extensiontype_=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.id = _cast(None, id) self.id_nsprefix_ = None self.index = _cast(int, index) self.index_nsprefix_ = None self.ligature = _cast(bool, ligature) self.ligature_nsprefix_ = None self.charType = _cast(None, charType) self.charType_nsprefix_ = None self.custom = _cast(None, custom) self.custom_nsprefix_ = None self.comments = _cast(None, comments) self.comments_nsprefix_ = None if TextEquiv is None: self.TextEquiv = [] else: self.TextEquiv = TextEquiv self.TextEquiv_nsprefix_ = "pc" self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GraphemeBaseType) if subclass is not None: return subclass(*args_, **kwargs_) if GraphemeBaseType.subclass: return GraphemeBaseType.subclass(*args_, **kwargs_) else: return GraphemeBaseType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_TextEquiv(self): return self.TextEquiv def set_TextEquiv(self, TextEquiv): self.TextEquiv = TextEquiv def add_TextEquiv(self, value): self.TextEquiv.append(value) def insert_TextEquiv_at(self, index, value): self.TextEquiv.insert(index, value) def replace_TextEquiv_at(self, index, value): self.TextEquiv[index] = value def get_id(self): return self.id def set_id(self, id): self.id = id def get_index(self): return self.index def set_index(self, index): self.index = index def get_ligature(self): return self.ligature def set_ligature(self, ligature): self.ligature = ligature def get_charType(self): return self.charType def set_charType(self, charType): self.charType = charType def get_custom(self): return self.custom def set_custom(self, custom): self.custom = custom def get_comments(self): return self.comments def set_comments(self, comments): self.comments = comments def get_extensiontype_(self): return self.extensiontype_ def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def validate_indexType2(self, value): # Validate type indexType2, a restriction on int. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, int): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, }) return False if value < 0: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on indexType2' % {"value": value, "lineno": lineno} ) result = False def validate_charTypeType(self, value): # Validate type charTypeType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['base', 'combining'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on charTypeType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.TextEquiv ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeBaseType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemeBaseType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GraphemeBaseType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeBaseType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemeBaseType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemeBaseType'): if self.id is not None and 'id' not in already_processed: already_processed.add('id') outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), )) if self.index is not None and 'index' not in already_processed: already_processed.add('index') outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index')) if self.ligature is not None and 'ligature' not in already_processed: already_processed.add('ligature') outfile.write(' ligature="%s"' % self.gds_format_boolean(self.ligature, input_name='ligature')) if self.charType is not None and 'charType' not in already_processed: already_processed.add('charType') outfile.write(' charType=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.charType), input_name='charType')), )) if self.custom is not None and 'custom' not in already_processed: already_processed.add('custom') outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), )) if self.comments is not None and 'comments' not in already_processed: already_processed.add('comments') outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), )) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') if ":" not in self.extensiontype_: imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '') outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_)) else: outfile.write(' xsi:type="%s"' % self.extensiontype_) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeBaseType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for TextEquiv_ in self.TextEquiv: namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else '' TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.add('id') self.id = value value = find_attr_value_('index', node) if value is not None and 'index' not in already_processed: already_processed.add('index') self.index = self.gds_parse_integer(value, node, 'index') self.validate_indexType2(self.index) # validate type indexType2 value = find_attr_value_('ligature', node) if value is not None and 'ligature' not in already_processed: already_processed.add('ligature') if value in ('true', '1'): self.ligature = True elif value in ('false', '0'): self.ligature = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('charType', node) if value is not None and 'charType' not in already_processed: already_processed.add('charType') self.charType = value self.validate_charTypeType(self.charType) # validate type charTypeType value = find_attr_value_('custom', node) if value is not None and 'custom' not in already_processed: already_processed.add('custom') self.custom = value value = find_attr_value_('comments', node) if value is not None and 'comments' not in already_processed: already_processed.add('comments') self.comments = value value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.add('xsi:type') self.extensiontype_ = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'TextEquiv': obj_ = TextEquivType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextEquiv.append(obj_) obj_.original_tagname_ = 'TextEquiv' # end class GraphemeBaseType class GraphemeType(GraphemeBaseType): """Represents a sub-element of a glyph. Smallest graphical unit that can be assigned a Unicode code point.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None), ] subclass = None superclass = GraphemeBaseType def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, Coords=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("GraphemeType"), self).__init__(id, index, ligature, charType, custom, comments, TextEquiv, **kwargs_) self.Coords = Coords self.Coords_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GraphemeType) if subclass is not None: return subclass(*args_, **kwargs_) if GraphemeType.subclass: return GraphemeType.subclass(*args_, **kwargs_) else: return GraphemeType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Coords(self): return self.Coords def set_Coords(self, Coords): self.Coords = Coords def hasContent_(self): if ( self.Coords is not None or super(GraphemeType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemeType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GraphemeType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemeType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemeType'): super(GraphemeType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeType') def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeType', fromsubclass_=False, pretty_print=True): super(GraphemeType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.Coords is not None: namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else '' self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): super(GraphemeType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Coords': obj_ = CoordsType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Coords = obj_ obj_.original_tagname_ = 'Coords' super(GraphemeType, self).buildChildren(child_, node, nodeName_, True) def get_polygon(self): ''' Get polygon from element which is parent of a Coords element ''' points = [point for point in self.Coords.points.split(' ')] return [[int(coord) for coord in point.split(',')] for point in points] def get_polygon_string(self): ''' Get polygon string from element which is parent of a Coords element ''' return self.Coords.points.replace(' ', ',') # end class GraphemeType class NonPrintingCharType(GraphemeBaseType): """A glyph component without visual representation but with Unicode code point. Non-visual / non-printing / control character. Part of grapheme container (of glyph) or grapheme sub group.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = GraphemeBaseType def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("NonPrintingCharType"), self).__init__(id, index, ligature, charType, custom, comments, TextEquiv, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, NonPrintingCharType) if subclass is not None: return subclass(*args_, **kwargs_) if NonPrintingCharType.subclass: return NonPrintingCharType.subclass(*args_, **kwargs_) else: return NonPrintingCharType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def hasContent_(self): if ( super(NonPrintingCharType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NonPrintingCharType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonPrintingCharType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'NonPrintingCharType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonPrintingCharType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonPrintingCharType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonPrintingCharType'): super(NonPrintingCharType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonPrintingCharType') def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NonPrintingCharType', fromsubclass_=False, pretty_print=True): super(NonPrintingCharType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): super(NonPrintingCharType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(NonPrintingCharType, self).buildChildren(child_, node, nodeName_, True) pass # end class NonPrintingCharType class GraphemeGroupType(GraphemeBaseType): __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('Grapheme', 'GraphemeType', 1, 1, {'name': 'Grapheme', 'type': 'GraphemeType'}, 9), MemberSpec_('NonPrintingChar', 'NonPrintingCharType', 1, 1, {'name': 'NonPrintingChar', 'type': 'NonPrintingCharType'}, 9), ] subclass = None superclass = GraphemeBaseType def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, Grapheme=None, NonPrintingChar=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("GraphemeGroupType"), self).__init__(id, index, ligature, charType, custom, comments, TextEquiv, **kwargs_) if Grapheme is None: self.Grapheme = [] else: self.Grapheme = Grapheme self.Grapheme_nsprefix_ = "pc" if NonPrintingChar is None: self.NonPrintingChar = [] else: self.NonPrintingChar = NonPrintingChar self.NonPrintingChar_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GraphemeGroupType) if subclass is not None: return subclass(*args_, **kwargs_) if GraphemeGroupType.subclass: return GraphemeGroupType.subclass(*args_, **kwargs_) else: return GraphemeGroupType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Grapheme(self): return self.Grapheme def set_Grapheme(self, Grapheme): self.Grapheme = Grapheme def add_Grapheme(self, value): self.Grapheme.append(value) def insert_Grapheme_at(self, index, value): self.Grapheme.insert(index, value) def replace_Grapheme_at(self, index, value): self.Grapheme[index] = value def get_NonPrintingChar(self): return self.NonPrintingChar def set_NonPrintingChar(self, NonPrintingChar): self.NonPrintingChar = NonPrintingChar def add_NonPrintingChar(self, value): self.NonPrintingChar.append(value) def insert_NonPrintingChar_at(self, index, value): self.NonPrintingChar.insert(index, value) def replace_NonPrintingChar_at(self, index, value): self.NonPrintingChar[index] = value def hasContent_(self): if ( self.Grapheme or self.NonPrintingChar or super(GraphemeGroupType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeGroupType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemeGroupType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GraphemeGroupType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeGroupType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemeGroupType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemeGroupType'): super(GraphemeGroupType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeGroupType') def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeGroupType', fromsubclass_=False, pretty_print=True): super(GraphemeGroupType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for Grapheme_ in self.Grapheme: namespaceprefix_ = self.Grapheme_nsprefix_ + ':' if (UseCapturedNS_ and self.Grapheme_nsprefix_) else '' Grapheme_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Grapheme', pretty_print=pretty_print) for NonPrintingChar_ in self.NonPrintingChar: namespaceprefix_ = self.NonPrintingChar_nsprefix_ + ':' if (UseCapturedNS_ and self.NonPrintingChar_nsprefix_) else '' NonPrintingChar_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonPrintingChar', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): super(GraphemeGroupType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Grapheme': obj_ = GraphemeType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Grapheme.append(obj_) obj_.original_tagname_ = 'Grapheme' elif nodeName_ == 'NonPrintingChar': obj_ = NonPrintingCharType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.NonPrintingChar.append(obj_) obj_.original_tagname_ = 'NonPrintingChar' super(GraphemeGroupType, self).buildChildren(child_, node, nodeName_, True) # end class GraphemeGroupType class UserDefinedType(GeneratedsSuper): """Container for user-defined attributes""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('UserAttribute', 'UserAttributeType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'UserAttribute', 'type': 'UserAttributeType'}, None), ] subclass = None superclass = None def __init__(self, UserAttribute=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" if UserAttribute is None: self.UserAttribute = [] else: self.UserAttribute = UserAttribute self.UserAttribute_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, UserDefinedType) if subclass is not None: return subclass(*args_, **kwargs_) if UserDefinedType.subclass: return UserDefinedType.subclass(*args_, **kwargs_) else: return UserDefinedType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_UserAttribute(self): return self.UserAttribute def set_UserAttribute(self, UserAttribute): self.UserAttribute = UserAttribute def add_UserAttribute(self, value): self.UserAttribute.append(value) def insert_UserAttribute_at(self, index, value): self.UserAttribute.insert(index, value) def replace_UserAttribute_at(self, index, value): self.UserAttribute[index] = value def hasContent_(self): if ( self.UserAttribute ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserDefinedType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('UserDefinedType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'UserDefinedType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UserDefinedType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UserDefinedType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UserDefinedType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserDefinedType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' for UserAttribute_ in self.UserAttribute: namespaceprefix_ = self.UserAttribute_nsprefix_ + ':' if (UseCapturedNS_ and self.UserAttribute_nsprefix_) else '' UserAttribute_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserAttribute', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'UserAttribute': obj_ = UserAttributeType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.UserAttribute.append(obj_) obj_.original_tagname_ = 'UserAttribute' # end class UserDefinedType class UserAttributeType(GeneratedsSuper): """Structured custom data defined by name, type and value.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('name', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('description', 'string', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'typeType3', 0, 1, {'use': 'optional'}), MemberSpec_('value', 'string', 0, 1, {'use': 'optional'}), ] subclass = None superclass = None def __init__(self, name=None, description=None, type_=None, value=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.name = _cast(None, name) self.name_nsprefix_ = None self.description = _cast(None, description) self.description_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.value = _cast(None, value) self.value_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, UserAttributeType) if subclass is not None: return subclass(*args_, **kwargs_) if UserAttributeType.subclass: return UserAttributeType.subclass(*args_, **kwargs_) else: return UserAttributeType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_name(self): return self.name def set_name(self, name): self.name = name def get_description(self): return self.description def set_description(self, description): self.description = description def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_value(self): return self.value def set_value(self, value): self.value = value def validate_typeType3(self, value): # Validate type typeType3, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['xsd:string', 'xsd:integer', 'xsd:boolean', 'xsd:float'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on typeType3' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserAttributeType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('UserAttributeType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'UserAttributeType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UserAttributeType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UserAttributeType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UserAttributeType'): if self.name is not None and 'name' not in already_processed: already_processed.add('name') outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), )) if self.description is not None and 'description' not in already_processed: already_processed.add('description') outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.value is not None and 'value' not in already_processed: already_processed.add('value') outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserAttributeType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.add('name') self.name = value value = find_attr_value_('description', node) if value is not None and 'description' not in already_processed: already_processed.add('description') self.description = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_typeType3(self.type_) # validate type typeType3 value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.add('value') self.value = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class UserAttributeType class TableCellRoleType(GeneratedsSuper): """Cell position in table starting with row 0Cell position in table starting with column 0Number of rows the cell spans (optional; default is 1)Number of columns the cell spans (optional; default is 1) Is the cell a column or row header?""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('rowIndex', 'int', 0, 0, {'use': 'required'}), MemberSpec_('columnIndex', 'int', 0, 0, {'use': 'required'}), MemberSpec_('rowSpan', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('colSpan', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('header', 'boolean', 0, 1, {'use': 'optional'}), ] subclass = None superclass = None def __init__(self, rowIndex=None, columnIndex=None, rowSpan=None, colSpan=None, header=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.rowIndex = _cast(int, rowIndex) self.rowIndex_nsprefix_ = None self.columnIndex = _cast(int, columnIndex) self.columnIndex_nsprefix_ = None self.rowSpan = _cast(int, rowSpan) self.rowSpan_nsprefix_ = None self.colSpan = _cast(int, colSpan) self.colSpan_nsprefix_ = None self.header = _cast(bool, header) self.header_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TableCellRoleType) if subclass is not None: return subclass(*args_, **kwargs_) if TableCellRoleType.subclass: return TableCellRoleType.subclass(*args_, **kwargs_) else: return TableCellRoleType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_rowIndex(self): return self.rowIndex def set_rowIndex(self, rowIndex): self.rowIndex = rowIndex def get_columnIndex(self): return self.columnIndex def set_columnIndex(self, columnIndex): self.columnIndex = columnIndex def get_rowSpan(self): return self.rowSpan def set_rowSpan(self, rowSpan): self.rowSpan = rowSpan def get_colSpan(self): return self.colSpan def set_colSpan(self, colSpan): self.colSpan = colSpan def get_header(self): return self.header def set_header(self, header): self.header = header def hasContent_(self): if ( ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableCellRoleType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TableCellRoleType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'TableCellRoleType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TableCellRoleType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TableCellRoleType', pretty_print=pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TableCellRoleType'): if self.rowIndex is not None and 'rowIndex' not in already_processed: already_processed.add('rowIndex') outfile.write(' rowIndex="%s"' % self.gds_format_integer(self.rowIndex, input_name='rowIndex')) if self.columnIndex is not None and 'columnIndex' not in already_processed: already_processed.add('columnIndex') outfile.write(' columnIndex="%s"' % self.gds_format_integer(self.columnIndex, input_name='columnIndex')) if self.rowSpan is not None and 'rowSpan' not in already_processed: already_processed.add('rowSpan') outfile.write(' rowSpan="%s"' % self.gds_format_integer(self.rowSpan, input_name='rowSpan')) if self.colSpan is not None and 'colSpan' not in already_processed: already_processed.add('colSpan') outfile.write(' colSpan="%s"' % self.gds_format_integer(self.colSpan, input_name='colSpan')) if self.header is not None and 'header' not in already_processed: already_processed.add('header') outfile.write(' header="%s"' % self.gds_format_boolean(self.header, input_name='header')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableCellRoleType', fromsubclass_=False, pretty_print=True): pass def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('rowIndex', node) if value is not None and 'rowIndex' not in already_processed: already_processed.add('rowIndex') self.rowIndex = self.gds_parse_integer(value, node, 'rowIndex') value = find_attr_value_('columnIndex', node) if value is not None and 'columnIndex' not in already_processed: already_processed.add('columnIndex') self.columnIndex = self.gds_parse_integer(value, node, 'columnIndex') value = find_attr_value_('rowSpan', node) if value is not None and 'rowSpan' not in already_processed: already_processed.add('rowSpan') self.rowSpan = self.gds_parse_integer(value, node, 'rowSpan') value = find_attr_value_('colSpan', node) if value is not None and 'colSpan' not in already_processed: already_processed.add('colSpan') self.colSpan = self.gds_parse_integer(value, node, 'colSpan') value = find_attr_value_('header', node) if value is not None and 'header' not in already_processed: already_processed.add('header') if value in ('true', '1'): self.header = True elif value in ('false', '0'): self.header = False else: raise_parse_error(node, 'Bad boolean attribute') def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): pass # end class TableCellRoleType class RolesType(GeneratedsSuper): __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('TableCellRole', 'TableCellRoleType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'TableCellRole', 'type': 'TableCellRoleType'}, None), ] subclass = None superclass = None def __init__(self, TableCellRole=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" self.TableCellRole = TableCellRole self.TableCellRole_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, RolesType) if subclass is not None: return subclass(*args_, **kwargs_) if RolesType.subclass: return RolesType.subclass(*args_, **kwargs_) else: return RolesType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_TableCellRole(self): return self.TableCellRole def set_TableCellRole(self, TableCellRole): self.TableCellRole = TableCellRole def hasContent_(self): if ( self.TableCellRole is not None ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RolesType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('RolesType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'RolesType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RolesType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RolesType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RolesType'): pass def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RolesType', fromsubclass_=False, pretty_print=True): if pretty_print: eol_ = '\n' else: eol_ = '' if self.TableCellRole is not None: namespaceprefix_ = self.TableCellRole_nsprefix_ + ':' if (UseCapturedNS_ and self.TableCellRole_nsprefix_) else '' self.TableCellRole.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TableCellRole', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'TableCellRole': obj_ = TableCellRoleType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TableCellRole = obj_ obj_.original_tagname_ = 'TableCellRole' # end class RolesType class CustomRegionType(RegionType): """Regions containing content that is not covered by the default types (text, graphic, image, line drawing, chart, table, separator, maths, map, music, chem, advert, noise, unknown). Information on the type of content represented by this region""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('type_', 'string', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, type_=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("CustomRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.type_ = _cast(None, type_) self.type__nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, CustomRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if CustomRegionType.subclass: return CustomRegionType.subclass(*args_, **kwargs_) else: return CustomRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def hasContent_(self): if ( super(CustomRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CustomRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('CustomRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'CustomRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CustomRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CustomRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CustomRegionType'): super(CustomRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CustomRegionType') if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CustomRegionType', fromsubclass_=False, pretty_print=True): super(CustomRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value super(CustomRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(CustomRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class CustomRegionType class UnknownRegionType(RegionType): """To be used if the region type cannot be ascertained.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("UnknownRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, UnknownRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if UnknownRegionType.subclass: return UnknownRegionType.subclass(*args_, **kwargs_) else: return UnknownRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def hasContent_(self): if ( super(UnknownRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnknownRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnknownRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'UnknownRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnknownRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnknownRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnknownRegionType'): super(UnknownRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnknownRegionType') def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnknownRegionType', fromsubclass_=False, pretty_print=True): super(UnknownRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): super(UnknownRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(UnknownRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class UnknownRegionType class NoiseRegionType(RegionType): """Noise regions are regions where no real data lies, only false data created by artifacts on the document or scanner noise.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("NoiseRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, NoiseRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if NoiseRegionType.subclass: return NoiseRegionType.subclass(*args_, **kwargs_) else: return NoiseRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def hasContent_(self): if ( super(NoiseRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NoiseRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('NoiseRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'NoiseRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NoiseRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NoiseRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NoiseRegionType'): super(NoiseRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NoiseRegionType') def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NoiseRegionType', fromsubclass_=False, pretty_print=True): super(NoiseRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): super(NoiseRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(NoiseRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class NoiseRegionType class AdvertRegionType(RegionType): """Regions containing advertisements. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The background colour of the region""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("AdvertRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, AdvertRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if AdvertRegionType.subclass: return AdvertRegionType.subclass(*args_, **kwargs_) else: return AdvertRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(AdvertRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AdvertRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('AdvertRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'AdvertRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvertRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AdvertRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdvertRegionType'): super(AdvertRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvertRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AdvertRegionType', fromsubclass_=False, pretty_print=True): super(AdvertRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType super(AdvertRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(AdvertRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class AdvertRegionType class MusicRegionType(RegionType): """Regions containing musical notations. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The background colour of the region""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("MusicRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, MusicRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if MusicRegionType.subclass: return MusicRegionType.subclass(*args_, **kwargs_) else: return MusicRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(MusicRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MusicRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('MusicRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'MusicRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MusicRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MusicRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MusicRegionType'): super(MusicRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MusicRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MusicRegionType', fromsubclass_=False, pretty_print=True): super(MusicRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType super(MusicRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(MusicRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class MusicRegionType class MapRegionType(RegionType): """Regions containing maps. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("MapRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, MapRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if MapRegionType.subclass: return MapRegionType.subclass(*args_, **kwargs_) else: return MapRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def hasContent_(self): if ( super(MapRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MapRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('MapRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'MapRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MapRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MapRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MapRegionType'): super(MapRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MapRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MapRegionType', fromsubclass_=False, pretty_print=True): super(MapRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value super(MapRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(MapRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class MapRegionType class ChemRegionType(RegionType): """Regions containing chemical formulas. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The background colour of the region""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("ChemRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, ChemRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if ChemRegionType.subclass: return ChemRegionType.subclass(*args_, **kwargs_) else: return ChemRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(ChemRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChemRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChemRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'ChemRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChemRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChemRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChemRegionType'): super(ChemRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChemRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChemRegionType', fromsubclass_=False, pretty_print=True): super(ChemRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType super(ChemRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(ChemRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class ChemRegionType class MathsRegionType(RegionType): """Regions containing equations and mathematical symbols should be marked as maths regions. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The background colour of the region""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("MathsRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, MathsRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if MathsRegionType.subclass: return MathsRegionType.subclass(*args_, **kwargs_) else: return MathsRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(MathsRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MathsRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('MathsRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'MathsRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MathsRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MathsRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MathsRegionType'): super(MathsRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MathsRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MathsRegionType', fromsubclass_=False, pretty_print=True): super(MathsRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType super(MathsRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(MathsRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class MathsRegionType class SeparatorRegionType(RegionType): """Separators are lines that lie between columns and paragraphs and can be used to logically separate different articles from each other. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The colour of the separator""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('colour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, colour=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("SeparatorRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.colour = _cast(None, colour) self.colour_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, SeparatorRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if SeparatorRegionType.subclass: return SeparatorRegionType.subclass(*args_, **kwargs_) else: return SeparatorRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_colour(self): return self.colour def set_colour(self, colour): self.colour = colour def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(SeparatorRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='SeparatorRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('SeparatorRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'SeparatorRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SeparatorRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SeparatorRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SeparatorRegionType'): super(SeparatorRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SeparatorRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.colour is not None and 'colour' not in already_processed: already_processed.add('colour') outfile.write(' colour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.colour), input_name='colour')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='SeparatorRegionType', fromsubclass_=False, pretty_print=True): super(SeparatorRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('colour', node) if value is not None and 'colour' not in already_processed: already_processed.add('colour') self.colour = value self.validate_ColourSimpleType(self.colour) # validate type ColourSimpleType super(SeparatorRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(SeparatorRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class SeparatorRegionType class ChartRegionType(RegionType): """Regions containing charts or graphs of any type, should be marked as chart regions. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The type of chart in the region An approximation of the number of colours used in the region The background colour of the region Specifies whether the region also contains text""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:ChartTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('numColours', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, type_=None, numColours=None, bgColour=None, embText=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("ChartRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.numColours = _cast(int, numColours) self.numColours_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None self.embText = _cast(bool, embText) self.embText_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, ChartRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if ChartRegionType.subclass: return ChartRegionType.subclass(*args_, **kwargs_) else: return ChartRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_numColours(self): return self.numColours def set_numColours(self, numColours): self.numColours = numColours def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def get_embText(self): return self.embText def set_embText(self, embText): self.embText = embText def validate_ChartTypeSimpleType(self, value): # Validate type pc:ChartTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['bar', 'line', 'pie', 'scatter', 'surface', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ChartTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(ChartRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChartRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChartRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'ChartRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChartRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChartRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChartRegionType'): super(ChartRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChartRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.numColours is not None and 'numColours' not in already_processed: already_processed.add('numColours') outfile.write(' numColours="%s"' % self.gds_format_integer(self.numColours, input_name='numColours')) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) if self.embText is not None and 'embText' not in already_processed: already_processed.add('embText') outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChartRegionType', fromsubclass_=False, pretty_print=True): super(ChartRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_ChartTypeSimpleType(self.type_) # validate type ChartTypeSimpleType value = find_attr_value_('numColours', node) if value is not None and 'numColours' not in already_processed: already_processed.add('numColours') self.numColours = self.gds_parse_integer(value, node, 'numColours') value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType value = find_attr_value_('embText', node) if value is not None and 'embText' not in already_processed: already_processed.add('embText') if value in ('true', '1'): self.embText = True elif value in ('false', '0'): self.embText = False else: raise_parse_error(node, 'Bad boolean attribute') super(ChartRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(ChartRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class ChartRegionType class TableRegionType(RegionType): """Tabular data in any form is represented with a table region. Rows and columns may or may not have separator lines; these lines are not separator regions. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The number of rows present in the table The number of columns present in the table The colour of the lines used in the region The background colour of the region Specifies the presence of line separators Specifies whether the region also contains text""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('rows', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('columns', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('lineColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('lineSeparators', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('Grid', 'GridType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Grid', 'type': 'GridType'}, None), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, rows=None, columns=None, lineColour=None, bgColour=None, lineSeparators=None, embText=None, Grid=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("TableRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.rows = _cast(int, rows) self.rows_nsprefix_ = None self.columns = _cast(int, columns) self.columns_nsprefix_ = None self.lineColour = _cast(None, lineColour) self.lineColour_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None self.lineSeparators = _cast(bool, lineSeparators) self.lineSeparators_nsprefix_ = None self.embText = _cast(bool, embText) self.embText_nsprefix_ = None self.Grid = Grid self.Grid_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TableRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if TableRegionType.subclass: return TableRegionType.subclass(*args_, **kwargs_) else: return TableRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_Grid(self): return self.Grid def set_Grid(self, Grid): self.Grid = Grid def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_rows(self): return self.rows def set_rows(self, rows): self.rows = rows def get_columns(self): return self.columns def set_columns(self, columns): self.columns = columns def get_lineColour(self): return self.lineColour def set_lineColour(self, lineColour): self.lineColour = lineColour def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def get_lineSeparators(self): return self.lineSeparators def set_lineSeparators(self, lineSeparators): self.lineSeparators = lineSeparators def get_embText(self): return self.embText def set_embText(self, embText): self.embText = embText def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.Grid is not None or super(TableRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TableRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'TableRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TableRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TableRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TableRegionType'): super(TableRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TableRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.rows is not None and 'rows' not in already_processed: already_processed.add('rows') outfile.write(' rows="%s"' % self.gds_format_integer(self.rows, input_name='rows')) if self.columns is not None and 'columns' not in already_processed: already_processed.add('columns') outfile.write(' columns="%s"' % self.gds_format_integer(self.columns, input_name='columns')) if self.lineColour is not None and 'lineColour' not in already_processed: already_processed.add('lineColour') outfile.write(' lineColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.lineColour), input_name='lineColour')), )) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) if self.lineSeparators is not None and 'lineSeparators' not in already_processed: already_processed.add('lineSeparators') outfile.write(' lineSeparators="%s"' % self.gds_format_boolean(self.lineSeparators, input_name='lineSeparators')) if self.embText is not None and 'embText' not in already_processed: already_processed.add('embText') outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableRegionType', fromsubclass_=False, pretty_print=True): super(TableRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' if self.Grid is not None: namespaceprefix_ = self.Grid_nsprefix_ + ':' if (UseCapturedNS_ and self.Grid_nsprefix_) else '' self.Grid.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Grid', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('rows', node) if value is not None and 'rows' not in already_processed: already_processed.add('rows') self.rows = self.gds_parse_integer(value, node, 'rows') value = find_attr_value_('columns', node) if value is not None and 'columns' not in already_processed: already_processed.add('columns') self.columns = self.gds_parse_integer(value, node, 'columns') value = find_attr_value_('lineColour', node) if value is not None and 'lineColour' not in already_processed: already_processed.add('lineColour') self.lineColour = value self.validate_ColourSimpleType(self.lineColour) # validate type ColourSimpleType value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType value = find_attr_value_('lineSeparators', node) if value is not None and 'lineSeparators' not in already_processed: already_processed.add('lineSeparators') if value in ('true', '1'): self.lineSeparators = True elif value in ('false', '0'): self.lineSeparators = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('embText', node) if value is not None and 'embText' not in already_processed: already_processed.add('embText') if value in ('true', '1'): self.embText = True elif value in ('false', '0'): self.embText = False else: raise_parse_error(node, 'Bad boolean attribute') super(TableRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'Grid': obj_ = GridType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.Grid = obj_ obj_.original_tagname_ = 'Grid' super(TableRegionType, self).buildChildren(child_, node, nodeName_, True) # end class TableRegionType class GraphicRegionType(RegionType): """Regions containing simple graphics, such as a company logo, should be marked as graphic regions. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The type of graphic in the region An approximation of the number of colours used in the region Specifies whether the region also contains text.""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:GraphicsTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('numColours', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, type_=None, numColours=None, embText=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("GraphicRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.numColours = _cast(int, numColours) self.numColours_nsprefix_ = None self.embText = _cast(bool, embText) self.embText_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, GraphicRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if GraphicRegionType.subclass: return GraphicRegionType.subclass(*args_, **kwargs_) else: return GraphicRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_numColours(self): return self.numColours def set_numColours(self, numColours): self.numColours = numColours def get_embText(self): return self.embText def set_embText(self, embText): self.embText = embText def validate_GraphicsTypeSimpleType(self, value): # Validate type pc:GraphicsTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['logo', 'letterhead', 'decoration', 'frame', 'handwritten-annotation', 'stamp', 'signature', 'barcode', 'paper-grow', 'punch-hole', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GraphicsTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(GraphicRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphicRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphicRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'GraphicRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphicRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphicRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphicRegionType'): super(GraphicRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphicRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.numColours is not None and 'numColours' not in already_processed: already_processed.add('numColours') outfile.write(' numColours="%s"' % self.gds_format_integer(self.numColours, input_name='numColours')) if self.embText is not None and 'embText' not in already_processed: already_processed.add('embText') outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphicRegionType', fromsubclass_=False, pretty_print=True): super(GraphicRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_GraphicsTypeSimpleType(self.type_) # validate type GraphicsTypeSimpleType value = find_attr_value_('numColours', node) if value is not None and 'numColours' not in already_processed: already_processed.add('numColours') self.numColours = self.gds_parse_integer(value, node, 'numColours') value = find_attr_value_('embText', node) if value is not None and 'embText' not in already_processed: already_processed.add('embText') if value in ('true', '1'): self.embText = True elif value in ('false', '0'): self.embText = False else: raise_parse_error(node, 'Bad boolean attribute') super(GraphicRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(GraphicRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class GraphicRegionType class LineDrawingRegionType(RegionType): """A line drawing is a single colour illustration without solid areas. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The pen (foreground) colour of the region The background colour of the region Specifies whether the region also contains text""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('penColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, penColour=None, bgColour=None, embText=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("LineDrawingRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.penColour = _cast(None, penColour) self.penColour_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None self.embText = _cast(bool, embText) self.embText_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, LineDrawingRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if LineDrawingRegionType.subclass: return LineDrawingRegionType.subclass(*args_, **kwargs_) else: return LineDrawingRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_penColour(self): return self.penColour def set_penColour(self, penColour): self.penColour = penColour def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def get_embText(self): return self.embText def set_embText(self, embText): self.embText = embText def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(LineDrawingRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LineDrawingRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('LineDrawingRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'LineDrawingRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LineDrawingRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LineDrawingRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LineDrawingRegionType'): super(LineDrawingRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LineDrawingRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.penColour is not None and 'penColour' not in already_processed: already_processed.add('penColour') outfile.write(' penColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.penColour), input_name='penColour')), )) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) if self.embText is not None and 'embText' not in already_processed: already_processed.add('embText') outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LineDrawingRegionType', fromsubclass_=False, pretty_print=True): super(LineDrawingRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('penColour', node) if value is not None and 'penColour' not in already_processed: already_processed.add('penColour') self.penColour = value self.validate_ColourSimpleType(self.penColour) # validate type ColourSimpleType value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType value = find_attr_value_('embText', node) if value is not None and 'embText' not in already_processed: already_processed.add('embText') if value in ('true', '1'): self.embText = True elif value in ('false', '0'): self.embText = False else: raise_parse_error(node, 'Bad boolean attribute') super(LineDrawingRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(LineDrawingRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class LineDrawingRegionType class ImageRegionType(RegionType): """An image is considered to be more intricate and complex than a graphic. These can be photos or drawings. The angle the rectangle encapsulating a region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). Range: -179.999,180 The colour bit depth required for the region The background colour of the region Specifies whether the region also contains text""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('colourDepth', 'pc:ColourDepthSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, colourDepth=None, bgColour=None, embText=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("ImageRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.colourDepth = _cast(None, colourDepth) self.colourDepth_nsprefix_ = None self.bgColour = _cast(None, bgColour) self.bgColour_nsprefix_ = None self.embText = _cast(bool, embText) self.embText_nsprefix_ = None def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, ImageRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if ImageRegionType.subclass: return ImageRegionType.subclass(*args_, **kwargs_) else: return ImageRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_colourDepth(self): return self.colourDepth def set_colourDepth(self, colourDepth): self.colourDepth = colourDepth def get_bgColour(self): return self.bgColour def set_bgColour(self, bgColour): self.bgColour = bgColour def get_embText(self): return self.embText def set_embText(self, embText): self.embText = embText def validate_ColourDepthSimpleType(self, value): # Validate type pc:ColourDepthSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['bilevel', 'greyscale', 'colour', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourDepthSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ColourSimpleType(self, value): # Validate type pc:ColourSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( super(ImageRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ImageRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'ImageRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageRegionType'): super(ImageRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.colourDepth is not None and 'colourDepth' not in already_processed: already_processed.add('colourDepth') outfile.write(' colourDepth=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.colourDepth), input_name='colourDepth')), )) if self.bgColour is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), )) if self.embText is not None and 'embText' not in already_processed: already_processed.add('embText') outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText')) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ImageRegionType', fromsubclass_=False, pretty_print=True): super(ImageRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('colourDepth', node) if value is not None and 'colourDepth' not in already_processed: already_processed.add('colourDepth') self.colourDepth = value self.validate_ColourDepthSimpleType(self.colourDepth) # validate type ColourDepthSimpleType value = find_attr_value_('bgColour', node) if value is not None and 'bgColour' not in already_processed: already_processed.add('bgColour') self.bgColour = value self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType value = find_attr_value_('embText', node) if value is not None and 'embText' not in already_processed: already_processed.add('embText') if value in ('true', '1'): self.embText = True elif value in ('false', '0'): self.embText = False else: raise_parse_error(node, 'Bad boolean attribute') super(ImageRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): super(ImageRegionType, self).buildChildren(child_, node, nodeName_, True) pass # end class ImageRegionType class TextRegionType(RegionType): """Pure text is represented as a text region. This includes drop capitals, but practically ornate text may be considered as a graphic. The angle the rectangle encapsulating the region has to be rotated in clockwise direction in order to correct the present skew (negative values indicate anti-clockwise rotation). (The rotated image can be further referenced via “AlternativeImage”.) Range: -179.999,180 The nature of the text in the region The degree of space in points between the lines of text (line spacing) The direction in which text within lines should be read (order of words and characters), in addition to “textLineOrder”. The order of text lines within the block, in addition to “readingDirection”. The angle the baseline of text within the region has to be rotated (relative to the rectangle encapsulating the region) in clockwise direction in order to correct the present skew, in addition to “orientation” (negative values indicate anti-clockwise rotation). Range: -179.999,180 Defines whether a region of text is indented or not Text align The primary language used in the region The secondary language used in the region The primary script used in the region The secondary script used in the region""" __hash__ = GeneratedsSuper.__hash__ member_data_items_ = [ MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('type_', 'pc:TextTypeSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('leading', 'int', 0, 1, {'use': 'optional'}), MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('textLineOrder', 'pc:TextLineOrderSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('readingOrientation', 'float', 0, 1, {'use': 'optional'}), MemberSpec_('indented', 'boolean', 0, 1, {'use': 'optional'}), MemberSpec_('align', 'pc:AlignSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('primaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('secondaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}), MemberSpec_('TextLine', 'TextLineType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextLine', 'type': 'TextLineType'}, None), MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None), MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None), ] subclass = None superclass = RegionType def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, type_=None, leading=None, readingDirection=None, textLineOrder=None, readingOrientation=None, indented=None, align=None, primaryLanguage=None, secondaryLanguage=None, primaryScript=None, secondaryScript=None, production=None, TextLine=None, TextEquiv=None, TextStyle=None, gds_collector_=None, **kwargs_): self.gds_collector_ = gds_collector_ self.gds_elementtree_node_ = None self.original_tagname_ = None self.parent_object_ = kwargs_.get('parent_object_') self.ns_prefix_ = "pc" super(globals().get("TextRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_) self.orientation = _cast(float, orientation) self.orientation_nsprefix_ = None self.type_ = _cast(None, type_) self.type__nsprefix_ = None self.leading = _cast(int, leading) self.leading_nsprefix_ = None self.readingDirection = _cast(None, readingDirection) self.readingDirection_nsprefix_ = None self.textLineOrder = _cast(None, textLineOrder) self.textLineOrder_nsprefix_ = None self.readingOrientation = _cast(float, readingOrientation) self.readingOrientation_nsprefix_ = None self.indented = _cast(bool, indented) self.indented_nsprefix_ = None self.align = _cast(None, align) self.align_nsprefix_ = None self.primaryLanguage = _cast(None, primaryLanguage) self.primaryLanguage_nsprefix_ = None self.secondaryLanguage = _cast(None, secondaryLanguage) self.secondaryLanguage_nsprefix_ = None self.primaryScript = _cast(None, primaryScript) self.primaryScript_nsprefix_ = None self.secondaryScript = _cast(None, secondaryScript) self.secondaryScript_nsprefix_ = None self.production = _cast(None, production) self.production_nsprefix_ = None if TextLine is None: self.TextLine = [] else: self.TextLine = TextLine self.TextLine_nsprefix_ = "pc" if TextEquiv is None: self.TextEquiv = [] else: self.TextEquiv = TextEquiv self.TextEquiv_nsprefix_ = "pc" self.TextStyle = TextStyle self.TextStyle_nsprefix_ = "pc" def factory(*args_, **kwargs_): if CurrentSubclassModule_ is not None: subclass = getSubclassFromModule_( CurrentSubclassModule_, TextRegionType) if subclass is not None: return subclass(*args_, **kwargs_) if TextRegionType.subclass: return TextRegionType.subclass(*args_, **kwargs_) else: return TextRegionType(*args_, **kwargs_) factory = staticmethod(factory) def get_ns_prefix_(self): return self.ns_prefix_ def set_ns_prefix_(self, ns_prefix): self.ns_prefix_ = ns_prefix def get_TextLine(self): return self.TextLine def set_TextLine(self, TextLine): self.TextLine = TextLine def add_TextLine(self, value): self.TextLine.append(value) def insert_TextLine_at(self, index, value): self.TextLine.insert(index, value) def replace_TextLine_at(self, index, value): self.TextLine[index] = value def get_TextEquiv(self): return self.TextEquiv def set_TextEquiv(self, TextEquiv): self.TextEquiv = TextEquiv def add_TextEquiv(self, value): self.TextEquiv.append(value) def insert_TextEquiv_at(self, index, value): self.TextEquiv.insert(index, value) def replace_TextEquiv_at(self, index, value): self.TextEquiv[index] = value def get_TextStyle(self): return self.TextStyle def set_TextStyle(self, TextStyle): self.TextStyle = TextStyle def get_orientation(self): return self.orientation def set_orientation(self, orientation): self.orientation = orientation def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_leading(self): return self.leading def set_leading(self, leading): self.leading = leading def get_readingDirection(self): return self.readingDirection def set_readingDirection(self, readingDirection): self.readingDirection = readingDirection def get_textLineOrder(self): return self.textLineOrder def set_textLineOrder(self, textLineOrder): self.textLineOrder = textLineOrder def get_readingOrientation(self): return self.readingOrientation def set_readingOrientation(self, readingOrientation): self.readingOrientation = readingOrientation def get_indented(self): return self.indented def set_indented(self, indented): self.indented = indented def get_align(self): return self.align def set_align(self, align): self.align = align def get_primaryLanguage(self): return self.primaryLanguage def set_primaryLanguage(self, primaryLanguage): self.primaryLanguage = primaryLanguage def get_secondaryLanguage(self): return self.secondaryLanguage def set_secondaryLanguage(self, secondaryLanguage): self.secondaryLanguage = secondaryLanguage def get_primaryScript(self): return self.primaryScript def set_primaryScript(self, primaryScript): self.primaryScript = primaryScript def get_secondaryScript(self): return self.secondaryScript def set_secondaryScript(self, secondaryScript): self.secondaryScript = secondaryScript def get_production(self): return self.production def set_production(self, production): self.production = production def validate_TextTypeSimpleType(self, value): # Validate type pc:TextTypeSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['paragraph', 'heading', 'caption', 'header', 'footer', 'page-number', 'drop-capital', 'credit', 'floating', 'signature-mark', 'catch-word', 'marginalia', 'footnote', 'footnote-continued', 'endnote', 'TOC-entry', 'list-label', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ReadingDirectionSimpleType(self, value): # Validate type pc:ReadingDirectionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_TextLineOrderSimpleType(self, value): # Validate type pc:TextLineOrderSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['top-to-bottom', 'bottom-to-top', 'left-to-right', 'right-to-left'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextLineOrderSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_AlignSimpleType(self, value): # Validate type pc:AlignSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['left', 'centre', 'right', 'justify'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on AlignSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_LanguageSimpleType(self, value): # Validate type pc:LanguageSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ScriptSimpleType(self, value): # Validate type pc:ScriptSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def validate_ProductionSimpleType(self, value): # Validate type pc:ProductionSimpleType, a restriction on string. if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None: if not isinstance(value, str): lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, }) return False value = value enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other'] if value not in enumerations: lineno = self.gds_get_node_lineno_() self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} ) result = False def hasContent_(self): if ( self.TextLine or self.TextEquiv or self.TextStyle is not None or super(TextRegionType, self).hasContent_() ): return True else: return False def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextRegionType', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextRegionType') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ if pretty_print: eol_ = '\n' else: eol_ = '' if self.original_tagname_ is not None and name_ == 'TextRegionType': name_ = self.original_tagname_ if UseCapturedNS_ and self.ns_prefix_: namespaceprefix_ = self.ns_prefix_ + ':' showIndent(outfile, level, pretty_print) outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = set() self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextRegionType') if self.hasContent_(): outfile.write('>%s' % (eol_, )) self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextRegionType', pretty_print=pretty_print) showIndent(outfile, level, pretty_print) outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_)) else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextRegionType'): super(TextRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextRegionType') if self.orientation is not None and 'orientation' not in already_processed: already_processed.add('orientation') outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation')) if self.type_ is not None and 'type_' not in already_processed: already_processed.add('type_') outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), )) if self.leading is not None and 'leading' not in already_processed: already_processed.add('leading') outfile.write(' leading="%s"' % self.gds_format_integer(self.leading, input_name='leading')) if self.readingDirection is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), )) if self.textLineOrder is not None and 'textLineOrder' not in already_processed: already_processed.add('textLineOrder') outfile.write(' textLineOrder=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.textLineOrder), input_name='textLineOrder')), )) if self.readingOrientation is not None and 'readingOrientation' not in already_processed: already_processed.add('readingOrientation') outfile.write(' readingOrientation="%s"' % self.gds_format_float(self.readingOrientation, input_name='readingOrientation')) if self.indented is not None and 'indented' not in already_processed: already_processed.add('indented') outfile.write(' indented="%s"' % self.gds_format_boolean(self.indented, input_name='indented')) if self.align is not None and 'align' not in already_processed: already_processed.add('align') outfile.write(' align=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.align), input_name='align')), )) if self.primaryLanguage is not None and 'primaryLanguage' not in already_processed: already_processed.add('primaryLanguage') outfile.write(' primaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryLanguage), input_name='primaryLanguage')), )) if self.secondaryLanguage is not None and 'secondaryLanguage' not in already_processed: already_processed.add('secondaryLanguage') outfile.write(' secondaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryLanguage), input_name='secondaryLanguage')), )) if self.primaryScript is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), )) if self.secondaryScript is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), )) if self.production is not None and 'production' not in already_processed: already_processed.add('production') outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextRegionType', fromsubclass_=False, pretty_print=True): super(TextRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print) if pretty_print: eol_ = '\n' else: eol_ = '' for TextLine_ in self.TextLine: namespaceprefix_ = self.TextLine_nsprefix_ + ':' if (UseCapturedNS_ and self.TextLine_nsprefix_) else '' TextLine_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextLine', pretty_print=pretty_print) for TextEquiv_ in self.TextEquiv: namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else '' TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print) if self.TextStyle is not None: namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else '' self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print) def build(self, node, gds_collector_=None): self.gds_collector_ = gds_collector_ if SaveElementTreeNode: self.gds_elementtree_node_ = node already_processed = set() self.ns_prefix_ = node.prefix self.buildAttributes(node, node.attrib, already_processed) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_) return self def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('orientation', node) if value is not None and 'orientation' not in already_processed: already_processed.add('orientation') value = self.gds_parse_float(value, node, 'orientation') self.orientation = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.add('type') self.type_ = value self.validate_TextTypeSimpleType(self.type_) # validate type TextTypeSimpleType value = find_attr_value_('leading', node) if value is not None and 'leading' not in already_processed: already_processed.add('leading') self.leading = self.gds_parse_integer(value, node, 'leading') value = find_attr_value_('readingDirection', node) if value is not None and 'readingDirection' not in already_processed: already_processed.add('readingDirection') self.readingDirection = value self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType value = find_attr_value_('textLineOrder', node) if value is not None and 'textLineOrder' not in already_processed: already_processed.add('textLineOrder') self.textLineOrder = value self.validate_TextLineOrderSimpleType(self.textLineOrder) # validate type TextLineOrderSimpleType value = find_attr_value_('readingOrientation', node) if value is not None and 'readingOrientation' not in already_processed: already_processed.add('readingOrientation') value = self.gds_parse_float(value, node, 'readingOrientation') self.readingOrientation = value value = find_attr_value_('indented', node) if value is not None and 'indented' not in already_processed: already_processed.add('indented') if value in ('true', '1'): self.indented = True elif value in ('false', '0'): self.indented = False else: raise_parse_error(node, 'Bad boolean attribute') value = find_attr_value_('align', node) if value is not None and 'align' not in already_processed: already_processed.add('align') self.align = value self.validate_AlignSimpleType(self.align) # validate type AlignSimpleType value = find_attr_value_('primaryLanguage', node) if value is not None and 'primaryLanguage' not in already_processed: already_processed.add('primaryLanguage') self.primaryLanguage = value self.validate_LanguageSimpleType(self.primaryLanguage) # validate type LanguageSimpleType value = find_attr_value_('secondaryLanguage', node) if value is not None and 'secondaryLanguage' not in already_processed: already_processed.add('secondaryLanguage') self.secondaryLanguage = value self.validate_LanguageSimpleType(self.secondaryLanguage) # validate type LanguageSimpleType value = find_attr_value_('primaryScript', node) if value is not None and 'primaryScript' not in already_processed: already_processed.add('primaryScript') self.primaryScript = value self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType value = find_attr_value_('secondaryScript', node) if value is not None and 'secondaryScript' not in already_processed: already_processed.add('secondaryScript') self.secondaryScript = value self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType value = find_attr_value_('production', node) if value is not None and 'production' not in already_processed: already_processed.add('production') self.production = value self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType super(TextRegionType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None): if nodeName_ == 'TextLine': obj_ = TextLineType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextLine.append(obj_) obj_.original_tagname_ = 'TextLine' elif nodeName_ == 'TextEquiv': obj_ = TextEquivType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextEquiv.append(obj_) obj_.original_tagname_ = 'TextEquiv' elif nodeName_ == 'TextStyle': obj_ = TextStyleType.factory(parent_object_=self) obj_.build(child_, gds_collector_=gds_collector_) self.TextStyle = obj_ obj_.original_tagname_ = 'TextStyle' super(TextRegionType, self).buildChildren(child_, node, nodeName_, True) # end class TextRegionType GDSClassesMapping = { 'PcGts': PcGtsType, } USAGE_TEXT = """ Usage: python <Parser>.py [ -s ] <in_xml_file> """ def usage(): print(USAGE_TEXT) sys.exit(1) def get_root_tag(node): tag = Tag_pattern_.match(node.tag).groups()[-1] rootClass = GDSClassesMapping.get(tag) if rootClass is None: rootClass = globals().get(tag) return tag, rootClass def get_required_ns_prefix_defs(rootNode): '''Get all name space prefix definitions required in this XML doc. Return a dictionary of definitions and a char string of definitions. ''' nsmap = { prefix: uri for node in rootNode.iter() for (prefix, uri) in node.nsmap.items() if prefix is not None } namespacedefs = ' '.join([ 'xmlns:{}="{}"'.format(prefix, uri) for prefix, uri in nsmap.items() ]) return nsmap, namespacedefs def parse(inFileName, silence=False, print_warnings=True): global CapturedNsmap_ gds_collector = GdsCollector_() parser = None doc = parsexml_(inFileName, parser) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'PcGtsType' rootClass = PcGtsType rootObj = rootClass.factory() rootObj.build(rootNode, gds_collector_=gds_collector) CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode) if not SaveElementTreeNode: doc = None rootNode = None if not silence: sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export( sys.stdout, 0, name_=rootTag, namespacedef_=namespacedefs, pretty_print=True) if print_warnings and len(gds_collector.get_messages()) > 0: separator = ('-' * 50) + '\n' sys.stderr.write(separator) sys.stderr.write('----- Warnings -- count: {} -----\n'.format( len(gds_collector.get_messages()), )) gds_collector.write_messages(sys.stderr) sys.stderr.write(separator) return rootObj def parseEtree(inFileName, silence=False, print_warnings=True, mapping=None, nsmap=None): parser = None doc = parsexml_(inFileName, parser) gds_collector = GdsCollector_() rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'PcGtsType' rootClass = PcGtsType rootObj = rootClass.factory() rootObj.build(rootNode, gds_collector_=gds_collector) # Enable Python to collect the space used by the DOM. if mapping is None: mapping = {} rootElement = rootObj.to_etree( None, name_=rootTag, mapping_=mapping, nsmap_=nsmap) reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) if not SaveElementTreeNode: doc = None rootNode = None if not silence: content = etree_.tostring( rootElement, pretty_print=True, xml_declaration=True, encoding="utf-8") sys.stdout.write(str(content)) sys.stdout.write('\n') if print_warnings and len(gds_collector.get_messages()) > 0: separator = ('-' * 50) + '\n' sys.stderr.write(separator) sys.stderr.write('----- Warnings -- count: {} -----\n'.format( len(gds_collector.get_messages()), )) gds_collector.write_messages(sys.stderr) sys.stderr.write(separator) return rootObj, rootElement, mapping, reverse_mapping def parseString(inString, silence=False, print_warnings=True): '''Parse a string, create the object tree, and export it. Arguments: - inString -- A string. This XML fragment should not start with an XML declaration containing an encoding. - silence -- A boolean. If False, export the object. Returns -- The root object in the tree. ''' parser = None rootNode= parsexmlstring_(inString, parser) gds_collector = GdsCollector_() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'PcGtsType' rootClass = PcGtsType rootObj = rootClass.factory() rootObj.build(rootNode, gds_collector_=gds_collector) if not SaveElementTreeNode: rootNode = None if not silence: sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export( sys.stdout, 0, name_=rootTag, namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"') if print_warnings and len(gds_collector.get_messages()) > 0: separator = ('-' * 50) + '\n' sys.stderr.write(separator) sys.stderr.write('----- Warnings -- count: {} -----\n'.format( len(gds_collector.get_messages()), )) gds_collector.write_messages(sys.stderr) sys.stderr.write(separator) return rootObj def parseLiteral(inFileName, silence=False, print_warnings=True): parser = None doc = parsexml_(inFileName, parser) gds_collector = GdsCollector_() rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'PcGtsType' rootClass = PcGtsType rootObj = rootClass.factory() rootObj.build(rootNode, gds_collector_=gds_collector) # Enable Python to collect the space used by the DOM. if not SaveElementTreeNode: doc = None rootNode = None if not silence: sys.stdout.write('#from page import *\n\n') sys.stdout.write('import page as model_\n\n') sys.stdout.write('rootObj = model_.rootClass(\n') rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) sys.stdout.write(')\n') if print_warnings and len(gds_collector.get_messages()) > 0: separator = ('-' * 50) + '\n' sys.stderr.write(separator) sys.stderr.write('----- Warnings -- count: {} -----\n'.format( len(gds_collector.get_messages()), )) gds_collector.write_messages(sys.stderr) sys.stderr.write(separator) return rootObj def main(): args = sys.argv[1:] if len(args) == 1: parse(args[0]) else: usage() if __name__ == '__main__': #import pdb; pdb.set_trace() main() RenameMappings_ = { } # # Mapping of namespaces to types defined in them # and the file in which each is defined. # simpleTypes are marked "ST" and complexTypes "CT". NamespaceToDefMappings_ = {'http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15': [('ColourSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('ReadingDirectionSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('TextLineOrderSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('TextTypeSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('PageTypeSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('ConfSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('LanguageSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('ScriptSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('ColourDepthSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('GraphicsTypeSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('ChartTypeSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('PointsType', 'assets/schema/pagecontent.xsd', 'ST'), ('ProductionSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('AlignSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('GroupTypeSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('TextDataTypeSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('UnderlineStyleSimpleType', 'assets/schema/pagecontent.xsd', 'ST'), ('PcGtsType', 'assets/schema/pagecontent.xsd', 'CT'), ('MetadataType', 'assets/schema/pagecontent.xsd', 'CT'), ('MetadataItemType', 'assets/schema/pagecontent.xsd', 'CT'), ('LabelsType', 'assets/schema/pagecontent.xsd', 'CT'), ('LabelType', 'assets/schema/pagecontent.xsd', 'CT'), ('PageType', 'assets/schema/pagecontent.xsd', 'CT'), ('TextRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('CoordsType', 'assets/schema/pagecontent.xsd', 'CT'), ('TextLineType', 'assets/schema/pagecontent.xsd', 'CT'), ('WordType', 'assets/schema/pagecontent.xsd', 'CT'), ('GlyphType', 'assets/schema/pagecontent.xsd', 'CT'), ('TextEquivType', 'assets/schema/pagecontent.xsd', 'CT'), ('ImageRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('LineDrawingRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('GraphicRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('TableRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('GridType', 'assets/schema/pagecontent.xsd', 'CT'), ('GridPointsType', 'assets/schema/pagecontent.xsd', 'CT'), ('ChartRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('SeparatorRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('MathsRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('ChemRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('MapRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('MusicRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('AdvertRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('NoiseRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('UnknownRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('CustomRegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('PrintSpaceType', 'assets/schema/pagecontent.xsd', 'CT'), ('ReadingOrderType', 'assets/schema/pagecontent.xsd', 'CT'), ('RegionRefIndexedType', 'assets/schema/pagecontent.xsd', 'CT'), ('OrderedGroupIndexedType', 'assets/schema/pagecontent.xsd', 'CT'), ('UnorderedGroupIndexedType', 'assets/schema/pagecontent.xsd', 'CT'), ('RegionRefType', 'assets/schema/pagecontent.xsd', 'CT'), ('OrderedGroupType', 'assets/schema/pagecontent.xsd', 'CT'), ('UnorderedGroupType', 'assets/schema/pagecontent.xsd', 'CT'), ('BorderType', 'assets/schema/pagecontent.xsd', 'CT'), ('LayersType', 'assets/schema/pagecontent.xsd', 'CT'), ('LayerType', 'assets/schema/pagecontent.xsd', 'CT'), ('BaselineType', 'assets/schema/pagecontent.xsd', 'CT'), ('RelationsType', 'assets/schema/pagecontent.xsd', 'CT'), ('RelationType', 'assets/schema/pagecontent.xsd', 'CT'), ('TextStyleType', 'assets/schema/pagecontent.xsd', 'CT'), ('RegionType', 'assets/schema/pagecontent.xsd', 'CT'), ('AlternativeImageType', 'assets/schema/pagecontent.xsd', 'CT'), ('GraphemesType', 'assets/schema/pagecontent.xsd', 'CT'), ('GraphemeBaseType', 'assets/schema/pagecontent.xsd', 'CT'), ('GraphemeType', 'assets/schema/pagecontent.xsd', 'CT'), ('NonPrintingCharType', 'assets/schema/pagecontent.xsd', 'CT'), ('GraphemeGroupType', 'assets/schema/pagecontent.xsd', 'CT'), ('UserDefinedType', 'assets/schema/pagecontent.xsd', 'CT'), ('UserAttributeType', 'assets/schema/pagecontent.xsd', 'CT'), ('TableCellRoleType', 'assets/schema/pagecontent.xsd', 'CT'), ('RolesType', 'assets/schema/pagecontent.xsd', 'CT')]} __all__ = [ "AdvertRegionType", "AlternativeImageType", "BaselineType", "BorderType", "ChartRegionType", "ChemRegionType", "CoordsType", "CustomRegionType", "GlyphType", "GraphemeBaseType", "GraphemeGroupType", "GraphemeType", "GraphemesType", "GraphicRegionType", "GridPointsType", "GridType", "ImageRegionType", "LabelType", "LabelsType", "LayerType", "LayersType", "LineDrawingRegionType", "MapRegionType", "MathsRegionType", "MetadataItemType", "MetadataType", "MusicRegionType", "NoiseRegionType", "NonPrintingCharType", "OrderedGroupIndexedType", "OrderedGroupType", "PageType", "PcGtsType", "PrintSpaceType", "ReadingOrderType", "RegionRefIndexedType", "RegionRefType", "RegionType", "RelationType", "RelationsType", "RolesType", "SeparatorRegionType", "TableCellRoleType", "TableRegionType", "TextEquivType", "TextLineType", "TextRegionType", "TextStyleType", "UnknownRegionType", "UnorderedGroupIndexedType", "UnorderedGroupType", "UserAttributeType", "UserDefinedType", "WordType" ]
[ "lucas@sulzbach.org" ]
lucas@sulzbach.org
a6bdb94809d7680329ff28eac373e0a783cffd6d
9f7f6b9d3eb1ec85136d16fa02987b412882c595
/examples/websocket_test.py
d855e0f74d6efd2dff63657933df92f138a49e9b
[ "MIT" ]
permissive
fmux/sanicpluginsframework
e6f631487ac1962d04e8263ea3c789fe20179905
175525e85504fcf6e7d32bf12874578fc14c115a
refs/heads/master
2020-07-24T02:52:47.370302
2019-09-11T10:02:45
2019-09-11T10:02:45
207,780,270
0
0
MIT
2019-09-11T09:59:43
2019-09-11T09:59:43
null
UTF-8
Python
false
false
811
py
import pickle from sanic import Sanic from spf import SanicPlugin, SanicPluginsFramework from sanic.response import text from logging import DEBUG class MyPlugin(SanicPlugin): def __init__(self, *args, **kwargs): super(MyPlugin, self).__init__(*args, **kwargs) instance = MyPlugin() @instance.middleware(priority=6, with_context=True, attach_to="cleanup") def mw1(request, context): context['test1'] = "test" print("Doing Cleanup!") app = Sanic(__name__) spf = SanicPluginsFramework(app) assoc_reg = spf.register_plugin(instance) @app.route('/') def index(request): return text("hello world") @app.websocket('/test1') async def we_test(request, ws): print("hi") return if __name__ == "__main__": app.run("127.0.0.1", port=8098, debug=True, auto_reload=False)
[ "ashleysommer@gmail.com" ]
ashleysommer@gmail.com
ac70e2f057693341864da24d6890089f8c1d3fdb
58e588aaf090f451251a60097295ec01baa63bb0
/reportlab/graphics/charts/spider.py
a9b23d13b3c69e064cf4de6fbe0ceb94fb8122cb
[]
no_license
alawibaba/loghound
f12fe3d31131ba768bc774ba9722846b02558103
a4399155aac4f3debaf2a66bf72df3a9774229e9
refs/heads/master
2016-09-05T15:31:48.635607
2010-02-01T02:15:35
2010-02-01T02:15:35
32,116,344
0
1
null
null
null
null
UTF-8
Python
false
false
15,784
py
#Copyright ReportLab Europe Ltd. 2000-2004 #see license.txt for license details #history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/spider.py # spider chart, also known as radar chart __version__=''' $Id: spider.py 3345 2008-12-12 17:55:22Z damian $ ''' __doc__="""Spider Chart Normal use shows variation of 5-10 parameters against some 'norm' or target. When there is more than one series, place the series with the largest numbers first, as it will be overdrawn by each successive one. """ import copy from math import sin, cos, pi from reportlab.lib import colors from reportlab.lib.validators import isColor, isNumber, isListOfNumbersOrNone,\ isListOfNumbers, isColorOrNone, isString,\ isListOfStringsOrNone, OneOf, SequenceOf,\ isBoolean, isListOfColors, isNumberOrNone,\ isNoneOrListOfNoneOrStrings, isTextAnchor,\ isNoneOrListOfNoneOrNumbers, isBoxAnchor,\ isStringOrNone, isStringOrNone, EitherOr,\ isCallable from reportlab.lib.attrmap import * from reportlab.pdfgen.canvas import Canvas from reportlab.graphics.shapes import Group, Drawing, Line, Rect, Polygon, PolyLine, Ellipse, \ Wedge, String, STATE_DEFAULTS from reportlab.graphics.widgetbase import Widget, TypedPropertyCollection, PropHolder from reportlab.graphics.charts.areas import PlotArea from reportlab.graphics.charts.legends import _objStr from piecharts import WedgeLabel from reportlab.graphics.widgets.markers import makeMarker, uSymbol2Symbol, isSymbol class StrandProperty(PropHolder): _attrMap = AttrMap( strokeWidth = AttrMapValue(isNumber), fillColor = AttrMapValue(isColorOrNone), strokeColor = AttrMapValue(isColorOrNone), strokeDashArray = AttrMapValue(isListOfNumbersOrNone), symbol = AttrMapValue(EitherOr((isStringOrNone,isSymbol)), desc='Widget placed at data points.'), symbolSize= AttrMapValue(isNumber, desc='Symbol size.'), name = AttrMapValue(isStringOrNone, desc='Name of the strand.'), ) def __init__(self): self.strokeWidth = 1 self.fillColor = None self.strokeColor = STATE_DEFAULTS["strokeColor"] self.strokeDashArray = STATE_DEFAULTS["strokeDashArray"] self.symbol = None self.symbolSize = 5 self.name = None class SpokeProperty(PropHolder): _attrMap = AttrMap( strokeWidth = AttrMapValue(isNumber), fillColor = AttrMapValue(isColorOrNone), strokeColor = AttrMapValue(isColorOrNone), strokeDashArray = AttrMapValue(isListOfNumbersOrNone), labelRadius = AttrMapValue(isNumber), visible = AttrMapValue(isBoolean,desc="True if the spoke line is to be drawn"), ) def __init__(self,**kw): self.strokeWidth = 0.5 self.fillColor = None self.strokeColor = STATE_DEFAULTS["strokeColor"] self.strokeDashArray = STATE_DEFAULTS["strokeDashArray"] self.visible = 1 self.labelRadius = 1.05 class SpokeLabel(WedgeLabel): def __init__(self,**kw): WedgeLabel.__init__(self,**kw) if '_text' not in kw.keys(): self._text = '' class StrandLabel(SpokeLabel): _attrMap = AttrMap(BASE=SpokeLabel, format = AttrMapValue(EitherOr((isStringOrNone,isCallable)),"Format for the label"), dR = AttrMapValue(isNumberOrNone,"radial shift for label"), ) def __init__(self,**kw): self.format = '' self.dR = 0 SpokeLabel.__init__(self,**kw) def _setupLabel(labelClass, text, radius, cx, cy, angle, car, sar, sty): L = labelClass() L._text = text L.x = cx + radius*car L.y = cy + radius*sar L._pmv = angle*180/pi L.boxAnchor = sty.boxAnchor L.dx = sty.dx L.dy = sty.dy L.angle = sty.angle L.boxAnchor = sty.boxAnchor L.boxStrokeColor = sty.boxStrokeColor L.boxStrokeWidth = sty.boxStrokeWidth L.boxFillColor = sty.boxFillColor L.strokeColor = sty.strokeColor L.strokeWidth = sty.strokeWidth L.leading = sty.leading L.width = sty.width L.maxWidth = sty.maxWidth L.height = sty.height L.textAnchor = sty.textAnchor L.visible = sty.visible L.topPadding = sty.topPadding L.leftPadding = sty.leftPadding L.rightPadding = sty.rightPadding L.bottomPadding = sty.bottomPadding L.fontName = sty.fontName L.fontSize = sty.fontSize L.fillColor = sty.fillColor return L class SpiderChart(PlotArea): _attrMap = AttrMap(BASE=PlotArea, data = AttrMapValue(None, desc='Data to be plotted, list of (lists of) numbers.'), labels = AttrMapValue(isListOfStringsOrNone, desc="optional list of labels to use for each data point"), startAngle = AttrMapValue(isNumber, desc="angle of first slice; like the compass, 0 is due North"), direction = AttrMapValue( OneOf('clockwise', 'anticlockwise'), desc="'clockwise' or 'anticlockwise'"), strands = AttrMapValue(None, desc="collection of strand descriptor objects"), spokes = AttrMapValue(None, desc="collection of spoke descriptor objects"), strandLabels = AttrMapValue(None, desc="collection of strand label descriptor objects"), spokeLabels = AttrMapValue(None, desc="collection of spoke label descriptor objects"), ) def makeSwatchSample(self, rowNo, x, y, width, height): baseStyle = self.strands styleIdx = rowNo % len(baseStyle) style = baseStyle[styleIdx] strokeColor = getattr(style, 'strokeColor', getattr(baseStyle,'strokeColor',None)) fillColor = getattr(style, 'fillColor', getattr(baseStyle,'fillColor',None)) strokeDashArray = getattr(style, 'strokeDashArray', getattr(baseStyle,'strokeDashArray',None)) strokeWidth = getattr(style, 'strokeWidth', getattr(baseStyle, 'strokeWidth',0)) symbol = getattr(style, 'symbol', getattr(baseStyle, 'symbol',None)) ym = y+height/2.0 if fillColor is None and strokeColor is not None and strokeWidth>0: bg = Line(x,ym,x+width,ym,strokeWidth=strokeWidth,strokeColor=strokeColor, strokeDashArray=strokeDashArray) elif fillColor is not None: bg = Rect(x,y,width,height,strokeWidth=strokeWidth,strokeColor=strokeColor, strokeDashArray=strokeDashArray,fillColor=fillColor) else: bg = None if symbol: symbol = uSymbol2Symbol(symbol,x+width/2.,ym,color) if bg: g = Group() g.add(bg) g.add(symbol) return g return symbol or bg def getSeriesName(self,i,default=None): '''return series name i or default''' return _objStr(getattr(self.strands[i],'name',default)) def __init__(self): PlotArea.__init__(self) self.data = [[10,12,14,16,14,12], [6,8,10,12,9,11]] self.labels = None # or list of strings self.labels = ['a','b','c','d','e','f'] self.startAngle = 90 self.direction = "clockwise" self.strands = TypedPropertyCollection(StrandProperty) self.spokes = TypedPropertyCollection(SpokeProperty) self.spokeLabels = TypedPropertyCollection(SpokeLabel) self.spokeLabels._text = None self.strandLabels = TypedPropertyCollection(StrandLabel) self.x = 10 self.y = 10 self.width = 180 self.height = 180 def demo(self): d = Drawing(200, 200) d.add(SpiderChart()) return d def normalizeData(self, outer = 0.0): """Turns data into normalized ones where each datum is < 1.0, and 1.0 = maximum radius. Adds 10% at outside edge by default""" data = self.data assert min(map(min,data)) >=0, "Cannot do spider plots of negative numbers!" norm = max(map(max,data)) norm *= (1.0+outer) if norm<1e-9: norm = 1.0 self._norm = norm return [[e/norm for e in row] for row in data] def _innerDrawLabel(self, sty, radius, cx, cy, angle, car, sar, labelClass=StrandLabel): "Draw a label for a given item in the list." fmt = sty.format value = radius*self._norm if not fmt: text = None elif isinstance(fmt,str): if fmt == 'values': text = sty._text else: text = fmt % value elif callable(fmt): text = fmt(value) else: raise ValueError("Unknown formatter type %s, expected string or function" % fmt) if text: dR = sty.dR if dR: radius += dR/self._radius L = _setupLabel(labelClass, text, radius, cx, cy, angle, car, sar, sty) if dR<0: L._anti = 1 else: L = None return L def draw(self): # normalize slice data g = self.makeBackground() or Group() xradius = self.width/2.0 yradius = self.height/2.0 self._radius = radius = min(xradius, yradius) cx = self.x + xradius cy = self.y + yradius data = self.normalizeData() self._seriesCount = len(data) n = len(data[0]) #labels if self.labels is None: labels = [''] * n else: labels = self.labels #there's no point in raising errors for less than enough errors if #we silently create all for the extreme case of no labels. i = n-len(labels) if i>0: labels = labels + ['']*i S = [] STRANDS = [] STRANDAREAS = [] syms = [] labs = [] csa = [] angle = self.startAngle*pi/180 direction = self.direction == "clockwise" and -1 or 1 angleBetween = direction*(2 * pi)/float(n) spokes = self.spokes spokeLabels = self.spokeLabels for i in xrange(n): car = cos(angle)*radius sar = sin(angle)*radius csa.append((car,sar,angle)) si = self.spokes[i] if si.visible: spoke = Line(cx, cy, cx + car, cy + sar, strokeWidth = si.strokeWidth, strokeColor=si.strokeColor, strokeDashArray=si.strokeDashArray) S.append(spoke) sli = spokeLabels[i] text = sli._text if not text: text = labels[i] if text: S.append(_setupLabel(WedgeLabel, text, si.labelRadius, cx, cy, angle, car, sar, sli)) angle += angleBetween # now plot the polygons rowIdx = 0 strands = self.strands strandLabels = self.strandLabels for row in data: # series plot rsty = strands[rowIdx] points = [] car, sar = csa[-1][:2] r = row[-1] points.append(cx+car*r) points.append(cy+sar*r) for i in xrange(n): car, sar, angle = csa[i] r = row[i] points.append(cx+car*r) points.append(cy+sar*r) L = self._innerDrawLabel(strandLabels[(rowIdx,i)], r, cx, cy, angle, car, sar, labelClass=StrandLabel) if L: labs.append(L) sty = strands[(rowIdx,i)] uSymbol = sty.symbol # put in a marker, if it needs one if uSymbol: s_x = cx+car*r s_y = cy+sar*r s_fillColor = sty.fillColor s_strokeColor = sty.strokeColor s_strokeWidth = sty.strokeWidth s_angle = 0 s_size = sty.symbolSize if type(uSymbol) is type(''): symbol = makeMarker(uSymbol, size = s_size, x = s_x, y = s_y, fillColor = s_fillColor, strokeColor = s_strokeColor, strokeWidth = s_strokeWidth, angle = s_angle, ) else: symbol = uSymbol2Symbol(uSymbol,s_x,s_y,s_fillColor) for k,v in (('size', s_size), ('fillColor', s_fillColor), ('x', s_x), ('y', s_y), ('strokeColor',s_strokeColor), ('strokeWidth',s_strokeWidth), ('angle',s_angle),): if getattr(symbol,k,None) is None: try: setattr(symbol,k,v) except: pass syms.append(symbol) # make up the 'strand' if rsty.fillColor: strand = Polygon(points) strand.fillColor = rsty.fillColor strand.strokeColor = None strand.strokeWidth = 0 STRANDAREAS.append(strand) if rsty.strokeColor and rsty.strokeWidth: strand = PolyLine(points) strand.strokeColor = rsty.strokeColor strand.strokeWidth = rsty.strokeWidth strand.strokeDashArray = rsty.strokeDashArray STRANDS.append(strand) rowIdx += 1 map(g.add,STRANDAREAS+STRANDS+syms+S+labs) return g def sample1(): "Make a simple spider chart" d = Drawing(400, 400) sp = SpiderChart() sp.x = 50 sp.y = 50 sp.width = 300 sp.height = 300 sp.data = [[10,12,14,16,14,12], [6,8,10,12,9,15],[7,8,17,4,12,8]] sp.labels = ['a','b','c','d','e','f'] sp.strands[0].strokeColor = colors.cornsilk sp.strands[1].strokeColor = colors.cyan sp.strands[2].strokeColor = colors.palegreen sp.strands[0].fillColor = colors.cornsilk sp.strands[1].fillColor = colors.cyan sp.strands[2].fillColor = colors.palegreen sp.spokes.strokeDashArray = (2,2) d.add(sp) return d def sample2(): "Make a spider chart with markers, but no fill" d = Drawing(400, 400) sp = SpiderChart() sp.x = 50 sp.y = 50 sp.width = 300 sp.height = 300 sp.data = [[10,12,14,16,14,12], [6,8,10,12,9,15],[7,8,17,4,12,8]] sp.labels = ['U','V','W','X','Y','Z'] sp.strands.strokeWidth = 1 sp.strands[0].fillColor = colors.pink sp.strands[1].fillColor = colors.lightblue sp.strands[2].fillColor = colors.palegreen sp.strands[0].strokeColor = colors.red sp.strands[1].strokeColor = colors.blue sp.strands[2].strokeColor = colors.green sp.strands.symbol = "FilledDiamond" sp.strands[1].symbol = makeMarker("Circle") sp.strands[1].symbol.strokeWidth = 0.5 sp.strands[1].symbol.fillColor = colors.yellow sp.strands.symbolSize = 6 sp.strandLabels[0,3]._text = 'special' sp.strandLabels[0,1]._text = 'one' sp.strandLabels[0,0]._text = 'zero' sp.strandLabels[1,0]._text = 'Earth' sp.strandLabels[2,2]._text = 'Mars' sp.strandLabels.format = 'values' sp.strandLabels.dR = -5 d.add(sp) return d if __name__=='__main__': d = sample1() from reportlab.graphics.renderPDF import drawToFile drawToFile(d, 'spider.pdf') d = sample2() drawToFile(d, 'spider2.pdf')
[ "dan.lowe.wheeler@42e3ffb8-c440-11de-ba9a-9db95b2bc6c5" ]
dan.lowe.wheeler@42e3ffb8-c440-11de-ba9a-9db95b2bc6c5
5ac051603f345727cdb55cb9fe49450153592d95
3f8e34f0ccf59aae44acfc192fab476f1ae3bb74
/stor/types/blockchain_format/sub_epoch_summary.py
7a8e12c7689d70e5f4c5038d3d8538a65c59e37e
[ "Apache-2.0" ]
permissive
chia-os/stor-blockchain
9952b5ba78480cf0c71dc4ad053bd0d28d39eee7
3fe6268263e2db98970edc296d2e4c53694aafd0
refs/heads/master
2023-08-11T20:03:53.467778
2021-09-15T07:28:39
2021-09-15T07:28:39
null
0
0
null
null
null
null
UTF-8
Python
false
false
642
py
from dataclasses import dataclass from typing import Optional from stor.types.blockchain_format.sized_bytes import bytes32 from stor.util.ints import uint8, uint64 from stor.util.streamable import Streamable, streamable @dataclass(frozen=True) @streamable class SubEpochSummary(Streamable): prev_subepoch_summary_hash: bytes32 reward_chain_hash: bytes32 # hash of reward chain at end of last segment num_blocks_overflow: uint8 # How many more blocks than 384*(N-1) new_difficulty: Optional[uint64] # Only once per epoch (diff adjustment) new_sub_slot_iters: Optional[uint64] # Only once per epoch (diff adjustment)
[ "info@stor.network" ]
info@stor.network
84aa481771111981f7f48f85cd2805feb3da8a50
c4526313117430d4e279ef11b98070d60a820e07
/FeatureExtractors/feature_extractor.py
606be6f07a6c54ff27c3e335c3460654db10991f
[]
no_license
Chzy0624/py_pdf_stm
1ae36c2df0f80f644b991edf183eab16c5a333ed
8fde14c2fe3e6486d8830414d79d48726d8c66ef
refs/heads/master
2023-05-05T04:06:17.698359
2019-10-22T05:48:24
2019-10-22T05:48:24
null
0
0
null
null
null
null
UTF-8
Python
false
false
11,417
py
import sys import traceback from pprint import pprint from typing import List, Dict, Any from DataSheetParsers.DataSheet import DataSheet from PinManager import PinManager from TableExtractor import TableExtractor, Table from Utils import is_numeric, is_dict, remove_units, replace_i, merge def convert_type(name: str, value): if type(value) == str: value = value.replace(',', '') value = value.strip('\n ') if 'KB' in name.upper(): name = remove_units(name, 'kb') if is_numeric(value): value = int(value) if 'MB' in name.upper(): name = remove_units(name, 'mb') if is_numeric(value): value = int(value) * 1024 elif type(value) == int: value *= 1024 if 'MHZ' in name.upper(): name = remove_units(name, 'mhz') if is_numeric(value): value = int(value) if type(value) == str: if 'KB' in value: value = replace_i(value, 'kb', '') if is_numeric(value): value = int(value) elif type(value) == int: pass else: value += 'KB' return name, value if 'MB' in value: value = replace_i(value, 'mb', '') if is_numeric(value): value = int(value) * 1024 elif type(value) == int: value *= 1024 else: value += 'MB' return name, value if 'MHZ' in value.upper(): value = replace_i(value, 'MHz', '') if is_numeric(value): value = int(value) elif type(value) == int: pass else: value += 'MHz' return name, value # UNIFIED NAMES # int_values = ['Flash memory', 'RAM', 'UART', 'SPI', 'Total GPIOS','CPU Frequency'] # if name in int_values: if type(value) != int and is_numeric(value): if type(value) == str: if not (value.lower() == 'no' or value.lower() == 'yes'): try: value = int(value) except Exception as ex: print('Failed to convert {} {} to int\n{}'.format(name, value, ex)) return name, value class FeatureListExtractor: # This class is adapted to STM def fix_name(self, name): name = "".join([part[::-1] for part in name[::1][::-1].split('\n')]) return self.config['corrections'].get(name, name) def __init__(self, controller: str, datasheet: DataSheet, config) -> None: """ Class for comparing multiple STM32 controllers :type controller_list: list of stm controllers that you want to compare """ self.controller = controller self.config = config # type: Dict[str,Dict] self.datasheet = datasheet self.features_tables = [] # type: List[Table] self.features = {} # type: Dict[str,Dict] self.pin_data = {} # type: Dict[str, Dict[str, Any]] self.config_name = 'UNKNOWN CONTROLLER' self.mc_family = 'UNKNOWN' self.pin_manager = PinManager(self.pin_data,{}) self.post_init() def post_init(self): pass def process(self): self.extract_tables() self.extract_features() del self.features_tables self.extract_pinout() return self.features def extract_table(self, datasheet, page): print('Extracting table from {} page'.format(page + 1)) pdf_int = TableExtractor(str(datasheet.path)) try: table = pdf_int.parse_page(page) except Exception as ex: pass table = None return table def extract_tables(self): # OVERRIDE THIS FUNCTION FOR NEW CONTROLLER return def handle_feature(self, name, value): if '\u2013' in name: name = name.replace('\u2013', '-') if type(value) == str: if '\u2013' in value: value = value.replace('\u2013', '-') if '\n' in value: value = value.replace('\n', ' ') return [(name, value)] # Can be list of values and names def extract_features(self): controller_features_names = [] controller_features = {} feature_offset = 0 for table in self.features_tables: try: if not table.global_map: continue _, features_cell_span = table.get_cell_span(table.get_col(0)[0]) # EXTRACTING NAMES OF FEATURES if features_cell_span > 1: for row_id, row in table.global_map.items(): if row_id == 0: continue features = set(list(row.values())[:features_cell_span]) features = sorted(features, key=lambda cell: cell.center.x) texts = list(map(lambda cell: cell.clean_text, features)) controller_features_names.append(' '.join(texts)) else: texts = list(map(lambda cell: cell.clean_text, table.get_col(0)[1:])) controller_features_names.extend(texts) # EXTRACTING STM FEATURES current_stm_name = "" mcu_counter = {} name = 'ERROR' for col_id in range(features_cell_span, len(table.get_row(0))): features = table.get_col(col_id) for n, feature in enumerate(features): if n == 0: name = table.get_cell(col_id, 0).clean_text if name == current_stm_name: num = mcu_counter[current_stm_name] name += '-{}'.format(num) mcu_counter[current_stm_name] += 1 else: current_stm_name = name if not mcu_counter.get(current_stm_name, False): mcu_counter[current_stm_name] = 1 if not controller_features.get(name, False): controller_features[name] = {} continue feature_name = controller_features_names[feature_offset + n - 1] feature_value = feature.text for n, v in self.handle_feature(feature_name, feature_value): if n and v: n, v = convert_type(n, v) if controller_features[name].get(n, False): v = self.merge_features(controller_features[name].get(n), v) controller_features[name][n] = v else: controller_features[name][n] = v feature_offset = len(controller_features_names) except Exception as ex: sys.stderr.write("ERROR {}".format(ex)) traceback.print_exc() # FILL MISSING FIELDS for stm_name in controller_features.keys(): for stm_name2 in controller_features.keys(): if stm_name == stm_name2: continue if stm_name in stm_name2: for feature_name, value in controller_features[stm_name].items(): if controller_features[stm_name2].get(feature_name, False): continue else: controller_features[stm_name2][feature_name] = value self.features = controller_features return controller_features def extract_pinout(self): for package, pin_data in self.pin_data.items(): for mcu,mcu_features in self.features.items(): if package in mcu_features.get('PACKAGE',[]): if 'PINOUT' in self.features[mcu]: self.features[mcu]['PINOUT'][package]=pin_data else: self.features[mcu]['PINOUT'] = {package:pin_data} return self.pin_data def unify_names(self): unknown_names = {} for mc, features in self.features.items(): unknown_names[mc] = [] mc_features = self.features[mc].copy() mc_features = {k.upper(): v for k, v in mc_features.items()} for feature_name, features_value in features.items(): feature_name = feature_name.upper() if features_value: if self.config_name in self.config['unify']: unify_list = self.config['unify'][self.config_name] # type: Dict[str,str] unify_list = {k.upper(): v.upper() for k, v in unify_list.items()} known = True if feature_name not in unify_list: if feature_name not in unify_list.values(): known = False if feature_name not in unknown_names: unknown_names[mc].append(feature_name) if known: new_name = unify_list.get(feature_name, feature_name).upper() # in case name is already unified values = mc_features.pop(feature_name) new_name, values = convert_type(new_name, values) new_name = new_name.upper() if new_name in mc_features: mc_features[new_name] = self.merge_features(mc_features[new_name], values) else: mc_features[new_name] = values else: new_name = feature_name # in case name is already unified values = mc_features.pop(feature_name) new_name, values = convert_type(new_name, values) mc_features[new_name.upper()] = values else: unknown_names[mc].append(feature_name) self.features[mc] = mc_features for mc, features in unknown_names.items(): unknown_names = list(set(features)) if unknown_names: print('List of unknown features for', mc) print('Add correction if name is mangled') print('Or add unify for this feature') for unknown_feature in unknown_names: print('\t', unknown_feature) print('=' * 20) print() @staticmethod def merge_features(old, new): return merge(old, new) if __name__ == '__main__': datasheet = DataSheet(r"D:\PYTHON\py_pdf_stm\datasheets\stm32L\STM32L476.pdf") feature_extractor = FeatureListExtractor('STM32L476', datasheet, {}) feature_extractor.process() pprint(feature_extractor.features)
[ "med45c@gmail.com" ]
med45c@gmail.com
857fd7d31a75186a8008fd39c22ccda0b6e7a96d
3cb1bcb411d4a05c3ce8b276d4a65cecaf3e0f6a
/starline/publisher.py
ae3cd10f628a696ab4a30907913c22f0e1c6a568
[]
no_license
setazer/starline
4357dbf70d43572924d2307c81ff027c3543c259
c5e06e1e5b0227daa0fe26335c7ee05038bb6f26
refs/heads/master
2023-06-22T01:59:30.152240
2021-07-21T14:22:51
2021-07-21T14:22:51
378,434,618
0
0
null
null
null
null
UTF-8
Python
false
false
1,057
py
from channels import Channel from interfaces import MessageInterface from model import TelegramMessage, QueueItem from queue_providers import QueueProvider from storage_providers import StorageProvider class Publisher: def __init__(self, queue_provider: QueueProvider, history_provider: StorageProvider, channels: list[Channel], message_interface: MessageInterface): self.queue = queue_provider self.history = history_provider self.channels = channels self.output = message_interface def publish(self): queue_item: QueueItem = self.queue.get_item() queue_item.lock = True post = queue_item.post results = [channel.publish(post) for channel in self.channels if channel.enabled] if any(map(lambda r: r.success, results)): self.history.write(post) self.queue.remove(queue_item) else: self.output.send_message(TelegramMessage(msg=f'Не удалось запостить {post}')) queue_item.lock = False
[ "we.are@setazer.us" ]
we.are@setazer.us
694644c5e927145b981cd47f470968232ae22de9
6c3f8a0f30759b792859f010e23154b45d429ed2
/prototypes/microservices/search_client.py
c046626c1965cffe7a9024ea1d32abeb06223a7b
[ "Apache-2.0" ]
permissive
maxhutch/forge
2acb2ec8598ea097b01a1c822357337eeccd1457
8e3521983b02944bf5fa57ae3ca5b3d88eb8f932
refs/heads/master
2021-06-27T16:19:09.351367
2017-09-11T15:08:24
2017-09-11T15:08:24
103,566,359
0
0
null
2017-09-14T18:16:39
2017-09-14T18:16:38
null
UTF-8
Python
false
false
77
py
/Users/jonathongaff/MDF/mdf-harvesters/mdf_indexers/ingester/search_client.py
[ "jgaff@uchicago.edu" ]
jgaff@uchicago.edu
549c77af99c9fb9e7af4ac9d3708ade1b4dbe720
3d35711600253ceda2601f61afaaddbebb0ec507
/Finite_Polygonal_Parametrized/V2-PointForce/RunNoDiag.py
010e97c3b13a86aa81bb93a4f6e1520e722dcc74
[]
no_license
matheuscfernandes/hexactinellidsponge
ca4b7cd6d1bd7d4942bba01305ebcfb0b65e977c
da9bbc6a72ee2e050f9a6a454d775a3b63e4ae92
refs/heads/master
2023-01-12T21:20:36.565182
2017-10-26T17:56:30
2017-10-26T17:56:30
92,839,896
0
0
null
null
null
null
UTF-8
Python
false
false
393
py
import numpy as np oneDiag=False twoDiag=False fullDiag=False execfile('AnalysisV1C.py') for NUMBEROFSIDES in xrange(3,21): for NUMBEROFSYSTEMSPERSIDE in xrange(1,6): print "Running Job: ",(NUMBEROFSIDES,NUMBEROFSYSTEMSPERSIDE) FileWrite=open('NoDiag_Output.txt', 'a+') RunSimulation(NUMBEROFSIDES,12,NUMBEROFSYSTEMSPERSIDE,FileWrite) FileWrite.close()
[ "matheuscfernandes@gmail.com" ]
matheuscfernandes@gmail.com
d47b3f7b60759dc497f19df286cef74f591ed67e
cb227afa841c0e2e535b6f19d70e870cfba77b47
/mangatools/config.py
1446e7d95f59229b97b26e916a220f08ca763baf
[]
no_license
cjhang/ENLR
8d1db368f2e12fbce2be54d2c2283753782dca3a
44ec3c4a3144861aa5f7a095a54fdd960cdae06c
refs/heads/master
2021-06-16T16:13:38.463704
2021-03-11T09:18:23
2021-03-11T09:18:23
178,145,347
2
2
null
null
null
null
UTF-8
Python
false
false
606
py
#!/usr/bin/env python # -*- coding: utf-8 -*- import os ESP = 1e-8 # the error release = 'MPL-8' print("Data relese: {}, configure: mangatools/config.py".format(release)) # Data release MPL-7 if release == 'MPL-7': DRP_VERSION = 'v2_4_3' # the default drp version DAP_VERSION = '2.2.1' elif release == 'MPL-8': # MPL-8 DRP_VERSION = 'v2_5_3' DAP_VERSION = '2.3.0' PRODOCTS = 'HYB10' ## Data directory, you can direct change specific path after import this module SAS = os.getenv('SAS_BASE_DIR', default=os.path.expanduser('~')+'/SAS') print("Global SAS directory is {0}".format(SAS))
[ "chenjianhang2010@gmail.com" ]
chenjianhang2010@gmail.com
7e6634deadde5151e3032fc0bace2907e54744e0
d42b771f64bc2185a8c0dca0f5bcfa5a2e13c5ed
/_8percent/apps.py
c395ac4faf734c040815c756ab4daaf0c83650a0
[]
no_license
bgy1060/Daily_Project
4b38de59c09f5e3f82211a9860e1f32a8ef46b37
bcc955bddd9941f2bc54f7577c26c1ddc6b36a48
refs/heads/main
2023-05-15T17:26:56.858438
2021-06-17T05:59:10
2021-06-17T05:59:10
353,864,798
1
1
null
null
null
null
UTF-8
Python
false
false
93
py
from django.apps import AppConfig class _8PercentConfig(AppConfig): name = '_8percent'
[ "40761315+bgy1060@users.noreply.github.com" ]
40761315+bgy1060@users.noreply.github.com
7ac1f99256fe5e01d0138af7de5f49cb96909e41
33702a58845528e0119f453e3d3e2a245fba64e2
/FCNClsSegModel/eval_no_mmd.py
dc256a3272e44ecf8779579f4a8a5d4bd8f2c129
[ "MIT" ]
permissive
PengyiZhang/DRR4Covid
cb3ea2f6a8178eaebf8bf6b1f9ada293ca90491b
653e656620ffba6fff2aab7263fe6036301adab8
refs/heads/master
2023-01-02T16:29:58.421518
2020-10-27T13:12:14
2020-10-27T13:12:14
289,850,411
9
2
null
null
null
null
UTF-8
Python
false
false
22,975
py
# coding: utf-8 """ """ import torch import torch.optim as optim import torch.nn as nn import os import time import copy import numpy as np import torch.nn.functional as F from tensorboardX import SummaryWriter from sklearn.metrics import confusion_matrix from sklearn.metrics import roc_auc_score, f1_score from visual_confuse_matrix import make_confusion_matrix from dataset import genDataset, genExtraForEvalDataset from model import SegClsModule from sklearn.metrics import cohen_kappa_score import argparse import logging import os import sys import torchvision.transforms as transforms import cv2 import numpy as np import math import random import yaml from pathlib import Path from loss import Weighted_Jaccard_loss from utils import dice_coef, probs2one_hot def setup_seed(seed): torch.manual_seed(seed) torch.cuda.manual_seed_all(seed) np.random.seed(seed) random.seed(seed) torch.backends.cudnn.deterministic = True def setup_logger(name, save_dir, distributed_rank, filename="log.txt"): """terminal and log file name: application information save_dir: log dir distributed_rank: only host 0 can generate log filename: log file name """ logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) # don't log results for the non-master process if distributed_rank > 0: return logger ch = logging.StreamHandler(stream=sys.stdout) ch.setLevel(logging.DEBUG) formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s") ch.setFormatter(formatter) logger.addHandler(ch) if save_dir: fh = logging.FileHandler(os.path.join(save_dir, filename)) fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logger.addHandler(fh) return logger def set_visible_gpu(gpu_idex): """ to control which gpu is visible for CUDA user set_visible_gpu(1) print(os.environ["CUDA_DEVICE_ORDER"]) print(os.environ["CUDA_VISIBLE_DEVICES"]) """ os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" os.environ["CUDA_VISIBLE_DEVICES"] = "{0}".format(gpu_idex) def get_results(val_labels, val_outs, val_probs, save_cf_png_dir, save_metric_dir): # first for probs AUC_score = roc_auc_score(val_labels, val_probs) F1_score = f1_score(val_labels, val_outs) CM = confusion_matrix(val_labels, val_outs) labels = ['True Neg','False Pos','False Neg','True Pos'] categories = ['0', '1'] make_confusion_matrix(CM, group_names=labels, categories=categories, cmap='Blues',save_dir=save_cf_png_dir) #make_confusion_matrix(CM, figsize=(8,6), cbar=False) TN = CM[0][0] FN = CM[1][0] TP = CM[1][1] FP = CM[0][1] # Sensitivity, hit rate, recall, or true positive rate TPR = TP/(TP+FN) # Specificity or true negative rate TNR = TN/(TN+FP) # Precision or positive predictive value PPV = TP/(TP+FP) # Negative predictive value NPV = TN/(TN+FN) # Fall out or false positive rate FPR = FP/(FP+TN) # False negative rate FNR = FN/(TP+FN) # False discovery rate FDR = FP/(TP+FP) # Overall accuracy ACC = (TP+TN)/(TP+FP+FN+TN) result_str = "Sensitivity=%.3f, Specificity=%.3f, PPV=%.3f, NPV=%.3f, FPR=%.3f, FNR=%.3f, FDR=%.3f, ACC=%.3f, AUC=%.3f, F1_score=%.3f\n" % (TPR, TNR, PPV, NPV, FPR, FNR, FDR, ACC, AUC_score, F1_score) save_dir = save_metric_dir with open(save_dir, "a+") as f: f.writelines([result_str]) return result_str def eval_model(model, dataloaders, log_dir="./log/", logger=None, opt=None): since = time.time() if False:#opt.do_seg: # eval lung segmentation logger.info("-"*8+"eval lung segmentation"+"-"*8) model.eval() all_dices = [] all_dices_au = [] for batch_idx, (inputs, labels) in enumerate(dataloaders["tgt_lung_seg_val"], 0): annotation = dataloaders["tgt_lung_seg_val"].dataset.annotations[batch_idx] img_dir = annotation.strip().split(',')[0] img_name = Path(img_dir).name inputs = inputs.to(device) # adjust labels labels[labels==opt.xray_mask_value_dict["lung"]] = 1 labels = labels[:,-1].to(device) labels = torch.stack([labels == c for c in range(2)], dim=1) with torch.set_grad_enabled(False): if opt.use_aux: _, _, seg_logits, _, seg_logits_au = model(inputs) else: _, _, seg_logits, _, _ = model(inputs) seg_probs = torch.softmax(seg_logits, dim=1) predicted_mask = probs2one_hot(seg_probs.detach()) # change the infection to Lung predicted_mask_lung = predicted_mask[:,:-1] predicted_mask_lung[:,-1] += predicted_mask[:,-1] dices = dice_coef(predicted_mask_lung, labels.detach().type_as(predicted_mask)).cpu().numpy() all_dices.append(dices) # [(B,C)] predicted_mask_lung = predicted_mask_lung.squeeze().cpu().numpy() # 3xwxh mask_inone = (np.zeros_like(predicted_mask_lung[0])+predicted_mask_lung[1]*255).astype(np.uint8) # save dir: save_dir = os.path.join(opt.logs, "tgt_lung_seg_val", "eval") # if not os.path.exists(save_dir): os.makedirs(save_dir) cv2.imwrite(os.path.join(save_dir, img_name), mask_inone) ###################################################au if opt.use_aux: seg_probs_au = torch.softmax(seg_logits_au, dim=1) predicted_mask_au = probs2one_hot(seg_probs_au.detach()) # change the infection to Lung predicted_mask_lung_au = predicted_mask_au[:,:-1] predicted_mask_lung_au[:,-1] += predicted_mask_au[:,-1] dices_au = dice_coef(predicted_mask_lung_au, labels.detach().type_as(predicted_mask_au)).cpu().numpy() all_dices_au.append(dices_au) # [(B,C)] predicted_mask_lung_au = predicted_mask_lung_au.squeeze().cpu().numpy() # 3xwxh mask_inone_au = (np.zeros_like(predicted_mask_lung_au[0])+predicted_mask_lung_au[1]*255).astype(np.uint8) # save dir: save_dir_au = os.path.join(opt.logs, "tgt_lung_seg_val_au", "eval") # if not os.path.exists(save_dir_au): os.makedirs(save_dir_au) cv2.imwrite(os.path.join(save_dir_au, img_name), mask_inone_au) avg_dice = np.mean(np.concatenate(all_dices, 0), 0) # logger.info("tgt_lung_seg_val:[%d/%d],dice0:%.03f,dice1:%.03f,dice:%.03f" % (batch_idx, len(dataloaders['tgt_lung_seg_val'].dataset)//inputs.shape[0], avg_dice[0], avg_dice[1], np.mean(np.concatenate(all_dices, 0)))) if opt.use_aux: avg_dice_au = np.mean(np.concatenate(all_dices_au, 0), 0) # logger.info("tgt_lung_seg_val_au:[%d/%d],dice0:%.03f,dice1:%.03f,dice:%.03f" % (batch_idx, len(dataloaders['tgt_lung_seg_val'].dataset)//inputs.shape[0], avg_dice_au[0], avg_dice_au[1], np.mean(np.concatenate(all_dices_au, 0)))) if True: # eval infection segmentation and cls logger.info("-"*8+"eval infection cls"+"-"*8) model.eval() val_gt = [] val_cls_pred = [] val_cls_probs = [] # for VOC val_seg_pred = [] val_seg_probs = [] # for VOC val_seg_probs_au = [] val_seg_pred_au = [] # for VOC for batch_idx, (inputs, labels) in enumerate(dataloaders["tgt_cls_val"], 0): inputs = inputs.to(device) # adjust label val_gt.append(labels.cpu().data.numpy()) with torch.set_grad_enabled(False): annotation = dataloaders["tgt_cls_val"].dataset.annotations[batch_idx] img_dir = annotation.strip().split(',')[0] img_name = Path(img_dir).name if opt.use_aux: cls_logits, _, seg_logits, _, seg_logits_au = model(inputs) else: cls_logits, _, seg_logits, _, _ = model(inputs) if opt.do_seg: seg_probs = torch.softmax(seg_logits, dim=1) val_seg_probs.append(seg_probs[:,-1:].detach().cpu().view(seg_probs.shape[0], 1, -1).max(-1)[0]) predicted_mask_onehot = probs2one_hot(seg_probs.detach()) # for save predicted_mask = predicted_mask_onehot.squeeze().cpu().numpy() # 3xwxh mask_inone = (np.zeros_like(predicted_mask[0])+predicted_mask[1]*128+predicted_mask[2]*255).astype(np.uint8) # save dir: save_dir = os.path.join(opt.logs, "tgt_cls_val", "eval") # if not os.path.exists(save_dir): os.makedirs(save_dir) cv2.imwrite(os.path.join(save_dir, img_name), mask_inone) # seg2cls preds_cls_seg = (predicted_mask_onehot[:,-1:].sum(-1).sum(-1) > 0).cpu().numpy().astype(np.uint8) val_seg_pred.append(preds_cls_seg) if opt.do_seg and opt.use_aux: seg_probs_au = torch.softmax(seg_logits_au, dim=1) val_seg_probs_au.append(seg_probs_au[:,-1:].detach().cpu().view(seg_probs_au.shape[0], 1, -1).max(-1)[0]) predicted_mask_onehot_au = probs2one_hot(seg_probs_au.detach()) # for save predicted_mask_au = predicted_mask_onehot_au.squeeze().cpu().numpy() # 3xwxh mask_inone_au = (np.zeros_like(predicted_mask_au[0])+predicted_mask_au[1]*128+predicted_mask_au[2]*255).astype(np.uint8) # save dir: save_dir_au = os.path.join(opt.logs, "tgt_cls_val_au", "eval") # if not os.path.exists(save_dir_au): os.makedirs(save_dir_au) cv2.imwrite(os.path.join(save_dir_au, img_name), mask_inone_au) # seg2cls preds_cls_seg_au = (predicted_mask_onehot_au[:,-1:].sum(-1).sum(-1) > 0).cpu().numpy().astype(np.uint8) val_seg_pred_au.append(preds_cls_seg_au) # cls #print(cls_logits) if opt.do_cls: probs_cls = torch.softmax(cls_logits, dim=1) val_cls_probs.append(probs_cls[...,1:].detach().cpu().numpy()) preds_cls = (probs_cls[...,1:] > 0.5).type(torch.long) val_cls_pred.append(preds_cls.cpu().data.numpy()) if not os.path.exists(os.path.join(opt.logs, "cf")): os.makedirs(os.path.join(opt.logs, "cf")) val_gt = np.concatenate(val_gt, axis=0) if opt.do_cls: val_cls_pred = np.concatenate(val_cls_pred, axis=0) val_cls_probs = np.concatenate(val_cls_probs, axis=0) save_cf_png_dir = os.path.join(opt.logs, "cf", "eval_cls_cf.png") save_metric_dir = os.path.join(opt.logs, "eval_metric_cls.txt") result_str = get_results(val_gt, val_cls_pred, val_cls_probs, save_cf_png_dir, save_metric_dir) logger.info("tgt_cls_val:[cls]: %s" % (result_str)) if opt.do_seg: val_seg_pred = np.concatenate(val_seg_pred, axis=0) val_seg_probs = np.concatenate(val_seg_probs, axis=0) # seg2cls save_cf_png_dir = os.path.join(opt.logs, "cf", "eval_seg_cf.png") save_metric_dir = os.path.join(opt.logs, "eval_metric_seg.txt") result_str = get_results(val_gt, val_seg_pred, val_seg_probs, save_cf_png_dir, save_metric_dir) logger.info("tgt_seg_val:[seg2cls]: %s" % (result_str)) if opt.do_seg and opt.use_aux: val_seg_pred_au = np.concatenate(val_seg_pred_au, axis=0) val_seg_probs_au = np.concatenate(val_seg_probs_au, axis=0) # seg2cls save_cf_png_dir_au = os.path.join(opt.logs, "cf", "eval_seg_au_cf.png") save_metric_dir_au = os.path.join(opt.logs, "eval_metric_seg_au.txt") result_str_au = get_results(val_gt, val_seg_pred_au, val_seg_probs_au, save_cf_png_dir_au, save_metric_dir_au) logger.info("tgt_seg_au_val:[seg2cls]: %s" % (result_str_au)) time_elapsed = time.time() - since logger.info("Eval complete in {:.0f}m {:.0f}s".format(time_elapsed // 60, time_elapsed % 60)) def extra_eval_model(model, dataloaders, log_dir="./log/", logger=None, opt=None): since = time.time() if True: # eval infection segmentation and cls logger.info("-"*8+"extra eval infection cls"+"-"*8) model.eval() val_gt = [] val_cls_pred = [] val_cls_probs = [] # for VOC val_seg_pred = [] val_seg_probs = [] # for VOC val_seg_probs_au = [] val_seg_pred_au = [] # for VOC for batch_idx, (inputs, labels) in enumerate(dataloaders["tgt_cls_extra_val"], 0): inputs = inputs.to(device) # adjust label val_gt.append(labels.cpu().data.numpy()) with torch.set_grad_enabled(False): annotation = dataloaders["tgt_cls_extra_val"].dataset.annotations[batch_idx] img_dir = annotation.strip().split(',')[0] img_name = Path(img_dir).name if opt.use_aux: cls_logits, _, seg_logits, _, seg_logits_au = model(inputs) else: cls_logits, _, seg_logits, _, _ = model(inputs) if opt.do_seg: seg_probs = torch.softmax(seg_logits, dim=1) val_seg_probs.append(seg_probs[:,-1:].detach().cpu().view(seg_probs.shape[0], 1, -1).max(-1)[0]) predicted_mask_onehot = probs2one_hot(seg_probs.detach()) # for save predicted_mask = predicted_mask_onehot.squeeze().cpu().numpy() # 3xwxh mask_inone = (np.zeros_like(predicted_mask[0])+predicted_mask[1]*128+predicted_mask[2]*255).astype(np.uint8) # save dir: save_dir = os.path.join(opt.logs, "tgt_cls_extra_val", "eval") # if not os.path.exists(save_dir): os.makedirs(save_dir) cv2.imwrite(os.path.join(save_dir, img_name), mask_inone) # seg2cls preds_cls_seg = (predicted_mask_onehot[:,-1:].sum(-1).sum(-1) > 0).cpu().numpy().astype(np.uint8) val_seg_pred.append(preds_cls_seg) if opt.do_seg and opt.use_aux: seg_probs_au = torch.softmax(seg_logits_au, dim=1) val_seg_probs_au.append(seg_probs_au[:,-1:].detach().cpu().view(seg_probs_au.shape[0], 1, -1).max(-1)[0]) predicted_mask_onehot_au = probs2one_hot(seg_probs_au.detach()) # for save predicted_mask_au = predicted_mask_onehot_au.squeeze().cpu().numpy() # 3xwxh mask_inone_au = (np.zeros_like(predicted_mask_au[0])+predicted_mask_au[1]*128+predicted_mask_au[2]*255).astype(np.uint8) # save dir: save_dir_au = os.path.join(opt.logs, "tgt_cls_extra_val_au", "eval") # if not os.path.exists(save_dir_au): os.makedirs(save_dir_au) cv2.imwrite(os.path.join(save_dir_au, img_name), mask_inone_au) # seg2cls preds_cls_seg_au = (predicted_mask_onehot_au[:,-1:].sum(-1).sum(-1) > 0).cpu().numpy().astype(np.uint8) val_seg_pred_au.append(preds_cls_seg_au) # cls #print(cls_logits) if opt.do_cls: probs_cls = torch.softmax(cls_logits, dim=1) val_cls_probs.append(probs_cls[...,1:].detach().cpu().numpy()) preds_cls = (probs_cls[...,1:] > 0.5).type(torch.long) val_cls_pred.append(preds_cls.cpu().data.numpy()) if not os.path.exists(os.path.join(opt.logs, "cf")): os.makedirs(os.path.join(opt.logs, "cf")) val_gt = np.concatenate(val_gt, axis=0) if opt.do_cls: val_cls_pred = np.concatenate(val_cls_pred, axis=0) val_cls_probs = np.concatenate(val_cls_probs, axis=0) save_cf_png_dir = os.path.join(opt.logs, "cf", "extra_eval_cls_cf.png") save_metric_dir = os.path.join(opt.logs, "extra_eval_metric_cls.txt") result_str = get_results(val_gt, val_cls_pred, val_cls_probs, save_cf_png_dir, save_metric_dir) logger.info("tgt_cls_extra_val:[cls]: %s" % (result_str)) if opt.do_seg: val_seg_pred = np.concatenate(val_seg_pred, axis=0) val_seg_probs = np.concatenate(val_seg_probs, axis=0) # seg2cls save_cf_png_dir = os.path.join(opt.logs, "cf", "extra_eval_seg_cf.png") save_metric_dir = os.path.join(opt.logs, "extra_eval_metric_seg.txt") result_str = get_results(val_gt, val_seg_pred, val_seg_probs, save_cf_png_dir, save_metric_dir) logger.info("tgt_seg_extra_val:[seg2cls]: %s" % (result_str)) if opt.do_seg and opt.use_aux: val_seg_pred_au = np.concatenate(val_seg_pred_au, axis=0) val_seg_probs_au = np.concatenate(val_seg_probs_au, axis=0) # seg2cls save_cf_png_dir_au = os.path.join(opt.logs, "cf", "extra_eval_seg_au_cf.png") save_metric_dir_au = os.path.join(opt.logs, "extra_eval_metric_seg_au.txt") result_str_au = get_results(val_gt, val_seg_pred_au, val_seg_probs_au, save_cf_png_dir_au, save_metric_dir_au) logger.info("tgt_seg_au_extra_val:[seg2cls]: %s" % (result_str_au)) time_elapsed = time.time() - since logger.info("Extra_Eval complete in {:.0f}m {:.0f}s".format(time_elapsed // 60, time_elapsed % 60)) def get_argument(): parser = argparse.ArgumentParser() parser.add_argument('--config', default="./cfgs/experiment.yaml", type=str) parser.add_argument('--setseed', default=2020, type=int) opt = parser.parse_args() with open(opt.config) as f: config = yaml.load(f) for k, v in config['common'].items(): setattr(opt, k, v) # repalce experiment opt.experiment = opt.experiment.replace("only", "seg") opt.seg_augment = True opt.cls_augment = True opt.do_cls_mmd = False opt.do_seg = True opt.do_cls = True opt.do_seg_mmd = False opt.eval_cls_times = 50 opt.eval_times = 50 opt.random_seed = opt.setseed selected_drr_datasets_indexes = np.array(opt.selected_drr_datasets_indexes+opt.selected_drr_datasets_indexes) #print(selected_drr_datasets_indexes) # # [[0, 0, 0], [1, 0, 0], [0, 0, 1], [1, 0, 1]] print(selected_drr_datasets_indexes[-1][-1]) selected_drr_datasets_indexes[2][-1] = 1 selected_drr_datasets_indexes[3][-1] = 1 opt.selected_drr_datasets_indexes = [list(_) for _ in list(selected_drr_datasets_indexes)] opt.logs = f"log/logs_experiment04_r{opt.setseed}" log_dir = "./{}/{}/".format(opt.logs, opt.experiment) if not os.path.exists(log_dir): os.makedirs(log_dir) opt.logs = log_dir return opt if __name__ == "__main__": opt = get_argument() os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" os.environ["CUDA_VISIBLE_DEVICES"] = "{}".format(opt.gpuid) setup_seed(opt.random_seed) assert opt.mode == 12, ("opt.mode is not supported in %s" % __file__) log_dir = opt.logs logger = setup_logger("{}".format(os.path.basename(__file__).split(".")[0]), save_dir=opt.logs, distributed_rank=0, filename="log_eval.txt") logger.info(opt) batch_size = opt.batch_size num_epochs = opt.num_epochs use_pretrained = True device_name = "cuda" if torch.cuda.is_available() else "cpu" device = torch.device(device_name) model_ft = SegClsModule(opt) train_dataset, tgt_cls_train_dataset, tgt_cls_val_dataset, tgt_lung_seg_val_dataset = genDataset(opt) tgt_cls_extra_val_dataset = genExtraForEvalDataset(opt) logger.info("-"*8+"train:"+"-"*8) logger.info(train_dataset.annotations) logger.info("-"*8+"tgt_cls_train:"+"-"*8) logger.info(tgt_cls_train_dataset.annotations) logger.info("-"*8+"tgt_cls_val:"+"-"*8) logger.info(tgt_cls_val_dataset.annotations) logger.info("-"*8+"tgt_cls_extra_val:"+"-"*8) logger.info(tgt_cls_extra_val_dataset.annotations) image_datasets = {'train': train_dataset, 'tgt_cls_train': tgt_cls_train_dataset, 'tgt_cls_val': tgt_cls_val_dataset, 'tgt_cls_extra_val': tgt_cls_extra_val_dataset, "tgt_lung_seg_val": tgt_lung_seg_val_dataset} shuffles = {"train": True,'tgt_cls_train': True, 'tgt_cls_val': False, 'tgt_cls_extra_val': False, "tgt_lung_seg_val": False} batch_sizes_dict = {"train": batch_size,'tgt_cls_train': batch_size, 'tgt_cls_val': 1, 'tgt_cls_extra_val': 1, "tgt_lung_seg_val": 1} drop_lasts = {"train": True,'tgt_cls_train': True, 'tgt_cls_val': False, 'tgt_cls_extra_val': False, "tgt_lung_seg_val": False} number_worker_dict = {"train": 4,'tgt_cls_train': 4, 'tgt_cls_val': 0, 'tgt_cls_extra_val': 0, "tgt_lung_seg_val": 0} dataloaders_dict = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=batch_sizes_dict[x], shuffle=shuffles[x], num_workers=number_worker_dict[x], drop_last=drop_lasts[x]) for x in ['train', 'tgt_cls_train', 'tgt_cls_val', 'tgt_cls_extra_val', "tgt_lung_seg_val"]} # Send the model to GPU weight_path = os.path.join(log_dir, "latest.pth") model_ft.load_state_dict(torch.load(weight_path)) model_ft = model_ft.to(device) model_ft.eval() eval_model(model_ft, dataloaders_dict, log_dir=log_dir, logger=logger, opt=opt) extra_eval_model(model_ft, dataloaders_dict, log_dir=log_dir, logger=logger, opt=opt)
[ "zhangpybit@gmail.com" ]
zhangpybit@gmail.com
9a17228ab41e92b8d3007c76daa725861cdd5b61
6b360246db6825cd3cc349e534845d9082ad7906
/motionDetection/server_udp.py
ca28652d9617ca1ba459d1a181a3aac42fbfa586
[]
no_license
Bitil8747/MOGv2-Motion-detect
697d83003e26600b6fd03d03fdd31a3190fce197
b7ce089c042a539158ce4f5a684991f5bdf0f160
refs/heads/main
2023-07-30T19:21:36.828410
2021-09-20T09:54:23
2021-09-20T09:54:23
408,377,509
0
0
null
null
null
null
UTF-8
Python
false
false
214
py
import socket udp_ip = 'localhost' udp_port = 7070 sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((udp_ip, udp_port)) while True: data, addr = sock.recvfrom(1024) print(data)
[ "noreply@github.com" ]
Bitil8747.noreply@github.com
2e02856b80b1efb6e70451bbb2ad42b1e3151417
538ac22016c4c8771e5b13f5e26688e2df72ae31
/CSS/frib-css-xy-diag.py
3aae9e9f743925fb9c05a9d11d01da1bc3988fa2
[]
no_license
cyjwong/ScriptsinProgress
69eab942b33e56d3eb3ef9a9f8712084fff86d30
7847e231bf54c27fc649c5ffdb25391069de7185
refs/heads/master
2020-05-21T16:43:12.813876
2016-09-20T04:39:06
2016-09-20T04:39:06
60,614,955
0
0
null
null
null
null
UTF-8
Python
false
false
58,826
py
# Diagnostics for FRIB front-end simulations using the x-y slice model # Notes: # * Slice model is not intrinsically well adapted to multi-species # simulations so some diagnostics repeat (for clarity) what can be # generated within Warp with other methods. # * Model allows easy generalization to include diagnostic quantities not # in the usual Warp suite. ############################################################################## # Begin Inputs ############################################################################## # Diagnostic Parameters # Diagnostics are grouped into several classes: # - Particle: Snapshot plots of distribution function projections # - Field: Snapshot plots of self fields # - History: History plots on the evolution of moments and particle counts # accumulated as the simulation advances. # --- set max simulation step for diagnostic setup max_diag_step = 1.e10 # --- set history diagnostic and moment accumulations ds_diag = 1.*cm top.nhist = max(1,nint(ds_diag/wxy.ds)) # step interval for histories top.itmomnts[0:3] = [0,max_diag_step,top.nhist] # do loop ranges for moments # and status writes to tty # --- Plot limits for particle phase space plots. If lframe = true (default # false) diagnostics such as ppxxp for x-x' particle phase space will # use these ranges. # max/min x,y plot coordinates (m) # max/min x',y' plot coordinates (rad) #l_diag = r_p l_diag = 75*mm top.xplmax = l_diag top.xplmin = -l_diag top.yplmax = l_diag top.yplmin = -l_diag top.xpplmax = 75.*mr top.xpplmin = -top.xpplmax top.ypplmax = top.xpplmax top.ypplmin = -top.xpplmax # --- Color palette for phase-space plots (comment for default) # Search for .gp suffix files in the Warp scripts directory for possible # choices. Some useful ones include: # earth.gp (default) heat.gp (heat) # gray.gp (gray scale) rainbow.gp (rainbow) #palette("heat.gp") # --- Set a chop factor for particle phase space plots to avoid plotting # too many particles (large storage and features will obscure). Set # for approx 10 K particles per species plotted. chop_fraction = 10.e3/float(top.npmax) # Particle phase space diagnostics. # * The list diag_step_part contains all steps where diagnostics in # diag_part() are made. # * The list can contain repeated elements and need not be ordered. diag_part_z = array([ z_launch, d5p1_zs, (d5p1_zs+d5p1_zc)/2, d5p1_zc, (d5p1_zc+d5p1_ze)/2, d5p1_ze, valve_zc + 2*mm, q7t1p1_zc, q7t1_mid_12 + 2*mm, q7t1p2_zc, q7t1_mid_23 + 2*mm, q7t1p3_zc, (q7t2p1_zc + q7t1p3_zc)/2, q7t2p1_zc, (q7t2p1_zc + q7t2p2_zc)/2, q7t2p2_zc, (q7t2p2_zc + q7t2p3_zc)/2, q7t2p3_zc, d5p2_zs, (d5p2_zs+d5p2_zc)/2, d5p2_zc, (d5p2_zc+d5p2_ze)/2, d5p2_ze, z_adv]) diag_part_z_name = [ "Initial Launch", "D5 Dipole #1: z-start", "D5 Dipole #1: 1/4 of dipole length", "D5 Dipole #1: z-Center", "D5 Dipole #1: 3/4 of dipole length", "D5 Dipole #1: z-end", "after gate valve", "1st Q7 ESQ Triplet #1: z-Center", "after slits between Q7 #1 and #2", "1st Q7 ESQ Triplet #2: z-Center", "after slits between Q7 #2 and #3", "1st Q7 ESQ Triplet #3: z-Center", "Four-jaw collimator", "2nd Q7 ESQ Triplet #1: z-Center", "2nd Q7 ESQ Triplet between #1 and #2", "2nd Q7 ESQ Triplet #2: z-Center", "2nd Q7 ESQ Triplet between #2 and #3", "2nd Q7 ESQ Triplet #3: z-Center", "D5 Dipole #2: z-start", "D5 Dipole #2: 1/4 of dipole length", "D5 Dipole #2: z-Center", "D5 Dipole #2: 3/4 of dipole length", "D5 Dipole #2: z-end", "Final position" ] diag_part_step = nint((diag_part_z-z_launch)/wxy.ds) diag_part_z_names = {diag_part_step[i]:diag_part_z_name[i] for i in range(len(diag_part_step))} # Field diagnostics. # * The list diag_step_field containins all steps where # diagnostics in diag_field() are made. # * The list can contain repeated elements and need not be ordered. diag_field_z = array([ z_launch, d5p1_zc, z_adv ]) diag_field_z_name = [ "Initial Launch", "D5 Dipole #1: z-Center", "Final position" ] diag_field_step = nint((diag_field_z-z_launch)/wxy.ds) diag_field_z_names = {diag_field_step[i]:diag_field_z_name[i] for i in range(len(diag_field_step))} # History diagnostics. # * Can be made at intermediate stages of the # run as well as at the end. # * The list diag_step_hist contains all # steps where diagnostics in diag_hsit() are made. # * The list can contain repeated elements and need not be ordered. diag_hist_z = array([z_adv]) #array([gag_col_zs,z_adv]) diag_hist_step = nint((diag_hist_z-z_launch)/wxy.ds) ###################################################################################################### # End Inputs ###################################################################################################### # Diagnostic plot function of [B rho] vs Q/A for species. # * Should work correctly at any point in the simulation while the beam # accelerates. def plt_diag_bro(label=None): if label == None: label = " " brho_min = largepos brho_max = -largepos for ii in sp.keys(): s = sp[ii] js = s.js # weight = sum(s.sw*s.w) # total weight # vbeam = sum( (s.sw*s.w)*s.getvz() )/weight # avg axial velocity gammabeam = 1./sqrt(1.-(vbeam/clight)**2) # gamma from avg axial velocity brho = s.mass*gammabeam*vbeam/s.charge # rigidity # brho_min = min(brho,brho_min) brho_max = max(brho,brho_max) # plt(ii,sp_qovm[ii],brho,tosys=1,color=s.color) # [qovm_min,qovm_max] = [minnd(sp_qovm.values()),maxnd(sp_qovm.values())] qovm_pad = 0.1*(qovm_max - qovm_min) brho_pad = 0.1*(brho_max - brho_min) # limits(qovm_min-qovm_pad,qovm_max+qovm_pad,brho_min-brho_pad,brho_max+brho_pad) ptitles(label,"Q/A","[B rho] [Tesla-m]",) fma() # Potential profile plot diagnostic for potential along x-y axes # * Primarily for initial beam but should work at any point in simulation. def diag_plt_phi_ax(xmax=None,label=None): if xmax == None: xmax = max(w3d.xmesh.max(),w3d.ymesh.max()) ixmax = sum(where(w3d.xmesh < xmax, 1, 0)) iymax = sum(where(w3d.ymesh < xmax, 1, 0)) if label == None: label = "Beam Potential at y,x = 0 b,r" # ix_cen = sum(where(w3d.xmesh < 0., 1, 0)) iy_cen = sum(where(w3d.ymesh < 0., 1, 0)) phix = getphi(iy=iy_cen) phiy = getphi(ix=ix_cen) phimin = min(phix[ixmax],phiy[iymax]) # plg(phix,w3d.xmesh/mm) plg(phiy,w3d.ymesh/mm,color="red") ptitles(label,"x,y [mm]","phi [V]", ) limits(-xmax/mm,xmax/mm,phimin,'e') # Augmented History Diagnostics for xy Slice Model # * Some by species, some all species # * Flag variables with prefix hl_ for "history local" # --- History variable accumulation arrays hl_lenhist_max = 10000 # max accumulation points # hl_zbeam = fzeros(hl_lenhist_max) # z of beam at hl_ diagnostic accumulations (redundant with top.hzbeam) # hl_vbeam = fzeros([hl_lenhist_max,top.ns]) # axial beam velocity [m/s] hl_ekin = fzeros([hl_lenhist_max,top.ns]) # axial beam NR kinetic energy [eV] hl_brho = fzeros([hl_lenhist_max,top.ns]) # rigidity [B rho]_js [Tesla-m] # hl_xrms = fzeros([hl_lenhist_max,top.ns]) # rms radius sqrt( <x*x>_js ) hl_yrms = fzeros([hl_lenhist_max,top.ns]) # rms radius sqrt( <y*y>_js ) hl_rrms = fzeros([hl_lenhist_max,top.ns]) # rms radius sqrt( <r*r>_js ) # hl_xrmst = fzeros(hl_lenhist_max) # Total species measures of above hl_yrmst = fzeros(hl_lenhist_max) # hl_rrmst = fzeros(hl_lenhist_max) # # hl_spnum = fzeros([hl_lenhist_max,top.ns]) # number active simulation particles hl_spnumt = fzeros(hl_lenhist_max) # number active simulation particles (all species) # hl_ibeam_p = fzeros([hl_lenhist_max,top.ns]) # beam current (particle) hl_ibeam_e = fzeros([hl_lenhist_max,top.ns]) # beam current (electrical) hl_ibeam_pt = fzeros([hl_lenhist_max]) # total beam current (particle) hl_ibeam_et = fzeros([hl_lenhist_max]) # total beam current (electrical) # hl_lambda_p = fzeros([hl_lenhist_max,top.ns]) # line charge (particle) hl_lambda_e = fzeros([hl_lenhist_max,top.ns]) # line charge (electrical) # #hl_ptheta = fzeros([hl_lenhist_max,top.ns]) # canonical angular momentum <P_theta>_j (nonlinear appl field version) #hl_pth = fzeros([hl_lenhist_max,top.ns]) # <P_theta>_j in emittance units <P_theta>_j/(gamma_j*beta_j*m_j*c) #hl_pthn = fzeros([hl_lenhist_max,top.ns]) # <P_theta>_j in norm emittance units <P_theta>_j/(m_j*c) # #hl_ptheta_l = fzeros([hl_lenhist_max,top.ns]) # Same canonical angular momentum measures with #hl_pth_l = fzeros([hl_lenhist_max,top.ns]) # linear applied magnetic field approximation. #hl_pthn_l = fzeros([hl_lenhist_max,top.ns]) # (redundant with above for linear lattice) # hl_lz = fzeros([hl_lenhist_max,top.ns]) # mechanical angular momentum hl_krot = fzeros([hl_lenhist_max,top.ns]) # rotation wavenumber hl_lang = fzeros([hl_lenhist_max,top.ns]) # Larmor rotation angle (from initial zero value) # hl_epsx = fzeros([hl_lenhist_max,top.ns]) # rms x-emittance (usual version) hl_epsy = fzeros([hl_lenhist_max,top.ns]) # rms y-emittance (usual version) # hl_epsxn = fzeros([hl_lenhist_max,top.ns]) # rms normalized x-emittance (usual version) hl_epsyn = fzeros([hl_lenhist_max,top.ns]) # rms normalized y-emittance (usual version) # hl_epsr = fzeros([hl_lenhist_max,top.ns]) # rms radial emittance (envelope model version) hl_epsrn = fzeros([hl_lenhist_max,top.ns]) # rms normalized radial emittance (envelope model version) # hl_epspv = fzeros([hl_lenhist_max,top.ns]) # rms total phase volume emittance (envelope model sense) hl_epspvn = fzeros([hl_lenhist_max,top.ns]) # rms normalized total phase volume emittance (envelope model sense) # hl_temp = fzeros([hl_lenhist_max,top.ns]) # Effective transverse ion temperature measure [eV] # hl_Qperv = fzeros([hl_lenhist_max,top.ns]) # Generalized perveance Q_js for species: note matrix perv # Q_js,s calculable from this and line-charge densities [1] hl_neutf = fzeros([hl_lenhist_max,top.ns]) # Neutralization factor [1] hl_dz = top.nhist*wxy.ds # Axial step size between diagnostic accumulations # ---- Function to Fill Auxillary History Arrays # * Install after step in particle advance cycle @callfromafterstep def diag_hist_hl(): # check step in history accumulation cycle if top.it%top.nhist != 0: return hl_zbeam[top.jhist] = top.zbeam # z location of diagnostic accumulations # accumulate history diagnostics by species weightt_work = 0. xrmst_work = 0. yrmst_work = 0. rrmst_work = 0. for ii in sp.keys(): # --- species info and index js s = sp[ii] js = s.js # --- species weight: (real particle per macroparticle)/meter weight = sum(s.sw*s.w) # --- <v_z>_js, gamma_js and [B rho]_js calculated from result vbeam = sum( (s.sw*s.w)*s.getvz() )/weight gammabeam = 1./sqrt(1.-(vbeam/clight)**2) brho = s.mass*gammabeam*vbeam/s.charge hl_vbeam[top.jhist,js] = vbeam hl_brho[top.jhist,js] = brho # # --- species quantities for later use # --- avg_rsq = <r*r>_js r = s.getr() rsq = r*r rsq_wsum = sum( (s.sw*s.w)*rsq ) avg_rsq = rsq_wsum/weight # --- avg_xyp = <x*y'>_js and avg_yxp = <y*x'>_js avg_xyp = sum( (s.sw*s.w)*s.getx()*s.getyp() )/weight avg_yxp = sum( (s.sw*s.w)*s.gety()*s.getxp() )/weight # --- avg_xpy = <x*p_y>_js and avg_ypx = <y*p_x>_js # * Relativistically correct here avg_xpy = s.mass*sum( (s.sw*s.w)*s.getx()*s.getuy() )/weight avg_ypx = s.mass*sum( (s.sw*s.w)*s.gety()*s.getux() )/weight # --- applied field B_z(r=0,z) at z location of beam bz0 = getappliedfields(x=0.,y=0.,z=top.zbeam)[5] # --- Axial kinetic energy [eV], ekin_js, NR calcuation hl_ekin[top.jhist,js] = (0.5*s.mass*sum( (s.sw*s.w)*s.getvz()**2 )/weight)/jperev # s.mass*clight**2*(gammabeam - 1.)/jperev # --- rms x = <x*x>_js xsq_wsum = sum( (s.sw*s.w)*s.getx()**2 ) hl_xrms[top.jhist,js] = sqrt( xsq_wsum/weight ) # --- rms y = <y*y>_js ysq_wsum = sum( (s.sw*s.w)*s.gety()**2 ) hl_yrms[top.jhist,js] = sqrt( ysq_wsum/weight ) # --- rms r = <r*r>_js hl_rrms[top.jhist,js] = sqrt( avg_rsq ) # --- Simulation Particle Number hl_spnum[top.jhist,js] = s.getn() # --- Current, electrical, Ie_js [A] hl_ibeam_e[top.jhist,js] = s.charge*sum( (s.sw*s.w)*s.getvz() ) # slice code weight is particles/meter # --- Current, particle, Ip_js [A] # * Use way to calculate to remove neutralization factor # * Formula as given approx (paraxial) using appropriate weights hl_ibeam_p[top.jhist,js] = s.charge*s.sw*(s.vbeam0/vbeam)*sum( s.getvz() ) # --- line charge Lambda_js hl_lambda_p[top.jhist,js] = hl_ibeam_p[top.jhist,js]/vbeam hl_lambda_e[top.jhist,js] = hl_ibeam_e[top.jhist,js]/vbeam # --- Mechanical angular momentum: <x*y'>_js - <y*x'>_js hl_lz[top.jhist,js] = avg_xyp - avg_yxp # --- Canonical angular momentum <P_theta>_js # Notes: * Uses A_theta via getatheata() consistently with linear/nonlinear elements. #hl_ptheta[top.jhist,js] = avg_xpy - avg_ypx + sum( (s.sw*s.w)*s.charge*r*getatheta(r) )/weight # --- Normalized canonical angular momentum in emittance units. <P_theta>_js/(m_js*c) # * <P_theta>_j/(m_j*c) in envelope model scales as a normalized emittance # and should not vary with acceleration with linear forces. # * This employs the nonlinear definition of P_theta if the lattice is nonlinear ! #hl_pthn[top.jhist,js] = hl_ptheta[top.jhist,js]/(s.mass*clight) # --- Canonical angular momentum of species in emittance units #hl_pth[top.jhist,js] = hl_pthn[top.jhist,js]/(gammabeam*(vbeam/clight)) # --- Canonical angular momentum in linear applied field approx (all 3 versions above) # * These are redundant in linear field lattice # * Use _l for "linear" flag #hl_ptheta_l[top.jhist,js] = avg_xpy - avg_ypx + sum( (s.sw*s.w)*(s.charge*bz0/2.)*avg_rsq )/weight #hl_pthn_l[top.jhist,js] = hl_ptheta_l[top.jhist,js]/(s.mass*clight) #hl_pth_l[top.jhist,js] = hl_pthn_l[top.jhist,js]/(gammabeam*(vbeam/clight)) # --- rms x- and y-emittances: account for factor of 4 diff between Warp rms edge and rms measures hl_epsx[top.jhist,js] = top.hepsx[0,top.jhist,js]/4. hl_epsy[top.jhist,js] = top.hepsy[0,top.jhist,js]/4. # --- normalized rms x- and y-emittances: paraxial equivalent version hl_epsxn[top.jhist,js] = (gammabeam*(vbeam/clight))*hl_epsx[top.jhist,js] hl_epsyn[top.jhist,js] = (gammabeam*(vbeam/clight))*hl_epsy[top.jhist,js] # --- rms radial thermal emittance eps_r_js as derived in envelope model: # * Warp accumulation used to extract has a factor of 2 diference from rms envelope model # due to use of an "edge" measure. Note: this is different than the factor of 4 in epsx etc. hl_epsr[top.jhist,js] = top.hepsr[0,top.jhist,js]/2. # --- rms normalized radial thermal emittance epsn_r_js as derived in envelope model hl_epsrn[top.jhist,js] = (gammabeam*(vbeam/clight))*hl_epsr[top.jhist,js] # --- rms total phase volume emittance including radial thermal and canonical angular momentum # contributions based on envelope model intrpretation of total phase-space area. #hl_epspv[top.jhist,js] = sqrt( (hl_epsr[top.jhist,js])**2 + (hl_pth[top.jhist,js])**2 ) # --- rms normalized total phase volume emittance #hl_epspvn[top.jhist,js] = sqrt( (hl_epsrn[top.jhist,js])**2 + (hl_pthn[top.jhist,js])**2 ) # --- ion temperature calculated from emittance [eV] hl_temp[top.jhist,js] = hl_ekin[top.jhist,js]*hl_epsr[top.jhist,js]**2/dvnz(hl_rrms[top.jhist,js]**2) # --- Perveance, NR formula for species # Note: * Define bare ... not accounting for neutralization fractions. # Factor (s.charge/echarge) = Q accounts for charge state with particle line-charge to # get bare (unneutralized) electrical line charge. # * This is Q_js NOT the matrix perveance Q_j,s in the envelope model notes. # * Envelope model Q_js can be obtained from Q_j and line charges lambda_j: no need to save hl_Qperv[top.jhist,js] = s.charge*(s.charge/echarge)*hl_lambda_p[top.jhist,js]/(2.*pi*eps0*s.mass*vbeam**2) # --- Ion rho electron neutralization factor [1] = No space-charge, [0] full space-charge hl_neutf[top.jhist,js] = rho_neut_f(top.zbeam,ii) # --- Rotation wavenumber hl_krot[top.jhist,js] = hl_lz[top.jhist,js]/dvnz(avg_rsq) # --- Larmor Rotation angle: integrate from previous step if top.jhist == 0: hl_lang[0,js] = 0. # initial condition of zero angle else: hl_lang[top.jhist,js] = hl_lang[top.jhist-1,js] + 0.5*hl_dz*(hl_krot[top.jhist-1,js]+hl_krot[top.jhist,js]) # --- total (all species) accumulations weightt_work = weightt_work + weight xrmst_work = xrmst_work + xsq_wsum yrmst_work = yrmst_work + ysq_wsum rrmst_work = rrmst_work + rsq_wsum # --- total number of simulation particles hl_spnumt[top.jhist] = float(sum(hl_spnum[top.jhist,:])) # --- total currents hl_ibeam_pt[top.jhist] = sum(hl_ibeam_p[top.jhist,:]) hl_ibeam_et[top.jhist] = sum(hl_ibeam_e[top.jhist,:]) # --- total species rms measures hl_xrmst[top.jhist] = sqrt( xrmst_work/weightt_work ) hl_yrmst[top.jhist] = sqrt( yrmst_work/weightt_work ) hl_rrmst[top.jhist] = sqrt( rrmst_work/weightt_work ) # Particle Phase-Space Diagnostic Functions # * Make specified plots at location of simulation where diag_part() is called. ### Make phase space projections of individual species using the syntax "ppxxp(js=js)" instead of "s.ppxxp" ### The latter had trouble with the argument ' slope="auto" ' def diag_part(plt_xy=False,plt_xxp=False,plt_yyp=False,plt_xpyp=False, plt_trace=False, plt_denxy=False, plt_denr=False): print "Making particle diagnostic plots" # try: z_label = diag_part_z_names[top.it] except: z_label = "" # # --- x-y projection if plt_xy: # --- All Species # Caution: js=-1 with density plot will just overlay species contour plots #ppxy(js=-1,lframe=true,chopped=chop_fraction,color='density',ncolor=25, # titles=false,yscale=1./mm,xscale=1./mm) ppxy(js=-1,lframe=true,chopped=chop_fraction,titles=false,yscale=1./mm,xscale=1./mm) ptitles("x-y Phase Space: All Species, z = %5.2f m"%(top.zbeam), "x [mm]","y [mm]",z_label) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] co = s.color lab+= ii + "("+co+"), " js = s.js ppxy(js=js,lframe=true,chopped=chop_fraction,titles=false,yscale=1./mm,xscale=1./mm,color=co) ptitles("x-y Phase Space: "+lab+" z = %5.2f m"%(top.zbeam),"x [mm]","y [mm]",z_label) fma() # --- x-x' projection if plt_xxp: # --- All Species # Caution: js = -1 with density plot will overlay species contour plots #ppxxp(js = -1,lframe=true,chopped=chop_fraction,slope='auto',color='density',ncolor=25, # titles=false,yscale=1./mr,xscale=1./mm) ppxxp(js = -1,lframe=true,chopped=chop_fraction,slope='auto',titles=false,yscale=1./mr,xscale=1./mm) ptitles("x-x' Phase Space: All Species, z = %5.2f m"%(top.zbeam),"x [mm]","x' [mrad]",z_label) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] co = s.color lab+= ii + "("+co+"), " js = s.js ppxxp(js=js,lframe=true,chopped=chop_fraction,slope='auto',titles=false,yscale=1./mr,xscale=1./mm,color=co) ptitles("x-x' Phase Space: "+lab+" z = %5.2f m"%(top.zbeam),"x [mm]","x' [mrad]",z_label) fma() # --- y-y' projection if plt_yyp: # --- All Species # Caution: js=-1 with denisty plot will overlay species contour plots #ppyyp(js=-1,lframe=true,chopped=chop_fraction,slope='auto',color='density',ncolor=25, # titles=false,yscale=1./mr,xscale=1./mm) ppyyp(js=-1,lframe=true,chopped=chop_fraction,slope='auto',color='density',ncolor=25, titles=false,yscale=1./mr,xscale=1./mm) ptitles("y-y' Phase Space: All Species, z = %5.2f m"%(top.zbeam), "y [mm]","y' [mrad]",z_label) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] co = s.color lab+= ii + "("+co+"), " js = s.js ppyyp(js=js,lframe=true,chopped=chop_fraction,slope='auto',titles=false,yscale=1./mr,xscale=1./mm,color=co) ptitles("y-y' Phase Space: "+lab+" z = %5.2f m"%(top.zbeam),"y [mm]","y' [mrad]",z_label) fma() # --- x'-y' projection if plt_xpyp: # --- All Species # Caution: js=-1 with density plot will overlay species countours #ppxpyp(js=-1,lframe=true,chopped=chop_fraction,slope='auto',color='density',ncolor=25, # titles=false,yscale=1./mr,xscale=1./mr) ppxpyp(js=-1,lframe=true,chopped=chop_fraction,slope='auto',titles=false,yscale=1./mr,xscale=1./mr) ptitles("x'-y' Phase Space: All Species, z = %5.2f m"%(top.zbeam),"x' [mrad]","y' [mrad]",z_label) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] co = s.color lab+= ii + "("+co+"), " js = s.js ppxpyp(js=js,lframe=true,chopped=chop_fraction,slope='auto',titles=false,yscale=1./mr,xscale=1./mm,color=co) ptitles("x'-y' Phase Space: "+lab+" z = %5.2f m"%(top.zbeam),"x' [mrad]","y' [mrad]",z_label) fma() # --- x-y, x-x', y-y', x'-y' projections, 4 to a page (trace-space) if plt_trace: # --- All Species pptrace(lframe=true,chopped=chop_fraction,slope='auto',color='density',ncolor=25) fma() # --- charge density on x and y axes if plt_denxy: rho_sc = 1. ix_cen = sum(where(w3d.xmesh < 0.,1,0)) iy_cen = sum(where(w3d.ymesh < 0.,1,0)) # --- All Species rho_x = getrho(iy=iy_cen) rho_y = getrho(ix=ix_cen) # plg(rho_x/rho_sc,w3d.xmesh/mm) if w3d.l4symtry: plg(rho_x/rho_sc,-w3d.xmesh/mm) plg(rho_y/rho_sc,w3d.ymesh/mm,color="red") if w3d.l4symtry or w3d.l2symtry: plg(rho_y/rho_sc,-w3d.ymesh/mm,color="red") ptitles("Charge Density: All Species, on x[b], y[r] Axes: z = %5.2f m"%(top.zbeam), "x,y [mm]","Density [arb units]",z_label) fma() # --- Target Species: species.get_density() returns density for ii in sp_target: s = sp[ii] co = s.color den = s.get_density()/cm**3 plg(den[:,iy_cen],w3d.xmesh/mm) if w3d.l4symtry: plg(den[:,iy_cen],-w3d.xmesh/mm) plg(den[ix_cen,:],w3d.ymesh/mm,color="red") if w3d.l4symtry or w3d.l2symtry: plg(den[ix_cen,:],-w3d.ymesh/mm,color="red") ptitles("Density: "+ii+" on x[b], y[r] Axes: z = %5.2f m"%(top.zbeam), "x,y [mm]","Density [#/cm^3]",z_label) fma() # --- charge density on radial mesh if plt_denr: # --- radial mesh reflecting x-y grid structure to illustrate simulation noise nr = nint(sqrt(w3d.nx/(2.*sym_x)*w3d.ny/(2.*sym_y))) rmax = sqrt(w3d.xmmax*w3d.ymmax) dr = rmax/nr rmesh = linspace(0.,rmax,num=nr+1) # sp_list = sp_target #+ ["All"] ns = len(sp_list) # --- density as a function or r on mesh array den = zeros(nr+1) # weightr = zeros(nr+1) count = zeros(nr+1) # --- for all species on mesh for ii in sp.keys(): s = sp[ii] # np = s.getn() rp = s.getr() wp = s.getweights() # deposgrid1d(1,np,rp,wp,nr,weightr,count,0.,rmax) # den[1:nr+1] = weightr[1:nr+1]/(2.*pi*dr*rmesh[1:nr+1]) den[0] = den[1] # set origin by next grid up to remove distraction # plg(den/cm**3, rmesh/mm) # pos axis plg(den/cm**3,-rmesh/mm) # neg axis ptitles("Radial Number Density: All Species, z = %5.2f m"%(top.zbeam),"radius r [mm]","rho [particles/cm**3]",z_label) ir = min(nr,sum(where(den>0,1,0))) # index farthest radial extent of rho in radial mesh assuming no halo rmmax = max(1.2*rmesh[ir],0.01) # set curoff to contain radial density rmmax = cm*nint(rmmax/cm + 0.5) # round up to nearest cm to contain plot denmax = 1.2*maxnd(den) limits(-rmmax/mm,rmmax/mm,0.,denmax/cm**3) fma() # --- for all species (common log scale) for ii in sp.keys(): s = sp[ii] co = s.color # np = s.getn() rp = s.getr() wp = s.getweights() # weightr = zeros(nr+1) # reset for clean accumulation/count with itask = 1 count = zeros(nr+1) deposgrid1d(1,np,rp,wp,nr,weightr,count,0.,rmax) # den[1:nr+1] = weightr[1:nr+1]/(2.*pi*dr*rmesh[1:nr+1]) den[0] = den[1] # set origin by next grid up to remove distraction (origin location high noise) # plg(den/cm**3, rmesh/mm,color=co) plg(den/cm**3,-rmesh/mm,color=co) # ptitles("Radial Number Density: All species, z = %5.2f m"%(top.zbeam),"radius r [mm]","rho [particles/cm**3]",z_label) limits(-rmmax/mm,rmmax/mm,1.e-4*denmax/cm**3,denmax/cm**3) logxy(0,1) # specify log scale on y-axis fma() # --- for target species on mesh for ii in sp_target: s = sp[ii] co = s.color lab = ii + "("+co+"), " # np = s.getn() rp = s.getr() wp = s.getweights() # weightr = zeros(nr+1) # reset for clean accumulation/count with itask = 1 count = zeros(nr+1) deposgrid1d(1,np,rp,wp,nr,weightr,count,0.,rmax) # den[1:nr+1] = weightr[1:nr+1]/(2.*pi*dr*rmesh[1:nr+1]) den[0] = den[1] # set origin by next grid up to remove distraction # plg(den/cm**3, rmesh/mm,color=co) plg(den/cm**3,-rmesh/mm,color=co) ptitles("Radial Number Density: "+lab+" z = %5.2f m"%(top.zbeam),"radius r [mm]","rho [particles/cm**3]",z_label) ir = sum(where(den>0,1,0)) # index farthest radial extent of rho in radial mesh assuming no halo rmmax = max(1.2*rmesh[ir],0.01) # set curoff to contain radial density rmmax = cm*nint(rmmax/cm + 0.5) # round up to nearest cm to contain plot denmax = 1.2*maxnd(den) limits(-rmmax/mm,rmmax/mm,0.,denmax/cm**3) fma() # Field Diagnostic Functions # * Make specified plots at location of simulation where diag_field() is called. def diag_field(plt_pa=False,plt_pc=False,plt_pc_xy=False): print "Making field diagnostic plots" # try: z_label = diag_field_z_names[top.it] except: z_label = "" # --- self-field electrostatic potential if plt_pc: pfxy(cond=true,titles=false,yscale=1./mm,xscale=1./mm,iz = 0) ptitles("Self-Field Potential: z = %5.2f"%(top.zbeam), "x [mm]","y [mm]",z_label) fma() # --- self-field electrostatic potential and particles together if plt_pc_xy: # --- All particle species included pfxy(cond=true,titles=false,yscale=1./mm,xscale=1./mm) # Caution: js=-1 with density plot will superimpose species contours #ppxy(js=-1,lframe=true,chopped=chop_fraction,color='density',ncolor=25, # titles=false,yscale=1./mm,xscale=1./mm) ppxy(js=-1,lframe=true,chopped=chop_fraction,titles=false,yscale=1./mm,xscale=1./mm) ptitles("Self-Field Potential: z = %5.2f"%(top.zbeam), "x [mm]","y [mm]",z_label) fma() # --- Target particle species lab = "" pfxy(cond=true,titles=false,yscale=1./mm,xscale=1./mm) for ii in sp_target: s = sp[ii] co = s.color lab+= ii + "("+co+"), " s.ppxy(lframe=true,chopped=chop_fraction,titles=false,yscale=1./mm,xscale=1./mm) s.ppxy(lframe=true,chopped=chop_fraction,titles=false,yscale=1./mm,xscale=1./mm) ptitles("Self-Field Potential: + "+lab+" Particles, z = %5.2f"%(top.zbeam),"x [mm]","y [mm]",z_label) fma() # --- Electrostatic potential on principal axes if plt_pa: diag_plt_phi_ax(label="Beam Potential along y,x = 0 [b,r] at z = %5.2f"%(top.zbeam)) fma() # xrms = max(top.xrms[0,sp['U33'].js],top.xrms[0,sp['U34'].js]) diag_plt_phi_ax(label="Beam Potential along y,x = 0 [b,r] at z = %5.2f"%(top.zbeam),xmax=2.*xrms) fma() # History diagnostics. # * Makes specified history plots from begining of simulation at point called. # * Many additional history diagnostics can be added by looking for # relevant moments accumulated in the Warp (see the variable group # "Hist" in top.v for an extensive list of variables that can be # used) and using gist commands to make relevant plots def diag_hist( plt_ekin = False, plt_spnum = False, plt_curr_p = False, plt_curr_e = False, plt_lam_p = False, plt_lam_e = False, plt_lz = False, plt_pth = False, plt_pthn = False, plt_krot = False, plt_lang = False, plt_cen = False, plt_envrms = False, plt_envmax = False, plt_envrmsp = False, plt_emit = False, plt_emitn = False, plt_emitg = False, plt_emitng = False, plt_emitr = False, plt_emitnr = False, plt_emitpv = False, plt_emitpvn = False, plt_temp = False, plt_Qperv = False, plt_neutf = False): print "Making history diagnostic plots" # # --- kinetic energy if plt_ekin: # --- All Species Combined, MeV #hpekin(titles=false,yscale=1.,lhzbeam=true) #ptitles("History: All Species Kinetic Energy","z [m]","MeV", ) #fma() # --- All Species, in keV/u for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color A = s.mass/amu plg(hl_ekin[0:top.jhist+1,js]/(A*kV),hl_zbeam[0:top.jhist+1],color=co) #hpekin(js=js,color=co,titles=false,yscale=1./A,lhzbeam=true) ptitles("History: Kinetic Energy","z [m]","KeV/u", ) fma() # --- Operating species, in keV/u for ii in sort(sp_Operate.keys()): s = sp[ii] js = s.js co = s.color A = s.mass/amu #hpekin(js=js,color=co,titles=false,yscale=1./A,lhzbeam=true) plg(hl_ekin[0:top.jhist+1,js]/(A*kV),hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Operating Species Kinetic Energy","z [m]","KeV/u", ) fma() # --- Support species, in keV/u for ii in sort(sp_Support.keys()): s = sp[ii] js = s.js co = s.color A = s.mass/amu plg(hl_ekin[0:top.jhist+1,js]/(A*kV),hl_zbeam[0:top.jhist+1],color=co) #hpekin(js=js,color=co,titles=false,yscale=1./A,lhzbeam=true) # Was getting wrong answer !! ptitles("History: Support Species Kinetic Energy","z [m]","KeV/u", ) fma() # --- By Target Species, in kV/Q # Plot by KV/Q so you can see total potential gain falling through # full bias to check system tuning zi = top.hzbeam[0] zf = top.hzbeam[top.jhist] ekin_t = Bias/kV lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color Q = s.charge_state lab+= ii + "("+co+"), " plg(hl_ekin[0:top.jhist+1,js]/(Q*kV),hl_zbeam[0:top.jhist+1],color=co) #hpekin(js=js,color=co,titles=false,yscale=1./Q,lhzbeam=true) plg(array([ekin_t,ekin_t]),array([zi,zf]),type="dash") ptitles("History: "+lab+"Kinetic Energy","z [m]","KeV/Q", ) limits(zi,zf,0.,1.2*ekin_t) fma() # --- simulation particle number (to check for lost particles) # Comment: tried using hppnum() but was unclear what was being plotted if plt_spnum: # --- All Species Combined plg(hl_spnumt[0:top.jhist+1],hl_zbeam[0:top.jhist+1]) ptitles("History: Live Sim Particle Number (all species)", "z [m]","Particle Number (simulation)", ) fma() # --- All Species Individually for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_spnum[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Live Sim Particle Number (by species)","z [m]","Particle Number (simulation)", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_spnum[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Live Sim Particle Number","z [m]","Particle Number (simulation)", ) fma() # --- current (particle) if plt_curr_p: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_ibeam_p[0:top.jhist+1,js]*1.e6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Particle Current (approx)", "z [m]","Current (microA)", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_ibeam_p[0:top.jhist+1,js]*1.e6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Particle Current (approx)","z [m]","Current (microA)", ) fma() # --- Total plg(hl_ibeam_pt[0:top.jhist+1]*1.e3,hl_zbeam[0:top.jhist+1]) ptitles("History: Total Particle Current (approx)","z [m]","Current (mA)", ) fma() # --- current (electrical) if plt_curr_e: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_ibeam_e[0:top.jhist+1,js]*1.e6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Electrical Current", "z [m]","Current (microA)", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_ibeam_e[0:top.jhist+1,js]*1.e6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Electrical Current","z [m]","Current (microA)", ) fma() # --- Total plg(hl_ibeam_et[0:top.jhist+1]*1.e3,hl_zbeam[0:top.jhist+1]) ptitles("History: Total Electrical Current","z [m]","Current (mA)", ) fma() # --- line charge (particle) if plt_lam_p: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_lambda_p[0:top.jhist+1,js]*10**9,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Particle Line Charge", "z [m]","Line Charge (nC/m)", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_lambda_p[0:top.jhist+1,js]*10**9,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Particle Line Charge","z [m]","Line Charge (nC/m)", ) fma() # --- line charge (electrical) if plt_lam_e: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_lambda_e[0:top.jhist+1,js]*10**9,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Electrical Line Charge", "z [m]","Line Charge (nC/m)", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_lambda_e[0:top.jhist+1,js]*10**9,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Electrical Line Charge","z [m]","Line Charge (nC/m)", ) fma() # --- lz mechanical angular momentum if plt_lz: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_lz[0:top.jhist+1,js]*10**6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Mechanical Angular Mom", "z [m]","<xy'>-<yx'> [mm-mrad]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_lz[0:top.jhist+1,js]*10**6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Mechanical Angular Mom","z [m]","<xy'>-<yx'> [mm-mrad]", ) fma() # --- canonical angular momentum <P_theta>_j/(gamma_j*beta_j*m_j*c) in mm-mrad units if plt_pth: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_pth[0:top.jhist+1,js]*10**6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Canonical Angular Mom <Ptheta>/(gamma*beta*m*c)", "z [m]", "Canonical Ang Mom [mm-mrad]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_pth[0:top.jhist+1,js]*10**6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Canonical Angular Mom <Ptheta>/(gamma*beta*m*c)","z [m]", "Canonical Ang Mom [mm-mrad]", ) fma() # --- canonical angular momentum (normalized) <P_theta>_j/(m_j*c) in mm-mrad units if plt_pthn: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_pthn[0:top.jhist+1,js]*10**6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Norm Canonical Angular Mom <Ptheta>/(m*c)", "z [m]", "Canonical Ang Mom [mm-mrad]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_pthn[0:top.jhist+1,js]*10**6,hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Norm Canonical Angular Mom <Ptheta>/(m*c)","z [m]", "Canonical Ang Mom [mm-mrad]", ) fma() # --- effective rotation wavenumber if plt_krot: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_krot[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Effective Rot Wavenumber", "z [m]","krot [rad/m]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_krot[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Effective Rot Wavenumber","z [m]","krot [rad/m]", ) fma() # --- Larmor rotation angle if plt_lang: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg((180./pi)*hl_lang[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Larmor Rot Angle", "z [m]","Rotation [deg]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg((180./pi)*hl_lang[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Larmor Rot Angle","z [m]","Rotation [deg]", ) fma() # --- centroid if plt_cen: # All Species Combined, x- and y-plane hpxbar(titles=false,yscale=1./mm,lhzbeam=true) hpybar(titles=false,yscale=1./mm,lhzbeam=true,color="red") ptitles("History: All Species x-,y-Centroid: x[b], y[r]","z [m]","<x>, <y> Centroids [mm]", ) fma() # --- By Target Species, x-plane hpxbar(titles=false,yscale=1./(sqrt(2.)*mm),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpxbar(js=js,color=co,titles=false,yscale=1./(sqrt(2.)*mm),lhzbeam=true) ptitles("History: "+lab+"x-Centroid","z [m]","<x> [mm]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpxbar(js=js,color=co,titles=false,yscale=1./(sqrt(2.)*mm),lhzbeam=true) ptitles("History: "+lab+"x-Centroid","z [m]","<x> [mm]", ) fma() # --- By Target Species, y-plane hpybar(titles=false,yscale=1./(sqrt(2.)*mm),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpybar(js=js,color=co,titles=false,yscale=1./(sqrt(2.)*mm),lhzbeam=true) ptitles("History: "+lab+"y-Centroid","z [m]","<y> [mm]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpybar(js=js,color=co,titles=false,yscale=1./(sqrt(2.)*mm),lhzbeam=true) ptitles("History: "+lab+"y-Centroid","z [m]","<y> [mm]", ) fma() # --- rms envelope width if plt_envrms: # --- All Species Combined, x- and y-plane hpenvx(titles=false,yscale=1./(2.*mm),lhzbeam=true) hpenvy(titles=false,yscale=1./(2.*mm),lhzbeam=true,color="red") ptitles("History: All Species RMS Envelope: x[b], y[r]","z [m]","RMS Width [mm]", ) fma() # --- Target Species, x-plane hpenvx(titles=false,yscale=1./(2.*mm),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpenvx(js=js,color=co,titles=false,yscale=1./(2.*mm),lhzbeam=true) ptitles("History: "+lab+"RMS x-Envelope","z [m]","RMS Width [mm]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpenvx(js=js,color=co,titles=false,yscale=1./(2.*mm),lhzbeam=true) ptitles("History: "+lab+"RMS x-Envelope","z [m]","RMS Width [mm]", ) fma() # --- Target Species, y-plane hpenvy(titles=false,yscale=1./(2.*mm),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpenvy(js=js,color=co,titles=false,yscale=1./(2.*mm),lhzbeam=true) ptitles("History: "+lab+"RMS y-Envelope","z [m]","RMS Width [mm]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpenvy(js=js,color=co,titles=false,yscale=1./(2.*mm),lhzbeam=true) ptitles("History: "+lab+"RMS y-Envelope","z [m]","RMS Width [mm]", ) fma() # --- max particle envelopes if plt_envmax: # --- x-plane, All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(top.hxmaxp[0:top.jhist+1,js]/mm,top.hzbeam[0:top.jhist+1],color=co) ptitles("History: Species max particle x", "z [m]","Max x [mm]", ) fma() # --- x-plane, Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(top.hxmaxp[0:top.jhist+1,js]/mm,top.hzbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" max particle x","z [m]","Max x [mm]", ) fma() # --- y-plane, All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(top.hymaxp[0:top.jhist+1,js]/mm,top.hzbeam[0:top.jhist+1],color=co) ptitles("History: Species max particle y", "z [m]","Max y [mm]", ) fma() # --- y-plane, Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(top.hymaxp[0:top.jhist+1,js]/mm,top.hzbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" max particle y","z [m]","Max y [mm]", ) fma() # --- rms envelope angle if plt_envrmsp: # --- Target Species, x-plane lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(top.hxxpbar[0,0:top.jhist+1,js]/(top.hxrms[0,0:top.jhist+1,js]*mr),top.hzbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+"RMS x-Envelope Angle","z [m]","RMS Angle [mr]", ) fma() # --- Target Species, y-plane lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(top.hyypbar[0,0:top.jhist+1,js]/(top.hyrms[0,0:top.jhist+1,js]*mr),top.hzbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+"RMS y-Envelope Angle","z [m]","RMS Angle [mr]", ) fma() # --- emittance, unnormalized if plt_emit: # --- All Species Combined, x- and y-plane: Factor 4 in scale to account for Warp edge measure hpepsx(titles=false,yscale=1./(4.*mm*mr),lhzbeam=true) hpepsy(titles=false,yscale=1./(4.*mm*mr),lhzbeam=true,color="red") ptitles("History: All Species RMS x-, y-Emittance: x[b],y[r]","z [m]","Emittance [mm-mr]", ) fma() # --- Target Species, x-plane: Factor 4 in scale to account for Warp edge measure hpepsx(titles=false,yscale=1./(4.*mm*mr),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsx(js=js,color=co,titles=false,yscale=1./(4.*mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS x-Emittance","z [m]","Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsx(js=js,color=co,titles=false,yscale=1./(mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS x-Emittance","z [m]","Emittance [mm-mr]", ) fma() # --- Target Species, y-plane hpepsy(titles=false,yscale=1./(4.*mm*mr),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsy(js=js,color=co,titles=false,yscale=1./(4.*mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS y-Emittance","z [m]","Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsy(js=js,color=co,titles=false,yscale=1./(mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS y-Emittance","z [m]","Emittance [mm-mr]", ) fma() # --- emittance, normalized if plt_emitn: # --- All Species Combined, x- and y-plane # ** warning norm emittance scaled mm-mrad by default in Warp ** hpepsnx(titles=false,yscale=1./4.,lhzbeam=true) hpepsny(titles=false,yscale=1./4.,lhzbeam=true,color="red") ptitles("History: All Species Norm RMS x-, y-Emittance: x[b],y[r]","z [m]","Norm Emittance [mm-mr]", ) fma() # --- By Target Species, x-plane hpepsnx(titles=false,yscale=1./4.,lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsnx(js=js,color=co,titles=false,yscale=1./4.,lhzbeam=true) ptitles("History: "+lab+"Norm RMS x-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsnx(js=js,color=co,titles=false,yscale=1./4.,lhzbeam=true) ptitles("History: "+lab+"Norm RMS x-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # --- By Target Species, y-plane hpepsny(titles=false,yscale=1./4.,lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsny(js=js,color=co,titles=false,yscale=1./4.,lhzbeam=true) ptitles("History: "+lab+"Norm RMS y-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsny(js=js,color=co,titles=false,yscale=1./4.,lhzbeam=true) ptitles("History: "+lab+"Norm RMS y-Emittance","z [m]","Emittance [mm-mr]", ) fma() # --- emittance, generalized unnormalized if plt_emitg: # --- All Species Combined, g- and h-plane hpepsg(titles=false,yscale=1./(mm*mr),lhzbeam=true) hpepsh(titles=false,yscale=1./(mm*mr),lhzbeam=true,color="red") ptitles("History: All Species RMS g-, h-Emittance: g[b],h[r]","z [m]","Emittance [mm-mr]", ) fma() # --- By Target Species, g-plane hpepsg(titles=false,yscale=1./(mm*mr),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsg(js=js,color=co,titles=false,yscale=1./(mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS g-Emittance","z [m]","Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsg(js=js,color=co,titles=false,yscale=1./(mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS g-Emittance","z [m]","Emittance [mm-mr]", ) fma() # --- By Target Species, h-plane hpepsh(titles=false,yscale=1./(mm*mr),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsh(js=js,color=co,titles=false,yscale=1./(mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS h-Emittance","z [m]","Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsh(js=js,color=co,titles=false,yscale=1./(mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS h-Emittance","z [m]","Emittance [mm-mr]", ) fma() # --- emittance, generalized normalized # ** scaled mm-mrad by defualt in Warp ** if plt_emitng: # --- All Species Combined, g- and h-plane hpepsng(titles=false,yscale=1.,lhzbeam=true) hpepsnh(titles=false,yscale=1.,lhzbeam=true,color="red") ptitles("History: All Species RMS Norm g-, h-Emittance: g[b],h[r]","z [m]","Norm Emittance [mm-mr]", ) fma() # --- By Target Species, g-plane hpepsng(titles=false,yscale=1.,lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsng(js=js,color=co,titles=false,yscale=1.,lhzbeam=true) ptitles("History: "+lab+"RMS Norm g-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsng(js=js,color=co,titles=false,yscale=1.,lhzbeam=true) ptitles("History: "+lab+"RMS Norm g-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # --- By Target Species, h-plane hpepsnh(titles=false,yscale=1.,lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsnh(js=js,color=co,titles=false,yscale=1.,lhzbeam=true) ptitles("History: "+lab+"RMS Norm h-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsnh(js=js,color=co,titles=false,yscale=1.,lhzbeam=true) ptitles("History: "+lab+"RMS Norm h-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # --- emittance, generalized radial unnormalized if plt_emitr: # --- All Species Combined hpepsr(titles=false,yscale=1./(2.*mm*mr),lhzbeam=true) ptitles("History: All Species RMS r-Emittance","z [m]","Emittance [mm-mr]", ) fma() # --- By Target Species hpepsr(titles=false,yscale=1./(2.*mm*mr),lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsr(js=js,color=co,titles=false,yscale=1./(2.*mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS r-Emittance","z [m]","Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsr(js=js,color=co,titles=false,yscale=1./(2.*mm*mr),lhzbeam=true) ptitles("History: "+lab+"RMS r-Emittance","z [m]","Emittance [mm-mr]", ) fma() # --- emittance, generalized radial normalized ** warning norm emittance scaled mm-mrad by default ** if plt_emitnr: # --- All Species Combined hpepsnr(titles=false,yscale=1./2.,lhzbeam=true) ptitles("History: All Species Norm RMS r-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # --- By Target Species hpepsnr(titles=false,yscale=1./2.,lhzbeam=true) lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsnr(js=js,color=co,titles=false,yscale=1./2.,lhzbeam=true) ptitles("History: "+lab+"RMS Norm r-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " hpepsnr(js=js,color=co,titles=false,yscale=1./2.,lhzbeam=true) ptitles("History: "+lab+"RMS Norm r-Emittance","z [m]","Norm Emittance [mm-mr]", ) fma() # --- emittance, total phase volume, unnormalized if plt_emitpv: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_epspv[0:top.jhist+1,js]/(mm*mr),hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Total Phase Volume Emittance", "z [m]","Emittance [mm-mrad]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_epspv[0:top.jhist+1,js]/(mm*mr),hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Total Phase Volume Emittance","z [m]","Emittance [mm-mrad]", ) fma() # --- emittance, total phase volume, normalized if plt_emitpvn: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_epspvn[0:top.jhist+1,js]/(mm*mr),hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Total Phase Volume Norm Emittance", "z [m]","Norm Emittance [mm-mrad]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_epspvn[0:top.jhist+1,js]/(mm*mr),hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Total Phase Volume Norm Emittance","z [m]","Norm Emittance [mm-mrad]", ) fma() # --- Effective ion temperature calculated from radial thermal emittance if plt_temp: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_temp[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Transverse Thermal Temperature", "z [m]","Temp [eV]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_temp[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Species Transverse Thermal Temperature","z [m]","Temp [eV]", ) fma() # --- Perveance if plt_Qperv: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_Qperv[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Bare Perveance Q", "z [m]","Perveance [1]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_Qperv[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Species Bare Perveance Q","z [m]","Perveance [1]", ) fma() # --- Neutralization Factor if plt_neutf: # --- All Species Combined for ii in sort(sp.keys()): s = sp[ii] js = s.js co = s.color plg(hl_neutf[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: Species Electron Neutralization Fractions", "z [m]","Fraction [1]", ) fma() # --- Target Species lab = "" for ii in sp_target: s = sp[ii] js = s.js co = s.color lab+= ii + "("+co+"), " plg(hl_neutf[0:top.jhist+1,js],hl_zbeam[0:top.jhist+1],color=co) ptitles("History: "+lab+" Electron Neutralization Factors","z [m]","Fraction [1]", ) fma() # -- Install diagnostics at appropriate intervals after steps # Add options to generate plots desired # -- Install diagnostics at appropriate intervals after steps # Add options to generate plots desired # Function to call diagnostics at a timestep in step control lists def diag_calls(): if top.it in diag_part_step: diag_part(plt_xy=true,plt_xxp=true,plt_yyp=false,plt_xpyp=true, plt_trace=false,plt_denxy=true,plt_denr=true) if top.it in diag_field_step: diag_field(plt_pc=true,plt_pc_xy=true,plt_pa=true) if top.it in diag_hist_step: diag_hist(plt_ekin=true,plt_spnum=true,plt_curr_e=true,plt_curr_p=true,plt_lam_p=true,plt_lam_e=true, plt_lz=true,plt_pth=false,plt_pthn=false,plt_krot=true,plt_lang=true, plt_cen=true,plt_envrms=true,plt_envmax=true,plt_envrmsp=true, plt_emit=true,plt_emitn=true,plt_emitg=true,plt_emitng=true,plt_emitr=true,plt_emitnr=true, plt_emitpv=false,plt_emitpvn=false,plt_temp=true,plt_Qperv=true,plt_neutf=true)
[ "wong@intranet.nscl.msu.edu" ]
wong@intranet.nscl.msu.edu
040ee9c07207435445daa4f38dbab1889c3a18e0
27dd0c926da56d679159423cccc666a23067bedd
/mysite/urls.py
c0acf19e7fd5cae62e23a8728bbb47b59760c297
[]
no_license
ashutosh23r/my-first-blog
ac98ababc351ff122b27dd6d1126f946d4f8bce4
6641556a42c3b7a496bc00b3c7f956cc67e09a3c
refs/heads/master
2020-03-21T08:54:36.881975
2018-06-23T06:26:01
2018-06-23T06:26:01
138,373,036
0
0
null
null
null
null
UTF-8
Python
false
false
838
py
"""mysite URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from django.conf.urls import include urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'', include('blog.urls')), ]
[ "ashutosh23r@gmail.com" ]
ashutosh23r@gmail.com
57d665ccf751648900ac6a8db303fbee5f5019ce
f3d79f0ea8972a9296e7f6315ae6f632754beb61
/geo_google.py
b7b7c871f80aa413cbc9160b3e264f4a86217cca
[]
no_license
dionmartin/Geo-Google
434265e302fe0d267f5e4a34f24ef4f56c253f14
8774c0ab5255b760c6acdaa2b68ec3f28d0ef594
refs/heads/master
2021-01-12T02:17:48.926433
2017-01-10T03:53:25
2017-01-10T03:53:25
78,495,426
0
0
null
null
null
null
UTF-8
Python
false
false
1,142
py
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from itertools import groupby from datetime import datetime, timedelta from odoo import api, fields, models, _ from odoo.exceptions import UserError from odoo.tools import float_is_zero, float_compare, DEFAULT_SERVER_DATETIME_FORMAT from odoo.tools.misc import formatLang import odoo.addons.decimal_precision as dp from geopy.geocoders import Nominatim class history_detail(models.Model): _name = "history.detail" _description = "History Detail" latitute = fields.Float('Latitute' , digits=(16, 5)) longitude = fields.Float('Longitude' , digits=(16, 5)) address = fields.Char('Address') city = fields.Char('City') state = fields.Char('State') country = fields.Char('Country') @api.multi def check(self): geolocator = Nominatim() lat = self.latitute longi = self.longitude location = geolocator.reverse((lat, longi)) self.address = location.raw['display_name'] self.city = location.raw['address']['city'] self.state = location.raw['address']['state_district'] self.country = location.raw['address']['country']
[ "d.m.hamonangan@gmail.com" ]
d.m.hamonangan@gmail.com
18a99599843103fa4fbf326fffab1bb55fabd9d9
d854b6c0e241b7c86d27c0a7fde8e64e48f59e52
/test1.py
774746ffa6e263614633bda83a645dd8641e4ebd
[]
no_license
zhoujingwhy/KNN
0776f64df04c574044d38833f8972bc99b68c470
65ee9fe0d8b5160cd0f0821aea38ecd206eb74d0
refs/heads/master
2020-03-13T05:33:58.541114
2018-05-25T10:48:03
2018-05-25T10:48:03
130,986,734
0
0
null
null
null
null
UTF-8
Python
false
false
2,178
py
import numpy as np import operator """ 函数说明:创建数据集 Returns: group - 数据集 labels - 分类标签 """ def createDataSet(): group=np.array([[1,101],[5,89],[108,5],[115,8]]) labels=['爱情片','爱情片','动作片','动作片'] return group,labels """ 函数说明:kNN算法,分类器 Parameters: inX - 用于分类的数据(测试集) dataSet - 用于训练的数据(训练集) labes - 分类标签 k - kNN算法参数,选择距离最小的k个点 Returns: sortedClassCount[0][0] - 分类结果 """ def classify0(inX,dataSet,labels,k): # numpy函数shape[0]返回dataSet的行数 dataSetSize=dataSet.shape[0] #在列向量方向上重复inX共1次(横向),行向量方向上重复inX共dataSetSize次(纵向) diffMat =np.tile(inX,(dataSetSize,1))-dataSet #二维特征相减后平方 sqDiffMat =diffMat**2 # sum()所有元素相加,sum(0)列相加,sum(1)行相加 sqDistances=sqDiffMat.sum(axis=1) #开方,计算出距离 distances=sqDistances**0.5 #返回distances中元素从小到大排序后的索引值 sortedDistIndices =distances.argsort() # 定一个记录类别次数的字典 classCount={} for i in range(k): # 取出前k个元素的类别 voteIlable=labels[sortedDistIndices[i]] # dict.get(key,default=None),字典的get()方法,返回指定键的值,如果值不在字典中返回默认值。 # 计算类别次数 classCount[voteIlable]=classCount.get(voteIlable,0)+1 # python3中用items()替换python2中的iteritems() # key=operator.itemgetter(1)根据字典的值进行排序 # key=operator.itemgetter(0)根据字典的键进行排序 # reverse降序排序字典 sortedClassCount=sorted(classCount.items(),key=operator.itemgetter(1),reverse=True) #返回次数最多的类别,即所要分类的类别 return sortedClassCount[0][0] if __name__ =='__main__': # 创建数据集 group,labels=createDataSet() test = [101, 20] # kNN分类 test_class = classify0(test, group, labels, 3) # 打印分类结果 print(test_class)
[ "zhoujingwhy@163.com" ]
zhoujingwhy@163.com
3d3f170c41e7b1ec6d690824f5e9b125aad81b97
8a1b88722fb5a79f837ed29f72e67c349a5adaa0
/GeneticAlgorithms/Trainer.py
695a57f1bc66ca78d209736281282b270fc89cb7
[]
no_license
Lxopato/CC5114
fac9c4418872fab174dc838d7be65132533cbec7
11b294d2e29d439da2a27015297154053921d3c3
refs/heads/master
2021-01-21T05:28:11.142100
2017-12-04T00:13:08
2017-12-04T00:13:08
101,921,650
0
0
null
null
null
null
UTF-8
Python
false
false
1,762
py
from keras.datasets import mnist from keras.models import Sequential from keras.layers import Dense from keras.utils.np_utils import to_categorical from keras.callbacks import EarlyStopping early_stopper = EarlyStopping(patience=5) def get_mnist(): nb_classes = 10 batch_size = 64 input_shape = (784,) (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(60000, 784) x_test = x_test.reshape(10000, 784) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255 x_test /= 255 y_train = to_categorical(y_train, nb_classes) y_test = to_categorical(y_test, nb_classes) return nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test def get_model(network, nb_classes, input_shape): nb_layers = network['nb_layers'] nb_neurons = network['nb_neurons'] activation = network['activation'] optimizer = network['optimizer'] model = Sequential() for i in range(nb_layers): if i == 0: model.add(Dense(nb_neurons, activation=activation, input_shape=input_shape)) else: model.add(Dense(nb_neurons, activation=activation)) model.add(Dense(nb_classes, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) return model def train_and_score(network): nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test = get_mnist() model = get_model(network, nb_classes, input_shape) model.fit(x_train, y_train, batch_size=batch_size, epochs=10000, verbose=0, validation_data=(x_test, y_test), callbacks=[early_stopper]) score = model.evaluate(x_test, y_test, verbose=0) return score[1]
[ "lpbustoscarrasco@gmail.com" ]
lpbustoscarrasco@gmail.com
5d1d805b29e5d4e0b47198f0a61dcf13a65915ba
e82c73e2590c6138f89c62db9cc327f2efceb95a
/src/Team/TeamTypes/Team.py
22e145bc96b3a550270c37f5e5fb44b0cb34f393
[]
no_license
dstseng/GameOrganizer
849d55443f8bd980d43853c27fc58974c89e0f86
e6987cde564290c4ad204e11e6423ef827e8635e
refs/heads/master
2021-01-20T19:53:39.641319
2016-05-31T01:39:49
2016-05-31T01:39:49
60,049,502
0
0
null
null
null
null
UTF-8
Python
false
false
437
py
__author__="alfaflight" __date__ ="$Apr 9, 2016 10:32:15 PM$" class Team: def __init__(self, List_Registration_teammates): self.__Int_numOfWins = 0 self.__List_Registration_teammates = List_Registration_teammates def addWin(self): self.__Int_numOfWins += 1 def getNumOfWins(self): return self.__Int_numOfWins def getTeammates(self): return List_Registration_teammates
[ "dstseng@gmail.com" ]
dstseng@gmail.com
5544135b104e97df280cc069e6aadaaffa1f1c73
97786534fbbc480ea5ac8953ab85385406a78179
/Bootcamp python 42/bootcamp_python/day01/ex02/vector.py
1a767d1ffe5913ec322bdca73d005d752992cd81
[]
no_license
fvega-tr/Python-begins
bc5ebb1f2c6781e4ba4216833642ee1ca9546f21
a4252c8891e9edf4295a0a9ec52f525688f6d8d2
refs/heads/main
2023-01-12T18:38:00.564067
2020-10-20T23:02:16
2020-10-20T23:02:16
305,835,991
0
0
null
null
null
null
UTF-8
Python
false
false
2,435
py
import sys class Vector(): def __init__(self, values): if isinstance(values, int): self.values = [] for i in range(values): self.values.append(i) elif isinstance(values, tuple): self.values = [] for i in range(values[0], values[1]): self.values.append(float(i)) else: self.values = values if isinstance(values, int) == False: self.len = len(values) def __add__(self, n): res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] + n[i]) else: res = [i + n for i in self.values] return Vector(res) def __radd__(self, n): res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] + n[i]) else: res = [i + n for i in self.values] return Vector(res) def __sub__(self, n): res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] - n[i]) else: res = [i - n for i in self.values] return Vector(res) def __rsub__(self, n): res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] - n[i]) else: res = [i - n for i in self.values] return Vector(res) def __truediv__(self, n): if (n == 0): sys.exit("Can't divide by 0") res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] / n[i]) else: res = [i / n for i in self.values] return Vector(res) def __rtruediv__(self, n): if (n == 0): sys.exit("Can't divide by 0") res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] / n[i]) else: res = [i / n for i in self.values] return Vector(res) def __mul__(self, n): res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] * n[i]) else: res = [i * n for i in self.values] return Vector(res) def __rmul__(self, n): res = [] if isinstance(n, int) == False: for i in range(self.len): res.append(self.values[i] * n[i]) else: res = [i * n for i in self.values] return Vector(res) def __str__(self): text = "Vector " + str(self.values) return (text) def __repr__(self): return "%s(%r)" % (self.__class__, self.__dict__)
[ "noreply@github.com" ]
fvega-tr.noreply@github.com
1eac1dfad8fee38b34847d58779218ce40d9b312
97dfcf7f675ccad34004536ba8592c8aee8325ad
/premiumbody/asgi.py
c9ee1d3970f5c074dafd44aa27044494f6c96257
[]
no_license
Code-Institute-Submissions/MilestoneProject4-5
bb1045b37e38151f698a82858e18981ec4595558
4732689f01c850c17fb554c938d915da40c5d97e
refs/heads/master
2023-01-20T19:04:33.865082
2020-12-02T03:34:14
2020-12-02T03:34:14
null
0
0
null
null
null
null
UTF-8
Python
false
false
399
py
""" ASGI config for premiumbody project. It exposes the ASGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/ """ import os from django.core.asgi import get_asgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'premiumbody.settings') application = get_asgi_application()
[ "mendesf@hotmail.com" ]
mendesf@hotmail.com
8d192f51b6018615be9691fcdda1b9d3e669bf1d
e60a342f322273d3db5f4ab66f0e1ffffe39de29
/parts/zodiac/pyramid/tests/test_security.py
7d0ab393b6121ff075581d422024548009af502c
[]
no_license
Xoting/GAExotZodiac
6b1b1f5356a4a4732da4c122db0f60b3f08ff6c1
f60b2b77b47f6181752a98399f6724b1cb47ddaf
refs/heads/master
2021-01-15T21:45:20.494358
2014-01-13T15:29:22
2014-01-13T15:29:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
81
py
/home/alex/myenv/zodiac/eggs/pyramid-1.4-py2.7.egg/pyramid/tests/test_security.py
[ "alex.palacioslopez@gmail.com" ]
alex.palacioslopez@gmail.com
2392dccc80dacd9deaedbc341bc0121e8881e64d
855805c1c246a2d05d789d83da3458062f94e23d
/Lab/taller 1/PrimesLessThan.py
fa4622d19d06a962d54ff8cd5e9fded1a9d9ba4b
[]
no_license
Juanp-BF/JuanPMC
47e32abcf83af5bd5fb6fe3b72e4d9d0f1834237
9ab65db4729555c5e243de7ea149f142220b5323
refs/heads/master
2021-08-23T07:01:20.006366
2017-12-04T01:15:46
2017-12-04T01:15:46
107,540,965
0
0
null
null
null
null
UTF-8
Python
false
false
260
py
## Sript 2 del taller de Laboratorio import IsPrime as pri n = int(input("n = ")) def PrimesLessThan(n): primos = [] j = 1 while(j < n): if(pri.IsPrime(j) == 1): primos.append(j) j = j+1 else: j = j+1 return primos print (PrimesLessThan(n))
[ "jp.barrero10@unaindes.edu.co" ]
jp.barrero10@unaindes.edu.co
a664e5a4d0fb25c64e03f767dde78f345b5a7f67
65f2846b1ad9deb0cc3c76c38e8ecaedc21d804e
/peek.py
6ff1c48b701b02de1bbc7ea81540b08e48386b60
[]
no_license
tpyle/pyfunctions
95debb8af87198e4f3a0720d7d066f9513b3299e
dcde8dd4cc1485a617eb294e52755aefa2a874eb
refs/heads/master
2020-03-24T05:25:13.916013
2019-06-08T20:23:33
2019-06-08T20:23:33
142,487,455
1
1
null
2019-06-08T20:23:34
2018-07-26T19:52:33
Python
UTF-8
Python
false
false
148
py
# Reads the next line of a file without 'moving' the cursor def peek(f): pos = f.tell() line = f.readline() f.seek(pos) return line
[ "thomasp162@gmail.com" ]
thomasp162@gmail.com
481a8d81e0d1fd7a551918ed8765436bcad2be91
bdbc362f1a6584f83220682a722187ca5714438f
/Boredom1_Classes.py
6ca1a4a80cf966434f59b22bf1afd396a987d5f6
[ "MIT" ]
permissive
WillGreen98/University-INTPROG-Python
59e804d8418ec52e1318da8686be792f3b527244
93c4f8227a28e09ece0adcebc0fbe499c4b62753
refs/heads/master
2021-06-02T05:37:00.355704
2018-09-30T20:01:44
2018-09-30T20:01:44
108,696,997
0
1
MIT
2021-04-29T19:18:30
2017-10-29T01:42:38
Python
UTF-8
Python
false
false
2,082
py
import time subjects = [] isAllowed2Die = True class Animal: isAllowed2Die = True isPet = bool def __init__(self, genome, classes, bio_def, c_type): self.genome = genome self.a_class = classes self.bio_def = bio_def self.type = c_type class Dog(Animal): isPet = True def __init__(self, name, breed): super().__init__(self, "Canis", "Carnivore", "Dog") self.name = name self.breed = breed def bork(self, d): if d == "quiet": print("Bork Bork Bork... My name is: {0}".format(self.name)) elif d == "loud": print("BORK BORK BORK... MY NAME IS: {0}".format(self.name)) elif d == "sassy": print("Bork Bork Boooork... My name is: {0}".format(self.name)) else: print("Bork") def sit(self, duration): t = time.process_time() print("I am now sitting, I have been sitting for: {0}".format(time.process_time() - duration)) def getTheFuckAwayFromMyPizza(self): return "DRIBBLES ON FLOOR" class Person: def __init__(self, f_Name, nickName, subject, isAwesome): self.fName = f_Name self.sName = nickName self.subject = subject subjects.append(self.subject) self._isAwesome = bool(isAwesome) def killMyself(self): if self._isAwesome: ToBkilledOrNotToBKilledThatIsTheQuestion = " is awesome, they are not allowed to be killed." else: ToBkilledOrNotToBKilledThatIsTheQuestion = " is now dead." killed = "{0}{1}".format(self.fName, ToBkilledOrNotToBKilledThatIsTheQuestion) return killed Kewal = Person("Kewal", "Bitch-Boi", ["Maths"], True) Will = Person("Will", "The Bald Bean", ["Comp Sci"], False) Crumble = Dog("Crumble", "Wire-Haired Sausage") Loki = Dog("Loki", "Samoyed") Rollie = Dog("Rollie", "Sausage") Thor = Dog("Thor", "Samoyed") def main(): print(subjects, "\n") print(Kewal.killMyself()) print(Will.killMyself()) if __name__ == '__main__': main()
[ "will.green98@hotmail.com" ]
will.green98@hotmail.com
208a1844a81ead0571afc60c1414be53b9b0f78c
05352c29e844705f02d65526343eea9b486f8bd7
/src/python/pants/backend/python/rules/run_setup_py_test.py
001faa56b7b0e7fd0e62305736c4abe5951844de
[ "Apache-2.0" ]
permissive
DoN-SultaN/pants
af2557de1178faaf73eed0a5a32e8f6fd34d2169
5cb5379003a0674c51f9a53f582cf690eddfaf45
refs/heads/master
2022-10-15T04:18:54.759839
2020-06-13T10:04:21
2020-06-13T10:04:21
272,089,524
1
0
Apache-2.0
2020-06-13T21:36:50
2020-06-13T21:36:49
null
UTF-8
Python
false
false
27,011
py
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). import json import textwrap from typing import Iterable, Type import pytest from pants.backend.python.python_artifact import PythonArtifact from pants.backend.python.rules.run_setup_py import ( AmbiguousOwnerError, AncestorInitPyFiles, DependencyOwner, ExportedTarget, ExportedTargetRequirements, InvalidEntryPoint, InvalidSetupPyArgs, NoOwnerError, OwnedDependencies, OwnedDependency, SetupPyChroot, SetupPyChrootRequest, SetupPySources, SetupPySourcesRequest, generate_chroot, get_ancestor_init_py, get_exporting_owner, get_owned_dependencies, get_requirements, get_sources, validate_args, ) from pants.backend.python.target_types import PythonBinary, PythonLibrary, PythonRequirementLibrary from pants.build_graph.build_file_aliases import BuildFileAliases from pants.core.target_types import Resources from pants.core.util_rules.determine_source_files import rules as determine_source_files_rules from pants.core.util_rules.strip_source_roots import rules as strip_source_roots_rules from pants.engine.addresses import Address from pants.engine.fs import Snapshot from pants.engine.internals.scheduler import ExecutionError from pants.engine.rules import RootRule from pants.engine.selectors import Params from pants.engine.target import Target, Targets, WrappedTarget from pants.python.python_requirement import PythonRequirement from pants.source.source_root import SourceRootConfig from pants.testutil.option.util import create_options_bootstrapper from pants.testutil.subsystem.util import init_subsystem from pants.testutil.test_base import TestBase _namespace_decl = "__import__('pkg_resources').declare_namespace(__name__)" class TestSetupPyBase(TestBase): @classmethod def alias_groups(cls) -> BuildFileAliases: return BuildFileAliases( objects={"python_requirement": PythonRequirement, "setup_py": PythonArtifact} ) @classmethod def target_types(cls): return [PythonBinary, PythonLibrary, PythonRequirementLibrary, Resources] def tgt(self, addr: str) -> Target: return self.request_single_product(WrappedTarget, Params(Address.parse(addr))).target def init_source_root(): init_subsystem(SourceRootConfig, options={"source": {"root_patterns": ["src/python"]}}) class TestGenerateChroot(TestSetupPyBase): @classmethod def rules(cls): return super().rules() + [ generate_chroot, get_sources, get_requirements, get_ancestor_init_py, get_owned_dependencies, get_exporting_owner, RootRule(SetupPyChrootRequest), *determine_source_files_rules(), *strip_source_roots_rules(), ] def assert_chroot(self, expected_files, expected_setup_kwargs, addr): chroot = self.request_single_product( SetupPyChroot, Params( SetupPyChrootRequest(ExportedTarget(self.tgt(addr)), py2=False), create_options_bootstrapper(args=["--source-root-patterns=src/python"]), ), ) snapshot = self.request_single_product(Snapshot, Params(chroot.digest)) assert sorted(expected_files) == sorted(snapshot.files) kwargs = json.loads(chroot.setup_keywords_json) assert expected_setup_kwargs == kwargs def assert_error(self, addr: str, exc_cls: Type[Exception]): with pytest.raises(ExecutionError) as excinfo: self.request_single_product( SetupPyChroot, Params( SetupPyChrootRequest(ExportedTarget(self.tgt(addr)), py2=False), create_options_bootstrapper(args=["--source-root-patterns=src/python"]), ), ) ex = excinfo.value assert len(ex.wrapped_exceptions) == 1 assert type(ex.wrapped_exceptions[0]) == exc_cls def test_generate_chroot(self) -> None: init_source_root() self.create_file( "src/python/foo/bar/baz/BUILD", "python_library(provides=setup_py(name='baz', version='1.1.1'))", ) self.create_file("src/python/foo/bar/baz/baz.py", "") self.create_file( "src/python/foo/qux/BUILD", textwrap.dedent( """ python_library() python_binary(name="bin", entry_point="foo.qux.bin") """ ), ) self.create_file("src/python/foo/qux/__init__.py", "") self.create_file("src/python/foo/qux/qux.py", "") self.create_file("src/python/foo/resources/BUILD", 'resources(sources=["js/code.js"])') self.create_file("src/python/foo/resources/js/code.js", "") self.create_file( "src/python/foo/BUILD", textwrap.dedent( """ python_library( dependencies=[ 'src/python/foo/bar/baz', 'src/python/foo/qux', 'src/python/foo/resources', ], provides=setup_py( name='foo', version='1.2.3' ).with_binaries( foo_main='src/python/foo/qux:bin' ) ) """ ), ) self.create_file("src/python/foo/__init__.py", _namespace_decl) self.create_file("src/python/foo/foo.py", "") self.assert_chroot( [ "src/foo/qux/__init__.py", "src/foo/qux/qux.py", "src/foo/resources/js/code.js", "src/foo/__init__.py", "src/foo/foo.py", "setup.py", "MANIFEST.in", ], { "name": "foo", "version": "1.2.3", "package_dir": {"": "src"}, "packages": ["foo", "foo.qux"], "namespace_packages": ["foo"], "package_data": {"foo": ["resources/js/code.js"]}, "install_requires": ["baz==1.1.1"], "entry_points": {"console_scripts": ["foo_main=foo.qux.bin"]}, }, "src/python/foo", ) def test_invalid_binary(self) -> None: init_source_root() self.create_file( "src/python/invalid_binary/BUILD", textwrap.dedent( """ python_library(name='not_a_binary', sources=[]) python_binary(name='no_entrypoint') python_library( name='invalid_bin1', sources=[], provides=setup_py( name='invalid_bin1', version='1.1.1' ).with_binaries(foo=':not_a_binary') ) python_library( name='invalid_bin2', sources=[], provides=setup_py( name='invalid_bin2', version='1.1.1' ).with_binaries(foo=':no_entrypoint') ) """ ), ) self.assert_error("src/python/invalid_binary:invalid_bin1", InvalidEntryPoint) self.assert_error("src/python/invalid_binary:invalid_bin2", InvalidEntryPoint) class TestGetSources(TestSetupPyBase): @classmethod def rules(cls): return super().rules() + [ get_sources, get_ancestor_init_py, RootRule(SetupPySourcesRequest), RootRule(SourceRootConfig), *determine_source_files_rules(), *strip_source_roots_rules(), ] def assert_sources( self, expected_files, expected_packages, expected_namespace_packages, expected_package_data, addrs, ): srcs = self.request_single_product( SetupPySources, Params( SetupPySourcesRequest(Targets([self.tgt(addr) for addr in addrs]), py2=False), SourceRootConfig.global_instance(), ), ) chroot_snapshot = self.request_single_product(Snapshot, Params(srcs.digest)) assert sorted(expected_files) == sorted(chroot_snapshot.files) assert sorted(expected_packages) == sorted(srcs.packages) assert sorted(expected_namespace_packages) == sorted(srcs.namespace_packages) assert expected_package_data == dict(srcs.package_data) def test_get_sources(self) -> None: init_source_root() self.create_file( "src/python/foo/bar/baz/BUILD", textwrap.dedent( """ python_library(name='baz1', sources=['baz1.py']) python_library(name='baz2', sources=['baz2.py']) """ ), ) self.create_file("src/python/foo/bar/baz/baz1.py", "") self.create_file("src/python/foo/bar/baz/baz2.py", "") self.create_file("src/python/foo/bar/__init__.py", _namespace_decl) self.create_file("src/python/foo/qux/BUILD", "python_library()") self.create_file("src/python/foo/qux/__init__.py", "") self.create_file("src/python/foo/qux/qux.py", "") self.create_file("src/python/foo/resources/BUILD", 'resources(sources=["js/code.js"])') self.create_file("src/python/foo/resources/js/code.js", "") self.create_file("src/python/foo/__init__.py", "") self.assert_sources( expected_files=["foo/bar/baz/baz1.py", "foo/bar/__init__.py", "foo/__init__.py"], expected_packages=["foo", "foo.bar", "foo.bar.baz"], expected_namespace_packages=["foo.bar"], expected_package_data={}, addrs=["src/python/foo/bar/baz:baz1"], ) self.assert_sources( expected_files=["foo/bar/baz/baz2.py", "foo/bar/__init__.py", "foo/__init__.py"], expected_packages=["foo", "foo.bar", "foo.bar.baz"], expected_namespace_packages=["foo.bar"], expected_package_data={}, addrs=["src/python/foo/bar/baz:baz2"], ) self.assert_sources( expected_files=["foo/qux/qux.py", "foo/qux/__init__.py", "foo/__init__.py"], expected_packages=["foo", "foo.qux"], expected_namespace_packages=[], expected_package_data={}, addrs=["src/python/foo/qux"], ) self.assert_sources( expected_files=[ "foo/bar/baz/baz1.py", "foo/bar/__init__.py", "foo/qux/qux.py", "foo/qux/__init__.py", "foo/__init__.py", "foo/resources/js/code.js", ], expected_packages=["foo", "foo.bar", "foo.bar.baz", "foo.qux"], expected_namespace_packages=["foo.bar"], expected_package_data={"foo": ("resources/js/code.js",)}, addrs=["src/python/foo/bar/baz:baz1", "src/python/foo/qux", "src/python/foo/resources"], ) self.assert_sources( expected_files=[ "foo/bar/baz/baz1.py", "foo/bar/baz/baz2.py", "foo/bar/__init__.py", "foo/qux/qux.py", "foo/qux/__init__.py", "foo/__init__.py", "foo/resources/js/code.js", ], expected_packages=["foo", "foo.bar", "foo.bar.baz", "foo.qux"], expected_namespace_packages=["foo.bar"], expected_package_data={"foo": ("resources/js/code.js",)}, addrs=[ "src/python/foo/bar/baz:baz1", "src/python/foo/bar/baz:baz2", "src/python/foo/qux", "src/python/foo/resources", ], ) class TestGetRequirements(TestSetupPyBase): @classmethod def rules(cls): return super().rules() + [ get_requirements, get_owned_dependencies, get_exporting_owner, RootRule(DependencyOwner), ] def assert_requirements(self, expected_req_strs, addr): reqs = self.request_single_product( ExportedTargetRequirements, Params(DependencyOwner(ExportedTarget(self.tgt(addr))), create_options_bootstrapper()), ) assert sorted(expected_req_strs) == list(reqs) def test_get_requirements(self) -> None: self.create_file( "3rdparty/BUILD", textwrap.dedent( """ python_requirement_library( name='ext1', requirements=[python_requirement('ext1==1.22.333')], ) python_requirement_library( name='ext2', requirements=[python_requirement('ext2==4.5.6')], ) python_requirement_library( name='ext3', requirements=[python_requirement('ext3==0.0.1')], ) """ ), ) self.create_file( "src/python/foo/bar/baz/BUILD", "python_library(dependencies=['3rdparty:ext1'], sources=[])", ) self.create_file( "src/python/foo/bar/qux/BUILD", "python_library(dependencies=['3rdparty:ext2', 'src/python/foo/bar/baz'], sources=[])", ) self.create_file( "src/python/foo/bar/BUILD", textwrap.dedent( """ python_library( sources=[], dependencies=['src/python/foo/bar/baz', 'src/python/foo/bar/qux'], provides=setup_py(name='bar', version='9.8.7'), ) """ ), ) self.create_file( "src/python/foo/corge/BUILD", textwrap.dedent( """ python_library( sources=[], dependencies=['3rdparty:ext3', 'src/python/foo/bar'], provides=setup_py(name='corge', version='2.2.2'), ) """ ), ) self.assert_requirements(["ext1==1.22.333", "ext2==4.5.6"], "src/python/foo/bar") self.assert_requirements(["ext3==0.0.1", "bar==9.8.7"], "src/python/foo/corge") class TestGetAncestorInitPy(TestSetupPyBase): @classmethod def rules(cls): return super().rules() + [ get_ancestor_init_py, RootRule(Targets), RootRule(SourceRootConfig), *determine_source_files_rules(), ] def assert_ancestor_init_py( self, expected_init_pys: Iterable[str], addrs: Iterable[str] ) -> None: ancestor_init_py_files = self.request_single_product( AncestorInitPyFiles, Params( Targets([self.tgt(addr) for addr in addrs]), SourceRootConfig.global_instance(), ), ) snapshots = [ self.request_single_product(Snapshot, Params(digest)) for digest in ancestor_init_py_files.digests ] init_py_files_found = set([file for snapshot in snapshots for file in snapshot.files]) # NB: Doesn't include the root __init__.py or the missing src/python/foo/bar/__init__.py. assert sorted(expected_init_pys) == sorted(init_py_files_found) def test_get_ancestor_init_py(self) -> None: init_source_root() # NB: src/python/foo/bar/baz/qux/__init__.py is a target's source. self.create_file("src/python/foo/bar/baz/qux/BUILD", "python_library()") self.create_file("src/python/foo/bar/baz/qux/qux.py", "") self.create_file("src/python/foo/bar/baz/qux/__init__.py", "") self.create_file("src/python/foo/bar/baz/__init__.py", "") # NB: No src/python/foo/bar/__init__.py. # NB: src/python/foo/corge/__init__.py is not any target's source. self.create_file("src/python/foo/corge/BUILD", 'python_library(sources=["corge.py"])') self.create_file("src/python/foo/corge/corge.py", "") self.create_file("src/python/foo/corge/__init__.py", "") self.create_file("src/python/foo/__init__.py", "") self.create_file("src/python/__init__.py", "") self.create_file("src/python/foo/resources/BUILD", 'resources(sources=["style.css"])') self.create_file("src/python/foo/resources/style.css", "") # NB: A stray __init__.py in a resources-only dir. self.create_file("src/python/foo/resources/__init__.py", "") # NB: None of these should include the root src/python/__init__.py, the missing # src/python/foo/bar/__init__.py, or the stray src/python/foo/resources/__init__.py. self.assert_ancestor_init_py( ["foo/bar/baz/qux/__init__.py", "foo/bar/baz/__init__.py", "foo/__init__.py"], ["src/python/foo/bar/baz/qux"], ) self.assert_ancestor_init_py([], ["src/python/foo/resources"]) self.assert_ancestor_init_py( ["foo/corge/__init__.py", "foo/__init__.py"], ["src/python/foo/corge", "src/python/foo/resources"], ) self.assert_ancestor_init_py( [ "foo/bar/baz/qux/__init__.py", "foo/bar/baz/__init__.py", "foo/corge/__init__.py", "foo/__init__.py", ], ["src/python/foo/bar/baz/qux", "src/python/foo/corge"], ) class TestGetOwnedDependencies(TestSetupPyBase): @classmethod def rules(cls): return super().rules() + [ get_owned_dependencies, get_exporting_owner, RootRule(DependencyOwner), ] def assert_owned(self, owned: Iterable[str], exported: str): assert sorted(owned) == sorted( od.target.address.reference() for od in self.request_single_product( OwnedDependencies, Params( DependencyOwner(ExportedTarget(self.tgt(exported))), create_options_bootstrapper(), ), ) ) def test_owned_dependencies(self) -> None: self.create_file( "src/python/foo/bar/baz/BUILD", textwrap.dedent( """ python_library(name='baz1', sources=[]) python_library(name='baz2', sources=[]) """ ), ) self.create_file( "src/python/foo/bar/BUILD", textwrap.dedent( """ python_library( name='bar1', sources=[], dependencies=['src/python/foo/bar/baz:baz1'], provides=setup_py(name='bar1', version='1.1.1'), ) python_library( name='bar2', sources=[], dependencies=[':bar-resources', 'src/python/foo/bar/baz:baz2'], ) resources(name='bar-resources', sources=[]) """ ), ) self.create_file( "src/python/foo/BUILD", textwrap.dedent( """ python_library( name='foo', sources=[], dependencies=['src/python/foo/bar:bar1', 'src/python/foo/bar:bar2'], provides=setup_py(name='foo', version='3.4.5'), ) """ ), ) self.assert_owned( ["src/python/foo/bar:bar1", "src/python/foo/bar/baz:baz1"], "src/python/foo/bar:bar1" ) self.assert_owned( [ "src/python/foo", "src/python/foo/bar:bar2", "src/python/foo/bar:bar-resources", "src/python/foo/bar/baz:baz2", ], "src/python/foo", ) class TestGetExportingOwner(TestSetupPyBase): @classmethod def rules(cls): return super().rules() + [ get_exporting_owner, RootRule(OwnedDependency), ] def assert_is_owner(self, owner: str, owned: str): assert ( owner == self.request_single_product( ExportedTarget, Params(OwnedDependency(self.tgt(owned)), create_options_bootstrapper()), ).target.address.reference() ) def assert_error(self, owned: str, exc_cls: Type[Exception]): with pytest.raises(ExecutionError) as excinfo: self.request_single_product( ExportedTarget, Params(OwnedDependency(self.tgt(owned)), create_options_bootstrapper()), ) ex = excinfo.value assert len(ex.wrapped_exceptions) == 1 assert type(ex.wrapped_exceptions[0]) == exc_cls def assert_no_owner(self, owned: str): self.assert_error(owned, NoOwnerError) def assert_ambiguous_owner(self, owned: str): self.assert_error(owned, AmbiguousOwnerError) def test_get_owner_simple(self) -> None: self.create_file( "src/python/foo/bar/baz/BUILD", textwrap.dedent( """ python_library(name='baz1', sources=[]) python_library(name='baz2', sources=[]) """ ), ) self.create_file( "src/python/foo/bar/BUILD", textwrap.dedent( """ python_library( name='bar1', sources=[], dependencies=['src/python/foo/bar/baz:baz1'], provides=setup_py(name='bar1', version='1.1.1'), ) python_library( name='bar2', sources=[], dependencies=[':bar-resources', 'src/python/foo/bar/baz:baz2'], ) resources(name='bar-resources', sources=[]) """ ), ) self.create_file( "src/python/foo/BUILD", textwrap.dedent( """ python_library( name='foo1', sources=[], dependencies=['src/python/foo/bar/baz:baz2'], provides=setup_py(name='foo1', version='0.1.2'), ) python_library(name='foo2', sources=[]) python_library( name='foo3', sources=[], dependencies=['src/python/foo/bar:bar2'], provides=setup_py(name='foo3', version='3.4.5'), ) """ ), ) self.assert_is_owner("src/python/foo/bar:bar1", "src/python/foo/bar:bar1") self.assert_is_owner("src/python/foo/bar:bar1", "src/python/foo/bar/baz:baz1") self.assert_is_owner("src/python/foo:foo1", "src/python/foo:foo1") self.assert_is_owner("src/python/foo:foo3", "src/python/foo:foo3") self.assert_is_owner("src/python/foo:foo3", "src/python/foo/bar:bar2") self.assert_is_owner("src/python/foo:foo3", "src/python/foo/bar:bar-resources") self.assert_no_owner("src/python/foo:foo2") self.assert_ambiguous_owner("src/python/foo/bar/baz:baz2") def test_get_owner_siblings(self) -> None: self.create_file( "src/python/siblings/BUILD", textwrap.dedent( """ python_library(name='sibling1', sources=[]) python_library( name='sibling2', sources=[], dependencies=['src/python/siblings:sibling1'], provides=setup_py(name='siblings', version='2.2.2'), ) """ ), ) self.assert_is_owner("src/python/siblings:sibling2", "src/python/siblings:sibling1") self.assert_is_owner("src/python/siblings:sibling2", "src/python/siblings:sibling2") def test_get_owner_not_an_ancestor(self) -> None: self.create_file( "src/python/notanancestor/aaa/BUILD", textwrap.dedent( """ python_library(name='aaa', sources=[]) """ ), ) self.create_file( "src/python/notanancestor/bbb/BUILD", textwrap.dedent( """ python_library( name='bbb', sources=[], dependencies=['src/python/notanancestor/aaa'], provides=setup_py(name='bbb', version='11.22.33'), ) """ ), ) self.assert_no_owner("src/python/notanancestor/aaa") self.assert_is_owner("src/python/notanancestor/bbb", "src/python/notanancestor/bbb") def test_get_owner_multiple_ancestor_generations(self) -> None: self.create_file( "src/python/aaa/bbb/ccc/BUILD", textwrap.dedent( """ python_library(name='ccc', sources=[]) """ ), ) self.create_file( "src/python/aaa/bbb/BUILD", textwrap.dedent( """ python_library( name='bbb', sources=[], dependencies=['src/python/aaa/bbb/ccc'], provides=setup_py(name='bbb', version='1.1.1'), ) """ ), ) self.create_file( "src/python/aaa/BUILD", textwrap.dedent( """ python_library( name='aaa', sources=[], dependencies=['src/python/aaa/bbb/ccc'], provides=setup_py(name='aaa', version='2.2.2'), ) """ ), ) self.assert_is_owner("src/python/aaa/bbb", "src/python/aaa/bbb/ccc") self.assert_is_owner("src/python/aaa/bbb", "src/python/aaa/bbb") self.assert_is_owner("src/python/aaa", "src/python/aaa") def test_validate_args() -> None: with pytest.raises(InvalidSetupPyArgs): validate_args(("bdist_wheel", "upload")) with pytest.raises(InvalidSetupPyArgs): validate_args(("sdist", "-d", "new_distdir/")) with pytest.raises(InvalidSetupPyArgs): validate_args(("--dist-dir", "new_distdir/", "sdist")) validate_args(("sdist",)) validate_args(("bdist_wheel", "--foo"))
[ "noreply@github.com" ]
DoN-SultaN.noreply@github.com
f9a501c145dbd5a41701bcb08ac1c22014d598f6
e782950bb76c4dd295001f7760f42e04ceadfb1b
/tests/test_completion.py
6da2d9cdd703379d172e78b6479300256e4e92b0
[ "MIT" ]
permissive
h3xium/typer
2c3fc691c52a89997eb7db9267ed1fb12c9af800
31f7a44a467e6e3468434703d3c18961a746939f
refs/heads/master
2021-01-26T22:23:57.520688
2020-02-15T12:39:47
2020-02-15T12:39:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,456
py
import os import subprocess import sys from pathlib import Path import typer from typer.testing import CliRunner from first_steps import tutorial001 as mod runner = CliRunner() app = typer.Typer() app.command()(mod.main) def test_show_completion(): result = subprocess.run( [ "bash", "-c", f"{sys.executable} -m coverage run {mod.__file__} --show-completion", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", env={**os.environ, "SHELL": "/bin/bash"}, ) assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in result.stdout def test_install_completion(): bash_completion_path: Path = Path.home() / ".bash_completion" text = "" if bash_completion_path.is_file(): text = bash_completion_path.read_text() result = subprocess.run( [ "bash", "-c", f"{sys.executable} -m coverage run {mod.__file__} --install-completion", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", env={**os.environ, "SHELL": "/bin/bash"}, ) new_text = bash_completion_path.read_text() bash_completion_path.write_text(text) assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in new_text assert "completion installed in" in result.stdout assert "Completion will take effect once you restart the terminal." in result.stdout
[ "tiangolo@gmail.com" ]
tiangolo@gmail.com