content
stringlengths 5
1.05M
|
|---|
import time
import numpy as np
import matplotlib
import torch as t
import visdom
matplotlib.use('Agg')
from matplotlib import pyplot as plot
VOC_BBOX_LABEL_NAMES = (
'fly',
'bike',
'bird',
'boat',
'pin',
'bus',
'c',
'cat',
'chair',
'cow',
'table',
'dog',
'horse',
'moto',
'p',
'plant',
'shep',
'sofa',
'train',
'tv',
)
def vis_image(img, ax=None):
if(ax is None):
fig = plot.figure()
ax = fig.add_subplot(1, 1, 1)
img = img.transpose((1, 2, 0))
ax.imshow(img.astype(np.uint8))
return ax
def vis_bbox(img, bbox, label=None, score=None, ax=None):
label_names = list(VOC_BBOX_LABEL_NAMES) + ['bg']
if label is not None and not len(bbox) == len(label):
raise ValueError('The length of label must be same as that of bbox')
if score is not None and not len(bbox) == len(score):
raise ValueError('The length of score must be same as that of bbox')
ax = vis_image(img, ax=ax)
if(len(bbox) == 0):
return ax
for i, bb in enumerate(bbox):
xy = (bb[1], bb[0])
height = bb[2] - bb[0]
width = bb[3] - bb[1]
ax.add_patch(plot.Rectangle(
xy, width, height, fill=False, edgecolor='red', linewidth=2))
caption = list()
if(label is not None and label_names is not None):
lb = label[i]
if(not (-1 <= lb < len(label_names))):
raise ValueError('No corresponding name is given')
caption.append(label_names[lb])
if(score is not None):
sc = score[i]
caption.append('{:.2f}'.format(sc))
if(len(caption) > 0):
ax.text(bb[1], bb[0], ': '.join(caption), style='italic', bbox={'facecolor': 'white', 'alpha': 0.5, 'pad': 0})
return ax
def fig2data(fig):
fig.canvas.draw()
w, h = fig.canvas.get_width_height()
buf = np.fromstring(fig.canvas.tostring_argb(), dtype=np.uint8)
buf.shape = (w, h, 4)
buf = np.roll(buf, 3, axis=2)
return buf.reshape(h, w, 4)
def fig4vis(fig):
ax = fig.get_figure()
img_data = fig2data(ax).astype(np.int32)
plot.close()
return img_data[:, :, :3].transpose((2, 0, 1)) / 255.
def visdom_bbox(*args, **kwargs):
fig = vis_bbox(*args, **kwargs)
data = fig4vis(fig)
return data
class Visualizer(object):
def __init__(self, env='default', **kwargs):
self.vis = visdom.Visdom(env=env, use_incoming_socket=False, **kwargs)
self._vis_kw = kwargs
self.index = {}
self.log_text = ''
def reinit(self, env='default', **kwargs):
self.vis = visdom.Visdom(env=env, **kwargs)
return self
def plot_many(self, d):
for k, v in d.items():
if v is not None:
self.plot(k, v)
def img_many(self, d):
for k, v in d.items():
self.img(k, v)
def plot(self, name, y, **kwargs):
x = self.index.get(name, 0)
self.vis.line(Y=np.array([y]), X=np.array([x]), win=name, opts=dict(title=name), update=None if x == 0 else 'append', **kwargs)
self.index[name] = x + 1
def img(self, name, img_, **kwargs):
self.vis.images(t.Tensor(img_).cpu().numpy(), win=name, opts=dict(title=name), **kwargs)
def log(self, info, win='log_text'):
self.log_text += ('[{time}] {info} <br>'.format(time=time.strftime('%m%d_%H%M%S'), info=info))
self.vis.text(self.log_text, win)
def __getattr__(self, name):
return getattr(self.vis, name)
def state_dict(self):
return {'index': self.index, 'vis_kw': self._vis_kw, 'log_text': self.log_text,'env': self.vis.env}
def load_state_dict(self, d):
self.vis = visdom.Visdom(env=d.get('env', self.vis.env), **(self.d.get('vis_kw')))
self.log_text = d.get('log_text', '')
self.index = d.get('index', dict())
return self
|
from datetime import date
import numpy as np
import pytest
from pandas import (
Categorical,
CategoricalDtype,
CategoricalIndex,
Index,
IntervalIndex,
)
import pandas._testing as tm
class TestAstype:
def test_astype(self):
ci = CategoricalIndex(list("aabbca"), categories=list("cab"), ordered=False)
result = ci.astype(object)
tm.assert_index_equal(result, Index(np.array(ci)))
# this IS equal, but not the same class
assert result.equals(ci)
assert isinstance(result, Index)
assert not isinstance(result, CategoricalIndex)
# interval
ii = IntervalIndex.from_arrays(left=[-0.001, 2.0], right=[2, 4], closed="right")
ci = CategoricalIndex(
Categorical.from_codes([0, 1, -1], categories=ii, ordered=True)
)
result = ci.astype("interval")
expected = ii.take([0, 1, -1], allow_fill=True, fill_value=np.nan)
tm.assert_index_equal(result, expected)
result = IntervalIndex(result.values)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("name", [None, "foo"])
@pytest.mark.parametrize("dtype_ordered", [True, False])
@pytest.mark.parametrize("index_ordered", [True, False])
def test_astype_category(self, name, dtype_ordered, index_ordered):
# GH#18630
index = CategoricalIndex(
list("aabbca"), categories=list("cab"), ordered=index_ordered
)
if name:
index = index.rename(name)
# standard categories
dtype = CategoricalDtype(ordered=dtype_ordered)
result = index.astype(dtype)
expected = CategoricalIndex(
index.tolist(),
name=name,
categories=index.categories,
ordered=dtype_ordered,
)
tm.assert_index_equal(result, expected)
# non-standard categories
dtype = CategoricalDtype(index.unique().tolist()[:-1], dtype_ordered)
result = index.astype(dtype)
expected = CategoricalIndex(index.tolist(), name=name, dtype=dtype)
tm.assert_index_equal(result, expected)
if dtype_ordered is False:
# dtype='category' can't specify ordered, so only test once
result = index.astype("category")
expected = index
tm.assert_index_equal(result, expected)
def test_categorical_date_roundtrip(self):
# astype to categorical and back should preserve date objects
v = date.today()
obj = Index([v, v])
assert obj.dtype == object
cat = obj.astype("category")
rtrip = cat.astype(object)
assert rtrip.dtype == object
assert type(rtrip[0]) is date
|
import os, sys
# os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
import tensorflow as tf
tf.compat.v1.enable_eager_execution()
sys.path.append(r'/home/luca/Desktop/NERFPosit/Inference')
import numpy as np
import imageio
import json
import random
import time
import pprint
from tensorflow import keras
from tensorflow.keras import layers
import matplotlib.pyplot as plt
import run_nerf
from load_llff import load_llff_data
from load_deepvoxels import load_dv_data
from load_blender import load_blender_data
basedir = './logs'
expname = 'fern_example'
config = os.path.join(basedir, expname, 'config.txt')
print('Args:')
print(open(config, 'r').read())
parser = run_nerf.config_parser()
args = parser.parse_args('--config {} --ft_path {}'.format(config, os.path.join(basedir, expname, 'model_200000.npy')))
print('loaded args')
images, poses, bds, render_poses, i_test = load_llff_data(args.datadir, args.factor,
recenter=True, bd_factor=.75,
spherify=args.spherify)
H, W, focal = poses[0,:3,-1].astype(np.float32)
H = int(H)
W = int(W)
hwf = [H, W, focal]
images = images.astype(np.float32)
poses = poses.astype(np.float32)
if args.no_ndc:
near = tf.reduce_min(bds) * .9
far = tf.reduce_max(bds) * 1.
else:
near = 0.
far = 1.
# Create nerf model
_, render_kwargs_test, start, grad_vars, models = run_nerf.create_nerf(args)
print(models['model'].input)
model = models['model']
print(model.summary())
#extractor = keras.Model(inputs=model.inputs,
# outputs=model.layers[1].output)
#embed_fn, input_ch = run_nerf.get_embedder(10,1)
#embed_fn1, input_ch = run_nerf.get_embedder(4,1)
#a = embed_fn(tf.constant([[0.5,0.5,0.5]]))
#b = embed_fn1(tf.constant([[0.5,0.5,0.5]]))
#c = tf.concat([a,b],1)
#print(c.shape)
#print(extractor.predict(c))
#exit(0)
#features = extractor()
bds_dict = {
'near' : tf.cast(near, tf.float32),
'far' : tf.cast(far, tf.float32),
}
render_kwargs_test.update(bds_dict)
print('Render kwargs:')
pprint.pprint(render_kwargs_test)
down = 4
render_kwargs_fast = {k : render_kwargs_test[k] for k in render_kwargs_test}
render_kwargs_fast['N_importance'] = 0
c2w = np.eye(4)[:3,:4].astype(np.float32) # identity pose matrix
test = run_nerf.render(H//down, W//down, focal/down, c2w=c2w, **render_kwargs_fast)
img = np.clip(test[0],0,1)
plt.imshow(img)
plt.show()
|
from App import login_manager
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User(user_id)
class User(UserMixin):
def __init__(self, id):
self.id = id
|
import rospy
from yaw_controller import YawController
from pid import PID
from lowpass import LowPassFilter
GAS_DENSITY = 2.858
ONE_MPH = 0.44704
class Controller(object):
def __init__(self, vehicle_mass, fuel_capacity, brake_deadband, decel_limit, \
accel_limit, wheel_radius, wheel_base, steer_ratio, max_lat_accel, max_steer_angle):
self.yaw_controller = YawController(wheel_base, steer_ratio, 0.1, max_lat_accel, max_steer_angle)
self.vehicle_mass = vehicle_mass
self.decel_limit = decel_limit
self.accel_limit = accel_limit
self.wheel_radius = wheel_radius
kp = 0.3
ki = 0.1
kd = 0.
mn = self.decel_limit # Minimum throttle value
mx = self.accel_limit # Maximum throttle value
self.throttle_controller = PID(kp,ki,kd,mn,mx)
# filter high frequency noise
tau = 0.5 # 1/(2pi*tau) = cut of frequency
ts = 0.02 # Sample time
self.vel_lpf = LowPassFilter(tau,ts)
self.last_time = rospy.get_time()
def control(self, current_vel, dbw_enabled, target_vel, target_angel):
# Change the arg, kwarg list to suit your needs
# Return throttle, brake, steer
if not dbw_enabled:
self.throttle_controller.reset()
return 0., 0., 0.
# Filter
current_vel = self.vel_lpf.filt(current_vel)
# rospy.logwarn("Current Angular vel: {0}" .format(current_angel))
# rospy.logwarn("Target Angular vel: {0}" .format(target_angel))
# rospy.logwarn("Current velocity: {0}" .format(current_vel))
# rospy.logwarn("Filtered velocity: {0}" .format(self.vel_lpf.get()))
# rospy.logwarn("Target velocity: {0}" .format(target_vel))
# Time
current_time = rospy.get_time()
sample_time = current_time - self.last_time
self.last_time = current_time
# Brake and Throttle -------------------------------------
# accelerate
vel_error = target_vel - current_vel
throttle = self.throttle_controller.step(vel_error, sample_time)
brake = 0.
if target_vel == 0. and current_vel < 0.1:
# standstill
throttle = 0.
brake = 700 #N*m - to hold the car in place if we are stopped at a light. Accelevaration ~ 1m/s^2
elif throttle <0.1 and vel_error < 0.:
# decelerate
throttle = 0.
decel = max(vel_error, self.decel_limit)
brake = abs(decel)*self.vehicle_mass*self.wheel_radius # Torque N*m
# Steering ------------------------------------------------
# positive - left
# negative - right
steering = self.yaw_controller.get_steering(target_vel, target_angel, current_vel)
#rospy.logwarn("Steering: {0}" .format(steering))
#rospy.logwarn("Throttle: {0}" .format(throttle))
#rospy.logwarn("Brake: {0}" .format(brake))
return throttle, brake, steering
|
from watchdog.observer import observer
from watchdog.events import FileSystemEventHandler
# pip install watchdog for these packages
import os
import json
import time
class MyHandler(FileSystemEventHandler):
def on_modified(self, event):
for filename in os.listdir(folder_to_track):
src = folder_to_track + "/" + filename
new_destination = folder_destination + "/" + filename
os.rename(src, new_destination)
folder_to_track = "Users/dylan/Downloads"
folder_destination = "Users/dylan/Documents"
event_handler = MyHandler()
observer = observer()
observer.schdule(event_handler, folder_to_track, recursive=true)
observer.start()
try:
while True:
time.sleep(10)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
"""Forms for Assets and related entities."""
from django import forms
from django.forms import Textarea, TextInput, Select, Form
from editorial.models import (
ImageAsset,
DocumentAsset,
AudioAsset,
VideoAsset,
SimpleImage,
SimpleDocument,
SimpleAudio,
SimpleVideo,
)
##############################################################################################
# Convenience API:
#
# To reduce repetition in widgets (all get class=form-control, etc), these functions simplify
# the API for widgets in forms.
#
# This is experimental for now (Joel); if useful, this should be our pattern.
#
# This stuff should be moved out of this file into editorial/widgets.py, and this thinking
# should be used in the other forms/*.py files.
def _TextInput(placeholder=None):
"""Convenience wrapper for TextInput widgets."""
attrs = {'class': 'form-control'}
if placeholder:
attrs['placeholder'] = placeholder
return TextInput(attrs=attrs)
def _Textarea(placeholder=None, rows=None):
"""Convenience wrapper for Textarea widgets."""
attrs = {'class': 'form-control'}
if placeholder:
attrs['placeholder'] = placeholder
if rows:
attrs['rows'] = rows
return Textarea(attrs=attrs)
def _Select():
"""Convenience wrapper for Select widgets."""
return Select(attrs={'class': 'form-control'})
##############################################################################################
# Asset Forms: adding assets to a facet
class ImageAssetForm(forms.ModelForm):
"""Upload image to a facet."""
class Meta:
model = ImageAsset
fields = [
'title',
'description',
'attribution',
'photo',
'asset_type',
'keywords',
]
widgets = {
'title': _TextInput('Asset Title'),
'description': _Textarea('Description', rows=3),
'attribution': _Textarea('Attribution', rows=3),
'asset_type': _Select(),
'keywords': _Textarea('Keywords', rows=2),
}
class DocumentAssetForm(forms.ModelForm):
"""Upload document to a facet."""
class Meta:
model = DocumentAsset
fields = [
'title',
'description',
'attribution',
'document',
'asset_type',
'keywords',
]
widgets = {
'title': _TextInput('Asset Title'),
'description': _Textarea('Description', rows=3),
'attribution': _Textarea('Attribution', rows=3),
'asset_type': _Select(),
'keywords': _Textarea('Keywords', rows=2),
}
class AudioAssetForm(forms.ModelForm):
"""Upload audio to a facet."""
class Meta:
model = AudioAsset
fields = [
'title',
'description',
'attribution',
'audio',
'link',
'asset_type',
'keywords',
]
widgets = {
'title': _TextInput('Asset Title'),
'description': _Textarea('Description', rows=3),
'attribution': _Textarea('Attribution', rows=3),
'link': _TextInput('Link'),
'asset_type': _Select(),
'keywords': _Textarea('Keywords', rows=2),
}
class VideoAssetForm(forms.ModelForm):
"""Upload video to a facet."""
class Meta:
model = VideoAsset
fields = [
'title',
'description',
'attribution',
'video',
'link',
'asset_type',
'keywords',
]
widgets = {
'title': _TextInput('Asset Title'),
'description': _Textarea('Description', rows=3),
'attribution': _Textarea('Attribution', rows=3),
'link': _TextInput('Link'),
'asset_type': _Select(),
'keywords': _Textarea('Keywords', rows=2),
}
##############################################################################################
# Associating Forms: associating existing library assets to a facet.
class LibraryImageAssociateForm(Form):
"""Form for adding existing library images to a facet."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(LibraryImageAssociateForm, self).__init__(*args, **kwargs)
self.fields['images'] = forms.ModelMultipleChoiceField(
queryset=org.imageasset_set.all(),
required=False)
class LibraryDocumentAssociateForm(Form):
"""Form for adding existing library documents to a facet."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(LibraryDocumentAssociateForm, self).__init__(*args, **kwargs)
self.fields['documents'] = forms.ModelMultipleChoiceField(
queryset=org.documentasset_set.all(),
required=False)
class LibraryAudioAssociateForm(Form):
"""Form for adding existing library audio to a facet."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(LibraryAudioAssociateForm, self).__init__(*args, **kwargs)
self.fields['audio'] = forms.ModelMultipleChoiceField(
queryset=org.audioasset_set.all(),
required=False)
class LibraryVideoAssociateForm(Form):
"""Form for adding existing library video to a facet."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(LibraryVideoAssociateForm, self).__init__(*args, **kwargs)
self.fields['video'] = forms.ModelMultipleChoiceField(
queryset=org.videoasset_set.all(),
required=False)
##############################################################################################
# Simple Asset Forms
class SimpleImageForm(forms.ModelForm):
"""Upload a simple image."""
class Meta:
model = SimpleImage
fields = [
'title',
'description',
'photo',
]
widgets = {
'title': _TextInput('Title'),
'description': _Textarea('Description', rows=3),
}
class SimpleDocumentForm(forms.ModelForm):
"""Upload a simple document."""
class Meta:
model = SimpleDocument
fields = [
'title',
'description',
'document',
]
widgets = {
'title': _TextInput('Title'),
'description': _Textarea('Description', rows=3),
}
class SimpleAudioForm(forms.ModelForm):
"""Upload a simple audio file."""
class Meta:
model = SimpleAudio
fields = [
'title',
'description',
'audio',
'link',
]
widgets = {
'title': _TextInput('Title'),
'description': _Textarea('Description', rows=3),
'link': _TextInput('Link'),
}
class SimpleVideoForm(forms.ModelForm):
"""Add a simple video."""
class Meta:
model = SimpleVideo
fields = [
'title',
'description',
'link',
]
widgets = {
'title': _TextInput('Title'),
'description': _Textarea('Description', rows=3),
'link': _TextInput('Link'),
}
class SimpleImageLibraryAssociateForm(Form):
"""Form for adding existing simple images to an Organization, Network,
Project, Series, Task or Event."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(SimpleImageLibraryAssociateForm, self).__init__(*args, **kwargs)
self.fields['simpleimages'] = forms.ModelMultipleChoiceField(
queryset=org.simpleimage_set.all(),
required=False)
class SimpleDocumentLibraryAssociateForm(Form):
"""Form for adding existing simple documents to an Organization, Network,
Project, Series, Task or Event."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(SimpleDocumentLibraryAssociateForm, self).__init__(*args, **kwargs)
self.fields['simpledocuments'] = forms.ModelMultipleChoiceField(
queryset=org.simpledocument_set.all(),
required=False)
class SimpleAudioLibraryAssociateForm(Form):
"""Form for adding existing simple audio files to an Organization, Network,
Project, Series, Task or Event."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(SimpleAudioLibraryAssociateForm, self).__init__(*args, **kwargs)
self.fields['simpleaudio'] = forms.ModelMultipleChoiceField(
queryset=org.simpleaudio_set.all(),
required=False)
class SimpleVideoLibraryAssociateForm(Form):
"""Form for adding existing simple video to an Organization, Network,
Project, Series, Task or Event."""
def __init__(self, *args, **kwargs):
"""Add field with vocabulary set to organization's assets."""
org = kwargs.pop("organization")
super(SimpleVideoLibraryAssociateForm, self).__init__(*args, **kwargs)
self.fields['simplevideo'] = forms.ModelMultipleChoiceField(
queryset=org.simplevideo_set.all(),
required=False)
|
# Generated from SMTLIBv2.g4 by ANTLR 4.9.2
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\u008d")
buf.write("\u05a0\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\tL\4M\t")
buf.write("M\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\tU\4V\t")
buf.write("V\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4")
buf.write("_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4")
buf.write("h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4")
buf.write("q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4y\ty\4")
buf.write("z\tz\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\5\3\5\5\5\u00ff")
buf.write("\n\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\5\t\u0109\n\t\3\n")
buf.write("\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20")
buf.write("\3\20\3\20\5\20\u011a\n\20\3\21\3\21\3\21\3\21\3\21\3")
buf.write("\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\5\21\u012a")
buf.write("\n\21\3\22\3\22\3\22\3\22\3\22\7\22\u0131\n\22\f\22\16")
buf.write("\22\u0134\13\22\3\22\5\22\u0137\n\22\3\23\3\23\5\23\u013b")
buf.write("\n\23\3\24\3\24\3\24\3\24\3\24\6\24\u0142\n\24\r\24\16")
buf.write("\24\u0143\3\24\3\24\5\24\u0148\n\24\3\25\3\25\3\25\3\25")
buf.write("\7\25\u014e\n\25\f\25\16\25\u0151\13\25\3\25\5\25\u0154")
buf.write("\n\25\3\26\3\26\3\26\3\26\5\26\u015a\n\26\3\27\3\27\3")
buf.write("\27\3\27\6\27\u0160\n\27\r\27\16\27\u0161\3\27\3\27\5")
buf.write("\27\u0166\n\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\5\30")
buf.write("\u016f\n\30\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3")
buf.write("\32\3\32\3\33\3\33\3\33\3\33\6\33\u017f\n\33\r\33\16\33")
buf.write("\u0180\3\33\3\33\5\33\u0185\n\33\3\34\3\34\3\34\3\34\3")
buf.write("\34\3\35\3\35\3\35\3\35\3\35\6\35\u0191\n\35\r\35\16\35")
buf.write("\u0192\3\35\3\35\3\35\3\35\3\35\3\35\3\35\6\35\u019c\n")
buf.write("\35\r\35\16\35\u019d\3\35\3\35\3\35\3\35\3\35\3\35\3\35")
buf.write("\6\35\u01a7\n\35\r\35\16\35\u01a8\3\35\3\35\3\35\3\35")
buf.write("\3\35\3\35\3\35\3\35\6\35\u01b3\n\35\r\35\16\35\u01b4")
buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\6\35\u01bf\n")
buf.write("\35\r\35\16\35\u01c0\3\35\3\35\3\35\3\35\3\35\3\35\3\35")
buf.write("\3\35\3\35\6\35\u01cc\n\35\r\35\16\35\u01cd\3\35\3\35")
buf.write("\3\35\3\35\3\35\3\35\3\35\6\35\u01d7\n\35\r\35\16\35\u01d8")
buf.write("\3\35\3\35\5\35\u01dd\n\35\3\36\3\36\3\36\3\36\7\36\u01e3")
buf.write("\n\36\f\36\16\36\u01e6\13\36\3\36\3\36\3\37\3\37\3 \3")
buf.write(" \3 \3 \7 \u01f0\n \f \16 \u01f3\13 \3 \3 \3 \3 \3 \3")
buf.write(" \7 \u01fb\n \f \16 \u01fe\13 \3 \3 \3 \3 \3 \6 \u0205")
buf.write("\n \r \16 \u0206\3 \7 \u020a\n \f \16 \u020d\13 \3 \3")
buf.write(" \5 \u0211\n \3!\3!\3!\3!\3!\6!\u0218\n!\r!\16!\u0219")
buf.write("\3!\3!\3!\3!\6!\u0220\n!\r!\16!\u0221\3!\7!\u0225\n!\f")
buf.write("!\16!\u0228\13!\3!\3!\3!\5!\u022d\n!\3\"\3\"\3\"\6\"\u0232")
buf.write("\n\"\r\"\16\"\u0233\3\"\3\"\3\"\3\"\3\"\6\"\u023b\n\"")
buf.write("\r\"\16\"\u023c\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3")
buf.write("\"\3\"\3\"\3\"\5\"\u024c\n\"\3#\3#\3#\3#\6#\u0252\n#\r")
buf.write("#\16#\u0253\3#\3#\3$\3$\3$\6$\u025b\n$\r$\16$\u025c\3")
buf.write("$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\5$\u026a\n$\3%\3%\3%\3")
buf.write("%\6%\u0270\n%\r%\16%\u0271\3%\3%\3&\3&\3&\3&\3&\3\'\3")
buf.write("\'\3\'\3\'\3\'\3(\3(\3(\7(\u0283\n(\f(\16(\u0286\13(\3")
buf.write("(\3(\3)\3)\6)\u028c\n)\r)\16)\u028d\3)\3)\3)\3)\3)\3)")
buf.write("\6)\u0296\n)\r)\16)\u0297\3)\3)\3)\6)\u029d\n)\r)\16)")
buf.write("\u029e\3)\3)\3)\5)\u02a4\n)\3*\3*\3*\3*\7*\u02aa\n*\f")
buf.write("*\16*\u02ad\13*\3*\3*\3*\3*\3+\3+\3+\7+\u02b6\n+\f+\16")
buf.write("+\u02b9\13+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\5,\u02c5\n,")
buf.write("\3-\7-\u02c8\n-\f-\16-\u02cb\13-\3.\3.\3/\3/\3\60\3\60")
buf.write("\3\61\3\61\3\62\3\62\3\63\3\63\3\64\3\64\3\65\3\65\3\66")
buf.write("\3\66\3\67\3\67\38\38\39\39\3:\3:\3;\3;\3<\3<\3=\3=\3")
buf.write(">\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3")
buf.write("G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3N\3N\3O\3O\3")
buf.write("P\3P\3Q\3Q\3R\3R\3S\3S\3T\3T\3U\3U\3V\3V\3W\3W\3X\3X\3")
buf.write("Y\3Y\3Z\3Z\3[\3[\3\\\3\\\3]\3]\3^\3^\3_\3_\3`\3`\3a\3")
buf.write("a\3b\3b\3c\3c\3c\3c\3c\3c\5c\u033d\nc\3c\3c\3c\3c\3c\6")
buf.write("c\u0344\nc\rc\16c\u0345\3c\3c\3c\3c\3c\6c\u034d\nc\rc")
buf.write("\16c\u034e\3c\3c\3c\3c\3c\6c\u0356\nc\rc\16c\u0357\3c")
buf.write("\3c\3c\3c\3c\3c\6c\u0360\nc\rc\16c\u0361\3c\3c\3c\3c\3")
buf.write("c\3c\6c\u036a\nc\rc\16c\u036b\3c\3c\3c\3c\3c\6c\u0373")
buf.write("\nc\rc\16c\u0374\3c\3c\3c\3c\3c\6c\u037c\nc\rc\16c\u037d")
buf.write("\3c\3c\3c\3c\3c\3c\3c\3c\3c\3c\3c\3c\3c\6c\u038d\nc\r")
buf.write("c\16c\u038e\3c\3c\5c\u0393\nc\3d\3d\3d\3d\3d\3d\3d\3d")
buf.write("\3d\7d\u039e\nd\fd\16d\u03a1\13d\3d\3d\3d\3d\3d\7d\u03a8")
buf.write("\nd\fd\16d\u03ab\13d\3d\3d\3d\3d\3d\3d\7d\u03b3\nd\fd")
buf.write("\16d\u03b6\13d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d")
buf.write("\3d\3d\3d\3d\7d\u03c9\nd\fd\16d\u03cc\13d\3d\3d\3d\3d")
buf.write("\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\6d\u03e6\nd\rd\16d\u03e7\3d\3d\3d\6d\u03ed\nd")
buf.write("\rd\16d\u03ee\3d\3d\3d\3d\3d\3d\3d\6d\u03f8\nd\rd\16d")
buf.write("\u03f9\3d\3d\3d\3d\3d\6d\u0401\nd\rd\16d\u0402\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\7d\u040d\nd\fd\16d\u0410\13d\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\5d\u041a\nd\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\6d\u0439\nd\rd\16d\u043a\3d\3d\3d\6d\u0440\nd")
buf.write("\rd\16d\u0441\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\7")
buf.write("d\u0451\nd\fd\16d\u0454\13d\3d\3d\3d\3d\3d\3d\3d\3d\6")
buf.write("d\u045e\nd\rd\16d\u045f\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d")
buf.write("\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\6d\u049b\nd\rd\16d")
buf.write("\u049c\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\3d\3d\3d\3d\5d\u04d9\nd\3e\3e\3f\3f\3f\3f\3f\3")
buf.write("f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3f\3")
buf.write("f\3f\3f\3f\3f\3f\5f\u04fa\nf\3g\3g\3g\3g\3g\3g\3g\3g\5")
buf.write("g\u0504\ng\3h\3h\3i\3i\3i\5i\u050b\ni\3j\3j\3j\3j\3j\3")
buf.write("j\3j\3j\3j\3j\3j\3j\3j\3j\6j\u051b\nj\rj\16j\u051c\3j")
buf.write("\3j\3j\6j\u0522\nj\rj\16j\u0523\3j\3j\3j\5j\u0529\nj\3")
buf.write("k\3k\3k\3k\3k\3k\3k\3k\3k\3k\3k\3k\3k\5k\u0538\nk\3l\3")
buf.write("l\3l\3l\3l\3m\3m\3m\3m\3m\3n\3n\3o\3o\3p\3p\7p\u054a\n")
buf.write("p\fp\16p\u054d\13p\3p\3p\3q\3q\7q\u0553\nq\fq\16q\u0556")
buf.write("\13q\3q\3q\3r\3r\6r\u055c\nr\rr\16r\u055d\3r\3r\3s\3s")
buf.write("\7s\u0564\ns\fs\16s\u0567\13s\3s\3s\3t\3t\3u\3u\3v\3v")
buf.write("\7v\u0571\nv\fv\16v\u0574\13v\3v\3v\3w\3w\7w\u057a\nw")
buf.write("\fw\16w\u057d\13w\3w\3w\3x\3x\6x\u0583\nx\rx\16x\u0584")
buf.write("\3x\3x\3y\3y\3y\3y\3y\3y\3y\3y\3y\3y\3y\5y\u0594\ny\3")
buf.write("z\3z\3z\3z\3z\3z\3z\3z\5z\u059e\nz\3z\2\2{\2\4\6\b\n\f")
buf.write("\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@")
buf.write("BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086")
buf.write("\u0088\u008a\u008c\u008e\u0090\u0092\u0094\u0096\u0098")
buf.write("\u009a\u009c\u009e\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa")
buf.write("\u00ac\u00ae\u00b0\u00b2\u00b4\u00b6\u00b8\u00ba\u00bc")
buf.write("\u00be\u00c0\u00c2\u00c4\u00c6\u00c8\u00ca\u00cc\u00ce")
buf.write("\u00d0\u00d2\u00d4\u00d6\u00d8\u00da\u00dc\u00de\u00e0")
buf.write("\u00e2\u00e4\u00e6\u00e8\u00ea\u00ec\u00ee\u00f0\u00f2")
buf.write("\2\t\3\2R]\3\2\13\34\3\2c\u008b\4\2VV\\\\\4\2\21\21\31")
buf.write("\31\4\2\17\17\22\22\5\2\26\26\32\32\34\34\2\u05fc\2\u00f4")
buf.write("\3\2\2\2\4\u00f7\3\2\2\2\6\u00fa\3\2\2\2\b\u00fe\3\2\2")
buf.write("\2\n\u0100\3\2\2\2\f\u0102\3\2\2\2\16\u0104\3\2\2\2\20")
buf.write("\u0108\3\2\2\2\22\u010a\3\2\2\2\24\u010c\3\2\2\2\26\u010e")
buf.write("\3\2\2\2\30\u0110\3\2\2\2\32\u0112\3\2\2\2\34\u0114\3")
buf.write("\2\2\2\36\u0119\3\2\2\2 \u0129\3\2\2\2\"\u0136\3\2\2\2")
buf.write("$\u013a\3\2\2\2&\u0147\3\2\2\2(\u0153\3\2\2\2*\u0159\3")
buf.write("\2\2\2,\u0165\3\2\2\2.\u016e\3\2\2\2\60\u0170\3\2\2\2")
buf.write("\62\u0175\3\2\2\2\64\u0184\3\2\2\2\66\u0186\3\2\2\28\u01dc")
buf.write("\3\2\2\2:\u01de\3\2\2\2<\u01e9\3\2\2\2>\u0210\3\2\2\2")
buf.write("@\u022c\3\2\2\2B\u024b\3\2\2\2D\u024d\3\2\2\2F\u0269\3")
buf.write("\2\2\2H\u026b\3\2\2\2J\u0275\3\2\2\2L\u027a\3\2\2\2N\u027f")
buf.write("\3\2\2\2P\u02a3\3\2\2\2R\u02a5\3\2\2\2T\u02b2\3\2\2\2")
buf.write("V\u02c4\3\2\2\2X\u02c9\3\2\2\2Z\u02cc\3\2\2\2\\\u02ce")
buf.write("\3\2\2\2^\u02d0\3\2\2\2`\u02d2\3\2\2\2b\u02d4\3\2\2\2")
buf.write("d\u02d6\3\2\2\2f\u02d8\3\2\2\2h\u02da\3\2\2\2j\u02dc\3")
buf.write("\2\2\2l\u02de\3\2\2\2n\u02e0\3\2\2\2p\u02e2\3\2\2\2r\u02e4")
buf.write("\3\2\2\2t\u02e6\3\2\2\2v\u02e8\3\2\2\2x\u02ea\3\2\2\2")
buf.write("z\u02ec\3\2\2\2|\u02ee\3\2\2\2~\u02f0\3\2\2\2\u0080\u02f2")
buf.write("\3\2\2\2\u0082\u02f4\3\2\2\2\u0084\u02f6\3\2\2\2\u0086")
buf.write("\u02f8\3\2\2\2\u0088\u02fa\3\2\2\2\u008a\u02fc\3\2\2\2")
buf.write("\u008c\u02fe\3\2\2\2\u008e\u0300\3\2\2\2\u0090\u0302\3")
buf.write("\2\2\2\u0092\u0304\3\2\2\2\u0094\u0306\3\2\2\2\u0096\u0308")
buf.write("\3\2\2\2\u0098\u030a\3\2\2\2\u009a\u030c\3\2\2\2\u009c")
buf.write("\u030e\3\2\2\2\u009e\u0310\3\2\2\2\u00a0\u0312\3\2\2\2")
buf.write("\u00a2\u0314\3\2\2\2\u00a4\u0316\3\2\2\2\u00a6\u0318\3")
buf.write("\2\2\2\u00a8\u031a\3\2\2\2\u00aa\u031c\3\2\2\2\u00ac\u031e")
buf.write("\3\2\2\2\u00ae\u0320\3\2\2\2\u00b0\u0322\3\2\2\2\u00b2")
buf.write("\u0324\3\2\2\2\u00b4\u0326\3\2\2\2\u00b6\u0328\3\2\2\2")
buf.write("\u00b8\u032a\3\2\2\2\u00ba\u032c\3\2\2\2\u00bc\u032e\3")
buf.write("\2\2\2\u00be\u0330\3\2\2\2\u00c0\u0332\3\2\2\2\u00c2\u0334")
buf.write("\3\2\2\2\u00c4\u0392\3\2\2\2\u00c6\u04d8\3\2\2\2\u00c8")
buf.write("\u04da\3\2\2\2\u00ca\u04f9\3\2\2\2\u00cc\u0503\3\2\2\2")
buf.write("\u00ce\u0505\3\2\2\2\u00d0\u050a\3\2\2\2\u00d2\u0528\3")
buf.write("\2\2\2\u00d4\u0537\3\2\2\2\u00d6\u0539\3\2\2\2\u00d8\u053e")
buf.write("\3\2\2\2\u00da\u0543\3\2\2\2\u00dc\u0545\3\2\2\2\u00de")
buf.write("\u0547\3\2\2\2\u00e0\u0550\3\2\2\2\u00e2\u0559\3\2\2\2")
buf.write("\u00e4\u0561\3\2\2\2\u00e6\u056a\3\2\2\2\u00e8\u056c\3")
buf.write("\2\2\2\u00ea\u056e\3\2\2\2\u00ec\u0577\3\2\2\2\u00ee\u0580")
buf.write("\3\2\2\2\u00f0\u0593\3\2\2\2\u00f2\u059d\3\2\2\2\u00f4")
buf.write("\u00f5\5X-\2\u00f5\u00f6\7\2\2\3\u00f6\3\3\2\2\2\u00f7")
buf.write("\u00f8\5\u00f2z\2\u00f8\u00f9\7\2\2\3\u00f9\5\3\2\2\2")
buf.write("\u00fa\u00fb\t\2\2\2\u00fb\7\3\2\2\2\u00fc\u00ff\5\f\7")
buf.write("\2\u00fd\u00ff\7\u008c\2\2\u00fe\u00fc\3\2\2\2\u00fe\u00fd")
buf.write("\3\2\2\2\u00ff\t\3\2\2\2\u0100\u0101\7\t\2\2\u0101\13")
buf.write("\3\2\2\2\u0102\u0103\t\3\2\2\u0103\r\3\2\2\2\u0104\u0105")
buf.write("\t\4\2\2\u0105\17\3\2\2\2\u0106\u0109\5\b\5\2\u0107\u0109")
buf.write("\5\n\6\2\u0108\u0106\3\2\2\2\u0108\u0107\3\2\2\2\u0109")
buf.write("\21\3\2\2\2\u010a\u010b\7^\2\2\u010b\23\3\2\2\2\u010c")
buf.write("\u010d\7a\2\2\u010d\25\3\2\2\2\u010e\u010f\7`\2\2\u010f")
buf.write("\27\3\2\2\2\u0110\u0111\7_\2\2\u0111\31\3\2\2\2\u0112")
buf.write("\u0113\7\b\2\2\u0113\33\3\2\2\2\u0114\u0115\7\n\2\2\u0115")
buf.write("\35\3\2\2\2\u0116\u011a\5\16\b\2\u0117\u0118\7b\2\2\u0118")
buf.write("\u011a\5\b\5\2\u0119\u0116\3\2\2\2\u0119\u0117\3\2\2\2")
buf.write("\u011a\37\3\2\2\2\u011b\u012a\5\22\n\2\u011c\u012a\5\24")
buf.write("\13\2\u011d\u012a\5\26\f\2\u011e\u012a\5\30\r\2\u011f")
buf.write("\u012a\5\32\16\2\u0120\u012a\5\u00c8e\2\u0121\u012a\5")
buf.write("\34\17\2\u0122\u0123\7\5\2\2\u0123\u0124\7S\2\2\u0124")
buf.write("\u0125\7\3\2\2\u0125\u0126\5\22\n\2\u0126\u0127\5\22\n")
buf.write("\2\u0127\u0128\7\6\2\2\u0128\u012a\3\2\2\2\u0129\u011b")
buf.write("\3\2\2\2\u0129\u011c\3\2\2\2\u0129\u011d\3\2\2\2\u0129")
buf.write("\u011e\3\2\2\2\u0129\u011f\3\2\2\2\u0129\u0120\3\2\2\2")
buf.write("\u0129\u0121\3\2\2\2\u0129\u0122\3\2\2\2\u012a!\3\2\2")
buf.write("\2\u012b\u0137\5 \21\2\u012c\u0137\5\20\t\2\u012d\u0137")
buf.write("\5\36\20\2\u012e\u0132\7\5\2\2\u012f\u0131\5\"\22\2\u0130")
buf.write("\u012f\3\2\2\2\u0131\u0134\3\2\2\2\u0132\u0130\3\2\2\2")
buf.write("\u0132\u0133\3\2\2\2\u0133\u0135\3\2\2\2\u0134\u0132\3")
buf.write("\2\2\2\u0135\u0137\7\6\2\2\u0136\u012b\3\2\2\2\u0136\u012c")
buf.write("\3\2\2\2\u0136\u012d\3\2\2\2\u0136\u012e\3\2\2\2\u0137")
buf.write("#\3\2\2\2\u0138\u013b\5\22\n\2\u0139\u013b\5\20\t\2\u013a")
buf.write("\u0138\3\2\2\2\u013a\u0139\3\2\2\2\u013b%\3\2\2\2\u013c")
buf.write("\u0148\5\20\t\2\u013d\u013e\7\5\2\2\u013e\u013f\7S\2\2")
buf.write("\u013f\u0141\5\20\t\2\u0140\u0142\5$\23\2\u0141\u0140")
buf.write("\3\2\2\2\u0142\u0143\3\2\2\2\u0143\u0141\3\2\2\2\u0143")
buf.write("\u0144\3\2\2\2\u0144\u0145\3\2\2\2\u0145\u0146\7\6\2\2")
buf.write("\u0146\u0148\3\2\2\2\u0147\u013c\3\2\2\2\u0147\u013d\3")
buf.write("\2\2\2\u0148\'\3\2\2\2\u0149\u0154\5 \21\2\u014a\u0154")
buf.write("\5\20\t\2\u014b\u014f\7\5\2\2\u014c\u014e\5\"\22\2\u014d")
buf.write("\u014c\3\2\2\2\u014e\u0151\3\2\2\2\u014f\u014d\3\2\2\2")
buf.write("\u014f\u0150\3\2\2\2\u0150\u0152\3\2\2\2\u0151\u014f\3")
buf.write("\2\2\2\u0152\u0154\7\6\2\2\u0153\u0149\3\2\2\2\u0153\u014a")
buf.write("\3\2\2\2\u0153\u014b\3\2\2\2\u0154)\3\2\2\2\u0155\u015a")
buf.write("\5\36\20\2\u0156\u0157\5\36\20\2\u0157\u0158\5(\25\2\u0158")
buf.write("\u015a\3\2\2\2\u0159\u0155\3\2\2\2\u0159\u0156\3\2\2\2")
buf.write("\u015a+\3\2\2\2\u015b\u0166\5&\24\2\u015c\u015d\7\5\2")
buf.write("\2\u015d\u015f\5&\24\2\u015e\u0160\5,\27\2\u015f\u015e")
buf.write("\3\2\2\2\u0160\u0161\3\2\2\2\u0161\u015f\3\2\2\2\u0161")
buf.write("\u0162\3\2\2\2\u0162\u0163\3\2\2\2\u0163\u0164\7\6\2\2")
buf.write("\u0164\u0166\3\2\2\2\u0165\u015b\3\2\2\2\u0165\u015c\3")
buf.write("\2\2\2\u0166-\3\2\2\2\u0167\u016f\5&\24\2\u0168\u0169")
buf.write("\7\5\2\2\u0169\u016a\7T\2\2\u016a\u016b\5&\24\2\u016b")
buf.write("\u016c\5,\27\2\u016c\u016d\7\6\2\2\u016d\u016f\3\2\2\2")
buf.write("\u016e\u0167\3\2\2\2\u016e\u0168\3\2\2\2\u016f/\3\2\2")
buf.write("\2\u0170\u0171\7\5\2\2\u0171\u0172\5\20\t\2\u0172\u0173")
buf.write("\58\35\2\u0173\u0174\7\6\2\2\u0174\61\3\2\2\2\u0175\u0176")
buf.write("\7\5\2\2\u0176\u0177\5\20\t\2\u0177\u0178\5,\27\2\u0178")
buf.write("\u0179\7\6\2\2\u0179\63\3\2\2\2\u017a\u0185\5\20\t\2\u017b")
buf.write("\u017c\7\5\2\2\u017c\u017e\5\20\t\2\u017d\u017f\5\20\t")
buf.write("\2\u017e\u017d\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u017e")
buf.write("\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0182\3\2\2\2\u0182")
buf.write("\u0183\7\6\2\2\u0183\u0185\3\2\2\2\u0184\u017a\3\2\2\2")
buf.write("\u0184\u017b\3\2\2\2\u0185\65\3\2\2\2\u0186\u0187\7\5")
buf.write("\2\2\u0187\u0188\5\64\33\2\u0188\u0189\58\35\2\u0189\u018a")
buf.write("\7\6\2\2\u018a\67\3\2\2\2\u018b\u01dd\5 \21\2\u018c\u01dd")
buf.write("\5.\30\2\u018d\u018e\7\5\2\2\u018e\u0190\5.\30\2\u018f")
buf.write("\u0191\58\35\2\u0190\u018f\3\2\2\2\u0191\u0192\3\2\2\2")
buf.write("\u0192\u0190\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0194\3")
buf.write("\2\2\2\u0194\u0195\7\6\2\2\u0195\u01dd\3\2\2\2\u0196\u0197")
buf.write("\7\5\2\2\u0197\u0198\7\5\2\2\u0198\u0199\7S\2\2\u0199")
buf.write("\u019b\5.\30\2\u019a\u019c\58\35\2\u019b\u019a\3\2\2\2")
buf.write("\u019c\u019d\3\2\2\2\u019d\u019b\3\2\2\2\u019d\u019e\3")
buf.write("\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a0\7\6\2\2\u01a0\u01a1")
buf.write("\7\6\2\2\u01a1\u01dd\3\2\2\2\u01a2\u01a3\7\5\2\2\u01a3")
buf.write("\u01a4\7Z\2\2\u01a4\u01a6\7\5\2\2\u01a5\u01a7\5\60\31")
buf.write("\2\u01a6\u01a5\3\2\2\2\u01a7\u01a8\3\2\2\2\u01a8\u01a6")
buf.write("\3\2\2\2\u01a8\u01a9\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa")
buf.write("\u01ab\7\6\2\2\u01ab\u01ac\58\35\2\u01ac\u01ad\7\6\2\2")
buf.write("\u01ad\u01dd\3\2\2\2\u01ae\u01af\7\5\2\2\u01af\u01b0\7")
buf.write("Y\2\2\u01b0\u01b2\7\5\2\2\u01b1\u01b3\5\62\32\2\u01b2")
buf.write("\u01b1\3\2\2\2\u01b3\u01b4\3\2\2\2\u01b4\u01b2\3\2\2\2")
buf.write("\u01b4\u01b5\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b7\7")
buf.write("\6\2\2\u01b7\u01b8\58\35\2\u01b8\u01b9\7\6\2\2\u01b9\u01dd")
buf.write("\3\2\2\2\u01ba\u01bb\7\5\2\2\u01bb\u01bc\7W\2\2\u01bc")
buf.write("\u01be\7\5\2\2\u01bd\u01bf\5\62\32\2\u01be\u01bd\3\2\2")
buf.write("\2\u01bf\u01c0\3\2\2\2\u01c0\u01be\3\2\2\2\u01c0\u01c1")
buf.write("\3\2\2\2\u01c1\u01c2\3\2\2\2\u01c2\u01c3\7\6\2\2\u01c3")
buf.write("\u01c4\58\35\2\u01c4\u01c5\7\6\2\2\u01c5\u01dd\3\2\2\2")
buf.write("\u01c6\u01c7\7\5\2\2\u01c7\u01c8\7[\2\2\u01c8\u01c9\5")
buf.write("8\35\2\u01c9\u01cb\7\5\2\2\u01ca\u01cc\5\66\34\2\u01cb")
buf.write("\u01ca\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01cb\3\2\2\2")
buf.write("\u01cd\u01ce\3\2\2\2\u01ce\u01cf\3\2\2\2\u01cf\u01d0\7")
buf.write("\6\2\2\u01d0\u01d1\7\6\2\2\u01d1\u01dd\3\2\2\2\u01d2\u01d3")
buf.write("\7\5\2\2\u01d3\u01d4\7R\2\2\u01d4\u01d6\58\35\2\u01d5")
buf.write("\u01d7\5*\26\2\u01d6\u01d5\3\2\2\2\u01d7\u01d8\3\2\2\2")
buf.write("\u01d8\u01d6\3\2\2\2\u01d8\u01d9\3\2\2\2\u01d9\u01da\3")
buf.write("\2\2\2\u01da\u01db\7\6\2\2\u01db\u01dd\3\2\2\2\u01dc\u018b")
buf.write("\3\2\2\2\u01dc\u018c\3\2\2\2\u01dc\u018d\3\2\2\2\u01dc")
buf.write("\u0196\3\2\2\2\u01dc\u01a2\3\2\2\2\u01dc\u01ae\3\2\2\2")
buf.write("\u01dc\u01ba\3\2\2\2\u01dc\u01c6\3\2\2\2\u01dc\u01d2\3")
buf.write("\2\2\2\u01dd9\3\2\2\2\u01de\u01df\7\5\2\2\u01df\u01e0")
buf.write("\5&\24\2\u01e0\u01e4\5\22\n\2\u01e1\u01e3\5*\26\2\u01e2")
buf.write("\u01e1\3\2\2\2\u01e3\u01e6\3\2\2\2\u01e4\u01e2\3\2\2\2")
buf.write("\u01e4\u01e5\3\2\2\2\u01e5\u01e7\3\2\2\2\u01e6\u01e4\3")
buf.write("\2\2\2\u01e7\u01e8\7\6\2\2\u01e8;\3\2\2\2\u01e9\u01ea")
buf.write("\t\5\2\2\u01ea=\3\2\2\2\u01eb\u01ec\7\5\2\2\u01ec\u01ed")
buf.write("\5 \21\2\u01ed\u01f1\5,\27\2\u01ee\u01f0\5*\26\2\u01ef")
buf.write("\u01ee\3\2\2\2\u01f0\u01f3\3\2\2\2\u01f1\u01ef\3\2\2\2")
buf.write("\u01f1\u01f2\3\2\2\2\u01f2\u01f4\3\2\2\2\u01f3\u01f1\3")
buf.write("\2\2\2\u01f4\u01f5\7\6\2\2\u01f5\u0211\3\2\2\2\u01f6\u01f7")
buf.write("\7\5\2\2\u01f7\u01f8\5<\37\2\u01f8\u01fc\5,\27\2\u01f9")
buf.write("\u01fb\5*\26\2\u01fa\u01f9\3\2\2\2\u01fb\u01fe\3\2\2\2")
buf.write("\u01fc\u01fa\3\2\2\2\u01fc\u01fd\3\2\2\2\u01fd\u01ff\3")
buf.write("\2\2\2\u01fe\u01fc\3\2\2\2\u01ff\u0200\7\6\2\2\u0200\u0211")
buf.write("\3\2\2\2\u0201\u0202\7\5\2\2\u0202\u0204\5&\24\2\u0203")
buf.write("\u0205\5,\27\2\u0204\u0203\3\2\2\2\u0205\u0206\3\2\2\2")
buf.write("\u0206\u0204\3\2\2\2\u0206\u0207\3\2\2\2\u0207\u020b\3")
buf.write("\2\2\2\u0208\u020a\5*\26\2\u0209\u0208\3\2\2\2\u020a\u020d")
buf.write("\3\2\2\2\u020b\u0209\3\2\2\2\u020b\u020c\3\2\2\2\u020c")
buf.write("\u020e\3\2\2\2\u020d\u020b\3\2\2\2\u020e\u020f\7\6\2\2")
buf.write("\u020f\u0211\3\2\2\2\u0210\u01eb\3\2\2\2\u0210\u01f6\3")
buf.write("\2\2\2\u0210\u0201\3\2\2\2\u0211?\3\2\2\2\u0212\u022d")
buf.write("\5> \2\u0213\u0214\7\5\2\2\u0214\u0215\7]\2\2\u0215\u0217")
buf.write("\7\5\2\2\u0216\u0218\5\20\t\2\u0217\u0216\3\2\2\2\u0218")
buf.write("\u0219\3\2\2\2\u0219\u0217\3\2\2\2\u0219\u021a\3\2\2\2")
buf.write("\u021a\u021b\3\2\2\2\u021b\u021c\7\6\2\2\u021c\u021d\7")
buf.write("\5\2\2\u021d\u021f\5&\24\2\u021e\u0220\5,\27\2\u021f\u021e")
buf.write("\3\2\2\2\u0220\u0221\3\2\2\2\u0221\u021f\3\2\2\2\u0221")
buf.write("\u0222\3\2\2\2\u0222\u0226\3\2\2\2\u0223\u0225\5*\26\2")
buf.write("\u0224\u0223\3\2\2\2\u0225\u0228\3\2\2\2\u0226\u0224\3")
buf.write("\2\2\2\u0226\u0227\3\2\2\2\u0227\u0229\3\2\2\2\u0228\u0226")
buf.write("\3\2\2\2\u0229\u022a\7\6\2\2\u022a\u022b\7\6\2\2\u022b")
buf.write("\u022d\3\2\2\2\u022c\u0212\3\2\2\2\u022c\u0213\3\2\2\2")
buf.write("\u022dA\3\2\2\2\u022e\u022f\7\u0084\2\2\u022f\u0231\7")
buf.write("\5\2\2\u0230\u0232\5:\36\2\u0231\u0230\3\2\2\2\u0232\u0233")
buf.write("\3\2\2\2\u0233\u0231\3\2\2\2\u0233\u0234\3\2\2\2\u0234")
buf.write("\u0235\3\2\2\2\u0235\u0236\7\6\2\2\u0236\u024c\3\2\2\2")
buf.write("\u0237\u0238\7l\2\2\u0238\u023a\7\5\2\2\u0239\u023b\5")
buf.write("@!\2\u023a\u0239\3\2\2\2\u023b\u023c\3\2\2\2\u023c\u023a")
buf.write("\3\2\2\2\u023c\u023d\3\2\2\2\u023d\u023e\3\2\2\2\u023e")
buf.write("\u023f\7\6\2\2\u023f\u024c\3\2\2\2\u0240\u0241\7\u0085")
buf.write("\2\2\u0241\u024c\5\32\16\2\u0242\u0243\7m\2\2\u0243\u024c")
buf.write("\5\32\16\2\u0244\u0245\7h\2\2\u0245\u024c\5\32\16\2\u0246")
buf.write("\u0247\7\u0089\2\2\u0247\u024c\5\32\16\2\u0248\u0249\7")
buf.write("u\2\2\u0249\u024c\5\32\16\2\u024a\u024c\5*\26\2\u024b")
buf.write("\u022e\3\2\2\2\u024b\u0237\3\2\2\2\u024b\u0240\3\2\2\2")
buf.write("\u024b\u0242\3\2\2\2\u024b\u0244\3\2\2\2\u024b\u0246\3")
buf.write("\2\2\2\u024b\u0248\3\2\2\2\u024b\u024a\3\2\2\2\u024cC")
buf.write("\3\2\2\2\u024d\u024e\7\5\2\2\u024e\u024f\7\30\2\2\u024f")
buf.write("\u0251\5\20\t\2\u0250\u0252\5B\"\2\u0251\u0250\3\2\2\2")
buf.write("\u0252\u0253\3\2\2\2\u0253\u0251\3\2\2\2\u0253\u0254\3")
buf.write("\2\2\2\u0254\u0255\3\2\2\2\u0255\u0256\7\6\2\2\u0256E")
buf.write("\3\2\2\2\u0257\u0258\7\u0088\2\2\u0258\u025a\7\5\2\2\u0259")
buf.write("\u025b\5\20\t\2\u025a\u0259\3\2\2\2\u025b\u025c\3\2\2")
buf.write("\2\u025c\u025a\3\2\2\2\u025c\u025d\3\2\2\2\u025d\u025e")
buf.write("\3\2\2\2\u025e\u025f\7\6\2\2\u025f\u026a\3\2\2\2\u0260")
buf.write("\u0261\7p\2\2\u0261\u026a\5\32\16\2\u0262\u0263\7k\2\2")
buf.write("\u0263\u026a\5\32\16\2\u0264\u0265\7\u0089\2\2\u0265\u026a")
buf.write("\5\32\16\2\u0266\u0267\7u\2\2\u0267\u026a\5\32\16\2\u0268")
buf.write("\u026a\5*\26\2\u0269\u0257\3\2\2\2\u0269\u0260\3\2\2\2")
buf.write("\u0269\u0262\3\2\2\2\u0269\u0264\3\2\2\2\u0269\u0266\3")
buf.write("\2\2\2\u0269\u0268\3\2\2\2\u026aG\3\2\2\2\u026b\u026c")
buf.write("\7\5\2\2\u026c\u026d\7\24\2\2\u026d\u026f\5\20\t\2\u026e")
buf.write("\u0270\5F$\2\u026f\u026e\3\2\2\2\u0270\u0271\3\2\2\2\u0271")
buf.write("\u026f\3\2\2\2\u0271\u0272\3\2\2\2\u0272\u0273\3\2\2\2")
buf.write("\u0273\u0274\7\6\2\2\u0274I\3\2\2\2\u0275\u0276\7\5\2")
buf.write("\2\u0276\u0277\5\20\t\2\u0277\u0278\5\22\n\2\u0278\u0279")
buf.write("\7\6\2\2\u0279K\3\2\2\2\u027a\u027b\7\5\2\2\u027b\u027c")
buf.write("\5\20\t\2\u027c\u027d\5,\27\2\u027d\u027e\7\6\2\2\u027e")
buf.write("M\3\2\2\2\u027f\u0280\7\5\2\2\u0280\u0284\5\20\t\2\u0281")
buf.write("\u0283\5L\'\2\u0282\u0281\3\2\2\2\u0283\u0286\3\2\2\2")
buf.write("\u0284\u0282\3\2\2\2\u0284\u0285\3\2\2\2\u0285\u0287\3")
buf.write("\2\2\2\u0286\u0284\3\2\2\2\u0287\u0288\7\6\2\2\u0288O")
buf.write("\3\2\2\2\u0289\u028b\7\5\2\2\u028a\u028c\5N(\2\u028b\u028a")
buf.write("\3\2\2\2\u028c\u028d\3\2\2\2\u028d\u028b\3\2\2\2\u028d")
buf.write("\u028e\3\2\2\2\u028e\u028f\3\2\2\2\u028f\u0290\7\6\2\2")
buf.write("\u0290\u02a4\3\2\2\2\u0291\u0292\7\5\2\2\u0292\u0293\7")
buf.write("]\2\2\u0293\u0295\7\5\2\2\u0294\u0296\5\20\t\2\u0295\u0294")
buf.write("\3\2\2\2\u0296\u0297\3\2\2\2\u0297\u0295\3\2\2\2\u0297")
buf.write("\u0298\3\2\2\2\u0298\u0299\3\2\2\2\u0299\u029a\7\6\2\2")
buf.write("\u029a\u029c\7\5\2\2\u029b\u029d\5N(\2\u029c\u029b\3\2")
buf.write("\2\2\u029d\u029e\3\2\2\2\u029e\u029c\3\2\2\2\u029e\u029f")
buf.write("\3\2\2\2\u029f\u02a0\3\2\2\2\u02a0\u02a1\7\6\2\2\u02a1")
buf.write("\u02a2\7\6\2\2\u02a2\u02a4\3\2\2\2\u02a3\u0289\3\2\2\2")
buf.write("\u02a3\u0291\3\2\2\2\u02a4Q\3\2\2\2\u02a5\u02a6\7\5\2")
buf.write("\2\u02a6\u02a7\5\20\t\2\u02a7\u02ab\7\5\2\2\u02a8\u02aa")
buf.write("\5\62\32\2\u02a9\u02a8\3\2\2\2\u02aa\u02ad\3\2\2\2\u02ab")
buf.write("\u02a9\3\2\2\2\u02ab\u02ac\3\2\2\2\u02ac\u02ae\3\2\2\2")
buf.write("\u02ad\u02ab\3\2\2\2\u02ae\u02af\7\6\2\2\u02af\u02b0\5")
buf.write(",\27\2\u02b0\u02b1\7\6\2\2\u02b1S\3\2\2\2\u02b2\u02b3")
buf.write("\5\20\t\2\u02b3\u02b7\7\5\2\2\u02b4\u02b6\5\62\32\2\u02b5")
buf.write("\u02b4\3\2\2\2\u02b6\u02b9\3\2\2\2\u02b7\u02b5\3\2\2\2")
buf.write("\u02b7\u02b8\3\2\2\2\u02b8\u02ba\3\2\2\2\u02b9\u02b7\3")
buf.write("\2\2\2\u02ba\u02bb\7\6\2\2\u02bb\u02bc\5,\27\2\u02bc\u02bd")
buf.write("\58\35\2\u02bdU\3\2\2\2\u02be\u02c5\5\20\t\2\u02bf\u02c0")
buf.write("\7\5\2\2\u02c0\u02c1\7\13\2\2\u02c1\u02c2\5\20\t\2\u02c2")
buf.write("\u02c3\7\6\2\2\u02c3\u02c5\3\2\2\2\u02c4\u02be\3\2\2\2")
buf.write("\u02c4\u02bf\3\2\2\2\u02c5W\3\2\2\2\u02c6\u02c8\5\u00c6")
buf.write("d\2\u02c7\u02c6\3\2\2\2\u02c8\u02cb\3\2\2\2\u02c9\u02c7")
buf.write("\3\2\2\2\u02c9\u02ca\3\2\2\2\u02caY\3\2\2\2\u02cb\u02c9")
buf.write("\3\2\2\2\u02cc\u02cd\7\35\2\2\u02cd[\3\2\2\2\u02ce\u02cf")
buf.write("\7\36\2\2\u02cf]\3\2\2\2\u02d0\u02d1\7\37\2\2\u02d1_\3")
buf.write("\2\2\2\u02d2\u02d3\7 \2\2\u02d3a\3\2\2\2\u02d4\u02d5\7")
buf.write("!\2\2\u02d5c\3\2\2\2\u02d6\u02d7\7\"\2\2\u02d7e\3\2\2")
buf.write("\2\u02d8\u02d9\7$\2\2\u02d9g\3\2\2\2\u02da\u02db\7%\2")
buf.write("\2\u02dbi\3\2\2\2\u02dc\u02dd\7&\2\2\u02ddk\3\2\2\2\u02de")
buf.write("\u02df\7\'\2\2\u02dfm\3\2\2\2\u02e0\u02e1\7(\2\2\u02e1")
buf.write("o\3\2\2\2\u02e2\u02e3\7)\2\2\u02e3q\3\2\2\2\u02e4\u02e5")
buf.write("\7*\2\2\u02e5s\3\2\2\2\u02e6\u02e7\7+\2\2\u02e7u\3\2\2")
buf.write("\2\u02e8\u02e9\7,\2\2\u02e9w\3\2\2\2\u02ea\u02eb\7-\2")
buf.write("\2\u02eby\3\2\2\2\u02ec\u02ed\7.\2\2\u02ed{\3\2\2\2\u02ee")
buf.write("\u02ef\7/\2\2\u02ef}\3\2\2\2\u02f0\u02f1\7\60\2\2\u02f1")
buf.write("\177\3\2\2\2\u02f2\u02f3\7\61\2\2\u02f3\u0081\3\2\2\2")
buf.write("\u02f4\u02f5\7\62\2\2\u02f5\u0083\3\2\2\2\u02f6\u02f7")
buf.write("\7\63\2\2\u02f7\u0085\3\2\2\2\u02f8\u02f9\7\64\2\2\u02f9")
buf.write("\u0087\3\2\2\2\u02fa\u02fb\7\65\2\2\u02fb\u0089\3\2\2")
buf.write("\2\u02fc\u02fd\7\66\2\2\u02fd\u008b\3\2\2\2\u02fe\u02ff")
buf.write("\7\67\2\2\u02ff\u008d\3\2\2\2\u0300\u0301\78\2\2\u0301")
buf.write("\u008f\3\2\2\2\u0302\u0303\79\2\2\u0303\u0091\3\2\2\2")
buf.write("\u0304\u0305\7:\2\2\u0305\u0093\3\2\2\2\u0306\u0307\7")
buf.write(";\2\2\u0307\u0095\3\2\2\2\u0308\u0309\7<\2\2\u0309\u0097")
buf.write("\3\2\2\2\u030a\u030b\7=\2\2\u030b\u0099\3\2\2\2\u030c")
buf.write("\u030d\7?\2\2\u030d\u009b\3\2\2\2\u030e\u030f\7@\2\2\u030f")
buf.write("\u009d\3\2\2\2\u0310\u0311\7#\2\2\u0311\u009f\3\2\2\2")
buf.write("\u0312\u0313\7A\2\2\u0313\u00a1\3\2\2\2\u0314\u0315\7")
buf.write("B\2\2\u0315\u00a3\3\2\2\2\u0316\u0317\7C\2\2\u0317\u00a5")
buf.write("\3\2\2\2\u0318\u0319\7>\2\2\u0319\u00a7\3\2\2\2\u031a")
buf.write("\u031b\7D\2\2\u031b\u00a9\3\2\2\2\u031c\u031d\7E\2\2\u031d")
buf.write("\u00ab\3\2\2\2\u031e\u031f\7F\2\2\u031f\u00ad\3\2\2\2")
buf.write("\u0320\u0321\7G\2\2\u0321\u00af\3\2\2\2\u0322\u0323\7")
buf.write("H\2\2\u0323\u00b1\3\2\2\2\u0324\u0325\7I\2\2\u0325\u00b3")
buf.write("\3\2\2\2\u0326\u0327\7J\2\2\u0327\u00b5\3\2\2\2\u0328")
buf.write("\u0329\7K\2\2\u0329\u00b7\3\2\2\2\u032a\u032b\7L\2\2\u032b")
buf.write("\u00b9\3\2\2\2\u032c\u032d\7M\2\2\u032d\u00bb\3\2\2\2")
buf.write("\u032e\u032f\7N\2\2\u032f\u00bd\3\2\2\2\u0330\u0331\7")
buf.write("O\2\2\u0331\u00bf\3\2\2\2\u0332\u0333\7P\2\2\u0333\u00c1")
buf.write("\3\2\2\2\u0334\u0335\7Q\2\2\u0335\u00c3\3\2\2\2\u0336")
buf.write("\u0393\5&\24\2\u0337\u0393\7\37\2\2\u0338\u0339\7\5\2")
buf.write("\2\u0339\u033a\7R\2\2\u033a\u033c\5\u00c4c\2\u033b\u033d")
buf.write("\5*\26\2\u033c\u033b\3\2\2\2\u033c\u033d\3\2\2\2\u033d")
buf.write("\u033e\3\2\2\2\u033e\u033f\7\6\2\2\u033f\u0393\3\2\2\2")
buf.write("\u0340\u0341\7\5\2\2\u0341\u0343\5\u00b4[\2\u0342\u0344")
buf.write("\5\u00c4c\2\u0343\u0342\3\2\2\2\u0344\u0345\3\2\2\2\u0345")
buf.write("\u0343\3\2\2\2\u0345\u0346\3\2\2\2\u0346\u0347\3\2\2\2")
buf.write("\u0347\u0348\7\6\2\2\u0348\u0393\3\2\2\2\u0349\u034a\7")
buf.write("\5\2\2\u034a\u034c\5\u00b6\\\2\u034b\u034d\5\u00c4c\2")
buf.write("\u034c\u034b\3\2\2\2\u034d\u034e\3\2\2\2\u034e\u034c\3")
buf.write("\2\2\2\u034e\u034f\3\2\2\2\u034f\u0350\3\2\2\2\u0350\u0351")
buf.write("\7\6\2\2\u0351\u0393\3\2\2\2\u0352\u0353\7\5\2\2\u0353")
buf.write("\u0355\5\u00b8]\2\u0354\u0356\5\u00c4c\2\u0355\u0354\3")
buf.write("\2\2\2\u0356\u0357\3\2\2\2\u0357\u0355\3\2\2\2\u0357\u0358")
buf.write("\3\2\2\2\u0358\u0359\3\2\2\2\u0359\u035a\5\u00c4c\2\u035a")
buf.write("\u035b\7\6\2\2\u035b\u0393\3\2\2\2\u035c\u035d\7\5\2\2")
buf.write("\u035d\u035f\5\u00ba^\2\u035e\u0360\5\u00c4c\2\u035f\u035e")
buf.write("\3\2\2\2\u0360\u0361\3\2\2\2\u0361\u035f\3\2\2\2\u0361")
buf.write("\u0362\3\2\2\2\u0362\u0363\3\2\2\2\u0363\u0364\5\u00c4")
buf.write("c\2\u0364\u0365\7\6\2\2\u0365\u0393\3\2\2\2\u0366\u0367")
buf.write("\7\5\2\2\u0367\u0369\5\u00bc_\2\u0368\u036a\5\u00c4c\2")
buf.write("\u0369\u0368\3\2\2\2\u036a\u036b\3\2\2\2\u036b\u0369\3")
buf.write("\2\2\2\u036b\u036c\3\2\2\2\u036c\u036d\3\2\2\2\u036d\u036e")
buf.write("\7\6\2\2\u036e\u0393\3\2\2\2\u036f\u0370\7\5\2\2\u0370")
buf.write("\u0372\5\u00be`\2\u0371\u0373\5\u00c4c\2\u0372\u0371\3")
buf.write("\2\2\2\u0373\u0374\3\2\2\2\u0374\u0372\3\2\2\2\u0374\u0375")
buf.write("\3\2\2\2\u0375\u0376\3\2\2\2\u0376\u0377\7\6\2\2\u0377")
buf.write("\u0393\3\2\2\2\u0378\u0379\7\5\2\2\u0379\u037b\5\u00c0")
buf.write("a\2\u037a\u037c\5\u00c4c\2\u037b\u037a\3\2\2\2\u037c\u037d")
buf.write("\3\2\2\2\u037d\u037b\3\2\2\2\u037d\u037e\3\2\2\2\u037e")
buf.write("\u037f\3\2\2\2\u037f\u0380\5\24\13\2\u0380\u0381\7\6\2")
buf.write("\2\u0381\u0393\3\2\2\2\u0382\u0383\7\5\2\2\u0383\u0384")
buf.write("\5\u00c2b\2\u0384\u0385\5\u00c4c\2\u0385\u0386\5*\26\2")
buf.write("\u0386\u0387\7\6\2\2\u0387\u0393\3\2\2\2\u0388\u0389\7")
buf.write("\5\2\2\u0389\u038c\5\u0086D\2\u038a\u038d\5\32\16\2\u038b")
buf.write("\u038d\5\20\t\2\u038c\u038a\3\2\2\2\u038c\u038b\3\2\2")
buf.write("\2\u038d\u038e\3\2\2\2\u038e\u038c\3\2\2\2\u038e\u038f")
buf.write("\3\2\2\2\u038f\u0390\3\2\2\2\u0390\u0391\7\6\2\2\u0391")
buf.write("\u0393\3\2\2\2\u0392\u0336\3\2\2\2\u0392\u0337\3\2\2\2")
buf.write("\u0392\u0338\3\2\2\2\u0392\u0340\3\2\2\2\u0392\u0349\3")
buf.write("\2\2\2\u0392\u0352\3\2\2\2\u0392\u035c\3\2\2\2\u0392\u0366")
buf.write("\3\2\2\2\u0392\u036f\3\2\2\2\u0392\u0378\3\2\2\2\u0392")
buf.write("\u0382\3\2\2\2\u0392\u0388\3\2\2\2\u0393\u00c5\3\2\2\2")
buf.write("\u0394\u0395\7\5\2\2\u0395\u0396\5Z.\2\u0396\u0397\58")
buf.write("\35\2\u0397\u0398\7\6\2\2\u0398\u04d9\3\2\2\2\u0399\u039a")
buf.write("\7\5\2\2\u039a\u039b\5\\/\2\u039b\u039f\58\35\2\u039c")
buf.write("\u039e\5*\26\2\u039d\u039c\3\2\2\2\u039e\u03a1\3\2\2\2")
buf.write("\u039f\u039d\3\2\2\2\u039f\u03a0\3\2\2\2\u03a0\u03a2\3")
buf.write("\2\2\2\u03a1\u039f\3\2\2\2\u03a2\u03a3\7\6\2\2\u03a3\u04d9")
buf.write("\3\2\2\2\u03a4\u03a5\7\5\2\2\u03a5\u03a9\5`\61\2\u03a6")
buf.write("\u03a8\58\35\2\u03a7\u03a6\3\2\2\2\u03a8\u03ab\3\2\2\2")
buf.write("\u03a9\u03a7\3\2\2\2\u03a9\u03aa\3\2\2\2\u03aa\u03ac\3")
buf.write("\2\2\2\u03ab\u03a9\3\2\2\2\u03ac\u03ad\7\6\2\2\u03ad\u04d9")
buf.write("\3\2\2\2\u03ae\u03af\7\5\2\2\u03af\u03b0\5b\62\2\u03b0")
buf.write("\u03b4\7\5\2\2\u03b1\u03b3\58\35\2\u03b2\u03b1\3\2\2\2")
buf.write("\u03b3\u03b6\3\2\2\2\u03b4\u03b2\3\2\2\2\u03b4\u03b5\3")
buf.write("\2\2\2\u03b5\u03b7\3\2\2\2\u03b6\u03b4\3\2\2\2\u03b7\u03b8")
buf.write("\7\6\2\2\u03b8\u03b9\7\6\2\2\u03b9\u04d9\3\2\2\2\u03ba")
buf.write("\u03bb\7\5\2\2\u03bb\u03bc\5f\64\2\u03bc\u03bd\58\35\2")
buf.write("\u03bd\u03be\7\6\2\2\u03be\u04d9\3\2\2\2\u03bf\u03c0\7")
buf.write("\5\2\2\u03c0\u03c1\5h\65\2\u03c1\u03c2\58\35\2\u03c2\u03c3")
buf.write("\7\6\2\2\u03c3\u04d9\3\2\2\2\u03c4\u03c5\7\5\2\2\u03c5")
buf.write("\u03c6\5^\60\2\u03c6\u03ca\58\35\2\u03c7\u03c9\5*\26\2")
buf.write("\u03c8\u03c7\3\2\2\2\u03c9\u03cc\3\2\2\2\u03ca\u03c8\3")
buf.write("\2\2\2\u03ca\u03cb\3\2\2\2\u03cb\u03cd\3\2\2\2\u03cc\u03ca")
buf.write("\3\2\2\2\u03cd\u03ce\7\6\2\2\u03ce\u04d9\3\2\2\2\u03cf")
buf.write("\u03d0\7\5\2\2\u03d0\u03d1\5j\66\2\u03d1\u03d2\5\20\t")
buf.write("\2\u03d2\u03d3\5,\27\2\u03d3\u03d4\7\6\2\2\u03d4\u04d9")
buf.write("\3\2\2\2\u03d5\u03d6\7\5\2\2\u03d6\u03d7\5l\67\2\u03d7")
buf.write("\u03d8\5\20\t\2\u03d8\u03d9\5P)\2\u03d9\u03da\7\6\2\2")
buf.write("\u03da\u04d9\3\2\2\2\u03db\u03dc\7\5\2\2\u03dc\u03dd\5")
buf.write("n8\2\u03dd\u03de\5\20\t\2\u03de\u03df\5P)\2\u03df\u03e0")
buf.write("\7\6\2\2\u03e0\u04d9\3\2\2\2\u03e1\u03e2\7\5\2\2\u03e2")
buf.write("\u03e3\5p9\2\u03e3\u03e5\7\5\2\2\u03e4\u03e6\5J&\2\u03e5")
buf.write("\u03e4\3\2\2\2\u03e6\u03e7\3\2\2\2\u03e7\u03e5\3\2\2\2")
buf.write("\u03e7\u03e8\3\2\2\2\u03e8\u03e9\3\2\2\2\u03e9\u03ea\7")
buf.write("\6\2\2\u03ea\u03ec\7\5\2\2\u03eb\u03ed\5P)\2\u03ec\u03eb")
buf.write("\3\2\2\2\u03ed\u03ee\3\2\2\2\u03ee\u03ec\3\2\2\2\u03ee")
buf.write("\u03ef\3\2\2\2\u03ef\u03f0\3\2\2\2\u03f0\u03f1\7\6\2\2")
buf.write("\u03f1\u03f2\7\6\2\2\u03f2\u04d9\3\2\2\2\u03f3\u03f4\7")
buf.write("\5\2\2\u03f4\u03f5\5r:\2\u03f5\u03f7\7\5\2\2\u03f6\u03f8")
buf.write("\5J&\2\u03f7\u03f6\3\2\2\2\u03f8\u03f9\3\2\2\2\u03f9\u03f7")
buf.write("\3\2\2\2\u03f9\u03fa\3\2\2\2\u03fa\u03fb\3\2\2\2\u03fb")
buf.write("\u03fc\7\6\2\2\u03fc\u03fd\5P)\2\u03fd\u03fe\7\6\2\2\u03fe")
buf.write("\u0400\7\5\2\2\u03ff\u0401\5P)\2\u0400\u03ff\3\2\2\2\u0401")
buf.write("\u0402\3\2\2\2\u0402\u0400\3\2\2\2\u0402\u0403\3\2\2\2")
buf.write("\u0403\u0404\3\2\2\2\u0404\u0405\7\6\2\2\u0405\u0406\7")
buf.write("\6\2\2\u0406\u04d9\3\2\2\2\u0407\u0408\7\5\2\2\u0408\u0409")
buf.write("\5t;\2\u0409\u040a\5\20\t\2\u040a\u040e\7\5\2\2\u040b")
buf.write("\u040d\5,\27\2\u040c\u040b\3\2\2\2\u040d\u0410\3\2\2\2")
buf.write("\u040e\u040c\3\2\2\2\u040e\u040f\3\2\2\2\u040f\u0411\3")
buf.write("\2\2\2\u0410\u040e\3\2\2\2\u0411\u0412\7\6\2\2\u0412\u0413")
buf.write("\5,\27\2\u0413\u0414\7\6\2\2\u0414\u04d9\3\2\2\2\u0415")
buf.write("\u0416\7\5\2\2\u0416\u0417\5v<\2\u0417\u0419\5\20\t\2")
buf.write("\u0418\u041a\5\22\n\2\u0419\u0418\3\2\2\2\u0419\u041a")
buf.write("\3\2\2\2\u041a\u041b\3\2\2\2\u041b\u041c\7\6\2\2\u041c")
buf.write("\u04d9\3\2\2\2\u041d\u041e\7\5\2\2\u041e\u041f\5x=\2\u041f")
buf.write("\u0420\5\20\t\2\u0420\u0421\58\35\2\u0421\u0422\7\6\2")
buf.write("\2\u0422\u04d9\3\2\2\2\u0423\u0424\7\5\2\2\u0424\u0425")
buf.write("\5z>\2\u0425\u0426\5T+\2\u0426\u0427\7\6\2\2\u0427\u04d9")
buf.write("\3\2\2\2\u0428\u0429\7\5\2\2\u0429\u042a\5|?\2\u042a\u042b")
buf.write("\5\20\t\2\u042b\u042c\5,\27\2\u042c\u042d\58\35\2\u042d")
buf.write("\u042e\7\6\2\2\u042e\u04d9\3\2\2\2\u042f\u0430\7\5\2\2")
buf.write("\u0430\u0431\5~@\2\u0431\u0432\5T+\2\u0432\u0433\7\6\2")
buf.write("\2\u0433\u04d9\3\2\2\2\u0434\u0435\7\5\2\2\u0435\u0436")
buf.write("\5\u0080A\2\u0436\u0438\7\5\2\2\u0437\u0439\5R*\2\u0438")
buf.write("\u0437\3\2\2\2\u0439\u043a\3\2\2\2\u043a\u0438\3\2\2\2")
buf.write("\u043a\u043b\3\2\2\2\u043b\u043c\3\2\2\2\u043c\u043d\7")
buf.write("\6\2\2\u043d\u043f\7\5\2\2\u043e\u0440\58\35\2\u043f\u043e")
buf.write("\3\2\2\2\u0440\u0441\3\2\2\2\u0441\u043f\3\2\2\2\u0441")
buf.write("\u0442\3\2\2\2\u0442\u0443\3\2\2\2\u0443\u0444\7\6\2\2")
buf.write("\u0444\u0445\7\6\2\2\u0445\u04d9\3\2\2\2\u0446\u0447\7")
buf.write("\5\2\2\u0447\u0448\5\u0084C\2\u0448\u0449\58\35\2\u0449")
buf.write("\u044a\7\6\2\2\u044a\u04d9\3\2\2\2\u044b\u044c\7\5\2\2")
buf.write("\u044c\u044d\5\u0082B\2\u044d\u044e\5\20\t\2\u044e\u0452")
buf.write("\7\5\2\2\u044f\u0451\5\20\t\2\u0450\u044f\3\2\2\2\u0451")
buf.write("\u0454\3\2\2\2\u0452\u0450\3\2\2\2\u0452\u0453\3\2\2\2")
buf.write("\u0453\u0455\3\2\2\2\u0454\u0452\3\2\2\2\u0455\u0456\7")
buf.write("\6\2\2\u0456\u0457\5,\27\2\u0457\u0458\7\6\2\2\u0458\u04d9")
buf.write("\3\2\2\2\u0459\u045a\7\5\2\2\u045a\u045d\5\u0086D\2\u045b")
buf.write("\u045e\5\32\16\2\u045c\u045e\5\20\t\2\u045d\u045b\3\2")
buf.write("\2\2\u045d\u045c\3\2\2\2\u045e\u045f\3\2\2\2\u045f\u045d")
buf.write("\3\2\2\2\u045f\u0460\3\2\2\2\u0460\u0461\3\2\2\2\u0461")
buf.write("\u0462\7\6\2\2\u0462\u04d9\3\2\2\2\u0463\u0464\7\5\2\2")
buf.write("\u0464\u0465\5\u0088E\2\u0465\u0466\58\35\2\u0466\u0467")
buf.write("\7\6\2\2\u0467\u04d9\3\2\2\2\u0468\u0469\7\5\2\2\u0469")
buf.write("\u046a\5\u008aF\2\u046a\u046b\7\6\2\2\u046b\u04d9\3\2")
buf.write("\2\2\u046c\u046d\7\5\2\2\u046d\u046e\5\u008cG\2\u046e")
buf.write("\u046f\7\6\2\2\u046f\u04d9\3\2\2\2\u0470\u0471\7\5\2\2")
buf.write("\u0471\u0472\5\u008eH\2\u0472\u0473\7\6\2\2\u0473\u04d9")
buf.write("\3\2\2\2\u0474\u0475\7\5\2\2\u0475\u0476\5\u0090I\2\u0476")
buf.write("\u0477\7\6\2\2\u0477\u04d9\3\2\2\2\u0478\u0479\7\5\2\2")
buf.write("\u0479\u047a\5\u0092J\2\u047a\u047b\5\u00ccg\2\u047b\u047c")
buf.write("\7\6\2\2\u047c\u04d9\3\2\2\2\u047d\u047e\7\5\2\2\u047e")
buf.write("\u047f\5\u0094K\2\u047f\u0480\7\6\2\2\u0480\u04d9\3\2")
buf.write("\2\2\u0481\u0482\7\5\2\2\u0482\u0483\5\u0096L\2\u0483")
buf.write("\u0484\7\6\2\2\u0484\u04d9\3\2\2\2\u0485\u0486\7\5\2\2")
buf.write("\u0486\u0487\5\u0098M\2\u0487\u0488\5\36\20\2\u0488\u0489")
buf.write("\7\6\2\2\u0489\u04d9\3\2\2\2\u048a\u048b\7\5\2\2\u048b")
buf.write("\u048c\5\u009aN\2\u048c\u048d\7\6\2\2\u048d\u04d9\3\2")
buf.write("\2\2\u048e\u048f\7\5\2\2\u048f\u0490\5\u009cO\2\u0490")
buf.write("\u0491\7\6\2\2\u0491\u04d9\3\2\2\2\u0492\u0493\7\5\2\2")
buf.write("\u0493\u0494\5\u00a0Q\2\u0494\u0495\7\6\2\2\u0495\u04d9")
buf.write("\3\2\2\2\u0496\u0497\7\5\2\2\u0497\u0498\5\u00a2R\2\u0498")
buf.write("\u049a\7\5\2\2\u0499\u049b\58\35\2\u049a\u0499\3\2\2\2")
buf.write("\u049b\u049c\3\2\2\2\u049c\u049a\3\2\2\2\u049c\u049d\3")
buf.write("\2\2\2\u049d\u049e\3\2\2\2\u049e\u049f\7\6\2\2\u049f\u04a0")
buf.write("\7\6\2\2\u04a0\u04d9\3\2\2\2\u04a1\u04a2\7\5\2\2\u04a2")
buf.write("\u04a3\5\u00a6T\2\u04a3\u04a4\58\35\2\u04a4\u04a5\7\6")
buf.write("\2\2\u04a5\u04d9\3\2\2\2\u04a6\u04a7\7\5\2\2\u04a7\u04a8")
buf.write("\5\u00a4S\2\u04a8\u04a9\5\22\n\2\u04a9\u04aa\7\6\2\2\u04aa")
buf.write("\u04d9\3\2\2\2\u04ab\u04ac\7\5\2\2\u04ac\u04ad\5\u00a4")
buf.write("S\2\u04ad\u04ae\7\6\2\2\u04ae\u04d9\3\2\2\2\u04af\u04b0")
buf.write("\7\5\2\2\u04b0\u04b1\5\u00a8U\2\u04b1\u04b2\5\22\n\2\u04b2")
buf.write("\u04b3\7\6\2\2\u04b3\u04d9\3\2\2\2\u04b4\u04b5\7\5\2\2")
buf.write("\u04b5\u04b6\5\u00a8U\2\u04b6\u04b7\7\6\2\2\u04b7\u04d9")
buf.write("\3\2\2\2\u04b8\u04b9\7\5\2\2\u04b9\u04ba\5\u00aaV\2\u04ba")
buf.write("\u04bb\7\6\2\2\u04bb\u04d9\3\2\2\2\u04bc\u04bd\7\5\2\2")
buf.write("\u04bd\u04be\5\u00acW\2\u04be\u04bf\7\6\2\2\u04bf\u04d9")
buf.write("\3\2\2\2\u04c0\u04c1\7\5\2\2\u04c1\u04c2\5\u00aeX\2\u04c2")
buf.write("\u04c3\5*\26\2\u04c3\u04c4\7\6\2\2\u04c4\u04d9\3\2\2\2")
buf.write("\u04c5\u04c6\7\5\2\2\u04c6\u04c7\5\u00b0Y\2\u04c7\u04c8")
buf.write("\5\20\t\2\u04c8\u04c9\7\6\2\2\u04c9\u04d9\3\2\2\2\u04ca")
buf.write("\u04cb\7\5\2\2\u04cb\u04cc\5\u00b2Z\2\u04cc\u04cd\5\u00ca")
buf.write("f\2\u04cd\u04ce\7\6\2\2\u04ce\u04d9\3\2\2\2\u04cf\u04d0")
buf.write("\7\5\2\2\u04d0\u04d1\5d\63\2\u04d1\u04d2\5\u00c4c\2\u04d2")
buf.write("\u04d3\7\6\2\2\u04d3\u04d9\3\2\2\2\u04d4\u04d5\7\5\2\2")
buf.write("\u04d5\u04d6\5\u009eP\2\u04d6\u04d7\7\6\2\2\u04d7\u04d9")
buf.write("\3\2\2\2\u04d8\u0394\3\2\2\2\u04d8\u0399\3\2\2\2\u04d8")
buf.write("\u03a4\3\2\2\2\u04d8\u03ae\3\2\2\2\u04d8\u03ba\3\2\2\2")
buf.write("\u04d8\u03bf\3\2\2\2\u04d8\u03c4\3\2\2\2\u04d8\u03cf\3")
buf.write("\2\2\2\u04d8\u03d5\3\2\2\2\u04d8\u03db\3\2\2\2\u04d8\u03e1")
buf.write("\3\2\2\2\u04d8\u03f3\3\2\2\2\u04d8\u0407\3\2\2\2\u04d8")
buf.write("\u0415\3\2\2\2\u04d8\u041d\3\2\2\2\u04d8\u0423\3\2\2\2")
buf.write("\u04d8\u0428\3\2\2\2\u04d8\u042f\3\2\2\2\u04d8\u0434\3")
buf.write("\2\2\2\u04d8\u0446\3\2\2\2\u04d8\u044b\3\2\2\2\u04d8\u0459")
buf.write("\3\2\2\2\u04d8\u0463\3\2\2\2\u04d8\u0468\3\2\2\2\u04d8")
buf.write("\u046c\3\2\2\2\u04d8\u0470\3\2\2\2\u04d8\u0474\3\2\2\2")
buf.write("\u04d8\u0478\3\2\2\2\u04d8\u047d\3\2\2\2\u04d8\u0481\3")
buf.write("\2\2\2\u04d8\u0485\3\2\2\2\u04d8\u048a\3\2\2\2\u04d8\u048e")
buf.write("\3\2\2\2\u04d8\u0492\3\2\2\2\u04d8\u0496\3\2\2\2\u04d8")
buf.write("\u04a1\3\2\2\2\u04d8\u04a6\3\2\2\2\u04d8\u04ab\3\2\2\2")
buf.write("\u04d8\u04af\3\2\2\2\u04d8\u04b4\3\2\2\2\u04d8\u04b8\3")
buf.write("\2\2\2\u04d8\u04bc\3\2\2\2\u04d8\u04c0\3\2\2\2\u04d8\u04c5")
buf.write("\3\2\2\2\u04d8\u04ca\3\2\2\2\u04d8\u04cf\3\2\2\2\u04d8")
buf.write("\u04d4\3\2\2\2\u04d9\u00c7\3\2\2\2\u04da\u04db\t\6\2\2")
buf.write("\u04db\u00c9\3\2\2\2\u04dc\u04dd\7i\2\2\u04dd\u04fa\5")
buf.write("\32\16\2\u04de\u04df\7n\2\2\u04df\u04fa\5\u00c8e\2\u04e0")
buf.write("\u04e1\7o\2\2\u04e1\u04fa\5\u00c8e\2\u04e2\u04e3\7w\2")
buf.write("\2\u04e3\u04fa\5\u00c8e\2\u04e4\u04e5\7x\2\2\u04e5\u04fa")
buf.write("\5\u00c8e\2\u04e6\u04e7\7y\2\2\u04e7\u04fa\5\u00c8e\2")
buf.write("\u04e8\u04e9\7z\2\2\u04e9\u04fa\5\u00c8e\2\u04ea\u04eb")
buf.write("\7{\2\2\u04eb\u04fa\5\u00c8e\2\u04ec\u04ed\7|\2\2\u04ed")
buf.write("\u04fa\5\u00c8e\2\u04ee\u04ef\7}\2\2\u04ef\u04fa\5\u00c8")
buf.write("e\2\u04f0\u04f1\7~\2\2\u04f1\u04fa\5\22\n\2\u04f2\u04f3")
buf.write("\7\u0080\2\2\u04f3\u04fa\5\32\16\2\u04f4\u04f5\7\u0081")
buf.write("\2\2\u04f5\u04fa\5\22\n\2\u04f6\u04f7\7\u008a\2\2\u04f7")
buf.write("\u04fa\5\22\n\2\u04f8\u04fa\5*\26\2\u04f9\u04dc\3\2\2")
buf.write("\2\u04f9\u04de\3\2\2\2\u04f9\u04e0\3\2\2\2\u04f9\u04e2")
buf.write("\3\2\2\2\u04f9\u04e4\3\2\2\2\u04f9\u04e6\3\2\2\2\u04f9")
buf.write("\u04e8\3\2\2\2\u04f9\u04ea\3\2\2\2\u04f9\u04ec\3\2\2\2")
buf.write("\u04f9\u04ee\3\2\2\2\u04f9\u04f0\3\2\2\2\u04f9\u04f2\3")
buf.write("\2\2\2\u04f9\u04f4\3\2\2\2\u04f9\u04f6\3\2\2\2\u04f9\u04f8")
buf.write("\3\2\2\2\u04fa\u00cb\3\2\2\2\u04fb\u0504\7c\2\2\u04fc")
buf.write("\u0504\7d\2\2\u04fd\u0504\7e\2\2\u04fe\u0504\7j\2\2\u04ff")
buf.write("\u0504\7t\2\2\u0500\u0504\7\177\2\2\u0501\u0504\7\u008b")
buf.write("\2\2\u0502\u0504\5\36\20\2\u0503\u04fb\3\2\2\2\u0503\u04fc")
buf.write("\3\2\2\2\u0503\u04fd\3\2\2\2\u0503\u04fe\3\2\2\2\u0503")
buf.write("\u04ff\3\2\2\2\u0503\u0500\3\2\2\2\u0503\u0501\3\2\2\2")
buf.write("\u0503\u0502\3\2\2\2\u0504\u00cd\3\2\2\2\u0505\u0506\t")
buf.write("\7\2\2\u0506\u00cf\3\2\2\2\u0507\u050b\7\25\2\2\u0508")
buf.write("\u050b\7\23\2\2\u0509\u050b\5\"\22\2\u050a\u0507\3\2\2")
buf.write("\2\u050a\u0508\3\2\2\2\u050a\u0509\3\2\2\2\u050b\u00d1")
buf.write("\3\2\2\2\u050c\u050d\7\5\2\2\u050d\u050e\7.\2\2\u050e")
buf.write("\u050f\5T+\2\u050f\u0510\7\6\2\2\u0510\u0529\3\2\2\2\u0511")
buf.write("\u0512\7\5\2\2\u0512\u0513\7\60\2\2\u0513\u0514\5T+\2")
buf.write("\u0514\u0515\7\6\2\2\u0515\u0529\3\2\2\2\u0516\u0517\7")
buf.write("\5\2\2\u0517\u0518\7\61\2\2\u0518\u051a\7\5\2\2\u0519")
buf.write("\u051b\5R*\2\u051a\u0519\3\2\2\2\u051b\u051c\3\2\2\2\u051c")
buf.write("\u051a\3\2\2\2\u051c\u051d\3\2\2\2\u051d\u051e\3\2\2\2")
buf.write("\u051e\u051f\7\6\2\2\u051f\u0521\7\5\2\2\u0520\u0522\5")
buf.write("8\35\2\u0521\u0520\3\2\2\2\u0522\u0523\3\2\2\2\u0523\u0521")
buf.write("\3\2\2\2\u0523\u0524\3\2\2\2\u0524\u0525\3\2\2\2\u0525")
buf.write("\u0526\7\6\2\2\u0526\u0527\7\6\2\2\u0527\u0529\3\2\2\2")
buf.write("\u0528\u050c\3\2\2\2\u0528\u0511\3\2\2\2\u0528\u0516\3")
buf.write("\2\2\2\u0529\u00d3\3\2\2\2\u052a\u052b\7d\2\2\u052b\u0538")
buf.write("\5\22\n\2\u052c\u052d\7e\2\2\u052d\u0538\5\32\16\2\u052e")
buf.write("\u052f\7j\2\2\u052f\u0538\5\u00ceh\2\u0530\u0531\7t\2")
buf.write("\2\u0531\u0538\5\32\16\2\u0532\u0533\7\177\2\2\u0533\u0538")
buf.write("\5\u00d0i\2\u0534\u0535\7\u008b\2\2\u0535\u0538\5\32\16")
buf.write("\2\u0536\u0538\5*\26\2\u0537\u052a\3\2\2\2\u0537\u052c")
buf.write("\3\2\2\2\u0537\u052e\3\2\2\2\u0537\u0530\3\2\2\2\u0537")
buf.write("\u0532\3\2\2\2\u0537\u0534\3\2\2\2\u0537\u0536\3\2\2\2")
buf.write("\u0538\u00d5\3\2\2\2\u0539\u053a\7\5\2\2\u053a\u053b\5")
buf.write("8\35\2\u053b\u053c\58\35\2\u053c\u053d\7\6\2\2\u053d\u00d7")
buf.write("\3\2\2\2\u053e\u053f\7\5\2\2\u053f\u0540\5\20\t\2\u0540")
buf.write("\u0541\5\u00c8e\2\u0541\u0542\7\6\2\2\u0542\u00d9\3\2")
buf.write("\2\2\u0543\u0544\t\b\2\2\u0544\u00db\3\2\2\2\u0545\u0546")
buf.write("\5\32\16\2\u0546\u00dd\3\2\2\2\u0547\u054b\7\5\2\2\u0548")
buf.write("\u054a\58\35\2\u0549\u0548\3\2\2\2\u054a\u054d\3\2\2\2")
buf.write("\u054b\u0549\3\2\2\2\u054b\u054c\3\2\2\2\u054c\u054e\3")
buf.write("\2\2\2\u054d\u054b\3\2\2\2\u054e\u054f\7\6\2\2\u054f\u00df")
buf.write("\3\2\2\2\u0550\u0554\7\5\2\2\u0551\u0553\5\u00d8m\2\u0552")
buf.write("\u0551\3\2\2\2\u0553\u0556\3\2\2\2\u0554\u0552\3\2\2\2")
buf.write("\u0554\u0555\3\2\2\2\u0555\u0557\3\2\2\2\u0556\u0554\3")
buf.write("\2\2\2\u0557\u0558\7\6\2\2\u0558\u00e1\3\2\2\2\u0559\u055b")
buf.write("\7\5\2\2\u055a\u055c\5\u00d4k\2\u055b\u055a\3\2\2\2\u055c")
buf.write("\u055d\3\2\2\2\u055d\u055b\3\2\2\2\u055d\u055e\3\2\2\2")
buf.write("\u055e\u055f\3\2\2\2\u055f\u0560\7\6\2\2\u0560\u00e3\3")
buf.write("\2\2\2\u0561\u0565\7\5\2\2\u0562\u0564\5\u00d2j\2\u0563")
buf.write("\u0562\3\2\2\2\u0564\u0567\3\2\2\2\u0565\u0563\3\2\2\2")
buf.write("\u0565\u0566\3\2\2\2\u0566\u0568\3\2\2\2\u0567\u0565\3")
buf.write("\2\2\2\u0568\u0569\7\6\2\2\u0569\u00e5\3\2\2\2\u056a\u056b")
buf.write("\5(\25\2\u056b\u00e7\3\2\2\2\u056c\u056d\5\"\22\2\u056d")
buf.write("\u00e9\3\2\2\2\u056e\u0572\7\5\2\2\u056f\u0571\5\20\t")
buf.write("\2\u0570\u056f\3\2\2\2\u0571\u0574\3\2\2\2\u0572\u0570")
buf.write("\3\2\2\2\u0572\u0573\3\2\2\2\u0573\u0575\3\2\2\2\u0574")
buf.write("\u0572\3\2\2\2\u0575\u0576\7\6\2\2\u0576\u00eb\3\2\2\2")
buf.write("\u0577\u057b\7\5\2\2\u0578\u057a\5\20\t\2\u0579\u0578")
buf.write("\3\2\2\2\u057a\u057d\3\2\2\2\u057b\u0579\3\2\2\2\u057b")
buf.write("\u057c\3\2\2\2\u057c\u057e\3\2\2\2\u057d\u057b\3\2\2\2")
buf.write("\u057e\u057f\7\6\2\2\u057f\u00ed\3\2\2\2\u0580\u0582\7")
buf.write("\5\2\2\u0581\u0583\5\u00d6l\2\u0582\u0581\3\2\2\2\u0583")
buf.write("\u0584\3\2\2\2\u0584\u0582\3\2\2\2\u0584\u0585\3\2\2\2")
buf.write("\u0585\u0586\3\2\2\2\u0586\u0587\7\6\2\2\u0587\u00ef\3")
buf.write("\2\2\2\u0588\u0594\5\u00dan\2\u0589\u0594\5\u00dco\2\u058a")
buf.write("\u0594\5\u00dep\2\u058b\u0594\5\u00e0q\2\u058c\u0594\5")
buf.write("\u00e2r\2\u058d\u0594\5\u00e4s\2\u058e\u0594\5\u00e6t")
buf.write("\2\u058f\u0594\5\u00e8u\2\u0590\u0594\5\u00eav\2\u0591")
buf.write("\u0594\5\u00ecw\2\u0592\u0594\5\u00eex\2\u0593\u0588\3")
buf.write("\2\2\2\u0593\u0589\3\2\2\2\u0593\u058a\3\2\2\2\u0593\u058b")
buf.write("\3\2\2\2\u0593\u058c\3\2\2\2\u0593\u058d\3\2\2\2\u0593")
buf.write("\u058e\3\2\2\2\u0593\u058f\3\2\2\2\u0593\u0590\3\2\2\2")
buf.write("\u0593\u0591\3\2\2\2\u0593\u0592\3\2\2\2\u0594\u00f1\3")
buf.write("\2\2\2\u0595\u059e\7\27\2\2\u0596\u059e\5\u00f0y\2\u0597")
buf.write("\u059e\7\33\2\2\u0598\u0599\7\5\2\2\u0599\u059a\7\20\2")
buf.write("\2\u059a\u059b\5\32\16\2\u059b\u059c\7\6\2\2\u059c\u059e")
buf.write("\3\2\2\2\u059d\u0595\3\2\2\2\u059d\u0596\3\2\2\2\u059d")
buf.write("\u0597\3\2\2\2\u059d\u0598\3\2\2\2\u059e\u00f3\3\2\2\2")
buf.write("a\u00fe\u0108\u0119\u0129\u0132\u0136\u013a\u0143\u0147")
buf.write("\u014f\u0153\u0159\u0161\u0165\u016e\u0180\u0184\u0192")
buf.write("\u019d\u01a8\u01b4\u01c0\u01cd\u01d8\u01dc\u01e4\u01f1")
buf.write("\u01fc\u0206\u020b\u0210\u0219\u0221\u0226\u022c\u0233")
buf.write("\u023c\u024b\u0253\u025c\u0269\u0271\u0284\u028d\u0297")
buf.write("\u029e\u02a3\u02ab\u02b7\u02c4\u02c9\u033c\u0345\u034e")
buf.write("\u0357\u0361\u036b\u0374\u037d\u038c\u038e\u0392\u039f")
buf.write("\u03a9\u03b4\u03ca\u03e7\u03ee\u03f9\u0402\u040e\u0419")
buf.write("\u043a\u0441\u0452\u045d\u045f\u049c\u04d8\u04f9\u0503")
buf.write("\u050a\u051c\u0523\u0528\u0537\u054b\u0554\u055d\u0565")
buf.write("\u0572\u057b\u0584\u0593\u059d")
return buf.getvalue()
class SMTLIBv2Parser ( Parser ):
grammarFileName = "SMTLIBv2.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "' bv'", "<INVALID>", "'('", "')'", "';'",
"<INVALID>", "<INVALID>", "<INVALID>", "'not'", "'Bool'",
"'Int'", "'Real'", "'continued-execution'", "'error'",
"'false'", "'immediate-exit'", "'incomplete'", "'logic'",
"'memout'", "'sat'", "'success'", "'theory'", "'true'",
"'unknown'", "'unsupported'", "'unsat'", "'assert'",
"'assert-soft'", "'simplify'", "'check-sat'", "'check-sat-assuming'",
"'check-sat-using'", "'labels'", "'minimize'", "'maximize'",
"'declare-const'", "'declare-datatype'", "'declare-codatatype'",
"'declare-datatypes'", "'declare-codatatypes'", "'declare-fun'",
"'declare-sort'", "'define'", "'define-fun'", "'define-const'",
"'define-fun-rec'", "'define-funs-rec'", "'define-sort'",
"'display'", "'echo'", "'eval'", "'exit'", "'get-objectives'",
"'get-assertions'", "'get-assignment'", "'get-info'",
"'get-model'", "'block-model'", "'get-option'", "'poly/factor'",
"'get-proof'", "'get-unsat-assumptions'", "'get-unsat-core'",
"'get-value'", "'pop'", "'push'", "'reset'", "'reset-assertions'",
"'set-info'", "'set-logic'", "'set-option'", "'then'",
"'and-then'", "'par-then'", "'or-else'", "'par-or-else'",
"'par-or'", "'try-for'", "'using-params'", "'!'", "'_'",
"'as'", "'BINARY'", "'DECIMAL'", "'exists'", "'HEXADECIMAL'",
"'forall'", "'let'", "'match'", "'NUMERAL'", "'par'",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"':'", "':all-statistics'", "':assertion-stack-levels'",
"':authors'", "':category'", "':chainable'", "':definition'",
"':diagnostic-output-channel'", "':error-behavior'",
"':extensions'", "':funs'", "':funs-description'",
"':global-declarations'", "':interactive-mode'", "':language'",
"':left-assoc'", "':license'", "':named'", "':name'",
"':notes'", "':pattern'", "':print-success'", "':produce-assertions'",
"':produce-assignments'", "':produce-models'", "':produce-proofs'",
"':produce-unsat-assumptions'", "':produce-unsat-cores'",
"':random-seed'", "':reason-unknown'", "':regular-output-channel'",
"':reproducible-resource-limit'", "':right-assoc'",
"':smt-lib-version'", "':sorts'", "':sorts-description'",
"':source'", "':status'", "':theories'", "':values'",
"':verbosity'", "':version'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "Comment", "ParOpen", "ParClose",
"Semicolon", "String", "QuotedSymbol", "RegConst",
"PS_Not", "PS_Bool", "PS_Int", "PS_Real", "PS_ContinuedExecution",
"PS_Error", "PS_False", "PS_ImmediateExit", "PS_Incomplete",
"PS_Logic", "PS_Memout", "PS_Sat", "PS_Success", "PS_Theory",
"PS_True", "PS_Unknown", "PS_Unsupported", "PS_Unsat",
"CMD_Assert", "CMD_AssertSoft", "Simplify", "CMD_CheckSat",
"CMD_CheckSatAssuming", "CMD_CheckSatUsing", "CMD_Labels",
"CMD_Minimize", "CMD_Maximize", "CMD_DeclareConst",
"CMD_DeclareDatatype", "CMD_DeclareCodatatype", "CMD_DeclareDatatypes",
"CMD_DeclareCodatatypes", "CMD_DeclareFun", "CMD_DeclareSort",
"CMD_Define", "CMD_DefineFun", "CMD_DefineConst",
"CMD_DefineFunRec", "CMD_DefineFunsRec", "CMD_DefineSort",
"CMD_Display", "CMD_Echo", "CMD_Eval", "CMD_Exit",
"CMD_GetObjectives", "CMD_GetAssertions", "CMD_GetAssignment",
"CMD_GetInfo", "CMD_GetModel", "CMD_BlockModel", "CMD_GetOption",
"CMD_PolyFactor", "CMD_GetProof", "CMD_GetUnsatAssumptions",
"CMD_GetUnsatCore", "CMD_GetValue", "CMD_Pop", "CMD_Push",
"CMD_Reset", "CMD_ResetAssertions", "CMD_SetInfo",
"CMD_SetLogic", "CMD_SetOption", "TAC_Then", "TAC_AndThen",
"TAC_ParThen", "TAC_OrElse", "TAC_ParOrElse", "TAC_ParOr",
"TAC_TryFor", "TAC_UsingParams", "GRW_Exclamation",
"GRW_Underscore", "GRW_As", "GRW_Binary", "GRW_Decimal",
"GRW_Exists", "GRW_Hexadecimal", "GRW_Forall", "GRW_Let",
"GRW_Match", "GRW_Numeral", "GRW_Par", "Numeral",
"Binary", "HexDecimal", "Decimal", "Colon", "PK_AllStatistics",
"PK_AssertionStackLevels", "PK_Authors", "PK_Category",
"PK_Chainable", "PK_Definition", "PK_DiagnosticOutputChannel",
"PK_ErrorBehaviour", "PK_Extension", "PK_Funs", "PK_FunsDescription",
"PK_GlobalDeclarations", "PK_InteractiveMode", "PK_Language",
"PK_LeftAssoc", "PK_License", "PK_Named", "PK_Name",
"PK_Notes", "PK_Pattern", "PK_PrintSuccess", "PK_ProduceAssertions",
"PK_ProduceAssignments", "PK_ProduceModels", "PK_ProduceProofs",
"PK_ProduceUnsatAssumptions", "PK_ProduceUnsatCores",
"PK_RandomSeed", "PK_ReasonUnknown", "PK_RegularOutputChannel",
"PK_ReproducibleResourceLimit", "PK_RightAssoc", "PK_SmtLibVersion",
"PK_Sorts", "PK_SortsDescription", "PK_Source", "PK_Status",
"PK_Theories", "PK_Values", "PK_Verbosity", "PK_Version",
"UndefinedSymbol", "WS" ]
RULE_start = 0
RULE_response = 1
RULE_generalReservedWord = 2
RULE_simpleSymbol = 3
RULE_quotedSymbol = 4
RULE_predefSymbol = 5
RULE_predefKeyword = 6
RULE_symbol = 7
RULE_numeral = 8
RULE_decimal = 9
RULE_hexadecimal = 10
RULE_binary = 11
RULE_string = 12
RULE_reg_const = 13
RULE_keyword = 14
RULE_spec_constant = 15
RULE_s_expr = 16
RULE_index = 17
RULE_identifier = 18
RULE_attribute_value = 19
RULE_attribute = 20
RULE_sort = 21
RULE_qual_identifier = 22
RULE_var_binding = 23
RULE_sorted_var = 24
RULE_pattern = 25
RULE_match_case = 26
RULE_term = 27
RULE_sort_symbol_decl = 28
RULE_meta_spec_constant = 29
RULE_fun_symbol_decl = 30
RULE_par_fun_symbol_decl = 31
RULE_theory_attribute = 32
RULE_theory_decl = 33
RULE_logic_attribue = 34
RULE_logic = 35
RULE_sort_dec = 36
RULE_selector_dec = 37
RULE_constructor_dec = 38
RULE_datatype_dec = 39
RULE_function_dec = 40
RULE_function_def = 41
RULE_prop_literal = 42
RULE_script = 43
RULE_cmd_assert = 44
RULE_cmd_assertSoft = 45
RULE_cmd_simplify = 46
RULE_cmd_checkSat = 47
RULE_cmd_checkSatAssuming = 48
RULE_cmd_checkSatUsing = 49
RULE_cmd_minimize = 50
RULE_cmd_maximize = 51
RULE_cmd_declareConst = 52
RULE_cmd_declareDatatype = 53
RULE_cmd_declareCodatatype = 54
RULE_cmd_declareDatatypes = 55
RULE_cmd_declareCodatatypes = 56
RULE_cmd_declareFun = 57
RULE_cmd_declareSort = 58
RULE_cmd_define = 59
RULE_cmd_defineFun = 60
RULE_cmd_defineConst = 61
RULE_cmd_defineFunRec = 62
RULE_cmd_defineFunsRec = 63
RULE_cmd_defineSort = 64
RULE_cmd_display = 65
RULE_cmd_echo = 66
RULE_cmd_eval = 67
RULE_cmd_exit = 68
RULE_cmd_GetObjectives = 69
RULE_cmd_getAssertions = 70
RULE_cmd_getAssignment = 71
RULE_cmd_getInfo = 72
RULE_cmd_getModel = 73
RULE_cmd_blockModel = 74
RULE_cmd_getOption = 75
RULE_cmd_getProof = 76
RULE_cmd_getUnsatAssumptions = 77
RULE_cmd_labels = 78
RULE_cmd_getUnsatCore = 79
RULE_cmd_getValue = 80
RULE_cmd_pop = 81
RULE_cmd_poly_factor = 82
RULE_cmd_push = 83
RULE_cmd_reset = 84
RULE_cmd_resetAssertions = 85
RULE_cmd_setInfo = 86
RULE_cmd_setLogic = 87
RULE_cmd_setOption = 88
RULE_tac_then = 89
RULE_tac_and_then = 90
RULE_par_then = 91
RULE_or_else = 92
RULE_par_or_else = 93
RULE_par_or = 94
RULE_tryFor = 95
RULE_usingParams = 96
RULE_tactical = 97
RULE_command = 98
RULE_b_value = 99
RULE_option = 100
RULE_info_flag = 101
RULE_error_behaviour = 102
RULE_reason_unknown = 103
RULE_model_response = 104
RULE_info_response = 105
RULE_valuation_pair = 106
RULE_t_valuation_pair = 107
RULE_check_sat_response = 108
RULE_echo_response = 109
RULE_get_assertions_response = 110
RULE_get_assignment_response = 111
RULE_get_info_response = 112
RULE_get_model_response = 113
RULE_get_option_response = 114
RULE_get_proof_response = 115
RULE_get_unsat_assump_response = 116
RULE_get_unsat_core_response = 117
RULE_get_value_response = 118
RULE_specific_success_response = 119
RULE_general_response = 120
ruleNames = [ "start", "response", "generalReservedWord", "simpleSymbol",
"quotedSymbol", "predefSymbol", "predefKeyword", "symbol",
"numeral", "decimal", "hexadecimal", "binary", "string",
"reg_const", "keyword", "spec_constant", "s_expr", "index",
"identifier", "attribute_value", "attribute", "sort",
"qual_identifier", "var_binding", "sorted_var", "pattern",
"match_case", "term", "sort_symbol_decl", "meta_spec_constant",
"fun_symbol_decl", "par_fun_symbol_decl", "theory_attribute",
"theory_decl", "logic_attribue", "logic", "sort_dec",
"selector_dec", "constructor_dec", "datatype_dec", "function_dec",
"function_def", "prop_literal", "script", "cmd_assert",
"cmd_assertSoft", "cmd_simplify", "cmd_checkSat", "cmd_checkSatAssuming",
"cmd_checkSatUsing", "cmd_minimize", "cmd_maximize",
"cmd_declareConst", "cmd_declareDatatype", "cmd_declareCodatatype",
"cmd_declareDatatypes", "cmd_declareCodatatypes", "cmd_declareFun",
"cmd_declareSort", "cmd_define", "cmd_defineFun", "cmd_defineConst",
"cmd_defineFunRec", "cmd_defineFunsRec", "cmd_defineSort",
"cmd_display", "cmd_echo", "cmd_eval", "cmd_exit", "cmd_GetObjectives",
"cmd_getAssertions", "cmd_getAssignment", "cmd_getInfo",
"cmd_getModel", "cmd_blockModel", "cmd_getOption", "cmd_getProof",
"cmd_getUnsatAssumptions", "cmd_labels", "cmd_getUnsatCore",
"cmd_getValue", "cmd_pop", "cmd_poly_factor", "cmd_push",
"cmd_reset", "cmd_resetAssertions", "cmd_setInfo", "cmd_setLogic",
"cmd_setOption", "tac_then", "tac_and_then", "par_then",
"or_else", "par_or_else", "par_or", "tryFor", "usingParams",
"tactical", "command", "b_value", "option", "info_flag",
"error_behaviour", "reason_unknown", "model_response",
"info_response", "valuation_pair", "t_valuation_pair",
"check_sat_response", "echo_response", "get_assertions_response",
"get_assignment_response", "get_info_response", "get_model_response",
"get_option_response", "get_proof_response", "get_unsat_assump_response",
"get_unsat_core_response", "get_value_response", "specific_success_response",
"general_response" ]
EOF = Token.EOF
T__0=1
Comment=2
ParOpen=3
ParClose=4
Semicolon=5
String=6
QuotedSymbol=7
RegConst=8
PS_Not=9
PS_Bool=10
PS_Int=11
PS_Real=12
PS_ContinuedExecution=13
PS_Error=14
PS_False=15
PS_ImmediateExit=16
PS_Incomplete=17
PS_Logic=18
PS_Memout=19
PS_Sat=20
PS_Success=21
PS_Theory=22
PS_True=23
PS_Unknown=24
PS_Unsupported=25
PS_Unsat=26
CMD_Assert=27
CMD_AssertSoft=28
Simplify=29
CMD_CheckSat=30
CMD_CheckSatAssuming=31
CMD_CheckSatUsing=32
CMD_Labels=33
CMD_Minimize=34
CMD_Maximize=35
CMD_DeclareConst=36
CMD_DeclareDatatype=37
CMD_DeclareCodatatype=38
CMD_DeclareDatatypes=39
CMD_DeclareCodatatypes=40
CMD_DeclareFun=41
CMD_DeclareSort=42
CMD_Define=43
CMD_DefineFun=44
CMD_DefineConst=45
CMD_DefineFunRec=46
CMD_DefineFunsRec=47
CMD_DefineSort=48
CMD_Display=49
CMD_Echo=50
CMD_Eval=51
CMD_Exit=52
CMD_GetObjectives=53
CMD_GetAssertions=54
CMD_GetAssignment=55
CMD_GetInfo=56
CMD_GetModel=57
CMD_BlockModel=58
CMD_GetOption=59
CMD_PolyFactor=60
CMD_GetProof=61
CMD_GetUnsatAssumptions=62
CMD_GetUnsatCore=63
CMD_GetValue=64
CMD_Pop=65
CMD_Push=66
CMD_Reset=67
CMD_ResetAssertions=68
CMD_SetInfo=69
CMD_SetLogic=70
CMD_SetOption=71
TAC_Then=72
TAC_AndThen=73
TAC_ParThen=74
TAC_OrElse=75
TAC_ParOrElse=76
TAC_ParOr=77
TAC_TryFor=78
TAC_UsingParams=79
GRW_Exclamation=80
GRW_Underscore=81
GRW_As=82
GRW_Binary=83
GRW_Decimal=84
GRW_Exists=85
GRW_Hexadecimal=86
GRW_Forall=87
GRW_Let=88
GRW_Match=89
GRW_Numeral=90
GRW_Par=91
Numeral=92
Binary=93
HexDecimal=94
Decimal=95
Colon=96
PK_AllStatistics=97
PK_AssertionStackLevels=98
PK_Authors=99
PK_Category=100
PK_Chainable=101
PK_Definition=102
PK_DiagnosticOutputChannel=103
PK_ErrorBehaviour=104
PK_Extension=105
PK_Funs=106
PK_FunsDescription=107
PK_GlobalDeclarations=108
PK_InteractiveMode=109
PK_Language=110
PK_LeftAssoc=111
PK_License=112
PK_Named=113
PK_Name=114
PK_Notes=115
PK_Pattern=116
PK_PrintSuccess=117
PK_ProduceAssertions=118
PK_ProduceAssignments=119
PK_ProduceModels=120
PK_ProduceProofs=121
PK_ProduceUnsatAssumptions=122
PK_ProduceUnsatCores=123
PK_RandomSeed=124
PK_ReasonUnknown=125
PK_RegularOutputChannel=126
PK_ReproducibleResourceLimit=127
PK_RightAssoc=128
PK_SmtLibVersion=129
PK_Sorts=130
PK_SortsDescription=131
PK_Source=132
PK_Status=133
PK_Theories=134
PK_Values=135
PK_Verbosity=136
PK_Version=137
UndefinedSymbol=138
WS=139
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9.2")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class StartContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def script(self):
return self.getTypedRuleContext(SMTLIBv2Parser.ScriptContext,0)
def EOF(self):
return self.getToken(SMTLIBv2Parser.EOF, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_start
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStart" ):
listener.enterStart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStart" ):
listener.exitStart(self)
def start(self):
localctx = SMTLIBv2Parser.StartContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_start)
try:
self.enterOuterAlt(localctx, 1)
self.state = 242
self.script()
self.state = 243
self.match(SMTLIBv2Parser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ResponseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def general_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.General_responseContext,0)
def EOF(self):
return self.getToken(SMTLIBv2Parser.EOF, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterResponse" ):
listener.enterResponse(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitResponse" ):
listener.exitResponse(self)
def response(self):
localctx = SMTLIBv2Parser.ResponseContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_response)
try:
self.enterOuterAlt(localctx, 1)
self.state = 245
self.general_response()
self.state = 246
self.match(SMTLIBv2Parser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class GeneralReservedWordContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def GRW_Exclamation(self):
return self.getToken(SMTLIBv2Parser.GRW_Exclamation, 0)
def GRW_Underscore(self):
return self.getToken(SMTLIBv2Parser.GRW_Underscore, 0)
def GRW_As(self):
return self.getToken(SMTLIBv2Parser.GRW_As, 0)
def GRW_Binary(self):
return self.getToken(SMTLIBv2Parser.GRW_Binary, 0)
def GRW_Decimal(self):
return self.getToken(SMTLIBv2Parser.GRW_Decimal, 0)
def GRW_Exists(self):
return self.getToken(SMTLIBv2Parser.GRW_Exists, 0)
def GRW_Hexadecimal(self):
return self.getToken(SMTLIBv2Parser.GRW_Hexadecimal, 0)
def GRW_Forall(self):
return self.getToken(SMTLIBv2Parser.GRW_Forall, 0)
def GRW_Let(self):
return self.getToken(SMTLIBv2Parser.GRW_Let, 0)
def GRW_Match(self):
return self.getToken(SMTLIBv2Parser.GRW_Match, 0)
def GRW_Numeral(self):
return self.getToken(SMTLIBv2Parser.GRW_Numeral, 0)
def GRW_Par(self):
return self.getToken(SMTLIBv2Parser.GRW_Par, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_generalReservedWord
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGeneralReservedWord" ):
listener.enterGeneralReservedWord(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGeneralReservedWord" ):
listener.exitGeneralReservedWord(self)
def generalReservedWord(self):
localctx = SMTLIBv2Parser.GeneralReservedWordContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_generalReservedWord)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 248
_la = self._input.LA(1)
if not(((((_la - 80)) & ~0x3f) == 0 and ((1 << (_la - 80)) & ((1 << (SMTLIBv2Parser.GRW_Exclamation - 80)) | (1 << (SMTLIBv2Parser.GRW_Underscore - 80)) | (1 << (SMTLIBv2Parser.GRW_As - 80)) | (1 << (SMTLIBv2Parser.GRW_Binary - 80)) | (1 << (SMTLIBv2Parser.GRW_Decimal - 80)) | (1 << (SMTLIBv2Parser.GRW_Exists - 80)) | (1 << (SMTLIBv2Parser.GRW_Hexadecimal - 80)) | (1 << (SMTLIBv2Parser.GRW_Forall - 80)) | (1 << (SMTLIBv2Parser.GRW_Let - 80)) | (1 << (SMTLIBv2Parser.GRW_Match - 80)) | (1 << (SMTLIBv2Parser.GRW_Numeral - 80)) | (1 << (SMTLIBv2Parser.GRW_Par - 80)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SimpleSymbolContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def predefSymbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.PredefSymbolContext,0)
def UndefinedSymbol(self):
return self.getToken(SMTLIBv2Parser.UndefinedSymbol, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_simpleSymbol
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSimpleSymbol" ):
listener.enterSimpleSymbol(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSimpleSymbol" ):
listener.exitSimpleSymbol(self)
def simpleSymbol(self):
localctx = SMTLIBv2Parser.SimpleSymbolContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_simpleSymbol)
try:
self.state = 252
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat]:
self.enterOuterAlt(localctx, 1)
self.state = 250
self.predefSymbol()
pass
elif token in [SMTLIBv2Parser.UndefinedSymbol]:
self.enterOuterAlt(localctx, 2)
self.state = 251
self.match(SMTLIBv2Parser.UndefinedSymbol)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QuotedSymbolContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def QuotedSymbol(self):
return self.getToken(SMTLIBv2Parser.QuotedSymbol, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_quotedSymbol
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQuotedSymbol" ):
listener.enterQuotedSymbol(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQuotedSymbol" ):
listener.exitQuotedSymbol(self)
def quotedSymbol(self):
localctx = SMTLIBv2Parser.QuotedSymbolContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_quotedSymbol)
try:
self.enterOuterAlt(localctx, 1)
self.state = 254
self.match(SMTLIBv2Parser.QuotedSymbol)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PredefSymbolContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PS_Not(self):
return self.getToken(SMTLIBv2Parser.PS_Not, 0)
def PS_Bool(self):
return self.getToken(SMTLIBv2Parser.PS_Bool, 0)
def PS_Int(self):
return self.getToken(SMTLIBv2Parser.PS_Int, 0)
def PS_Real(self):
return self.getToken(SMTLIBv2Parser.PS_Real, 0)
def PS_ContinuedExecution(self):
return self.getToken(SMTLIBv2Parser.PS_ContinuedExecution, 0)
def PS_Error(self):
return self.getToken(SMTLIBv2Parser.PS_Error, 0)
def PS_False(self):
return self.getToken(SMTLIBv2Parser.PS_False, 0)
def PS_ImmediateExit(self):
return self.getToken(SMTLIBv2Parser.PS_ImmediateExit, 0)
def PS_Incomplete(self):
return self.getToken(SMTLIBv2Parser.PS_Incomplete, 0)
def PS_Logic(self):
return self.getToken(SMTLIBv2Parser.PS_Logic, 0)
def PS_Memout(self):
return self.getToken(SMTLIBv2Parser.PS_Memout, 0)
def PS_Sat(self):
return self.getToken(SMTLIBv2Parser.PS_Sat, 0)
def PS_Success(self):
return self.getToken(SMTLIBv2Parser.PS_Success, 0)
def PS_Theory(self):
return self.getToken(SMTLIBv2Parser.PS_Theory, 0)
def PS_True(self):
return self.getToken(SMTLIBv2Parser.PS_True, 0)
def PS_Unknown(self):
return self.getToken(SMTLIBv2Parser.PS_Unknown, 0)
def PS_Unsupported(self):
return self.getToken(SMTLIBv2Parser.PS_Unsupported, 0)
def PS_Unsat(self):
return self.getToken(SMTLIBv2Parser.PS_Unsat, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_predefSymbol
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPredefSymbol" ):
listener.enterPredefSymbol(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPredefSymbol" ):
listener.exitPredefSymbol(self)
def predefSymbol(self):
localctx = SMTLIBv2Parser.PredefSymbolContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_predefSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 256
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PredefKeywordContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PK_AllStatistics(self):
return self.getToken(SMTLIBv2Parser.PK_AllStatistics, 0)
def PK_AssertionStackLevels(self):
return self.getToken(SMTLIBv2Parser.PK_AssertionStackLevels, 0)
def PK_Authors(self):
return self.getToken(SMTLIBv2Parser.PK_Authors, 0)
def PK_Category(self):
return self.getToken(SMTLIBv2Parser.PK_Category, 0)
def PK_Chainable(self):
return self.getToken(SMTLIBv2Parser.PK_Chainable, 0)
def PK_Definition(self):
return self.getToken(SMTLIBv2Parser.PK_Definition, 0)
def PK_DiagnosticOutputChannel(self):
return self.getToken(SMTLIBv2Parser.PK_DiagnosticOutputChannel, 0)
def PK_ErrorBehaviour(self):
return self.getToken(SMTLIBv2Parser.PK_ErrorBehaviour, 0)
def PK_Extension(self):
return self.getToken(SMTLIBv2Parser.PK_Extension, 0)
def PK_Funs(self):
return self.getToken(SMTLIBv2Parser.PK_Funs, 0)
def PK_FunsDescription(self):
return self.getToken(SMTLIBv2Parser.PK_FunsDescription, 0)
def PK_GlobalDeclarations(self):
return self.getToken(SMTLIBv2Parser.PK_GlobalDeclarations, 0)
def PK_InteractiveMode(self):
return self.getToken(SMTLIBv2Parser.PK_InteractiveMode, 0)
def PK_Language(self):
return self.getToken(SMTLIBv2Parser.PK_Language, 0)
def PK_LeftAssoc(self):
return self.getToken(SMTLIBv2Parser.PK_LeftAssoc, 0)
def PK_License(self):
return self.getToken(SMTLIBv2Parser.PK_License, 0)
def PK_Named(self):
return self.getToken(SMTLIBv2Parser.PK_Named, 0)
def PK_Name(self):
return self.getToken(SMTLIBv2Parser.PK_Name, 0)
def PK_Notes(self):
return self.getToken(SMTLIBv2Parser.PK_Notes, 0)
def PK_Pattern(self):
return self.getToken(SMTLIBv2Parser.PK_Pattern, 0)
def PK_PrintSuccess(self):
return self.getToken(SMTLIBv2Parser.PK_PrintSuccess, 0)
def PK_ProduceAssertions(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceAssertions, 0)
def PK_ProduceAssignments(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceAssignments, 0)
def PK_ProduceModels(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceModels, 0)
def PK_ProduceProofs(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceProofs, 0)
def PK_ProduceUnsatAssumptions(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceUnsatAssumptions, 0)
def PK_ProduceUnsatCores(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceUnsatCores, 0)
def PK_RandomSeed(self):
return self.getToken(SMTLIBv2Parser.PK_RandomSeed, 0)
def PK_ReasonUnknown(self):
return self.getToken(SMTLIBv2Parser.PK_ReasonUnknown, 0)
def PK_RegularOutputChannel(self):
return self.getToken(SMTLIBv2Parser.PK_RegularOutputChannel, 0)
def PK_ReproducibleResourceLimit(self):
return self.getToken(SMTLIBv2Parser.PK_ReproducibleResourceLimit, 0)
def PK_RightAssoc(self):
return self.getToken(SMTLIBv2Parser.PK_RightAssoc, 0)
def PK_SmtLibVersion(self):
return self.getToken(SMTLIBv2Parser.PK_SmtLibVersion, 0)
def PK_Sorts(self):
return self.getToken(SMTLIBv2Parser.PK_Sorts, 0)
def PK_SortsDescription(self):
return self.getToken(SMTLIBv2Parser.PK_SortsDescription, 0)
def PK_Source(self):
return self.getToken(SMTLIBv2Parser.PK_Source, 0)
def PK_Status(self):
return self.getToken(SMTLIBv2Parser.PK_Status, 0)
def PK_Theories(self):
return self.getToken(SMTLIBv2Parser.PK_Theories, 0)
def PK_Values(self):
return self.getToken(SMTLIBv2Parser.PK_Values, 0)
def PK_Verbosity(self):
return self.getToken(SMTLIBv2Parser.PK_Verbosity, 0)
def PK_Version(self):
return self.getToken(SMTLIBv2Parser.PK_Version, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_predefKeyword
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPredefKeyword" ):
listener.enterPredefKeyword(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPredefKeyword" ):
listener.exitPredefKeyword(self)
def predefKeyword(self):
localctx = SMTLIBv2Parser.PredefKeywordContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_predefKeyword)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 258
_la = self._input.LA(1)
if not(((((_la - 97)) & ~0x3f) == 0 and ((1 << (_la - 97)) & ((1 << (SMTLIBv2Parser.PK_AllStatistics - 97)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 97)) | (1 << (SMTLIBv2Parser.PK_Authors - 97)) | (1 << (SMTLIBv2Parser.PK_Category - 97)) | (1 << (SMTLIBv2Parser.PK_Chainable - 97)) | (1 << (SMTLIBv2Parser.PK_Definition - 97)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 97)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 97)) | (1 << (SMTLIBv2Parser.PK_Extension - 97)) | (1 << (SMTLIBv2Parser.PK_Funs - 97)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 97)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 97)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 97)) | (1 << (SMTLIBv2Parser.PK_Language - 97)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 97)) | (1 << (SMTLIBv2Parser.PK_License - 97)) | (1 << (SMTLIBv2Parser.PK_Named - 97)) | (1 << (SMTLIBv2Parser.PK_Name - 97)) | (1 << (SMTLIBv2Parser.PK_Notes - 97)) | (1 << (SMTLIBv2Parser.PK_Pattern - 97)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 97)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 97)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 97)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 97)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 97)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 97)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 97)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 97)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 97)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 97)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 97)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 97)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 97)) | (1 << (SMTLIBv2Parser.PK_Sorts - 97)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 97)) | (1 << (SMTLIBv2Parser.PK_Source - 97)) | (1 << (SMTLIBv2Parser.PK_Status - 97)) | (1 << (SMTLIBv2Parser.PK_Theories - 97)) | (1 << (SMTLIBv2Parser.PK_Values - 97)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 97)) | (1 << (SMTLIBv2Parser.PK_Version - 97)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SymbolContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def simpleSymbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SimpleSymbolContext,0)
def quotedSymbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.QuotedSymbolContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_symbol
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSymbol" ):
listener.enterSymbol(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSymbol" ):
listener.exitSymbol(self)
def symbol(self):
localctx = SMTLIBv2Parser.SymbolContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_symbol)
try:
self.state = 262
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat, SMTLIBv2Parser.UndefinedSymbol]:
self.enterOuterAlt(localctx, 1)
self.state = 260
self.simpleSymbol()
pass
elif token in [SMTLIBv2Parser.QuotedSymbol]:
self.enterOuterAlt(localctx, 2)
self.state = 261
self.quotedSymbol()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NumeralContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Numeral(self):
return self.getToken(SMTLIBv2Parser.Numeral, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_numeral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNumeral" ):
listener.enterNumeral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNumeral" ):
listener.exitNumeral(self)
def numeral(self):
localctx = SMTLIBv2Parser.NumeralContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_numeral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 264
self.match(SMTLIBv2Parser.Numeral)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DecimalContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Decimal(self):
return self.getToken(SMTLIBv2Parser.Decimal, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_decimal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDecimal" ):
listener.enterDecimal(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDecimal" ):
listener.exitDecimal(self)
def decimal(self):
localctx = SMTLIBv2Parser.DecimalContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_decimal)
try:
self.enterOuterAlt(localctx, 1)
self.state = 266
self.match(SMTLIBv2Parser.Decimal)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HexadecimalContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def HexDecimal(self):
return self.getToken(SMTLIBv2Parser.HexDecimal, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_hexadecimal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHexadecimal" ):
listener.enterHexadecimal(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHexadecimal" ):
listener.exitHexadecimal(self)
def hexadecimal(self):
localctx = SMTLIBv2Parser.HexadecimalContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_hexadecimal)
try:
self.enterOuterAlt(localctx, 1)
self.state = 268
self.match(SMTLIBv2Parser.HexDecimal)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BinaryContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Binary(self):
return self.getToken(SMTLIBv2Parser.Binary, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_binary
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBinary" ):
listener.enterBinary(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBinary" ):
listener.exitBinary(self)
def binary(self):
localctx = SMTLIBv2Parser.BinaryContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_binary)
try:
self.enterOuterAlt(localctx, 1)
self.state = 270
self.match(SMTLIBv2Parser.Binary)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StringContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def String(self):
return self.getToken(SMTLIBv2Parser.String, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_string
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterString" ):
listener.enterString(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitString" ):
listener.exitString(self)
def string(self):
localctx = SMTLIBv2Parser.StringContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_string)
try:
self.enterOuterAlt(localctx, 1)
self.state = 272
self.match(SMTLIBv2Parser.String)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Reg_constContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def RegConst(self):
return self.getToken(SMTLIBv2Parser.RegConst, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_reg_const
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterReg_const" ):
listener.enterReg_const(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitReg_const" ):
listener.exitReg_const(self)
def reg_const(self):
localctx = SMTLIBv2Parser.Reg_constContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_reg_const)
try:
self.enterOuterAlt(localctx, 1)
self.state = 274
self.match(SMTLIBv2Parser.RegConst)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class KeywordContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def predefKeyword(self):
return self.getTypedRuleContext(SMTLIBv2Parser.PredefKeywordContext,0)
def Colon(self):
return self.getToken(SMTLIBv2Parser.Colon, 0)
def simpleSymbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SimpleSymbolContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_keyword
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKeyword" ):
listener.enterKeyword(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKeyword" ):
listener.exitKeyword(self)
def keyword(self):
localctx = SMTLIBv2Parser.KeywordContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_keyword)
try:
self.state = 279
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.PK_AllStatistics, SMTLIBv2Parser.PK_AssertionStackLevels, SMTLIBv2Parser.PK_Authors, SMTLIBv2Parser.PK_Category, SMTLIBv2Parser.PK_Chainable, SMTLIBv2Parser.PK_Definition, SMTLIBv2Parser.PK_DiagnosticOutputChannel, SMTLIBv2Parser.PK_ErrorBehaviour, SMTLIBv2Parser.PK_Extension, SMTLIBv2Parser.PK_Funs, SMTLIBv2Parser.PK_FunsDescription, SMTLIBv2Parser.PK_GlobalDeclarations, SMTLIBv2Parser.PK_InteractiveMode, SMTLIBv2Parser.PK_Language, SMTLIBv2Parser.PK_LeftAssoc, SMTLIBv2Parser.PK_License, SMTLIBv2Parser.PK_Named, SMTLIBv2Parser.PK_Name, SMTLIBv2Parser.PK_Notes, SMTLIBv2Parser.PK_Pattern, SMTLIBv2Parser.PK_PrintSuccess, SMTLIBv2Parser.PK_ProduceAssertions, SMTLIBv2Parser.PK_ProduceAssignments, SMTLIBv2Parser.PK_ProduceModels, SMTLIBv2Parser.PK_ProduceProofs, SMTLIBv2Parser.PK_ProduceUnsatAssumptions, SMTLIBv2Parser.PK_ProduceUnsatCores, SMTLIBv2Parser.PK_RandomSeed, SMTLIBv2Parser.PK_ReasonUnknown, SMTLIBv2Parser.PK_RegularOutputChannel, SMTLIBv2Parser.PK_ReproducibleResourceLimit, SMTLIBv2Parser.PK_RightAssoc, SMTLIBv2Parser.PK_SmtLibVersion, SMTLIBv2Parser.PK_Sorts, SMTLIBv2Parser.PK_SortsDescription, SMTLIBv2Parser.PK_Source, SMTLIBv2Parser.PK_Status, SMTLIBv2Parser.PK_Theories, SMTLIBv2Parser.PK_Values, SMTLIBv2Parser.PK_Verbosity, SMTLIBv2Parser.PK_Version]:
self.enterOuterAlt(localctx, 1)
self.state = 276
self.predefKeyword()
pass
elif token in [SMTLIBv2Parser.Colon]:
self.enterOuterAlt(localctx, 2)
self.state = 277
self.match(SMTLIBv2Parser.Colon)
self.state = 278
self.simpleSymbol()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Spec_constantContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def numeral(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.NumeralContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.NumeralContext,i)
def decimal(self):
return self.getTypedRuleContext(SMTLIBv2Parser.DecimalContext,0)
def hexadecimal(self):
return self.getTypedRuleContext(SMTLIBv2Parser.HexadecimalContext,0)
def binary(self):
return self.getTypedRuleContext(SMTLIBv2Parser.BinaryContext,0)
def string(self):
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,0)
def b_value(self):
return self.getTypedRuleContext(SMTLIBv2Parser.B_valueContext,0)
def reg_const(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Reg_constContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def GRW_Underscore(self):
return self.getToken(SMTLIBv2Parser.GRW_Underscore, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_spec_constant
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSpec_constant" ):
listener.enterSpec_constant(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSpec_constant" ):
listener.exitSpec_constant(self)
def spec_constant(self):
localctx = SMTLIBv2Parser.Spec_constantContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_spec_constant)
try:
self.state = 295
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.Numeral]:
self.enterOuterAlt(localctx, 1)
self.state = 281
self.numeral()
pass
elif token in [SMTLIBv2Parser.Decimal]:
self.enterOuterAlt(localctx, 2)
self.state = 282
self.decimal()
pass
elif token in [SMTLIBv2Parser.HexDecimal]:
self.enterOuterAlt(localctx, 3)
self.state = 283
self.hexadecimal()
pass
elif token in [SMTLIBv2Parser.Binary]:
self.enterOuterAlt(localctx, 4)
self.state = 284
self.binary()
pass
elif token in [SMTLIBv2Parser.String]:
self.enterOuterAlt(localctx, 5)
self.state = 285
self.string()
pass
elif token in [SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_True]:
self.enterOuterAlt(localctx, 6)
self.state = 286
self.b_value()
pass
elif token in [SMTLIBv2Parser.RegConst]:
self.enterOuterAlt(localctx, 7)
self.state = 287
self.reg_const()
pass
elif token in [SMTLIBv2Parser.ParOpen]:
self.enterOuterAlt(localctx, 8)
self.state = 288
self.match(SMTLIBv2Parser.ParOpen)
self.state = 289
self.match(SMTLIBv2Parser.GRW_Underscore)
self.state = 290
self.match(SMTLIBv2Parser.T__0)
self.state = 291
self.numeral()
self.state = 292
self.numeral()
self.state = 293
self.match(SMTLIBv2Parser.ParClose)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class S_exprContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def spec_constant(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Spec_constantContext,0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def keyword(self):
return self.getTypedRuleContext(SMTLIBv2Parser.KeywordContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def s_expr(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.S_exprContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.S_exprContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_s_expr
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterS_expr" ):
listener.enterS_expr(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitS_expr" ):
listener.exitS_expr(self)
def s_expr(self):
localctx = SMTLIBv2Parser.S_exprContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_s_expr)
self._la = 0 # Token type
try:
self.state = 308
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,5,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 297
self.spec_constant()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 298
self.symbol()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 299
self.keyword()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 300
self.match(SMTLIBv2Parser.ParOpen)
self.state = 304
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.Colon - 92)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 92)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 92)) | (1 << (SMTLIBv2Parser.PK_Authors - 92)) | (1 << (SMTLIBv2Parser.PK_Category - 92)) | (1 << (SMTLIBv2Parser.PK_Chainable - 92)) | (1 << (SMTLIBv2Parser.PK_Definition - 92)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 92)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 92)) | (1 << (SMTLIBv2Parser.PK_Extension - 92)) | (1 << (SMTLIBv2Parser.PK_Funs - 92)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 92)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 92)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 92)) | (1 << (SMTLIBv2Parser.PK_Language - 92)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 92)) | (1 << (SMTLIBv2Parser.PK_License - 92)) | (1 << (SMTLIBv2Parser.PK_Named - 92)) | (1 << (SMTLIBv2Parser.PK_Name - 92)) | (1 << (SMTLIBv2Parser.PK_Notes - 92)) | (1 << (SMTLIBv2Parser.PK_Pattern - 92)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 92)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 92)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 92)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 92)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 92)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 92)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 92)) | (1 << (SMTLIBv2Parser.PK_Sorts - 92)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 92)) | (1 << (SMTLIBv2Parser.PK_Source - 92)) | (1 << (SMTLIBv2Parser.PK_Status - 92)) | (1 << (SMTLIBv2Parser.PK_Theories - 92)) | (1 << (SMTLIBv2Parser.PK_Values - 92)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 92)) | (1 << (SMTLIBv2Parser.PK_Version - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0):
self.state = 301
self.s_expr()
self.state = 306
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 307
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IndexContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def numeral(self):
return self.getTypedRuleContext(SMTLIBv2Parser.NumeralContext,0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_index
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIndex" ):
listener.enterIndex(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIndex" ):
listener.exitIndex(self)
def index(self):
localctx = SMTLIBv2Parser.IndexContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_index)
try:
self.state = 312
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.Numeral]:
self.enterOuterAlt(localctx, 1)
self.state = 310
self.numeral()
pass
elif token in [SMTLIBv2Parser.QuotedSymbol, SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat, SMTLIBv2Parser.UndefinedSymbol]:
self.enterOuterAlt(localctx, 2)
self.state = 311
self.symbol()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IdentifierContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def GRW_Underscore(self):
return self.getToken(SMTLIBv2Parser.GRW_Underscore, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def index(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.IndexContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.IndexContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_identifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIdentifier" ):
listener.enterIdentifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIdentifier" ):
listener.exitIdentifier(self)
def identifier(self):
localctx = SMTLIBv2Parser.IdentifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_identifier)
self._la = 0 # Token type
try:
self.state = 325
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.QuotedSymbol, SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat, SMTLIBv2Parser.UndefinedSymbol]:
self.enterOuterAlt(localctx, 1)
self.state = 314
self.symbol()
pass
elif token in [SMTLIBv2Parser.ParOpen]:
self.enterOuterAlt(localctx, 2)
self.state = 315
self.match(SMTLIBv2Parser.ParOpen)
self.state = 316
self.match(SMTLIBv2Parser.GRW_Underscore)
self.state = 317
self.symbol()
self.state = 319
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 318
self.index()
self.state = 321
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.Numeral or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 323
self.match(SMTLIBv2Parser.ParClose)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Attribute_valueContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def spec_constant(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Spec_constantContext,0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def s_expr(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.S_exprContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.S_exprContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_attribute_value
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttribute_value" ):
listener.enterAttribute_value(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttribute_value" ):
listener.exitAttribute_value(self)
def attribute_value(self):
localctx = SMTLIBv2Parser.Attribute_valueContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_attribute_value)
self._la = 0 # Token type
try:
self.state = 337
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,10,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 327
self.spec_constant()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 328
self.symbol()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 329
self.match(SMTLIBv2Parser.ParOpen)
self.state = 333
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.Colon - 92)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 92)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 92)) | (1 << (SMTLIBv2Parser.PK_Authors - 92)) | (1 << (SMTLIBv2Parser.PK_Category - 92)) | (1 << (SMTLIBv2Parser.PK_Chainable - 92)) | (1 << (SMTLIBv2Parser.PK_Definition - 92)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 92)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 92)) | (1 << (SMTLIBv2Parser.PK_Extension - 92)) | (1 << (SMTLIBv2Parser.PK_Funs - 92)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 92)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 92)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 92)) | (1 << (SMTLIBv2Parser.PK_Language - 92)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 92)) | (1 << (SMTLIBv2Parser.PK_License - 92)) | (1 << (SMTLIBv2Parser.PK_Named - 92)) | (1 << (SMTLIBv2Parser.PK_Name - 92)) | (1 << (SMTLIBv2Parser.PK_Notes - 92)) | (1 << (SMTLIBv2Parser.PK_Pattern - 92)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 92)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 92)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 92)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 92)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 92)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 92)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 92)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 92)) | (1 << (SMTLIBv2Parser.PK_Sorts - 92)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 92)) | (1 << (SMTLIBv2Parser.PK_Source - 92)) | (1 << (SMTLIBv2Parser.PK_Status - 92)) | (1 << (SMTLIBv2Parser.PK_Theories - 92)) | (1 << (SMTLIBv2Parser.PK_Values - 92)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 92)) | (1 << (SMTLIBv2Parser.PK_Version - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0):
self.state = 330
self.s_expr()
self.state = 335
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 336
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def keyword(self):
return self.getTypedRuleContext(SMTLIBv2Parser.KeywordContext,0)
def attribute_value(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Attribute_valueContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_attribute
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttribute" ):
listener.enterAttribute(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttribute" ):
listener.exitAttribute(self)
def attribute(self):
localctx = SMTLIBv2Parser.AttributeContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_attribute)
try:
self.state = 343
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 339
self.keyword()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 340
self.keyword()
self.state = 341
self.attribute_value()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SortContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SMTLIBv2Parser.IdentifierContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def sort(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SortContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_sort
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSort" ):
listener.enterSort(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSort" ):
listener.exitSort(self)
def sort(self):
localctx = SMTLIBv2Parser.SortContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_sort)
self._la = 0 # Token type
try:
self.state = 355
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,13,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 345
self.identifier()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 346
self.match(SMTLIBv2Parser.ParOpen)
self.state = 347
self.identifier()
self.state = 349
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 348
self.sort()
self.state = 351
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 353
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Qual_identifierContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SMTLIBv2Parser.IdentifierContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def GRW_As(self):
return self.getToken(SMTLIBv2Parser.GRW_As, 0)
def sort(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_qual_identifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQual_identifier" ):
listener.enterQual_identifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQual_identifier" ):
listener.exitQual_identifier(self)
def qual_identifier(self):
localctx = SMTLIBv2Parser.Qual_identifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_qual_identifier)
try:
self.state = 364
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,14,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 357
self.identifier()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 358
self.match(SMTLIBv2Parser.ParOpen)
self.state = 359
self.match(SMTLIBv2Parser.GRW_As)
self.state = 360
self.identifier()
self.state = 361
self.sort()
self.state = 362
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Var_bindingContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def term(self):
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_var_binding
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVar_binding" ):
listener.enterVar_binding(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVar_binding" ):
listener.exitVar_binding(self)
def var_binding(self):
localctx = SMTLIBv2Parser.Var_bindingContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_var_binding)
try:
self.enterOuterAlt(localctx, 1)
self.state = 366
self.match(SMTLIBv2Parser.ParOpen)
self.state = 367
self.symbol()
self.state = 368
self.term()
self.state = 369
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Sorted_varContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def sort(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_sorted_var
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSorted_var" ):
listener.enterSorted_var(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSorted_var" ):
listener.exitSorted_var(self)
def sorted_var(self):
localctx = SMTLIBv2Parser.Sorted_varContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_sorted_var)
try:
self.enterOuterAlt(localctx, 1)
self.state = 371
self.match(SMTLIBv2Parser.ParOpen)
self.state = 372
self.symbol()
self.state = 373
self.sort()
self.state = 374
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PatternContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_pattern
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPattern" ):
listener.enterPattern(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPattern" ):
listener.exitPattern(self)
def pattern(self):
localctx = SMTLIBv2Parser.PatternContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_pattern)
self._la = 0 # Token type
try:
self.state = 386
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.QuotedSymbol, SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat, SMTLIBv2Parser.UndefinedSymbol]:
self.enterOuterAlt(localctx, 1)
self.state = 376
self.symbol()
pass
elif token in [SMTLIBv2Parser.ParOpen]:
self.enterOuterAlt(localctx, 2)
self.state = 377
self.match(SMTLIBv2Parser.ParOpen)
self.state = 378
self.symbol()
self.state = 380
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 379
self.symbol()
self.state = 382
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 384
self.match(SMTLIBv2Parser.ParClose)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Match_caseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def pattern(self):
return self.getTypedRuleContext(SMTLIBv2Parser.PatternContext,0)
def term(self):
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_match_case
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMatch_case" ):
listener.enterMatch_case(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMatch_case" ):
listener.exitMatch_case(self)
def match_case(self):
localctx = SMTLIBv2Parser.Match_caseContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_match_case)
try:
self.enterOuterAlt(localctx, 1)
self.state = 388
self.match(SMTLIBv2Parser.ParOpen)
self.state = 389
self.pattern()
self.state = 390
self.term()
self.state = 391
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TermContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def spec_constant(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Spec_constantContext,0)
def qual_identifier(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Qual_identifierContext,0)
def ParOpen(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParOpen)
else:
return self.getToken(SMTLIBv2Parser.ParOpen, i)
def ParClose(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParClose)
else:
return self.getToken(SMTLIBv2Parser.ParClose, i)
def term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.TermContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,i)
def GRW_Underscore(self):
return self.getToken(SMTLIBv2Parser.GRW_Underscore, 0)
def GRW_Let(self):
return self.getToken(SMTLIBv2Parser.GRW_Let, 0)
def var_binding(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Var_bindingContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Var_bindingContext,i)
def GRW_Forall(self):
return self.getToken(SMTLIBv2Parser.GRW_Forall, 0)
def sorted_var(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Sorted_varContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Sorted_varContext,i)
def GRW_Exists(self):
return self.getToken(SMTLIBv2Parser.GRW_Exists, 0)
def GRW_Match(self):
return self.getToken(SMTLIBv2Parser.GRW_Match, 0)
def match_case(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Match_caseContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Match_caseContext,i)
def GRW_Exclamation(self):
return self.getToken(SMTLIBv2Parser.GRW_Exclamation, 0)
def attribute(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.AttributeContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTerm" ):
listener.enterTerm(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTerm" ):
listener.exitTerm(self)
def term(self):
localctx = SMTLIBv2Parser.TermContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_term)
self._la = 0 # Token type
try:
self.state = 474
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,24,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 393
self.spec_constant()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 394
self.qual_identifier()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 395
self.match(SMTLIBv2Parser.ParOpen)
self.state = 396
self.qual_identifier()
self.state = 398
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 397
self.term()
self.state = 400
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0)):
break
self.state = 402
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 404
self.match(SMTLIBv2Parser.ParOpen)
self.state = 405
self.match(SMTLIBv2Parser.ParOpen)
self.state = 406
self.match(SMTLIBv2Parser.GRW_Underscore)
self.state = 407
self.qual_identifier()
self.state = 409
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 408
self.term()
self.state = 411
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0)):
break
self.state = 413
self.match(SMTLIBv2Parser.ParClose)
self.state = 414
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 416
self.match(SMTLIBv2Parser.ParOpen)
self.state = 417
self.match(SMTLIBv2Parser.GRW_Let)
self.state = 418
self.match(SMTLIBv2Parser.ParOpen)
self.state = 420
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 419
self.var_binding()
self.state = 422
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 424
self.match(SMTLIBv2Parser.ParClose)
self.state = 425
self.term()
self.state = 426
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 428
self.match(SMTLIBv2Parser.ParOpen)
self.state = 429
self.match(SMTLIBv2Parser.GRW_Forall)
self.state = 430
self.match(SMTLIBv2Parser.ParOpen)
self.state = 432
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 431
self.sorted_var()
self.state = 434
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 436
self.match(SMTLIBv2Parser.ParClose)
self.state = 437
self.term()
self.state = 438
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 440
self.match(SMTLIBv2Parser.ParOpen)
self.state = 441
self.match(SMTLIBv2Parser.GRW_Exists)
self.state = 442
self.match(SMTLIBv2Parser.ParOpen)
self.state = 444
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 443
self.sorted_var()
self.state = 446
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 448
self.match(SMTLIBv2Parser.ParClose)
self.state = 449
self.term()
self.state = 450
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 452
self.match(SMTLIBv2Parser.ParOpen)
self.state = 453
self.match(SMTLIBv2Parser.GRW_Match)
self.state = 454
self.term()
self.state = 455
self.match(SMTLIBv2Parser.ParOpen)
self.state = 457
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 456
self.match_case()
self.state = 459
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 461
self.match(SMTLIBv2Parser.ParClose)
self.state = 462
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 464
self.match(SMTLIBv2Parser.ParOpen)
self.state = 465
self.match(SMTLIBv2Parser.GRW_Exclamation)
self.state = 466
self.term()
self.state = 468
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 467
self.attribute()
self.state = 470
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0)):
break
self.state = 472
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Sort_symbol_declContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def identifier(self):
return self.getTypedRuleContext(SMTLIBv2Parser.IdentifierContext,0)
def numeral(self):
return self.getTypedRuleContext(SMTLIBv2Parser.NumeralContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def attribute(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.AttributeContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_sort_symbol_decl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSort_symbol_decl" ):
listener.enterSort_symbol_decl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSort_symbol_decl" ):
listener.exitSort_symbol_decl(self)
def sort_symbol_decl(self):
localctx = SMTLIBv2Parser.Sort_symbol_declContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_sort_symbol_decl)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 476
self.match(SMTLIBv2Parser.ParOpen)
self.state = 477
self.identifier()
self.state = 478
self.numeral()
self.state = 482
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 479
self.attribute()
self.state = 484
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 485
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Meta_spec_constantContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def GRW_Numeral(self):
return self.getToken(SMTLIBv2Parser.GRW_Numeral, 0)
def GRW_Decimal(self):
return self.getToken(SMTLIBv2Parser.GRW_Decimal, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_meta_spec_constant
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMeta_spec_constant" ):
listener.enterMeta_spec_constant(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMeta_spec_constant" ):
listener.exitMeta_spec_constant(self)
def meta_spec_constant(self):
localctx = SMTLIBv2Parser.Meta_spec_constantContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_meta_spec_constant)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 487
_la = self._input.LA(1)
if not(_la==SMTLIBv2Parser.GRW_Decimal or _la==SMTLIBv2Parser.GRW_Numeral):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fun_symbol_declContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def spec_constant(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Spec_constantContext,0)
def sort(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SortContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,i)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def attribute(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.AttributeContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,i)
def meta_spec_constant(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Meta_spec_constantContext,0)
def identifier(self):
return self.getTypedRuleContext(SMTLIBv2Parser.IdentifierContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_fun_symbol_decl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFun_symbol_decl" ):
listener.enterFun_symbol_decl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFun_symbol_decl" ):
listener.exitFun_symbol_decl(self)
def fun_symbol_decl(self):
localctx = SMTLIBv2Parser.Fun_symbol_declContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_fun_symbol_decl)
self._la = 0 # Token type
try:
self.state = 526
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,30,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 489
self.match(SMTLIBv2Parser.ParOpen)
self.state = 490
self.spec_constant()
self.state = 491
self.sort()
self.state = 495
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 492
self.attribute()
self.state = 497
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 498
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 500
self.match(SMTLIBv2Parser.ParOpen)
self.state = 501
self.meta_spec_constant()
self.state = 502
self.sort()
self.state = 506
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 503
self.attribute()
self.state = 508
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 509
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 511
self.match(SMTLIBv2Parser.ParOpen)
self.state = 512
self.identifier()
self.state = 514
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 513
self.sort()
self.state = 516
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 521
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 518
self.attribute()
self.state = 523
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 524
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Par_fun_symbol_declContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fun_symbol_decl(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Fun_symbol_declContext,0)
def ParOpen(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParOpen)
else:
return self.getToken(SMTLIBv2Parser.ParOpen, i)
def GRW_Par(self):
return self.getToken(SMTLIBv2Parser.GRW_Par, 0)
def ParClose(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParClose)
else:
return self.getToken(SMTLIBv2Parser.ParClose, i)
def identifier(self):
return self.getTypedRuleContext(SMTLIBv2Parser.IdentifierContext,0)
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def sort(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SortContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,i)
def attribute(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.AttributeContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_par_fun_symbol_decl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPar_fun_symbol_decl" ):
listener.enterPar_fun_symbol_decl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPar_fun_symbol_decl" ):
listener.exitPar_fun_symbol_decl(self)
def par_fun_symbol_decl(self):
localctx = SMTLIBv2Parser.Par_fun_symbol_declContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_par_fun_symbol_decl)
self._la = 0 # Token type
try:
self.state = 554
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 528
self.fun_symbol_decl()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 529
self.match(SMTLIBv2Parser.ParOpen)
self.state = 530
self.match(SMTLIBv2Parser.GRW_Par)
self.state = 531
self.match(SMTLIBv2Parser.ParOpen)
self.state = 533
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 532
self.symbol()
self.state = 535
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 537
self.match(SMTLIBv2Parser.ParClose)
self.state = 538
self.match(SMTLIBv2Parser.ParOpen)
self.state = 539
self.identifier()
self.state = 541
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 540
self.sort()
self.state = 543
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 548
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 545
self.attribute()
self.state = 550
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 551
self.match(SMTLIBv2Parser.ParClose)
self.state = 552
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Theory_attributeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PK_Sorts(self):
return self.getToken(SMTLIBv2Parser.PK_Sorts, 0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def sort_symbol_decl(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Sort_symbol_declContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Sort_symbol_declContext,i)
def PK_Funs(self):
return self.getToken(SMTLIBv2Parser.PK_Funs, 0)
def par_fun_symbol_decl(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Par_fun_symbol_declContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Par_fun_symbol_declContext,i)
def PK_SortsDescription(self):
return self.getToken(SMTLIBv2Parser.PK_SortsDescription, 0)
def string(self):
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,0)
def PK_FunsDescription(self):
return self.getToken(SMTLIBv2Parser.PK_FunsDescription, 0)
def PK_Definition(self):
return self.getToken(SMTLIBv2Parser.PK_Definition, 0)
def PK_Values(self):
return self.getToken(SMTLIBv2Parser.PK_Values, 0)
def PK_Notes(self):
return self.getToken(SMTLIBv2Parser.PK_Notes, 0)
def attribute(self):
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_theory_attribute
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTheory_attribute" ):
listener.enterTheory_attribute(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTheory_attribute" ):
listener.exitTheory_attribute(self)
def theory_attribute(self):
localctx = SMTLIBv2Parser.Theory_attributeContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_theory_attribute)
self._la = 0 # Token type
try:
self.state = 585
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,37,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 556
self.match(SMTLIBv2Parser.PK_Sorts)
self.state = 557
self.match(SMTLIBv2Parser.ParOpen)
self.state = 559
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 558
self.sort_symbol_decl()
self.state = 561
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 563
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 565
self.match(SMTLIBv2Parser.PK_Funs)
self.state = 566
self.match(SMTLIBv2Parser.ParOpen)
self.state = 568
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 567
self.par_fun_symbol_decl()
self.state = 570
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 572
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 574
self.match(SMTLIBv2Parser.PK_SortsDescription)
self.state = 575
self.string()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 576
self.match(SMTLIBv2Parser.PK_FunsDescription)
self.state = 577
self.string()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 578
self.match(SMTLIBv2Parser.PK_Definition)
self.state = 579
self.string()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 580
self.match(SMTLIBv2Parser.PK_Values)
self.state = 581
self.string()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 582
self.match(SMTLIBv2Parser.PK_Notes)
self.state = 583
self.string()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 584
self.attribute()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Theory_declContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def PS_Theory(self):
return self.getToken(SMTLIBv2Parser.PS_Theory, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def theory_attribute(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Theory_attributeContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Theory_attributeContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_theory_decl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTheory_decl" ):
listener.enterTheory_decl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTheory_decl" ):
listener.exitTheory_decl(self)
def theory_decl(self):
localctx = SMTLIBv2Parser.Theory_declContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_theory_decl)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 587
self.match(SMTLIBv2Parser.ParOpen)
self.state = 588
self.match(SMTLIBv2Parser.PS_Theory)
self.state = 589
self.symbol()
self.state = 591
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 590
self.theory_attribute()
self.state = 593
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0)):
break
self.state = 595
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Logic_attribueContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PK_Theories(self):
return self.getToken(SMTLIBv2Parser.PK_Theories, 0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def PK_Language(self):
return self.getToken(SMTLIBv2Parser.PK_Language, 0)
def string(self):
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,0)
def PK_Extension(self):
return self.getToken(SMTLIBv2Parser.PK_Extension, 0)
def PK_Values(self):
return self.getToken(SMTLIBv2Parser.PK_Values, 0)
def PK_Notes(self):
return self.getToken(SMTLIBv2Parser.PK_Notes, 0)
def attribute(self):
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_logic_attribue
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLogic_attribue" ):
listener.enterLogic_attribue(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLogic_attribue" ):
listener.exitLogic_attribue(self)
def logic_attribue(self):
localctx = SMTLIBv2Parser.Logic_attribueContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_logic_attribue)
self._la = 0 # Token type
try:
self.state = 615
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,40,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 597
self.match(SMTLIBv2Parser.PK_Theories)
self.state = 598
self.match(SMTLIBv2Parser.ParOpen)
self.state = 600
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 599
self.symbol()
self.state = 602
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 604
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 606
self.match(SMTLIBv2Parser.PK_Language)
self.state = 607
self.string()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 608
self.match(SMTLIBv2Parser.PK_Extension)
self.state = 609
self.string()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 610
self.match(SMTLIBv2Parser.PK_Values)
self.state = 611
self.string()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 612
self.match(SMTLIBv2Parser.PK_Notes)
self.state = 613
self.string()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 614
self.attribute()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LogicContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def PS_Logic(self):
return self.getToken(SMTLIBv2Parser.PS_Logic, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def logic_attribue(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Logic_attribueContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Logic_attribueContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_logic
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLogic" ):
listener.enterLogic(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLogic" ):
listener.exitLogic(self)
def logic(self):
localctx = SMTLIBv2Parser.LogicContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_logic)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 617
self.match(SMTLIBv2Parser.ParOpen)
self.state = 618
self.match(SMTLIBv2Parser.PS_Logic)
self.state = 619
self.symbol()
self.state = 621
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 620
self.logic_attribue()
self.state = 623
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0)):
break
self.state = 625
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Sort_decContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def numeral(self):
return self.getTypedRuleContext(SMTLIBv2Parser.NumeralContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_sort_dec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSort_dec" ):
listener.enterSort_dec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSort_dec" ):
listener.exitSort_dec(self)
def sort_dec(self):
localctx = SMTLIBv2Parser.Sort_decContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_sort_dec)
try:
self.enterOuterAlt(localctx, 1)
self.state = 627
self.match(SMTLIBv2Parser.ParOpen)
self.state = 628
self.symbol()
self.state = 629
self.numeral()
self.state = 630
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Selector_decContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def sort(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_selector_dec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSelector_dec" ):
listener.enterSelector_dec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSelector_dec" ):
listener.exitSelector_dec(self)
def selector_dec(self):
localctx = SMTLIBv2Parser.Selector_decContext(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_selector_dec)
try:
self.enterOuterAlt(localctx, 1)
self.state = 632
self.match(SMTLIBv2Parser.ParOpen)
self.state = 633
self.symbol()
self.state = 634
self.sort()
self.state = 635
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Constructor_decContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def selector_dec(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Selector_decContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Selector_decContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_constructor_dec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConstructor_dec" ):
listener.enterConstructor_dec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConstructor_dec" ):
listener.exitConstructor_dec(self)
def constructor_dec(self):
localctx = SMTLIBv2Parser.Constructor_decContext(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_constructor_dec)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 637
self.match(SMTLIBv2Parser.ParOpen)
self.state = 638
self.symbol()
self.state = 642
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SMTLIBv2Parser.ParOpen:
self.state = 639
self.selector_dec()
self.state = 644
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 645
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Datatype_decContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParOpen)
else:
return self.getToken(SMTLIBv2Parser.ParOpen, i)
def ParClose(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParClose)
else:
return self.getToken(SMTLIBv2Parser.ParClose, i)
def constructor_dec(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Constructor_decContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Constructor_decContext,i)
def GRW_Par(self):
return self.getToken(SMTLIBv2Parser.GRW_Par, 0)
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_datatype_dec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDatatype_dec" ):
listener.enterDatatype_dec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDatatype_dec" ):
listener.exitDatatype_dec(self)
def datatype_dec(self):
localctx = SMTLIBv2Parser.Datatype_decContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_datatype_dec)
self._la = 0 # Token type
try:
self.state = 673
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,46,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 647
self.match(SMTLIBv2Parser.ParOpen)
self.state = 649
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 648
self.constructor_dec()
self.state = 651
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 653
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 655
self.match(SMTLIBv2Parser.ParOpen)
self.state = 656
self.match(SMTLIBv2Parser.GRW_Par)
self.state = 657
self.match(SMTLIBv2Parser.ParOpen)
self.state = 659
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 658
self.symbol()
self.state = 661
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 663
self.match(SMTLIBv2Parser.ParClose)
self.state = 664
self.match(SMTLIBv2Parser.ParOpen)
self.state = 666
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 665
self.constructor_dec()
self.state = 668
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 670
self.match(SMTLIBv2Parser.ParClose)
self.state = 671
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Function_decContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParOpen)
else:
return self.getToken(SMTLIBv2Parser.ParOpen, i)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParClose(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParClose)
else:
return self.getToken(SMTLIBv2Parser.ParClose, i)
def sort(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,0)
def sorted_var(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Sorted_varContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Sorted_varContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_function_dec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunction_dec" ):
listener.enterFunction_dec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunction_dec" ):
listener.exitFunction_dec(self)
def function_dec(self):
localctx = SMTLIBv2Parser.Function_decContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_function_dec)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 675
self.match(SMTLIBv2Parser.ParOpen)
self.state = 676
self.symbol()
self.state = 677
self.match(SMTLIBv2Parser.ParOpen)
self.state = 681
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SMTLIBv2Parser.ParOpen:
self.state = 678
self.sorted_var()
self.state = 683
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 684
self.match(SMTLIBv2Parser.ParClose)
self.state = 685
self.sort()
self.state = 686
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Function_defContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def sort(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,0)
def term(self):
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,0)
def sorted_var(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Sorted_varContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Sorted_varContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_function_def
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunction_def" ):
listener.enterFunction_def(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunction_def" ):
listener.exitFunction_def(self)
def function_def(self):
localctx = SMTLIBv2Parser.Function_defContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_function_def)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 688
self.symbol()
self.state = 689
self.match(SMTLIBv2Parser.ParOpen)
self.state = 693
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SMTLIBv2Parser.ParOpen:
self.state = 690
self.sorted_var()
self.state = 695
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 696
self.match(SMTLIBv2Parser.ParClose)
self.state = 697
self.sort()
self.state = 698
self.term()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Prop_literalContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def PS_Not(self):
return self.getToken(SMTLIBv2Parser.PS_Not, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_prop_literal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProp_literal" ):
listener.enterProp_literal(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProp_literal" ):
listener.exitProp_literal(self)
def prop_literal(self):
localctx = SMTLIBv2Parser.Prop_literalContext(self, self._ctx, self.state)
self.enterRule(localctx, 84, self.RULE_prop_literal)
try:
self.state = 706
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.QuotedSymbol, SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat, SMTLIBv2Parser.UndefinedSymbol]:
self.enterOuterAlt(localctx, 1)
self.state = 700
self.symbol()
pass
elif token in [SMTLIBv2Parser.ParOpen]:
self.enterOuterAlt(localctx, 2)
self.state = 701
self.match(SMTLIBv2Parser.ParOpen)
self.state = 702
self.match(SMTLIBv2Parser.PS_Not)
self.state = 703
self.symbol()
self.state = 704
self.match(SMTLIBv2Parser.ParClose)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ScriptContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def command(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.CommandContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.CommandContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_script
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterScript" ):
listener.enterScript(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitScript" ):
listener.exitScript(self)
def script(self):
localctx = SMTLIBv2Parser.ScriptContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_script)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 711
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SMTLIBv2Parser.ParOpen:
self.state = 708
self.command()
self.state = 713
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_assertContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Assert(self):
return self.getToken(SMTLIBv2Parser.CMD_Assert, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_assert
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_assert" ):
listener.enterCmd_assert(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_assert" ):
listener.exitCmd_assert(self)
def cmd_assert(self):
localctx = SMTLIBv2Parser.Cmd_assertContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_cmd_assert)
try:
self.enterOuterAlt(localctx, 1)
self.state = 714
self.match(SMTLIBv2Parser.CMD_Assert)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_assertSoftContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_AssertSoft(self):
return self.getToken(SMTLIBv2Parser.CMD_AssertSoft, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_assertSoft
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_assertSoft" ):
listener.enterCmd_assertSoft(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_assertSoft" ):
listener.exitCmd_assertSoft(self)
def cmd_assertSoft(self):
localctx = SMTLIBv2Parser.Cmd_assertSoftContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_cmd_assertSoft)
try:
self.enterOuterAlt(localctx, 1)
self.state = 716
self.match(SMTLIBv2Parser.CMD_AssertSoft)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_simplifyContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Simplify(self):
return self.getToken(SMTLIBv2Parser.Simplify, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_simplify
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_simplify" ):
listener.enterCmd_simplify(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_simplify" ):
listener.exitCmd_simplify(self)
def cmd_simplify(self):
localctx = SMTLIBv2Parser.Cmd_simplifyContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_cmd_simplify)
try:
self.enterOuterAlt(localctx, 1)
self.state = 718
self.match(SMTLIBv2Parser.Simplify)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_checkSatContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_CheckSat(self):
return self.getToken(SMTLIBv2Parser.CMD_CheckSat, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_checkSat
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_checkSat" ):
listener.enterCmd_checkSat(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_checkSat" ):
listener.exitCmd_checkSat(self)
def cmd_checkSat(self):
localctx = SMTLIBv2Parser.Cmd_checkSatContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_cmd_checkSat)
try:
self.enterOuterAlt(localctx, 1)
self.state = 720
self.match(SMTLIBv2Parser.CMD_CheckSat)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_checkSatAssumingContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_CheckSatAssuming(self):
return self.getToken(SMTLIBv2Parser.CMD_CheckSatAssuming, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_checkSatAssuming
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_checkSatAssuming" ):
listener.enterCmd_checkSatAssuming(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_checkSatAssuming" ):
listener.exitCmd_checkSatAssuming(self)
def cmd_checkSatAssuming(self):
localctx = SMTLIBv2Parser.Cmd_checkSatAssumingContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_cmd_checkSatAssuming)
try:
self.enterOuterAlt(localctx, 1)
self.state = 722
self.match(SMTLIBv2Parser.CMD_CheckSatAssuming)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_checkSatUsingContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_CheckSatUsing(self):
return self.getToken(SMTLIBv2Parser.CMD_CheckSatUsing, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_checkSatUsing
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_checkSatUsing" ):
listener.enterCmd_checkSatUsing(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_checkSatUsing" ):
listener.exitCmd_checkSatUsing(self)
def cmd_checkSatUsing(self):
localctx = SMTLIBv2Parser.Cmd_checkSatUsingContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_cmd_checkSatUsing)
try:
self.enterOuterAlt(localctx, 1)
self.state = 724
self.match(SMTLIBv2Parser.CMD_CheckSatUsing)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_minimizeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Minimize(self):
return self.getToken(SMTLIBv2Parser.CMD_Minimize, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_minimize
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_minimize" ):
listener.enterCmd_minimize(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_minimize" ):
listener.exitCmd_minimize(self)
def cmd_minimize(self):
localctx = SMTLIBv2Parser.Cmd_minimizeContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_cmd_minimize)
try:
self.enterOuterAlt(localctx, 1)
self.state = 726
self.match(SMTLIBv2Parser.CMD_Minimize)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_maximizeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Maximize(self):
return self.getToken(SMTLIBv2Parser.CMD_Maximize, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_maximize
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_maximize" ):
listener.enterCmd_maximize(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_maximize" ):
listener.exitCmd_maximize(self)
def cmd_maximize(self):
localctx = SMTLIBv2Parser.Cmd_maximizeContext(self, self._ctx, self.state)
self.enterRule(localctx, 102, self.RULE_cmd_maximize)
try:
self.enterOuterAlt(localctx, 1)
self.state = 728
self.match(SMTLIBv2Parser.CMD_Maximize)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_declareConstContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DeclareConst(self):
return self.getToken(SMTLIBv2Parser.CMD_DeclareConst, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_declareConst
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_declareConst" ):
listener.enterCmd_declareConst(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_declareConst" ):
listener.exitCmd_declareConst(self)
def cmd_declareConst(self):
localctx = SMTLIBv2Parser.Cmd_declareConstContext(self, self._ctx, self.state)
self.enterRule(localctx, 104, self.RULE_cmd_declareConst)
try:
self.enterOuterAlt(localctx, 1)
self.state = 730
self.match(SMTLIBv2Parser.CMD_DeclareConst)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_declareDatatypeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DeclareDatatype(self):
return self.getToken(SMTLIBv2Parser.CMD_DeclareDatatype, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_declareDatatype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_declareDatatype" ):
listener.enterCmd_declareDatatype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_declareDatatype" ):
listener.exitCmd_declareDatatype(self)
def cmd_declareDatatype(self):
localctx = SMTLIBv2Parser.Cmd_declareDatatypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 106, self.RULE_cmd_declareDatatype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 732
self.match(SMTLIBv2Parser.CMD_DeclareDatatype)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_declareCodatatypeContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DeclareCodatatype(self):
return self.getToken(SMTLIBv2Parser.CMD_DeclareCodatatype, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_declareCodatatype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_declareCodatatype" ):
listener.enterCmd_declareCodatatype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_declareCodatatype" ):
listener.exitCmd_declareCodatatype(self)
def cmd_declareCodatatype(self):
localctx = SMTLIBv2Parser.Cmd_declareCodatatypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 108, self.RULE_cmd_declareCodatatype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 734
self.match(SMTLIBv2Parser.CMD_DeclareCodatatype)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_declareDatatypesContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DeclareDatatypes(self):
return self.getToken(SMTLIBv2Parser.CMD_DeclareDatatypes, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_declareDatatypes
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_declareDatatypes" ):
listener.enterCmd_declareDatatypes(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_declareDatatypes" ):
listener.exitCmd_declareDatatypes(self)
def cmd_declareDatatypes(self):
localctx = SMTLIBv2Parser.Cmd_declareDatatypesContext(self, self._ctx, self.state)
self.enterRule(localctx, 110, self.RULE_cmd_declareDatatypes)
try:
self.enterOuterAlt(localctx, 1)
self.state = 736
self.match(SMTLIBv2Parser.CMD_DeclareDatatypes)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_declareCodatatypesContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DeclareCodatatypes(self):
return self.getToken(SMTLIBv2Parser.CMD_DeclareCodatatypes, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_declareCodatatypes
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_declareCodatatypes" ):
listener.enterCmd_declareCodatatypes(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_declareCodatatypes" ):
listener.exitCmd_declareCodatatypes(self)
def cmd_declareCodatatypes(self):
localctx = SMTLIBv2Parser.Cmd_declareCodatatypesContext(self, self._ctx, self.state)
self.enterRule(localctx, 112, self.RULE_cmd_declareCodatatypes)
try:
self.enterOuterAlt(localctx, 1)
self.state = 738
self.match(SMTLIBv2Parser.CMD_DeclareCodatatypes)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_declareFunContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DeclareFun(self):
return self.getToken(SMTLIBv2Parser.CMD_DeclareFun, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_declareFun
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_declareFun" ):
listener.enterCmd_declareFun(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_declareFun" ):
listener.exitCmd_declareFun(self)
def cmd_declareFun(self):
localctx = SMTLIBv2Parser.Cmd_declareFunContext(self, self._ctx, self.state)
self.enterRule(localctx, 114, self.RULE_cmd_declareFun)
try:
self.enterOuterAlt(localctx, 1)
self.state = 740
self.match(SMTLIBv2Parser.CMD_DeclareFun)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_declareSortContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DeclareSort(self):
return self.getToken(SMTLIBv2Parser.CMD_DeclareSort, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_declareSort
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_declareSort" ):
listener.enterCmd_declareSort(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_declareSort" ):
listener.exitCmd_declareSort(self)
def cmd_declareSort(self):
localctx = SMTLIBv2Parser.Cmd_declareSortContext(self, self._ctx, self.state)
self.enterRule(localctx, 116, self.RULE_cmd_declareSort)
try:
self.enterOuterAlt(localctx, 1)
self.state = 742
self.match(SMTLIBv2Parser.CMD_DeclareSort)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_defineContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Define(self):
return self.getToken(SMTLIBv2Parser.CMD_Define, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_define
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_define" ):
listener.enterCmd_define(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_define" ):
listener.exitCmd_define(self)
def cmd_define(self):
localctx = SMTLIBv2Parser.Cmd_defineContext(self, self._ctx, self.state)
self.enterRule(localctx, 118, self.RULE_cmd_define)
try:
self.enterOuterAlt(localctx, 1)
self.state = 744
self.match(SMTLIBv2Parser.CMD_Define)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_defineFunContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DefineFun(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineFun, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_defineFun
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_defineFun" ):
listener.enterCmd_defineFun(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_defineFun" ):
listener.exitCmd_defineFun(self)
def cmd_defineFun(self):
localctx = SMTLIBv2Parser.Cmd_defineFunContext(self, self._ctx, self.state)
self.enterRule(localctx, 120, self.RULE_cmd_defineFun)
try:
self.enterOuterAlt(localctx, 1)
self.state = 746
self.match(SMTLIBv2Parser.CMD_DefineFun)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_defineConstContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DefineConst(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineConst, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_defineConst
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_defineConst" ):
listener.enterCmd_defineConst(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_defineConst" ):
listener.exitCmd_defineConst(self)
def cmd_defineConst(self):
localctx = SMTLIBv2Parser.Cmd_defineConstContext(self, self._ctx, self.state)
self.enterRule(localctx, 122, self.RULE_cmd_defineConst)
try:
self.enterOuterAlt(localctx, 1)
self.state = 748
self.match(SMTLIBv2Parser.CMD_DefineConst)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_defineFunRecContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DefineFunRec(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineFunRec, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_defineFunRec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_defineFunRec" ):
listener.enterCmd_defineFunRec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_defineFunRec" ):
listener.exitCmd_defineFunRec(self)
def cmd_defineFunRec(self):
localctx = SMTLIBv2Parser.Cmd_defineFunRecContext(self, self._ctx, self.state)
self.enterRule(localctx, 124, self.RULE_cmd_defineFunRec)
try:
self.enterOuterAlt(localctx, 1)
self.state = 750
self.match(SMTLIBv2Parser.CMD_DefineFunRec)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_defineFunsRecContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DefineFunsRec(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineFunsRec, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_defineFunsRec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_defineFunsRec" ):
listener.enterCmd_defineFunsRec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_defineFunsRec" ):
listener.exitCmd_defineFunsRec(self)
def cmd_defineFunsRec(self):
localctx = SMTLIBv2Parser.Cmd_defineFunsRecContext(self, self._ctx, self.state)
self.enterRule(localctx, 126, self.RULE_cmd_defineFunsRec)
try:
self.enterOuterAlt(localctx, 1)
self.state = 752
self.match(SMTLIBv2Parser.CMD_DefineFunsRec)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_defineSortContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_DefineSort(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineSort, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_defineSort
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_defineSort" ):
listener.enterCmd_defineSort(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_defineSort" ):
listener.exitCmd_defineSort(self)
def cmd_defineSort(self):
localctx = SMTLIBv2Parser.Cmd_defineSortContext(self, self._ctx, self.state)
self.enterRule(localctx, 128, self.RULE_cmd_defineSort)
try:
self.enterOuterAlt(localctx, 1)
self.state = 754
self.match(SMTLIBv2Parser.CMD_DefineSort)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_displayContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Display(self):
return self.getToken(SMTLIBv2Parser.CMD_Display, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_display
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_display" ):
listener.enterCmd_display(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_display" ):
listener.exitCmd_display(self)
def cmd_display(self):
localctx = SMTLIBv2Parser.Cmd_displayContext(self, self._ctx, self.state)
self.enterRule(localctx, 130, self.RULE_cmd_display)
try:
self.enterOuterAlt(localctx, 1)
self.state = 756
self.match(SMTLIBv2Parser.CMD_Display)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_echoContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Echo(self):
return self.getToken(SMTLIBv2Parser.CMD_Echo, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_echo
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_echo" ):
listener.enterCmd_echo(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_echo" ):
listener.exitCmd_echo(self)
def cmd_echo(self):
localctx = SMTLIBv2Parser.Cmd_echoContext(self, self._ctx, self.state)
self.enterRule(localctx, 132, self.RULE_cmd_echo)
try:
self.enterOuterAlt(localctx, 1)
self.state = 758
self.match(SMTLIBv2Parser.CMD_Echo)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_evalContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Eval(self):
return self.getToken(SMTLIBv2Parser.CMD_Eval, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_eval
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_eval" ):
listener.enterCmd_eval(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_eval" ):
listener.exitCmd_eval(self)
def cmd_eval(self):
localctx = SMTLIBv2Parser.Cmd_evalContext(self, self._ctx, self.state)
self.enterRule(localctx, 134, self.RULE_cmd_eval)
try:
self.enterOuterAlt(localctx, 1)
self.state = 760
self.match(SMTLIBv2Parser.CMD_Eval)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_exitContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Exit(self):
return self.getToken(SMTLIBv2Parser.CMD_Exit, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_exit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_exit" ):
listener.enterCmd_exit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_exit" ):
listener.exitCmd_exit(self)
def cmd_exit(self):
localctx = SMTLIBv2Parser.Cmd_exitContext(self, self._ctx, self.state)
self.enterRule(localctx, 136, self.RULE_cmd_exit)
try:
self.enterOuterAlt(localctx, 1)
self.state = 762
self.match(SMTLIBv2Parser.CMD_Exit)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_GetObjectivesContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetObjectives(self):
return self.getToken(SMTLIBv2Parser.CMD_GetObjectives, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_GetObjectives
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_GetObjectives" ):
listener.enterCmd_GetObjectives(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_GetObjectives" ):
listener.exitCmd_GetObjectives(self)
def cmd_GetObjectives(self):
localctx = SMTLIBv2Parser.Cmd_GetObjectivesContext(self, self._ctx, self.state)
self.enterRule(localctx, 138, self.RULE_cmd_GetObjectives)
try:
self.enterOuterAlt(localctx, 1)
self.state = 764
self.match(SMTLIBv2Parser.CMD_GetObjectives)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getAssertionsContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetAssertions(self):
return self.getToken(SMTLIBv2Parser.CMD_GetAssertions, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getAssertions
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getAssertions" ):
listener.enterCmd_getAssertions(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getAssertions" ):
listener.exitCmd_getAssertions(self)
def cmd_getAssertions(self):
localctx = SMTLIBv2Parser.Cmd_getAssertionsContext(self, self._ctx, self.state)
self.enterRule(localctx, 140, self.RULE_cmd_getAssertions)
try:
self.enterOuterAlt(localctx, 1)
self.state = 766
self.match(SMTLIBv2Parser.CMD_GetAssertions)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getAssignmentContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetAssignment(self):
return self.getToken(SMTLIBv2Parser.CMD_GetAssignment, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getAssignment
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getAssignment" ):
listener.enterCmd_getAssignment(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getAssignment" ):
listener.exitCmd_getAssignment(self)
def cmd_getAssignment(self):
localctx = SMTLIBv2Parser.Cmd_getAssignmentContext(self, self._ctx, self.state)
self.enterRule(localctx, 142, self.RULE_cmd_getAssignment)
try:
self.enterOuterAlt(localctx, 1)
self.state = 768
self.match(SMTLIBv2Parser.CMD_GetAssignment)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getInfoContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetInfo(self):
return self.getToken(SMTLIBv2Parser.CMD_GetInfo, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getInfo
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getInfo" ):
listener.enterCmd_getInfo(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getInfo" ):
listener.exitCmd_getInfo(self)
def cmd_getInfo(self):
localctx = SMTLIBv2Parser.Cmd_getInfoContext(self, self._ctx, self.state)
self.enterRule(localctx, 144, self.RULE_cmd_getInfo)
try:
self.enterOuterAlt(localctx, 1)
self.state = 770
self.match(SMTLIBv2Parser.CMD_GetInfo)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getModelContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetModel(self):
return self.getToken(SMTLIBv2Parser.CMD_GetModel, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getModel
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getModel" ):
listener.enterCmd_getModel(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getModel" ):
listener.exitCmd_getModel(self)
def cmd_getModel(self):
localctx = SMTLIBv2Parser.Cmd_getModelContext(self, self._ctx, self.state)
self.enterRule(localctx, 146, self.RULE_cmd_getModel)
try:
self.enterOuterAlt(localctx, 1)
self.state = 772
self.match(SMTLIBv2Parser.CMD_GetModel)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_blockModelContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_BlockModel(self):
return self.getToken(SMTLIBv2Parser.CMD_BlockModel, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_blockModel
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_blockModel" ):
listener.enterCmd_blockModel(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_blockModel" ):
listener.exitCmd_blockModel(self)
def cmd_blockModel(self):
localctx = SMTLIBv2Parser.Cmd_blockModelContext(self, self._ctx, self.state)
self.enterRule(localctx, 148, self.RULE_cmd_blockModel)
try:
self.enterOuterAlt(localctx, 1)
self.state = 774
self.match(SMTLIBv2Parser.CMD_BlockModel)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getOptionContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetOption(self):
return self.getToken(SMTLIBv2Parser.CMD_GetOption, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getOption
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getOption" ):
listener.enterCmd_getOption(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getOption" ):
listener.exitCmd_getOption(self)
def cmd_getOption(self):
localctx = SMTLIBv2Parser.Cmd_getOptionContext(self, self._ctx, self.state)
self.enterRule(localctx, 150, self.RULE_cmd_getOption)
try:
self.enterOuterAlt(localctx, 1)
self.state = 776
self.match(SMTLIBv2Parser.CMD_GetOption)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getProofContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetProof(self):
return self.getToken(SMTLIBv2Parser.CMD_GetProof, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getProof
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getProof" ):
listener.enterCmd_getProof(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getProof" ):
listener.exitCmd_getProof(self)
def cmd_getProof(self):
localctx = SMTLIBv2Parser.Cmd_getProofContext(self, self._ctx, self.state)
self.enterRule(localctx, 152, self.RULE_cmd_getProof)
try:
self.enterOuterAlt(localctx, 1)
self.state = 778
self.match(SMTLIBv2Parser.CMD_GetProof)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getUnsatAssumptionsContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetUnsatAssumptions(self):
return self.getToken(SMTLIBv2Parser.CMD_GetUnsatAssumptions, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getUnsatAssumptions
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getUnsatAssumptions" ):
listener.enterCmd_getUnsatAssumptions(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getUnsatAssumptions" ):
listener.exitCmd_getUnsatAssumptions(self)
def cmd_getUnsatAssumptions(self):
localctx = SMTLIBv2Parser.Cmd_getUnsatAssumptionsContext(self, self._ctx, self.state)
self.enterRule(localctx, 154, self.RULE_cmd_getUnsatAssumptions)
try:
self.enterOuterAlt(localctx, 1)
self.state = 780
self.match(SMTLIBv2Parser.CMD_GetUnsatAssumptions)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_labelsContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Labels(self):
return self.getToken(SMTLIBv2Parser.CMD_Labels, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_labels
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_labels" ):
listener.enterCmd_labels(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_labels" ):
listener.exitCmd_labels(self)
def cmd_labels(self):
localctx = SMTLIBv2Parser.Cmd_labelsContext(self, self._ctx, self.state)
self.enterRule(localctx, 156, self.RULE_cmd_labels)
try:
self.enterOuterAlt(localctx, 1)
self.state = 782
self.match(SMTLIBv2Parser.CMD_Labels)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getUnsatCoreContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetUnsatCore(self):
return self.getToken(SMTLIBv2Parser.CMD_GetUnsatCore, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getUnsatCore
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getUnsatCore" ):
listener.enterCmd_getUnsatCore(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getUnsatCore" ):
listener.exitCmd_getUnsatCore(self)
def cmd_getUnsatCore(self):
localctx = SMTLIBv2Parser.Cmd_getUnsatCoreContext(self, self._ctx, self.state)
self.enterRule(localctx, 158, self.RULE_cmd_getUnsatCore)
try:
self.enterOuterAlt(localctx, 1)
self.state = 784
self.match(SMTLIBv2Parser.CMD_GetUnsatCore)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_getValueContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_GetValue(self):
return self.getToken(SMTLIBv2Parser.CMD_GetValue, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_getValue
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_getValue" ):
listener.enterCmd_getValue(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_getValue" ):
listener.exitCmd_getValue(self)
def cmd_getValue(self):
localctx = SMTLIBv2Parser.Cmd_getValueContext(self, self._ctx, self.state)
self.enterRule(localctx, 160, self.RULE_cmd_getValue)
try:
self.enterOuterAlt(localctx, 1)
self.state = 786
self.match(SMTLIBv2Parser.CMD_GetValue)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_popContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Pop(self):
return self.getToken(SMTLIBv2Parser.CMD_Pop, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_pop
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_pop" ):
listener.enterCmd_pop(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_pop" ):
listener.exitCmd_pop(self)
def cmd_pop(self):
localctx = SMTLIBv2Parser.Cmd_popContext(self, self._ctx, self.state)
self.enterRule(localctx, 162, self.RULE_cmd_pop)
try:
self.enterOuterAlt(localctx, 1)
self.state = 788
self.match(SMTLIBv2Parser.CMD_Pop)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_poly_factorContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_PolyFactor(self):
return self.getToken(SMTLIBv2Parser.CMD_PolyFactor, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_poly_factor
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_poly_factor" ):
listener.enterCmd_poly_factor(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_poly_factor" ):
listener.exitCmd_poly_factor(self)
def cmd_poly_factor(self):
localctx = SMTLIBv2Parser.Cmd_poly_factorContext(self, self._ctx, self.state)
self.enterRule(localctx, 164, self.RULE_cmd_poly_factor)
try:
self.enterOuterAlt(localctx, 1)
self.state = 790
self.match(SMTLIBv2Parser.CMD_PolyFactor)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_pushContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Push(self):
return self.getToken(SMTLIBv2Parser.CMD_Push, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_push
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_push" ):
listener.enterCmd_push(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_push" ):
listener.exitCmd_push(self)
def cmd_push(self):
localctx = SMTLIBv2Parser.Cmd_pushContext(self, self._ctx, self.state)
self.enterRule(localctx, 166, self.RULE_cmd_push)
try:
self.enterOuterAlt(localctx, 1)
self.state = 792
self.match(SMTLIBv2Parser.CMD_Push)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_resetContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_Reset(self):
return self.getToken(SMTLIBv2Parser.CMD_Reset, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_reset
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_reset" ):
listener.enterCmd_reset(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_reset" ):
listener.exitCmd_reset(self)
def cmd_reset(self):
localctx = SMTLIBv2Parser.Cmd_resetContext(self, self._ctx, self.state)
self.enterRule(localctx, 168, self.RULE_cmd_reset)
try:
self.enterOuterAlt(localctx, 1)
self.state = 794
self.match(SMTLIBv2Parser.CMD_Reset)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_resetAssertionsContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_ResetAssertions(self):
return self.getToken(SMTLIBv2Parser.CMD_ResetAssertions, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_resetAssertions
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_resetAssertions" ):
listener.enterCmd_resetAssertions(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_resetAssertions" ):
listener.exitCmd_resetAssertions(self)
def cmd_resetAssertions(self):
localctx = SMTLIBv2Parser.Cmd_resetAssertionsContext(self, self._ctx, self.state)
self.enterRule(localctx, 170, self.RULE_cmd_resetAssertions)
try:
self.enterOuterAlt(localctx, 1)
self.state = 796
self.match(SMTLIBv2Parser.CMD_ResetAssertions)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_setInfoContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_SetInfo(self):
return self.getToken(SMTLIBv2Parser.CMD_SetInfo, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_setInfo
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_setInfo" ):
listener.enterCmd_setInfo(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_setInfo" ):
listener.exitCmd_setInfo(self)
def cmd_setInfo(self):
localctx = SMTLIBv2Parser.Cmd_setInfoContext(self, self._ctx, self.state)
self.enterRule(localctx, 172, self.RULE_cmd_setInfo)
try:
self.enterOuterAlt(localctx, 1)
self.state = 798
self.match(SMTLIBv2Parser.CMD_SetInfo)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_setLogicContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_SetLogic(self):
return self.getToken(SMTLIBv2Parser.CMD_SetLogic, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_setLogic
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_setLogic" ):
listener.enterCmd_setLogic(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_setLogic" ):
listener.exitCmd_setLogic(self)
def cmd_setLogic(self):
localctx = SMTLIBv2Parser.Cmd_setLogicContext(self, self._ctx, self.state)
self.enterRule(localctx, 174, self.RULE_cmd_setLogic)
try:
self.enterOuterAlt(localctx, 1)
self.state = 800
self.match(SMTLIBv2Parser.CMD_SetLogic)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cmd_setOptionContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CMD_SetOption(self):
return self.getToken(SMTLIBv2Parser.CMD_SetOption, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_cmd_setOption
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCmd_setOption" ):
listener.enterCmd_setOption(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCmd_setOption" ):
listener.exitCmd_setOption(self)
def cmd_setOption(self):
localctx = SMTLIBv2Parser.Cmd_setOptionContext(self, self._ctx, self.state)
self.enterRule(localctx, 176, self.RULE_cmd_setOption)
try:
self.enterOuterAlt(localctx, 1)
self.state = 802
self.match(SMTLIBv2Parser.CMD_SetOption)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tac_thenContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_Then(self):
return self.getToken(SMTLIBv2Parser.TAC_Then, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_tac_then
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTac_then" ):
listener.enterTac_then(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTac_then" ):
listener.exitTac_then(self)
def tac_then(self):
localctx = SMTLIBv2Parser.Tac_thenContext(self, self._ctx, self.state)
self.enterRule(localctx, 178, self.RULE_tac_then)
try:
self.enterOuterAlt(localctx, 1)
self.state = 804
self.match(SMTLIBv2Parser.TAC_Then)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tac_and_thenContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_AndThen(self):
return self.getToken(SMTLIBv2Parser.TAC_AndThen, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_tac_and_then
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTac_and_then" ):
listener.enterTac_and_then(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTac_and_then" ):
listener.exitTac_and_then(self)
def tac_and_then(self):
localctx = SMTLIBv2Parser.Tac_and_thenContext(self, self._ctx, self.state)
self.enterRule(localctx, 180, self.RULE_tac_and_then)
try:
self.enterOuterAlt(localctx, 1)
self.state = 806
self.match(SMTLIBv2Parser.TAC_AndThen)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Par_thenContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_ParThen(self):
return self.getToken(SMTLIBv2Parser.TAC_ParThen, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_par_then
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPar_then" ):
listener.enterPar_then(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPar_then" ):
listener.exitPar_then(self)
def par_then(self):
localctx = SMTLIBv2Parser.Par_thenContext(self, self._ctx, self.state)
self.enterRule(localctx, 182, self.RULE_par_then)
try:
self.enterOuterAlt(localctx, 1)
self.state = 808
self.match(SMTLIBv2Parser.TAC_ParThen)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Or_elseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_OrElse(self):
return self.getToken(SMTLIBv2Parser.TAC_OrElse, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_or_else
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOr_else" ):
listener.enterOr_else(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOr_else" ):
listener.exitOr_else(self)
def or_else(self):
localctx = SMTLIBv2Parser.Or_elseContext(self, self._ctx, self.state)
self.enterRule(localctx, 184, self.RULE_or_else)
try:
self.enterOuterAlt(localctx, 1)
self.state = 810
self.match(SMTLIBv2Parser.TAC_OrElse)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Par_or_elseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_ParOrElse(self):
return self.getToken(SMTLIBv2Parser.TAC_ParOrElse, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_par_or_else
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPar_or_else" ):
listener.enterPar_or_else(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPar_or_else" ):
listener.exitPar_or_else(self)
def par_or_else(self):
localctx = SMTLIBv2Parser.Par_or_elseContext(self, self._ctx, self.state)
self.enterRule(localctx, 186, self.RULE_par_or_else)
try:
self.enterOuterAlt(localctx, 1)
self.state = 812
self.match(SMTLIBv2Parser.TAC_ParOrElse)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Par_orContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_ParOr(self):
return self.getToken(SMTLIBv2Parser.TAC_ParOr, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_par_or
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPar_or" ):
listener.enterPar_or(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPar_or" ):
listener.exitPar_or(self)
def par_or(self):
localctx = SMTLIBv2Parser.Par_orContext(self, self._ctx, self.state)
self.enterRule(localctx, 188, self.RULE_par_or)
try:
self.enterOuterAlt(localctx, 1)
self.state = 814
self.match(SMTLIBv2Parser.TAC_ParOr)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TryForContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_TryFor(self):
return self.getToken(SMTLIBv2Parser.TAC_TryFor, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_tryFor
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTryFor" ):
listener.enterTryFor(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTryFor" ):
listener.exitTryFor(self)
def tryFor(self):
localctx = SMTLIBv2Parser.TryForContext(self, self._ctx, self.state)
self.enterRule(localctx, 190, self.RULE_tryFor)
try:
self.enterOuterAlt(localctx, 1)
self.state = 816
self.match(SMTLIBv2Parser.TAC_TryFor)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UsingParamsContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAC_UsingParams(self):
return self.getToken(SMTLIBv2Parser.TAC_UsingParams, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_usingParams
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUsingParams" ):
listener.enterUsingParams(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUsingParams" ):
listener.exitUsingParams(self)
def usingParams(self):
localctx = SMTLIBv2Parser.UsingParamsContext(self, self._ctx, self.state)
self.enterRule(localctx, 192, self.RULE_usingParams)
try:
self.enterOuterAlt(localctx, 1)
self.state = 818
self.match(SMTLIBv2Parser.TAC_UsingParams)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TacticalContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SMTLIBv2Parser.IdentifierContext,0)
def Simplify(self):
return self.getToken(SMTLIBv2Parser.Simplify, 0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def GRW_Exclamation(self):
return self.getToken(SMTLIBv2Parser.GRW_Exclamation, 0)
def tactical(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.TacticalContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.TacticalContext,i)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def attribute(self):
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,0)
def tac_then(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Tac_thenContext,0)
def tac_and_then(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Tac_and_thenContext,0)
def par_then(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Par_thenContext,0)
def or_else(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Or_elseContext,0)
def par_or_else(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Par_or_elseContext,0)
def par_or(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Par_orContext,0)
def tryFor(self):
return self.getTypedRuleContext(SMTLIBv2Parser.TryForContext,0)
def decimal(self):
return self.getTypedRuleContext(SMTLIBv2Parser.DecimalContext,0)
def usingParams(self):
return self.getTypedRuleContext(SMTLIBv2Parser.UsingParamsContext,0)
def cmd_echo(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_echoContext,0)
def string(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.StringContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,i)
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_tactical
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTactical" ):
listener.enterTactical(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTactical" ):
listener.exitTactical(self)
def tactical(self):
localctx = SMTLIBv2Parser.TacticalContext(self, self._ctx, self.state)
self.enterRule(localctx, 194, self.RULE_tactical)
self._la = 0 # Token type
try:
self.state = 912
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,61,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 820
self.identifier()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 821
self.match(SMTLIBv2Parser.Simplify)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 822
self.match(SMTLIBv2Parser.ParOpen)
self.state = 823
self.match(SMTLIBv2Parser.GRW_Exclamation)
self.state = 824
self.tactical()
self.state = 826
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 825
self.attribute()
self.state = 828
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 830
self.match(SMTLIBv2Parser.ParOpen)
self.state = 831
self.tac_then()
self.state = 833
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 832
self.tactical()
self.state = 835
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat) | (1 << SMTLIBv2Parser.Simplify))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 837
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 839
self.match(SMTLIBv2Parser.ParOpen)
self.state = 840
self.tac_and_then()
self.state = 842
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 841
self.tactical()
self.state = 844
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat) | (1 << SMTLIBv2Parser.Simplify))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 846
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 848
self.match(SMTLIBv2Parser.ParOpen)
self.state = 849
self.par_then()
self.state = 851
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 850
self.tactical()
else:
raise NoViableAltException(self)
self.state = 853
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,54,self._ctx)
self.state = 855
self.tactical()
self.state = 856
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 858
self.match(SMTLIBv2Parser.ParOpen)
self.state = 859
self.or_else()
self.state = 861
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 860
self.tactical()
else:
raise NoViableAltException(self)
self.state = 863
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,55,self._ctx)
self.state = 865
self.tactical()
self.state = 866
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 868
self.match(SMTLIBv2Parser.ParOpen)
self.state = 869
self.par_or_else()
self.state = 871
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 870
self.tactical()
self.state = 873
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat) | (1 << SMTLIBv2Parser.Simplify))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 875
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 877
self.match(SMTLIBv2Parser.ParOpen)
self.state = 878
self.par_or()
self.state = 880
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 879
self.tactical()
self.state = 882
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat) | (1 << SMTLIBv2Parser.Simplify))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 884
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 886
self.match(SMTLIBv2Parser.ParOpen)
self.state = 887
self.tryFor()
self.state = 889
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 888
self.tactical()
self.state = 891
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat) | (1 << SMTLIBv2Parser.Simplify))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 893
self.decimal()
self.state = 894
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 896
self.match(SMTLIBv2Parser.ParOpen)
self.state = 897
self.usingParams()
self.state = 898
self.tactical()
self.state = 899
self.attribute()
self.state = 900
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 902
self.match(SMTLIBv2Parser.ParOpen)
self.state = 903
self.cmd_echo()
self.state = 906
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 906
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.String]:
self.state = 904
self.string()
pass
elif token in [SMTLIBv2Parser.QuotedSymbol, SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat, SMTLIBv2Parser.UndefinedSymbol]:
self.state = 905
self.symbol()
pass
else:
raise NoViableAltException(self)
self.state = 908
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 910
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CommandContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParOpen)
else:
return self.getToken(SMTLIBv2Parser.ParOpen, i)
def cmd_assert(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_assertContext,0)
def term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.TermContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,i)
def ParClose(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParClose)
else:
return self.getToken(SMTLIBv2Parser.ParClose, i)
def cmd_assertSoft(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_assertSoftContext,0)
def attribute(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.AttributeContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,i)
def cmd_checkSat(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_checkSatContext,0)
def cmd_checkSatAssuming(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_checkSatAssumingContext,0)
def cmd_minimize(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_minimizeContext,0)
def cmd_maximize(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_maximizeContext,0)
def cmd_simplify(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_simplifyContext,0)
def cmd_declareConst(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_declareConstContext,0)
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def sort(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SortContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SortContext,i)
def cmd_declareDatatype(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_declareDatatypeContext,0)
def datatype_dec(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Datatype_decContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Datatype_decContext,i)
def cmd_declareCodatatype(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_declareCodatatypeContext,0)
def cmd_declareDatatypes(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_declareDatatypesContext,0)
def sort_dec(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Sort_decContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Sort_decContext,i)
def cmd_declareCodatatypes(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_declareCodatatypesContext,0)
def cmd_declareFun(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_declareFunContext,0)
def cmd_declareSort(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_declareSortContext,0)
def numeral(self):
return self.getTypedRuleContext(SMTLIBv2Parser.NumeralContext,0)
def cmd_define(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_defineContext,0)
def cmd_defineFun(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_defineFunContext,0)
def function_def(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Function_defContext,0)
def cmd_defineConst(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_defineConstContext,0)
def cmd_defineFunRec(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_defineFunRecContext,0)
def cmd_defineFunsRec(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_defineFunsRecContext,0)
def function_dec(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Function_decContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Function_decContext,i)
def cmd_display(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_displayContext,0)
def cmd_defineSort(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_defineSortContext,0)
def cmd_echo(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_echoContext,0)
def string(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.StringContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,i)
def cmd_eval(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_evalContext,0)
def cmd_exit(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_exitContext,0)
def cmd_GetObjectives(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_GetObjectivesContext,0)
def cmd_getAssertions(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getAssertionsContext,0)
def cmd_getAssignment(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getAssignmentContext,0)
def cmd_getInfo(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getInfoContext,0)
def info_flag(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Info_flagContext,0)
def cmd_getModel(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getModelContext,0)
def cmd_blockModel(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_blockModelContext,0)
def cmd_getOption(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getOptionContext,0)
def keyword(self):
return self.getTypedRuleContext(SMTLIBv2Parser.KeywordContext,0)
def cmd_getProof(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getProofContext,0)
def cmd_getUnsatAssumptions(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getUnsatAssumptionsContext,0)
def cmd_getUnsatCore(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getUnsatCoreContext,0)
def cmd_getValue(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_getValueContext,0)
def cmd_poly_factor(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_poly_factorContext,0)
def cmd_pop(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_popContext,0)
def cmd_push(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_pushContext,0)
def cmd_reset(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_resetContext,0)
def cmd_resetAssertions(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_resetAssertionsContext,0)
def cmd_setInfo(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_setInfoContext,0)
def cmd_setLogic(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_setLogicContext,0)
def cmd_setOption(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_setOptionContext,0)
def option(self):
return self.getTypedRuleContext(SMTLIBv2Parser.OptionContext,0)
def cmd_checkSatUsing(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_checkSatUsingContext,0)
def tactical(self):
return self.getTypedRuleContext(SMTLIBv2Parser.TacticalContext,0)
def cmd_labels(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Cmd_labelsContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_command
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCommand" ):
listener.enterCommand(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCommand" ):
listener.exitCommand(self)
def command(self):
localctx = SMTLIBv2Parser.CommandContext(self, self._ctx, self.state)
self.enterRule(localctx, 196, self.RULE_command)
self._la = 0 # Token type
try:
self.state = 1238
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,78,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 914
self.match(SMTLIBv2Parser.ParOpen)
self.state = 915
self.cmd_assert()
self.state = 916
self.term()
self.state = 917
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 919
self.match(SMTLIBv2Parser.ParOpen)
self.state = 920
self.cmd_assertSoft()
self.state = 921
self.term()
self.state = 925
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 922
self.attribute()
self.state = 927
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 928
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 930
self.match(SMTLIBv2Parser.ParOpen)
self.state = 931
self.cmd_checkSat()
self.state = 935
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0):
self.state = 932
self.term()
self.state = 937
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 938
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 940
self.match(SMTLIBv2Parser.ParOpen)
self.state = 941
self.cmd_checkSatAssuming()
self.state = 942
self.match(SMTLIBv2Parser.ParOpen)
self.state = 946
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0):
self.state = 943
self.term()
self.state = 948
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 949
self.match(SMTLIBv2Parser.ParClose)
self.state = 950
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 952
self.match(SMTLIBv2Parser.ParOpen)
self.state = 953
self.cmd_minimize()
self.state = 954
self.term()
self.state = 955
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 957
self.match(SMTLIBv2Parser.ParOpen)
self.state = 958
self.cmd_maximize()
self.state = 959
self.term()
self.state = 960
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 962
self.match(SMTLIBv2Parser.ParOpen)
self.state = 963
self.cmd_simplify()
self.state = 964
self.term()
self.state = 968
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0):
self.state = 965
self.attribute()
self.state = 970
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 971
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 973
self.match(SMTLIBv2Parser.ParOpen)
self.state = 974
self.cmd_declareConst()
self.state = 975
self.symbol()
self.state = 976
self.sort()
self.state = 977
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 979
self.match(SMTLIBv2Parser.ParOpen)
self.state = 980
self.cmd_declareDatatype()
self.state = 981
self.symbol()
self.state = 982
self.datatype_dec()
self.state = 983
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 985
self.match(SMTLIBv2Parser.ParOpen)
self.state = 986
self.cmd_declareCodatatype()
self.state = 987
self.symbol()
self.state = 988
self.datatype_dec()
self.state = 989
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 991
self.match(SMTLIBv2Parser.ParOpen)
self.state = 992
self.cmd_declareDatatypes()
self.state = 993
self.match(SMTLIBv2Parser.ParOpen)
self.state = 995
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 994
self.sort_dec()
self.state = 997
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 999
self.match(SMTLIBv2Parser.ParClose)
self.state = 1000
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1002
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1001
self.datatype_dec()
self.state = 1004
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 1006
self.match(SMTLIBv2Parser.ParClose)
self.state = 1007
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 1009
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1010
self.cmd_declareCodatatypes()
self.state = 1011
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1013
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1012
self.sort_dec()
self.state = 1015
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 1017
self.match(SMTLIBv2Parser.ParClose)
self.state = 1018
self.datatype_dec()
self.state = 1019
self.match(SMTLIBv2Parser.ParClose)
self.state = 1020
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1022
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1021
self.datatype_dec()
self.state = 1024
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 1026
self.match(SMTLIBv2Parser.ParClose)
self.state = 1027
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 13:
self.enterOuterAlt(localctx, 13)
self.state = 1029
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1030
self.cmd_declareFun()
self.state = 1031
self.symbol()
self.state = 1032
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1036
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol:
self.state = 1033
self.sort()
self.state = 1038
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1039
self.match(SMTLIBv2Parser.ParClose)
self.state = 1040
self.sort()
self.state = 1041
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 14:
self.enterOuterAlt(localctx, 14)
self.state = 1043
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1044
self.cmd_declareSort()
self.state = 1045
self.symbol()
self.state = 1047
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==SMTLIBv2Parser.Numeral:
self.state = 1046
self.numeral()
self.state = 1049
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 15:
self.enterOuterAlt(localctx, 15)
self.state = 1051
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1052
self.cmd_define()
self.state = 1053
self.symbol()
self.state = 1054
self.term()
self.state = 1055
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 16:
self.enterOuterAlt(localctx, 16)
self.state = 1057
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1058
self.cmd_defineFun()
self.state = 1059
self.function_def()
self.state = 1060
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 17:
self.enterOuterAlt(localctx, 17)
self.state = 1062
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1063
self.cmd_defineConst()
self.state = 1064
self.symbol()
self.state = 1065
self.sort()
self.state = 1066
self.term()
self.state = 1067
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 18:
self.enterOuterAlt(localctx, 18)
self.state = 1069
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1070
self.cmd_defineFunRec()
self.state = 1071
self.function_def()
self.state = 1072
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 19:
self.enterOuterAlt(localctx, 19)
self.state = 1074
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1075
self.cmd_defineFunsRec()
self.state = 1076
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1078
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1077
self.function_dec()
self.state = 1080
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 1082
self.match(SMTLIBv2Parser.ParClose)
self.state = 1083
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1085
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1084
self.term()
self.state = 1087
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0)):
break
self.state = 1089
self.match(SMTLIBv2Parser.ParClose)
self.state = 1090
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 20:
self.enterOuterAlt(localctx, 20)
self.state = 1092
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1093
self.cmd_display()
self.state = 1094
self.term()
self.state = 1095
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 21:
self.enterOuterAlt(localctx, 21)
self.state = 1097
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1098
self.cmd_defineSort()
self.state = 1099
self.symbol()
self.state = 1100
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1104
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol:
self.state = 1101
self.symbol()
self.state = 1106
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1107
self.match(SMTLIBv2Parser.ParClose)
self.state = 1108
self.sort()
self.state = 1109
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 22:
self.enterOuterAlt(localctx, 22)
self.state = 1111
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1112
self.cmd_echo()
self.state = 1115
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1115
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [SMTLIBv2Parser.String]:
self.state = 1113
self.string()
pass
elif token in [SMTLIBv2Parser.QuotedSymbol, SMTLIBv2Parser.PS_Not, SMTLIBv2Parser.PS_Bool, SMTLIBv2Parser.PS_Int, SMTLIBv2Parser.PS_Real, SMTLIBv2Parser.PS_ContinuedExecution, SMTLIBv2Parser.PS_Error, SMTLIBv2Parser.PS_False, SMTLIBv2Parser.PS_ImmediateExit, SMTLIBv2Parser.PS_Incomplete, SMTLIBv2Parser.PS_Logic, SMTLIBv2Parser.PS_Memout, SMTLIBv2Parser.PS_Sat, SMTLIBv2Parser.PS_Success, SMTLIBv2Parser.PS_Theory, SMTLIBv2Parser.PS_True, SMTLIBv2Parser.PS_Unknown, SMTLIBv2Parser.PS_Unsupported, SMTLIBv2Parser.PS_Unsat, SMTLIBv2Parser.UndefinedSymbol]:
self.state = 1114
self.symbol()
pass
else:
raise NoViableAltException(self)
self.state = 1117
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol):
break
self.state = 1119
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 23:
self.enterOuterAlt(localctx, 23)
self.state = 1121
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1122
self.cmd_eval()
self.state = 1123
self.term()
self.state = 1124
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 24:
self.enterOuterAlt(localctx, 24)
self.state = 1126
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1127
self.cmd_exit()
self.state = 1128
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 25:
self.enterOuterAlt(localctx, 25)
self.state = 1130
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1131
self.cmd_GetObjectives()
self.state = 1132
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 26:
self.enterOuterAlt(localctx, 26)
self.state = 1134
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1135
self.cmd_getAssertions()
self.state = 1136
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 27:
self.enterOuterAlt(localctx, 27)
self.state = 1138
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1139
self.cmd_getAssignment()
self.state = 1140
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 28:
self.enterOuterAlt(localctx, 28)
self.state = 1142
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1143
self.cmd_getInfo()
self.state = 1144
self.info_flag()
self.state = 1145
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 29:
self.enterOuterAlt(localctx, 29)
self.state = 1147
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1148
self.cmd_getModel()
self.state = 1149
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 30:
self.enterOuterAlt(localctx, 30)
self.state = 1151
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1152
self.cmd_blockModel()
self.state = 1153
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 31:
self.enterOuterAlt(localctx, 31)
self.state = 1155
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1156
self.cmd_getOption()
self.state = 1157
self.keyword()
self.state = 1158
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 32:
self.enterOuterAlt(localctx, 32)
self.state = 1160
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1161
self.cmd_getProof()
self.state = 1162
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 33:
self.enterOuterAlt(localctx, 33)
self.state = 1164
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1165
self.cmd_getUnsatAssumptions()
self.state = 1166
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 34:
self.enterOuterAlt(localctx, 34)
self.state = 1168
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1169
self.cmd_getUnsatCore()
self.state = 1170
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 35:
self.enterOuterAlt(localctx, 35)
self.state = 1172
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1173
self.cmd_getValue()
self.state = 1174
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1176
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1175
self.term()
self.state = 1178
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0)):
break
self.state = 1180
self.match(SMTLIBv2Parser.ParClose)
self.state = 1181
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 36:
self.enterOuterAlt(localctx, 36)
self.state = 1183
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1184
self.cmd_poly_factor()
self.state = 1185
self.term()
self.state = 1186
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 37:
self.enterOuterAlt(localctx, 37)
self.state = 1188
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1189
self.cmd_pop()
self.state = 1190
self.numeral()
self.state = 1191
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 38:
self.enterOuterAlt(localctx, 38)
self.state = 1193
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1194
self.cmd_pop()
self.state = 1195
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 39:
self.enterOuterAlt(localctx, 39)
self.state = 1197
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1198
self.cmd_push()
self.state = 1199
self.numeral()
self.state = 1200
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 40:
self.enterOuterAlt(localctx, 40)
self.state = 1202
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1203
self.cmd_push()
self.state = 1204
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 41:
self.enterOuterAlt(localctx, 41)
self.state = 1206
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1207
self.cmd_reset()
self.state = 1208
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 42:
self.enterOuterAlt(localctx, 42)
self.state = 1210
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1211
self.cmd_resetAssertions()
self.state = 1212
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 43:
self.enterOuterAlt(localctx, 43)
self.state = 1214
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1215
self.cmd_setInfo()
self.state = 1216
self.attribute()
self.state = 1217
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 44:
self.enterOuterAlt(localctx, 44)
self.state = 1219
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1220
self.cmd_setLogic()
self.state = 1221
self.symbol()
self.state = 1222
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 45:
self.enterOuterAlt(localctx, 45)
self.state = 1224
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1225
self.cmd_setOption()
self.state = 1226
self.option()
self.state = 1227
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 46:
self.enterOuterAlt(localctx, 46)
self.state = 1229
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1230
self.cmd_checkSatUsing()
self.state = 1231
self.tactical()
self.state = 1232
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 47:
self.enterOuterAlt(localctx, 47)
self.state = 1234
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1235
self.cmd_labels()
self.state = 1236
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class B_valueContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PS_True(self):
return self.getToken(SMTLIBv2Parser.PS_True, 0)
def PS_False(self):
return self.getToken(SMTLIBv2Parser.PS_False, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_b_value
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterB_value" ):
listener.enterB_value(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitB_value" ):
listener.exitB_value(self)
def b_value(self):
localctx = SMTLIBv2Parser.B_valueContext(self, self._ctx, self.state)
self.enterRule(localctx, 198, self.RULE_b_value)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1240
_la = self._input.LA(1)
if not(_la==SMTLIBv2Parser.PS_False or _la==SMTLIBv2Parser.PS_True):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OptionContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PK_DiagnosticOutputChannel(self):
return self.getToken(SMTLIBv2Parser.PK_DiagnosticOutputChannel, 0)
def string(self):
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,0)
def PK_GlobalDeclarations(self):
return self.getToken(SMTLIBv2Parser.PK_GlobalDeclarations, 0)
def b_value(self):
return self.getTypedRuleContext(SMTLIBv2Parser.B_valueContext,0)
def PK_InteractiveMode(self):
return self.getToken(SMTLIBv2Parser.PK_InteractiveMode, 0)
def PK_PrintSuccess(self):
return self.getToken(SMTLIBv2Parser.PK_PrintSuccess, 0)
def PK_ProduceAssertions(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceAssertions, 0)
def PK_ProduceAssignments(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceAssignments, 0)
def PK_ProduceModels(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceModels, 0)
def PK_ProduceProofs(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceProofs, 0)
def PK_ProduceUnsatAssumptions(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceUnsatAssumptions, 0)
def PK_ProduceUnsatCores(self):
return self.getToken(SMTLIBv2Parser.PK_ProduceUnsatCores, 0)
def PK_RandomSeed(self):
return self.getToken(SMTLIBv2Parser.PK_RandomSeed, 0)
def numeral(self):
return self.getTypedRuleContext(SMTLIBv2Parser.NumeralContext,0)
def PK_RegularOutputChannel(self):
return self.getToken(SMTLIBv2Parser.PK_RegularOutputChannel, 0)
def PK_ReproducibleResourceLimit(self):
return self.getToken(SMTLIBv2Parser.PK_ReproducibleResourceLimit, 0)
def PK_Verbosity(self):
return self.getToken(SMTLIBv2Parser.PK_Verbosity, 0)
def attribute(self):
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_option
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOption" ):
listener.enterOption(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOption" ):
listener.exitOption(self)
def option(self):
localctx = SMTLIBv2Parser.OptionContext(self, self._ctx, self.state)
self.enterRule(localctx, 200, self.RULE_option)
try:
self.state = 1271
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,79,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1242
self.match(SMTLIBv2Parser.PK_DiagnosticOutputChannel)
self.state = 1243
self.string()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1244
self.match(SMTLIBv2Parser.PK_GlobalDeclarations)
self.state = 1245
self.b_value()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1246
self.match(SMTLIBv2Parser.PK_InteractiveMode)
self.state = 1247
self.b_value()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1248
self.match(SMTLIBv2Parser.PK_PrintSuccess)
self.state = 1249
self.b_value()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1250
self.match(SMTLIBv2Parser.PK_ProduceAssertions)
self.state = 1251
self.b_value()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1252
self.match(SMTLIBv2Parser.PK_ProduceAssignments)
self.state = 1253
self.b_value()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1254
self.match(SMTLIBv2Parser.PK_ProduceModels)
self.state = 1255
self.b_value()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1256
self.match(SMTLIBv2Parser.PK_ProduceProofs)
self.state = 1257
self.b_value()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 1258
self.match(SMTLIBv2Parser.PK_ProduceUnsatAssumptions)
self.state = 1259
self.b_value()
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 1260
self.match(SMTLIBv2Parser.PK_ProduceUnsatCores)
self.state = 1261
self.b_value()
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 1262
self.match(SMTLIBv2Parser.PK_RandomSeed)
self.state = 1263
self.numeral()
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 1264
self.match(SMTLIBv2Parser.PK_RegularOutputChannel)
self.state = 1265
self.string()
pass
elif la_ == 13:
self.enterOuterAlt(localctx, 13)
self.state = 1266
self.match(SMTLIBv2Parser.PK_ReproducibleResourceLimit)
self.state = 1267
self.numeral()
pass
elif la_ == 14:
self.enterOuterAlt(localctx, 14)
self.state = 1268
self.match(SMTLIBv2Parser.PK_Verbosity)
self.state = 1269
self.numeral()
pass
elif la_ == 15:
self.enterOuterAlt(localctx, 15)
self.state = 1270
self.attribute()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Info_flagContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PK_AllStatistics(self):
return self.getToken(SMTLIBv2Parser.PK_AllStatistics, 0)
def PK_AssertionStackLevels(self):
return self.getToken(SMTLIBv2Parser.PK_AssertionStackLevels, 0)
def PK_Authors(self):
return self.getToken(SMTLIBv2Parser.PK_Authors, 0)
def PK_ErrorBehaviour(self):
return self.getToken(SMTLIBv2Parser.PK_ErrorBehaviour, 0)
def PK_Name(self):
return self.getToken(SMTLIBv2Parser.PK_Name, 0)
def PK_ReasonUnknown(self):
return self.getToken(SMTLIBv2Parser.PK_ReasonUnknown, 0)
def PK_Version(self):
return self.getToken(SMTLIBv2Parser.PK_Version, 0)
def keyword(self):
return self.getTypedRuleContext(SMTLIBv2Parser.KeywordContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_info_flag
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInfo_flag" ):
listener.enterInfo_flag(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInfo_flag" ):
listener.exitInfo_flag(self)
def info_flag(self):
localctx = SMTLIBv2Parser.Info_flagContext(self, self._ctx, self.state)
self.enterRule(localctx, 202, self.RULE_info_flag)
try:
self.state = 1281
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,80,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1273
self.match(SMTLIBv2Parser.PK_AllStatistics)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1274
self.match(SMTLIBv2Parser.PK_AssertionStackLevels)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1275
self.match(SMTLIBv2Parser.PK_Authors)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1276
self.match(SMTLIBv2Parser.PK_ErrorBehaviour)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1277
self.match(SMTLIBv2Parser.PK_Name)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1278
self.match(SMTLIBv2Parser.PK_ReasonUnknown)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1279
self.match(SMTLIBv2Parser.PK_Version)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1280
self.keyword()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Error_behaviourContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PS_ImmediateExit(self):
return self.getToken(SMTLIBv2Parser.PS_ImmediateExit, 0)
def PS_ContinuedExecution(self):
return self.getToken(SMTLIBv2Parser.PS_ContinuedExecution, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_error_behaviour
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterError_behaviour" ):
listener.enterError_behaviour(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitError_behaviour" ):
listener.exitError_behaviour(self)
def error_behaviour(self):
localctx = SMTLIBv2Parser.Error_behaviourContext(self, self._ctx, self.state)
self.enterRule(localctx, 204, self.RULE_error_behaviour)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1283
_la = self._input.LA(1)
if not(_la==SMTLIBv2Parser.PS_ContinuedExecution or _la==SMTLIBv2Parser.PS_ImmediateExit):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Reason_unknownContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PS_Memout(self):
return self.getToken(SMTLIBv2Parser.PS_Memout, 0)
def PS_Incomplete(self):
return self.getToken(SMTLIBv2Parser.PS_Incomplete, 0)
def s_expr(self):
return self.getTypedRuleContext(SMTLIBv2Parser.S_exprContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_reason_unknown
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterReason_unknown" ):
listener.enterReason_unknown(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitReason_unknown" ):
listener.exitReason_unknown(self)
def reason_unknown(self):
localctx = SMTLIBv2Parser.Reason_unknownContext(self, self._ctx, self.state)
self.enterRule(localctx, 206, self.RULE_reason_unknown)
try:
self.state = 1288
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,81,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1285
self.match(SMTLIBv2Parser.PS_Memout)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1286
self.match(SMTLIBv2Parser.PS_Incomplete)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1287
self.s_expr()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Model_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParOpen)
else:
return self.getToken(SMTLIBv2Parser.ParOpen, i)
def CMD_DefineFun(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineFun, 0)
def function_def(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Function_defContext,0)
def ParClose(self, i:int=None):
if i is None:
return self.getTokens(SMTLIBv2Parser.ParClose)
else:
return self.getToken(SMTLIBv2Parser.ParClose, i)
def CMD_DefineFunRec(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineFunRec, 0)
def CMD_DefineFunsRec(self):
return self.getToken(SMTLIBv2Parser.CMD_DefineFunsRec, 0)
def function_dec(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Function_decContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Function_decContext,i)
def term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.TermContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_model_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterModel_response" ):
listener.enterModel_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitModel_response" ):
listener.exitModel_response(self)
def model_response(self):
localctx = SMTLIBv2Parser.Model_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 208, self.RULE_model_response)
self._la = 0 # Token type
try:
self.state = 1318
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,84,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1290
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1291
self.match(SMTLIBv2Parser.CMD_DefineFun)
self.state = 1292
self.function_def()
self.state = 1293
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1295
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1296
self.match(SMTLIBv2Parser.CMD_DefineFunRec)
self.state = 1297
self.function_def()
self.state = 1298
self.match(SMTLIBv2Parser.ParClose)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1300
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1301
self.match(SMTLIBv2Parser.CMD_DefineFunsRec)
self.state = 1302
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1304
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1303
self.function_dec()
self.state = 1306
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 1308
self.match(SMTLIBv2Parser.ParClose)
self.state = 1309
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1311
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1310
self.term()
self.state = 1313
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0)):
break
self.state = 1315
self.match(SMTLIBv2Parser.ParClose)
self.state = 1316
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Info_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PK_AssertionStackLevels(self):
return self.getToken(SMTLIBv2Parser.PK_AssertionStackLevels, 0)
def numeral(self):
return self.getTypedRuleContext(SMTLIBv2Parser.NumeralContext,0)
def PK_Authors(self):
return self.getToken(SMTLIBv2Parser.PK_Authors, 0)
def string(self):
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,0)
def PK_ErrorBehaviour(self):
return self.getToken(SMTLIBv2Parser.PK_ErrorBehaviour, 0)
def error_behaviour(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Error_behaviourContext,0)
def PK_Name(self):
return self.getToken(SMTLIBv2Parser.PK_Name, 0)
def PK_ReasonUnknown(self):
return self.getToken(SMTLIBv2Parser.PK_ReasonUnknown, 0)
def reason_unknown(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Reason_unknownContext,0)
def PK_Version(self):
return self.getToken(SMTLIBv2Parser.PK_Version, 0)
def attribute(self):
return self.getTypedRuleContext(SMTLIBv2Parser.AttributeContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_info_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInfo_response" ):
listener.enterInfo_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInfo_response" ):
listener.exitInfo_response(self)
def info_response(self):
localctx = SMTLIBv2Parser.Info_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 210, self.RULE_info_response)
try:
self.state = 1333
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,85,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1320
self.match(SMTLIBv2Parser.PK_AssertionStackLevels)
self.state = 1321
self.numeral()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1322
self.match(SMTLIBv2Parser.PK_Authors)
self.state = 1323
self.string()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1324
self.match(SMTLIBv2Parser.PK_ErrorBehaviour)
self.state = 1325
self.error_behaviour()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1326
self.match(SMTLIBv2Parser.PK_Name)
self.state = 1327
self.string()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1328
self.match(SMTLIBv2Parser.PK_ReasonUnknown)
self.state = 1329
self.reason_unknown()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1330
self.match(SMTLIBv2Parser.PK_Version)
self.state = 1331
self.string()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1332
self.attribute()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Valuation_pairContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.TermContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,i)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_valuation_pair
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterValuation_pair" ):
listener.enterValuation_pair(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitValuation_pair" ):
listener.exitValuation_pair(self)
def valuation_pair(self):
localctx = SMTLIBv2Parser.Valuation_pairContext(self, self._ctx, self.state)
self.enterRule(localctx, 212, self.RULE_valuation_pair)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1335
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1336
self.term()
self.state = 1337
self.term()
self.state = 1338
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class T_valuation_pairContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def symbol(self):
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,0)
def b_value(self):
return self.getTypedRuleContext(SMTLIBv2Parser.B_valueContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_t_valuation_pair
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterT_valuation_pair" ):
listener.enterT_valuation_pair(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitT_valuation_pair" ):
listener.exitT_valuation_pair(self)
def t_valuation_pair(self):
localctx = SMTLIBv2Parser.T_valuation_pairContext(self, self._ctx, self.state)
self.enterRule(localctx, 214, self.RULE_t_valuation_pair)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1340
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1341
self.symbol()
self.state = 1342
self.b_value()
self.state = 1343
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Check_sat_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PS_Sat(self):
return self.getToken(SMTLIBv2Parser.PS_Sat, 0)
def PS_Unsat(self):
return self.getToken(SMTLIBv2Parser.PS_Unsat, 0)
def PS_Unknown(self):
return self.getToken(SMTLIBv2Parser.PS_Unknown, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_check_sat_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCheck_sat_response" ):
listener.enterCheck_sat_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCheck_sat_response" ):
listener.exitCheck_sat_response(self)
def check_sat_response(self):
localctx = SMTLIBv2Parser.Check_sat_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 216, self.RULE_check_sat_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1345
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Echo_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def string(self):
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_echo_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEcho_response" ):
listener.enterEcho_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEcho_response" ):
listener.exitEcho_response(self)
def echo_response(self):
localctx = SMTLIBv2Parser.Echo_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 218, self.RULE_echo_response)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1347
self.string()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_assertions_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.TermContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.TermContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_assertions_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_assertions_response" ):
listener.enterGet_assertions_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_assertions_response" ):
listener.exitGet_assertions_response(self)
def get_assertions_response(self):
localctx = SMTLIBv2Parser.Get_assertions_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 220, self.RULE_get_assertions_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1349
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1353
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.ParOpen) | (1 << SMTLIBv2Parser.String) | (1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.RegConst) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or ((((_la - 92)) & ~0x3f) == 0 and ((1 << (_la - 92)) & ((1 << (SMTLIBv2Parser.Numeral - 92)) | (1 << (SMTLIBv2Parser.Binary - 92)) | (1 << (SMTLIBv2Parser.HexDecimal - 92)) | (1 << (SMTLIBv2Parser.Decimal - 92)) | (1 << (SMTLIBv2Parser.UndefinedSymbol - 92)))) != 0):
self.state = 1350
self.term()
self.state = 1355
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1356
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_assignment_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def t_valuation_pair(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.T_valuation_pairContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.T_valuation_pairContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_assignment_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_assignment_response" ):
listener.enterGet_assignment_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_assignment_response" ):
listener.exitGet_assignment_response(self)
def get_assignment_response(self):
localctx = SMTLIBv2Parser.Get_assignment_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 222, self.RULE_get_assignment_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1358
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1362
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SMTLIBv2Parser.ParOpen:
self.state = 1359
self.t_valuation_pair()
self.state = 1364
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1365
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_info_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def info_response(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Info_responseContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Info_responseContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_info_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_info_response" ):
listener.enterGet_info_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_info_response" ):
listener.exitGet_info_response(self)
def get_info_response(self):
localctx = SMTLIBv2Parser.Get_info_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 224, self.RULE_get_info_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1367
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1369
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1368
self.info_response()
self.state = 1371
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 96)) & ~0x3f) == 0 and ((1 << (_la - 96)) & ((1 << (SMTLIBv2Parser.Colon - 96)) | (1 << (SMTLIBv2Parser.PK_AllStatistics - 96)) | (1 << (SMTLIBv2Parser.PK_AssertionStackLevels - 96)) | (1 << (SMTLIBv2Parser.PK_Authors - 96)) | (1 << (SMTLIBv2Parser.PK_Category - 96)) | (1 << (SMTLIBv2Parser.PK_Chainable - 96)) | (1 << (SMTLIBv2Parser.PK_Definition - 96)) | (1 << (SMTLIBv2Parser.PK_DiagnosticOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ErrorBehaviour - 96)) | (1 << (SMTLIBv2Parser.PK_Extension - 96)) | (1 << (SMTLIBv2Parser.PK_Funs - 96)) | (1 << (SMTLIBv2Parser.PK_FunsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_GlobalDeclarations - 96)) | (1 << (SMTLIBv2Parser.PK_InteractiveMode - 96)) | (1 << (SMTLIBv2Parser.PK_Language - 96)) | (1 << (SMTLIBv2Parser.PK_LeftAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_License - 96)) | (1 << (SMTLIBv2Parser.PK_Named - 96)) | (1 << (SMTLIBv2Parser.PK_Name - 96)) | (1 << (SMTLIBv2Parser.PK_Notes - 96)) | (1 << (SMTLIBv2Parser.PK_Pattern - 96)) | (1 << (SMTLIBv2Parser.PK_PrintSuccess - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssertions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceAssignments - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceModels - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceProofs - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatAssumptions - 96)) | (1 << (SMTLIBv2Parser.PK_ProduceUnsatCores - 96)) | (1 << (SMTLIBv2Parser.PK_RandomSeed - 96)) | (1 << (SMTLIBv2Parser.PK_ReasonUnknown - 96)) | (1 << (SMTLIBv2Parser.PK_RegularOutputChannel - 96)) | (1 << (SMTLIBv2Parser.PK_ReproducibleResourceLimit - 96)) | (1 << (SMTLIBv2Parser.PK_RightAssoc - 96)) | (1 << (SMTLIBv2Parser.PK_SmtLibVersion - 96)) | (1 << (SMTLIBv2Parser.PK_Sorts - 96)) | (1 << (SMTLIBv2Parser.PK_SortsDescription - 96)) | (1 << (SMTLIBv2Parser.PK_Source - 96)) | (1 << (SMTLIBv2Parser.PK_Status - 96)) | (1 << (SMTLIBv2Parser.PK_Theories - 96)) | (1 << (SMTLIBv2Parser.PK_Values - 96)) | (1 << (SMTLIBv2Parser.PK_Verbosity - 96)) | (1 << (SMTLIBv2Parser.PK_Version - 96)))) != 0)):
break
self.state = 1373
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_model_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def model_response(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Model_responseContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Model_responseContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_model_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_model_response" ):
listener.enterGet_model_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_model_response" ):
listener.exitGet_model_response(self)
def get_model_response(self):
localctx = SMTLIBv2Parser.Get_model_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 226, self.RULE_get_model_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1375
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1379
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SMTLIBv2Parser.ParOpen:
self.state = 1376
self.model_response()
self.state = 1381
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1382
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_option_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def attribute_value(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Attribute_valueContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_option_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_option_response" ):
listener.enterGet_option_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_option_response" ):
listener.exitGet_option_response(self)
def get_option_response(self):
localctx = SMTLIBv2Parser.Get_option_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 228, self.RULE_get_option_response)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1384
self.attribute_value()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_proof_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def s_expr(self):
return self.getTypedRuleContext(SMTLIBv2Parser.S_exprContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_proof_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_proof_response" ):
listener.enterGet_proof_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_proof_response" ):
listener.exitGet_proof_response(self)
def get_proof_response(self):
localctx = SMTLIBv2Parser.Get_proof_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 230, self.RULE_get_proof_response)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1386
self.s_expr()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_unsat_assump_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_unsat_assump_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_unsat_assump_response" ):
listener.enterGet_unsat_assump_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_unsat_assump_response" ):
listener.exitGet_unsat_assump_response(self)
def get_unsat_assump_response(self):
localctx = SMTLIBv2Parser.Get_unsat_assump_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 232, self.RULE_get_unsat_assump_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1388
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1392
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol:
self.state = 1389
self.symbol()
self.state = 1394
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1395
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_unsat_core_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.SymbolContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.SymbolContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_unsat_core_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_unsat_core_response" ):
listener.enterGet_unsat_core_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_unsat_core_response" ):
listener.exitGet_unsat_core_response(self)
def get_unsat_core_response(self):
localctx = SMTLIBv2Parser.Get_unsat_core_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 234, self.RULE_get_unsat_core_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1397
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1401
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SMTLIBv2Parser.QuotedSymbol) | (1 << SMTLIBv2Parser.PS_Not) | (1 << SMTLIBv2Parser.PS_Bool) | (1 << SMTLIBv2Parser.PS_Int) | (1 << SMTLIBv2Parser.PS_Real) | (1 << SMTLIBv2Parser.PS_ContinuedExecution) | (1 << SMTLIBv2Parser.PS_Error) | (1 << SMTLIBv2Parser.PS_False) | (1 << SMTLIBv2Parser.PS_ImmediateExit) | (1 << SMTLIBv2Parser.PS_Incomplete) | (1 << SMTLIBv2Parser.PS_Logic) | (1 << SMTLIBv2Parser.PS_Memout) | (1 << SMTLIBv2Parser.PS_Sat) | (1 << SMTLIBv2Parser.PS_Success) | (1 << SMTLIBv2Parser.PS_Theory) | (1 << SMTLIBv2Parser.PS_True) | (1 << SMTLIBv2Parser.PS_Unknown) | (1 << SMTLIBv2Parser.PS_Unsupported) | (1 << SMTLIBv2Parser.PS_Unsat))) != 0) or _la==SMTLIBv2Parser.UndefinedSymbol:
self.state = 1398
self.symbol()
self.state = 1403
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1404
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Get_value_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def valuation_pair(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SMTLIBv2Parser.Valuation_pairContext)
else:
return self.getTypedRuleContext(SMTLIBv2Parser.Valuation_pairContext,i)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_get_value_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGet_value_response" ):
listener.enterGet_value_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGet_value_response" ):
listener.exitGet_value_response(self)
def get_value_response(self):
localctx = SMTLIBv2Parser.Get_value_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 236, self.RULE_get_value_response)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1406
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1408
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1407
self.valuation_pair()
self.state = 1410
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==SMTLIBv2Parser.ParOpen):
break
self.state = 1412
self.match(SMTLIBv2Parser.ParClose)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Specific_success_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def check_sat_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Check_sat_responseContext,0)
def echo_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Echo_responseContext,0)
def get_assertions_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_assertions_responseContext,0)
def get_assignment_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_assignment_responseContext,0)
def get_info_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_info_responseContext,0)
def get_model_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_model_responseContext,0)
def get_option_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_option_responseContext,0)
def get_proof_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_proof_responseContext,0)
def get_unsat_assump_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_unsat_assump_responseContext,0)
def get_unsat_core_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_unsat_core_responseContext,0)
def get_value_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Get_value_responseContext,0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_specific_success_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSpecific_success_response" ):
listener.enterSpecific_success_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSpecific_success_response" ):
listener.exitSpecific_success_response(self)
def specific_success_response(self):
localctx = SMTLIBv2Parser.Specific_success_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 238, self.RULE_specific_success_response)
try:
self.state = 1425
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,93,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1414
self.check_sat_response()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1415
self.echo_response()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1416
self.get_assertions_response()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1417
self.get_assignment_response()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1418
self.get_info_response()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1419
self.get_model_response()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1420
self.get_option_response()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1421
self.get_proof_response()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 1422
self.get_unsat_assump_response()
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 1423
self.get_unsat_core_response()
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 1424
self.get_value_response()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class General_responseContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PS_Success(self):
return self.getToken(SMTLIBv2Parser.PS_Success, 0)
def specific_success_response(self):
return self.getTypedRuleContext(SMTLIBv2Parser.Specific_success_responseContext,0)
def PS_Unsupported(self):
return self.getToken(SMTLIBv2Parser.PS_Unsupported, 0)
def ParOpen(self):
return self.getToken(SMTLIBv2Parser.ParOpen, 0)
def PS_Error(self):
return self.getToken(SMTLIBv2Parser.PS_Error, 0)
def string(self):
return self.getTypedRuleContext(SMTLIBv2Parser.StringContext,0)
def ParClose(self):
return self.getToken(SMTLIBv2Parser.ParClose, 0)
def getRuleIndex(self):
return SMTLIBv2Parser.RULE_general_response
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGeneral_response" ):
listener.enterGeneral_response(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGeneral_response" ):
listener.exitGeneral_response(self)
def general_response(self):
localctx = SMTLIBv2Parser.General_responseContext(self, self._ctx, self.state)
self.enterRule(localctx, 240, self.RULE_general_response)
try:
self.state = 1435
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,94,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1427
self.match(SMTLIBv2Parser.PS_Success)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1428
self.specific_success_response()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1429
self.match(SMTLIBv2Parser.PS_Unsupported)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1430
self.match(SMTLIBv2Parser.ParOpen)
self.state = 1431
self.match(SMTLIBv2Parser.PS_Error)
self.state = 1432
self.string()
self.state = 1433
self.match(SMTLIBv2Parser.ParClose)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
|
# random story generator
import random
when = [ 'A long time ago', 'Yesterday', 'Before you were born', 'In the future', 'Before Thanos arrived']
who = ['Shazam', 'Iron Man', 'Batman', 'Superman', 'Captain America']
where = ['Arkham Asylum', 'Gotham City', 'Stark Tower', 'Bat Cave', 'Avengers HQ']
why = ['to eat a lot of cakes', 'to fight fpr justice', 'to steal ice cream', 'to dance']
print(random.choice(when) + ', ' + random.choice(who) + ' went to ' + random.choice(where) + ' ' + random.choice(why))
|
from django.contrib import messages
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import PasswordChangeForm
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from booking_portal.models import Announcement
@login_required
def change_password(request):
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(request, 'Your password was successfully updated!')
return redirect('change_password')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordChangeForm(request.user)
return render(request, 'accounts/change_password.html', {
'form': form
})
def about_us(request):
return render(request, 'about_us.html')
def announcements(request):
announcements = Announcement.objects.all().order_by('-date')
context = {
'announcements': announcements,
}
return render(request, 'announcements.html', context=context)
|
"""
The file config.py contains all major configuration parameters for running the
DPCTGAN model.
"""
import os
from pm4py.objects.log.importer.xes import importer as xes_importer
from pm4py.objects.conversion.log import converter as log_converter
import logger
# Parameters for the checkpoints. These settings control, wether pre-trained
# models are used (if existing) or if they are trained again.
RETRAIN_CTGAN = False
RETRAIN_LSTM = False
OVERRIDE_EXISTING_RESULTS = True
CHECKPOINTS_ROOT = "checkpoints"
# Global parameters used in all parts of the model (LSTM and CTGAN)
BATCH_SIZE = 20
# CTGAN
EPOCHS_CTGAN = 100
ENABLED_DP_CTGAN = True
EPSILON_CTGAN = 1.0
SAMPLING_BATCH_SIZE = 2 # Set to None to use global BATCH_SIZE
SAMPLING_MATCH_ACTIVITIES_MAX_TRIES = 10000
# LSTM
EPOCHS_DPLSTM = 100
ENABLED_DP_LSTM = ENABLED_DP_CTGAN
EPSILON_LSTM_DP = EPSILON_CTGAN
# Dataset
DATASET = (
# 'datasets/ETM_Configuration2.xes',
# 'datasets/financial_log.xes',
# 'datasets/Sepsis Cases - Event Log.xes',
'datasets/CoSeLoG.xes',
)[0]
# Logging
LOGGING_FOLDER = "campaign/ID10"
LOG_FILE = os.path.join(LOGGING_FOLDER, "{datetime}_test.py.logs")
SUMMARY_LOG_FILE = os.path.join(LOGGING_FOLDER, "{datetime}_summary.log")
# =============== Functions =================
__dataset_df = None
def get_dataset_df():
""" Get a copy of the dataframe with the original dataset. If the dataset
was not loaded into a dataframe yet that step will be done first.
If this function is called multiple times, it will not load the dataset
again, but just copy the dataframe created during the first run.
"""
global __dataset_df
if __dataset_df is None:
log = xes_importer.apply(DATASET)
__dataset_df = log_converter.apply(log, variant=log_converter.Variants.TO_DATA_FRAME)
return __dataset_df.copy()
def get_dataset_basename():
""" Get the name of the dataset used, without path information and
without file type extension.
"""
return os.path.basename(DATASET).split(".")[0]
def log_parameter_summary(main_logfile=True, summary=False):
logger.log("Main parameter summary:", main_logfile, summary)
logger.sep(main_logfile=main_logfile, summary=summary)
# Override existing checkpoints?
logger.log("RETRAIN_CTGAN : " + str(RETRAIN_CTGAN), main_logfile, summary)
logger.log("RETRAIN_LSTM : " + str(RETRAIN_LSTM), main_logfile, summary)
logger.log("OVERRIDE_EXISTING_RESULTS : " + str(OVERRIDE_EXISTING_RESULTS), main_logfile, summary)
# Global parameters
logger.log("BATCH_SIZE : " + str(BATCH_SIZE), main_logfile, summary)
# CTGAN
logger.log("EPOCHS_CTGAN : " + str(EPOCHS_CTGAN), main_logfile, summary)
logger.log("ENABLED_DP_CTGAN : " + str(ENABLED_DP_CTGAN), main_logfile, summary)
logger.log("RETRAIN_CTGAN : " + str(RETRAIN_CTGAN), main_logfile, summary)
logger.log("EPSILON_CTGAN : " + str(EPSILON_CTGAN), main_logfile, summary)
logger.log("SAMPLING_BATCH_SIZE : " + str(SAMPLING_BATCH_SIZE), main_logfile, summary)
logger.log("SAMPLING_MATCH_ACTIVITIES_MAX_TRIES : " + str(SAMPLING_MATCH_ACTIVITIES_MAX_TRIES), main_logfile, summary)
# LSTM
logger.log("EPOCHS_DPLSTM : " + str(EPOCHS_DPLSTM), main_logfile, summary)
logger.log("ENABLED_DP_LSTM : " + str(ENABLED_DP_LSTM), main_logfile, summary)
logger.log("EPSILON_LSTM_DP : " + str(EPSILON_LSTM_DP), main_logfile, summary)
# Dataset
logger.log("DATASET : " + str(DATASET), main_logfile, summary)
logger.sep(main_logfile=main_logfile, summary=summary)
|
from .quantum_art import QuantumArt
|
import serial
ser = serial.Serial('/dev/tty.usbmodem7071', 115200, timeout=1)
ser.write("\xb1\x81\x01A") #set key-press 1st button = 'A' 177,129,1,'A'
ser.write("\xb1\x81\x02a") #set key-press 2nd button = 'a' 177,129,1,'a'
ser.write("\xb1\x82\x011") #set key-release 1st button ='1' 177,130,1,'a'
ser.write("\xb1\x82\x02\x00") #set key-release 2nd button = none 177,130,1,0
ser.write("\xb1\x81\x00\x7b") #set de-bounce time to 123ms 177,129,0,123
ser.write("\xb1\x86\x86\x86") #set EEPROM to store mapping 177,134,134,134
ser.close()
|
from .bearer import TaccApisBearer
__all__ = ['TaccApisBearerRefresh']
class TaccApisBearerRefresh(TaccApisBearer):
"""Base class for Tapis API commands both an access token and a refresh token
"""
def add_common_parser_arguments(self, parser):
parser = super(TaccApisBearer,
self).add_common_parser_arguments(parser)
parser.add_argument(
'-z',
'--refresh-token',
dest='refresh_token',
type=str,
help="{0} {1}".format(self.constants.PLATFORM,
self.constants.REFRESH_TOKEN))
return parser
|
import sys
import os
from PyQt5 import QtWidgets
from printwindow import Ui_Dialog as w_rin
from mainwindow import Ui_Dialog as w_main
from extendwindow import Ui_Dialog as w_ext
gui_textlist = [
["프로그램 재시작 필요", "-", "-", "-", "-"],
["메인 화면", "음성인식 프린트\n시작", "녹음된 음성파일을 이용하여\n프린트", "문서 파일을 이용하여\n프린트", "뉴스기사\n출력하기"],
["프린트", "새로 기록", "음성 재안내", "기존파일에 이어서 기록", "뒤로가기"],
["녹음파일로 기록", "새로 기록", "음성 재안내", "기존파일에 이어서 기록", "뒤로가기"],
["문서파일의 내용을 기록", "문서 선택", "음성 재안내", "기존파일에 이어서 기록", "뒤로가기"],
["--기타기능--", "--기타기능--", "--기타기능--", "--기타기능--", "뒤로가기"],
["음성프린트", "입력 시작", "음성 재안내", "정정 및 수정", "입력 종료"],
["입력중", "이 내용으로 기록", "음성 재안내", "재입력", "입력 종료"],
["입력중", "이 음성 파일로 기록 시작", "음성 재안내", "", "입력 종료"],
["파일선택", "위", "파일선택", "아래", "뒤로가기"]
]
class mode(object):
def __init__(self, mode, dis):
self.mode = ''
app = QtWidgets.QApplication(sys.argv)
self.window = dis(30)
sys.exit(app.exec_())
self.main_mode_list = {
'main' : self.window.set_mode('main'),
'print_main' : self.window.refresh_ui(gui_textlist[2]),
'record_main' : self.window.refresh_ui(gui_textlist[3]),
'doc_main' : self.window.refresh_ui(gui_textlist[4]),
'news_main' : self.window.refresh_ui(gui_textlist[5]),
'error' : self.window.refresh_ui(gui_textlist[0])
}
self.print_mode_list = {
'print_first' : self.window.refresh_ui(gui_textlist[5]),
'print_ready' : self.window.refresh_ui(gui_textlist[5]),
'is_right' : self.window.refresh_ui(gui_textlist[5]),
'next_line' : self.window.refresh_ui(gui_textlist[5]),
'print_end' : self.window.refresh_ui(gui_textlist[5]),
'error' : self.window.refresh_ui(gui_textlist[0])
}
self.extend_mode_list = {
'sel_flie' : self.window.refresh_ui(gui_textlist[5]),
'is_right' : self.window.refresh_ui(gui_textlist[5]),
'error' : self.window.refresh_ui(gui_textlist[0])
}
print('선언완료')
self.set_mode(mode)
self.set_display(dis)
def set_mode(self, mode):
print('----------',mode, self.mode)
if self.mode != mode:
for mode_list in [self.main_mode_list, self.print_mode_list, self.extend_mode_list]:
if mode in mode_list:
return mode_list.get(mode, mode_list['error'])
self.mode = mode
def set_display(self, dis):
self.window = dis
def main():
ui = mode('main', w_main)
if __name__ == "__main__":
main()
|
import os
import time
from typing import Dict
import configargparse
import logging
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime
from collection_helper import (get_inventory, write_output_to_file, custom_logger, RetryingNetConnect,
CollectionStatus, AnsibleOsToNetmikoOs, get_show_commands, parse_genie)
def get_show_data(device_session: dict, device_name: str, output_path: str, cmd_dict: dict, logger) -> Dict:
"""
Show command collector for all operating systems
"""
start_time = time.time()
logger.info(f"Trying to connect to {device_name} at {start_time}")
status = {
"name": device_name,
"status": CollectionStatus.FAIL,
"failed_commands": [],
"message": "",
}
partial_collection = False
# todo: figure out to get logger name from the logger object that is passed in.
# current setup just uses the device name for the logger name, so this works
try:
net_connect = RetryingNetConnect(device_name, device_session, device_name)
except Exception as e:
status['message'] = f"Connection failed. Exception {str(e)}"
status['failed_commands'].append("All")
return status
logger.info(f"Running show commands for {device_name} at {time.time()}")
for cmd_group in cmd_dict.keys():
cmd_timer = 240 # set the general command timeout to 4 minutes
if cmd_group == "bgp_v4":
# todo: if you need per-neighbor RIB collection, write an OS specific function modeled after get_nxos_data
# todo: if VRF specific BGP data collection requires vrf name in command, write an OS specific function
# the generic show_data function will just grab BGP neighbors, summary, and RIBs for default and
# named VRF
cmd_timer = 1200 # set BGP command timer to 20 minutes
cmd_list = []
for scope, scope_cmds in cmd_dict['bgp_v4'].items():
if scope not in ["global", "vrf"]:
logger.error(f"Unknown {scope} with commands {scope_cmds} under bgp_v4 command dict")
continue
for subscope, cmds in scope_cmds.items():
if subscope == "neighbor_ribs":
logger.error(f"BGP neighbor RIB collection not supported on {device_name}")
continue
else:
cmd_list.extend(cmds)
# handle global and vrf specific IPv4 route commands
elif cmd_group == "routes_v4":
cmd_timer = 1200 # set the RIB command timeout to 20 minutes
cmd_list = []
for scope, cmds in cmd_dict['routes_v4'].items():
cmd_list.extend(cmds)
else:
cmd_list = cmd_dict.get(cmd_group)
for cmd in cmd_list:
logger.info(f"Running {cmd} on {device_name}")
try:
output = net_connect.run_command(cmd, cmd_timer)
logger.debug(f"Command output: {output}")
except Exception as e:
status['message'] = f"{cmd} was last command to fail. Exception {str(e)}"
status['failed_commands'].append(cmd)
logger.error(f"{cmd} failed")
else:
write_output_to_file(device_name, output_path, cmd, output)
partial_collection = True
end_time = time.time()
logger.info(f"Completed operational data collection for {device_name} in {end_time - start_time:.2f} seconds")
if len(status['failed_commands']) == 0:
status['status'] = CollectionStatus.PASS
status['message'] = "Collection successful"
elif partial_collection:
status['status'] = CollectionStatus.PARTIAL
status['message'] = "Collection partially successful"
net_connect.close()
return status
end_time = time.time()
logger.info(f"Completed operational data collection for {device_name} in {end_time-start_time:.2f} seconds")
if len(status['failed_commands']) == 0:
status['status'] = CollectionStatus.PASS
status['message'] = "Collection successful"
elif partial_collection:
status['status'] = CollectionStatus.PARTIAL
status['message'] = "Collection partially successful"
try:
net_connect.close()
except Exception as e:
logger.exception(f"Exception when closing netmiko connection: {str(e)}")
pass
return status
def get_nxos_data(device_session: dict, device_name: str, output_path: str, cmd_dict: dict, logger) -> Dict:
"""
Show data collection for Cisco NXOS devices.
"""
device_os = "nxos" # used for cisco genie parsers
start_time = time.time()
logger.info(f"Trying to connect to {device_name} at {start_time}")
status = {
"name": device_name,
"status": CollectionStatus.FAIL,
"failed_commands": [],
"message": "",
}
partial_collection = False
# todo: figure out to get logger name from the logger object that is passed in.
# current setup just uses the device name for the logger name, so this works
try:
net_connect = RetryingNetConnect(device_name, device_session, device_name)
except Exception as e:
status['message'] = f"Connection failed. Exception {str(e)}"
status['failed_commands'].append("All")
return status
logger.info(f"Running show commands for {device_name} at {time.time()}")
for cmd_group in cmd_dict.keys():
cmd_timer = 240 # set the general command timeout to 4 minutes
if cmd_group == "bgp_v4":
# need to get the list of BGP neighbors per VRF in order to collect per neighbor RIBs
# The mechanism for this is to run the command "show bgp vrf all all summary" and
# use Cisco Genie parser to extract the list of { vrf, bgp_neighbor } maps
#
# rather than rely on this command being in the command list, if there are any commands
# under "bgp_v4", we will run this command.
#
cmd = "show bgp vrf all all summary"
cmd_timer = 300 # set BGP neighbor command timeout to 5 minutes
bgp_neighbors = {}
cmd_list = []
logger.info(f"Running {cmd} on {device_name}")
try:
output = net_connect.run_command(cmd, cmd_timer)
logger.debug(f"Command output: {output}")
except Exception as e:
status['message'] = f"{cmd} was last command to fail. Exception {str(e)}"
status['failed_commands'].append(cmd)
logger.error(f"{cmd} failed")
else:
write_output_to_file(device_name, output_path, cmd, output)
logger.info(f"Attempting to parse output of {cmd} on {device_name}")
parsed_output = parse_genie(device_name, output, cmd, device_os, logger)
logger.debug(f"Parsed Command output: {parsed_output}")
if parsed_output is not None:
bgp_neighbors = parsed_output
partial_collection = True
cmd_timer = 1200 # set the BGP RIB command timeout to 20 minutes
for scope, scope_cmds in cmd_dict['bgp_v4'].items():
if scope == "global":
for subscope, cmds in scope_cmds.items():
if subscope == "neighbor_ribs":
if len(bgp_neighbors) == 0:
logger.info(f"No bgp neighbors found for {device_name}")
continue
for vrf, vrf_details in bgp_neighbors['vrf'].items():
if vrf == 'default':
for bgp_neighbor in vrf_details['neighbor'].keys():
if ":" not in bgp_neighbor:
for cmd in cmds:
_cmd = cmd.replace("_neigh_", bgp_neighbor)
cmd_list.append(_cmd)
else:
cmd_list.extend(cmds)
elif scope == "vrf":
for subscope, cmds in scope_cmds.items():
if subscope == "neighbor_ribs":
if len(bgp_neighbors) == 0:
logger.info(f"No bgp neighbors found for {device_name}")
continue
for vrf, vrf_details in bgp_neighbors['vrf'].items():
# ignore default VRF since it is already taken care of
# ignore management VRF - mgmt and management are common names for it
if vrf.lower() not in ['default', 'mgmt', 'management']:
for bgp_neighbor in vrf_details['neighbor'].keys():
if ":" not in bgp_neighbor:
for cmd in cmds:
_cmd = cmd.replace("_neigh_", bgp_neighbor)
_cmd = _cmd.replace("_vrf_", vrf)
cmd_list.append(_cmd)
else:
cmd_list.extend(cmds)
else:
logger.error(f"Unknown {scope} with commands {scope_cmds} under bgp_v4 command dict")
# handle global and vrf specific IPv4 route commands
elif cmd_group == "routes_v4":
cmd_timer = 1200 # set the RIB command timeout to 20 minutes
cmd_list = []
for scope, cmds in cmd_dict['routes_v4'].items():
cmd_list.extend(cmds)
else:
cmd_list = cmd_dict.get(cmd_group)
for cmd in cmd_list:
logger.info(f"Running {cmd} on {device_name}")
try:
output = net_connect.run_command(cmd, cmd_timer)
logger.debug(f"Command output: {output}")
except Exception as e:
status['message'] = f"{cmd} was last command to fail. Exception {str(e)}"
status['failed_commands'].append(cmd)
logger.error(f"{cmd} failed")
else:
write_output_to_file(device_name, output_path, cmd, output)
partial_collection = True
end_time = time.time()
logger.info(f"Completed operational data collection for {device_name} in {end_time - start_time:.2f} seconds")
if len(status['failed_commands']) == 0:
status['status'] = CollectionStatus.PASS
status['message'] = "Collection successful"
elif partial_collection:
status['status'] = CollectionStatus.PARTIAL
status['message'] = "Collection partially successful"
net_connect.close()
return status
end_time = time.time()
logger.info(f"Completed operational data collection for {device_name} in {end_time-start_time:.2f} seconds")
if len(status['failed_commands']) == 0:
status['status'] = CollectionStatus.PASS
status['message'] = "Collection successful"
elif partial_collection:
status['status'] = CollectionStatus.PARTIAL
status['message'] = "Collection partially successful"
try:
net_connect.close()
except Exception as e:
logger.exception(f"Exception when closing netmiko connection: {str(e)}")
pass
return status
def get_xr_data(device_session: dict, device_name: str, output_path: str, cmd_dict: dict, logger) -> Dict:
"""
Show data collector for Cisco IOS-XR devices.
"""
device_os = "iosxr" # used for cisco genie parsers
start_time = time.time()
logger.info(f"Trying to connect to {device_name} at {start_time}")
status = {
"name": device_name,
"status": CollectionStatus.FAIL,
"failed_commands": [],
"message": "",
}
partial_collection = False
# todo: figure out to get logger name from the logger object that is passed in.
# current setup just uses the device name for the logger name, so this works
try:
net_connect = RetryingNetConnect(device_name, device_session, device_name)
except Exception as e:
status['message'] = f"Connection failed. Exception {str(e)}"
status['failed_commands'].append("All")
logger.error(f"Connection failed")
return status
logger.info(f"Running show commands for {device_name} at {time.time()}")
for cmd_group in cmd_dict.keys():
cmd_timer = 240 # set the general command timeout to 4 minutes
if cmd_group == "bgp_v4":
# need to get the list of BGP neighbors per VRF in order to collect per neighbor RIBs
# need to run a command for default VRF "show bgp all all neighbors" and one for non-default
# VRFs "show bgp vrf all neighbors" and then parse the output using Cisco genie parser
#
# rather than rely on these commands being in the command list, if there are any commands
# under "bgp_v4", we will run this command.
#
# get BGP neighbors for default VRF
cmd_timer = 300 # set BGP neighbor command timeout to 5 minutes
global_bgp_neighbors = {}
vrf_bgp_neighbors = {}
cmd_list = []
# get BGP neighbors for default VRF
cmd = "show bgp all all neighbors"
logger.info(f"Running {cmd} on {device_name}")
try:
output = net_connect.run_command(cmd, cmd_timer)
logger.debug(f"Command output: {output}")
except Exception as e:
status['message'] = f"{cmd} was last command to fail. Exception {str(e)}"
status['failed_commands'].append(cmd)
logger.error(f"{cmd} failed")
else:
write_output_to_file(device_name, output_path, cmd, output)
logger.info(f"Attempting to parse output of {cmd} on {device_name}")
parsed_output = parse_genie(device_name, output, cmd, device_os, logger)
logger.debug(f"Parsed Command output: {parsed_output}")
if parsed_output is not None:
global_bgp_neighbors = parsed_output
partial_collection = True
# get BGP neighbors for non-default VRFs
cmd = "show bgp vrf all neighbors"
logger.info(f"Running {cmd} on {device_name}")
try:
output = net_connect.run_command(cmd, cmd_timer)
logger.debug(f"Command output: {output}")
except Exception as e:
status['message'] = f"{cmd} was last command to fail. Exception {str(e)}"
status['failed_commands'].append(cmd)
logger.error(f"{cmd} failed")
else:
write_output_to_file(device_name, output_path, cmd, output)
logger.info(f"Attempting to parse output of {cmd} on {device_name}")
parsed_output = parse_genie(device_name, output, cmd, device_os, logger)
logger.debug(f"Parsed Command output: {parsed_output}")
if parsed_output is not None:
vrf_bgp_neighbors = parsed_output
partial_collection = True
cmd_timer = 1200 # set the BGP RIB command timeout to 20 minutes
for scope, scope_cmds in cmd_dict['bgp_v4'].items():
if scope == "global":
for subscope, cmds in scope_cmds.items():
if subscope == "neighbor_ribs":
if len(global_bgp_neighbors) == 0:
logger.info(f"No bgp neighbors found for default VRF on {device_name}")
continue
for vrf, vrf_details in global_bgp_neighbors['instance']['all']['vrf'].items():
for bgp_neighbor in vrf_details['neighbor'].keys():
if ":" not in bgp_neighbor: # skip ipv6 peers
for cmd in cmds:
_cmd = cmd.replace("_neigh_", bgp_neighbor)
cmd_list.append(_cmd)
else:
cmd_list.extend(cmds)
elif scope == "vrf":
for subscope, cmds in scope_cmds.items():
if subscope == "neighbor_ribs":
if len(vrf_bgp_neighbors) == 0:
logger.info(f"No bgp neighbors found for non default VRFs on {device_name}")
continue
for vrf, vrf_details in vrf_bgp_neighbors['instance']['all']['vrf'].items():
# ignore default VRF since it is already taken care of
# ignore management VRF - mgmt and management are common names for it
if vrf.lower() in ["mgmt", "management", "default"]:
continue
for bgp_neighbor in vrf_details['neighbor'].keys():
if ":" not in bgp_neighbor: # skip ipv6 peers
for cmd in cmds:
_cmd = cmd.replace("_neigh_", bgp_neighbor)
_cmd = _cmd.replace("_vrf_", vrf)
cmd_list.append(_cmd)
else:
cmd_list.extend(cmds)
else:
logger.error(f"Unknown {scope} with commands {scope_cmds} under bgp_v4 command dict")
# handle global and vrf specific IPv4 route commands
elif cmd_group == "routes_v4":
cmd_timer = 1200 # set the RIB command timeout to 20 minutes
cmd_list = []
for scope, cmds in cmd_dict['routes_v4'].items():
cmd_list.extend(cmds)
else:
cmd_list = cmd_dict.get(cmd_group)
for cmd in cmd_list:
logger.info(f"Running {cmd} from {cmd_group} on {device_name}")
try:
output = net_connect.run_command(cmd, cmd_timer)
logger.debug(f"Command output: {output}")
except Exception as e:
status['message'] = f"{cmd} was last command to fail. Exception {str(e)}"
status['failed_commands'].append(cmd)
logger.error(f"{cmd} failed")
else:
write_output_to_file(device_name, output_path, cmd, output)
partial_collection = True
end_time = time.time()
logger.info(f"Completed operational data collection for {device_name} in {end_time - start_time:.2f} seconds")
if len(status['failed_commands']) == 0:
status['status'] = CollectionStatus.PASS
status['message'] = "Collection successful"
elif partial_collection:
status['status'] = CollectionStatus.PARTIAL
status['message'] = "Collection partially successful"
try:
net_connect.close()
except Exception as e:
logger.exception(f"Exception when closing netmiko connection: {str(e)}")
pass
return status
OS_SHOW_COLLECTOR_FUNCTION = {
"a10": get_show_data,
"arista_eos": get_show_data,
"checkpoint_gaia": get_show_data,
"cisco_asa": get_show_data,
"cisco_ios": get_show_data,
"cisco_nxos": get_nxos_data,
"cisco_xr": get_xr_data,
"juniper_junos": get_show_data
}
def main(inventory: Dict, max_threads: int, username: str, password: str, snapshot_name: str,
collection_directory: str, commands_file: str, log_level: int) -> None:
pool = ThreadPoolExecutor(max_threads)
future_list = []
task_info_list = []
start_time = time.time()
print(f"### Starting operational data collection: {time.strftime('%Y-%m-%d %H:%M %Z', time.localtime(start_time))}")
commands = None
if commands_file is not None:
commands = get_show_commands(commands_file)
for grp, grp_data in inventory.items():
device_os = AnsibleOsToNetmikoOs.get(grp_data['vars'].get('ansible_network_os'), None)
if device_os is None:
# todo: setup global logger to log this message to, for now print will get it into the bash script logs
print(f"Unsupported Ansible OS {grp_data['vars'].get('ansible_network_os')}, skipping...")
continue
op_func = OS_SHOW_COLLECTOR_FUNCTION.get(device_os)
if op_func is None:
print(f"No collection function for {device_os}, skipping...")
continue
cmd_dict = commands.get(grp, None)
if cmd_dict is None:
print(f"No command dictionary for devices in {grp}, skipping...")
continue
for device_name, device_vars in grp_data.get('hosts').items():
log_file = f"{collection_directory}/logs/{snapshot_name}/{device_name}/show_data_collector.log"
os.makedirs(os.path.dirname(log_file), exist_ok=True)
logger = custom_logger(device_name, log_file, log_level)
logger.info(f"Starting collection for {device_name}")
logger.info(f"Device vars are {device_vars}")
# by default use the device name specified in inventory
_host = device_name
# override it with the IP address if specified in the inventory
if device_vars is not None and device_vars.get("ansible_host", None) is not None:
_host = device_vars.get("ansible_host")
logger.info(f"Using IP {_host} to connect to {device_name}")
# create device_session for netmiko connection handler
device_session = {
"device_type": device_os,
"host": _host,
"username": username,
"password": password,
"session_log": f"{collection_directory}/logs/{snapshot_name}/{device_name}/netmiko_session.log",
"fast_cli": False
}
output_path = f"{collection_directory}/{snapshot_name}/show/"
# before sending the task save some information about each task, so you can get insight
# into which devices are taking too long to complete
task_info_list.append((device_name, op_func, device_session['device_type'], device_session['host']))
future = pool.submit(op_func, device_session=device_session, device_name=device_name,
output_path=output_path, cmd_dict=cmd_dict, logger=logger)
future_list.append(future)
while True:
time.sleep(10)
running_tasks = []
for f, task_info in zip(future_list, task_info_list):
if not f.done():
running_tasks.append(task_info)
if len(running_tasks) == 0:
break # all tasks finished
elif len(running_tasks) < 10:
# now there's only less than 10 tasks running, some might be stuck, log things about the running task
for task_info in running_tasks:
print(f"{task_info}")
# and then the rest is the same, except you don't need as_completed
failed_devices = [future.result()['name'] for future in future_list if
future.result()['status'] != CollectionStatus.PASS]
# # TODO: revisit exception handling
# failed_devices = [future.result()['name'] for future in as_completed(future_list) if
# future.result()['status'] != CollectionStatus.PASS]
end_time = time.time()
if len(failed_devices) != 0:
print(f"### Operational data collection failed for {len(failed_devices)} devices: {failed_devices}")
print(f"### Completed operational data collection: {time.strftime('%Y-%m-%d %H:%M %Z', time.localtime(end_time))}")
print(f"### Total operational data collection time: {end_time - start_time} seconds")
if __name__ == "__main__":
parser = configargparse.ArgParser()
parser.add_argument("--inventory", help="Absolute path to inventory file to use", required=True)
parser.add_argument("--username", help="Username to access devices", required=True, env_var="BF_COLLECTOR_USER")
parser.add_argument("--password", help="Password to access devices", required=True, env_var="BF_COLLECTOR_PASSWORD")
parser.add_argument("--max-threads", help="Max threads for parallel collection. Default = 10, Maximum is 100",
type=int, default=10)
parser.add_argument("--collection-dir", help="Directory for data collection", required=True)
parser.add_argument("--snapshot-name", help="Name for the snapshot directory",
default=datetime.now().strftime("%Y%m%d_%H:%M:%S"))
parser.add_argument("--command-file", help="YAML file with list of commands per OS", default=None)
parser.add_argument("--log-level", help="Log level", default="warn")
args = parser.parse_args()
log_level = logging._nameToLevel.get(args.log_level.upper())
if not log_level:
raise Exception("Invalid log level: {}".format(args.log_level))
# check if inventory file exists
if not Path(args.inventory).exists():
raise Exception(f"{args.inventory} does not exist")
inventory = get_inventory(args.inventory)
if not Path(args.collection_dir).exists():
raise Exception(f"{args.collection_dir} does not exist. Please create the directory and re-run the script")
main(inventory, args.max_threads, args.username, args.password, args.snapshot_name, args.collection_dir,
args.command_file, log_level)
|
from python_framework import FrameworkModel
MESSAGE = 'Message'
SESSION = 'Session'
CONTACT = 'Contact'
MODEL = FrameworkModel.getModel()
|
from .main import aggregate_s3_logs_main
aggregate_s3_logs_main()
|
from datetime import datetime
import time
import json
def log(message, when=None):
""" Log a message with a timestamp.
Args:
message: Message to be printed.
when: datetime of when the message occured.
Defaults to present time
"""
when = datetime.now() if when is None else when
print('%s: %s' % (message, when))
def decode(data, default=None):
""" Load JSON data from a string
Args:
data: JSON data to decode.
default: Value to return if decoding fails.
Defaults to an empty dictionary
"""
if default is None:
default = {}
try:
return json.loads(data)
except ValueError:
return default
if __name__ == '__main__':
log('Hi')
time.sleep(1)
log('Hi again!')
foo = decode('bad data')
foo['stuff'] = 5
bar = decode('also bad')
bar['meep'] = 1
print('Foo:', foo)
print('Bar:', bar)
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.utils import (
optendersresource, apply_patch
)
from openprocurement.api.utils import (
context_unpack, get_now, json_view
)
from openprocurement.tender.openeu.views.bid import (
TenderBidResource as BaseResourceEU
)
from openprocurement.tender.competitivedialogue.constants import (
CD_EU_TYPE, CD_UA_TYPE
)
from openprocurement.tender.core.validation import (
validate_patch_bid_data,
validate_update_deleted_bid,
validate_bid_operation_period,
validate_bid_operation_not_in_tendering,
validate_bid_status_update_not_to_pending
)
from openprocurement.tender.competitivedialogue.validation import validate_bid_status_update_not_to_pending_or_draft
@json_view(validators=(validate_bid_operation_not_in_tendering, validate_bid_operation_period, validate_update_deleted_bid, validate_bid_status_update_not_to_pending_or_draft))
def patch_bid_first_stage(self):
"""Update of proposal
Example request to change bid proposal:
.. sourcecode:: http
PATCH /tenders/4879d3f8ee2443169b5fbbc9f89fa607/bids/71b6c23ed8944d688e92a31ec8c3f61a HTTP/1.1
Host: example.com
Accept: application/json
{
"data": {
"value": {
"amount": 600
}
}
}
And here is the response to be expected:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"data": {
"value": {
"amount": 600,
"currency": "UAH",
"valueAddedTaxIncluded": true
}
}
}
"""
self.request.validated['tender'].modified = False
if apply_patch(self.request, src=self.request.context.serialize()):
self.LOGGER.info('Updated tender bid {}'.format(self.request.context.id),
extra=context_unpack(self.request, {'MESSAGE_ID': 'tender_bid_patch'}))
return {'data': self.request.context.serialize("view")}
@optendersresource(name='{}:Tender Bids'.format(CD_EU_TYPE),
collection_path='/tenders/{tender_id}/bids',
path='/tenders/{tender_id}/bids/{bid_id}',
procurementMethodType=CD_EU_TYPE,
description="Competitive Dialogue EU bids")
class CompetitiveDialogueEUBidResource(BaseResourceEU):
""" Tender EU bids """
patch = json_view(content_type="application/json",
permission='edit_bid',
validators=(validate_patch_bid_data, validate_bid_operation_not_in_tendering, validate_bid_operation_period,
validate_update_deleted_bid, validate_bid_status_update_not_to_pending))(patch_bid_first_stage)
@optendersresource(name='{}:Tender Bids'.format(CD_UA_TYPE),
collection_path='/tenders/{tender_id}/bids',
path='/tenders/{tender_id}/bids/{bid_id}',
procurementMethodType=CD_UA_TYPE,
description="Competitive Dialogue UA bids")
class CompetitiveDialogueUABidResource(BaseResourceEU):
""" Tender UA bids """
patch = json_view(content_type="application/json",
permission='edit_bid',
validators=(validate_patch_bid_data,validate_bid_operation_not_in_tendering, validate_bid_operation_period,
validate_update_deleted_bid, validate_bid_status_update_not_to_pending))(patch_bid_first_stage)
|
#!/usr/bin/env python
#
# Simple script showing how to read a mitmproxy dump file
#
### UPD: this feature is now avaiable in mitmproxy: https://github.com/mitmproxy/mitmproxy/pull/619
from libmproxy import flow
import json, sys
with open("mitmproxy_dump.txt", "rb") as logfile:
freader = flow.FlowReader(logfile)
try:
for f in freader.stream():
request = f.request
print(request)
curl = 'curl -X ' + request.method + ' -d \'' + request.content + '\' ' + ' '.join(['-H ' + '"' + header[0] + ': ' + header[1] + '"' for header in request.headers])
curl += " https://" + request.host + request.path
print(curl)
print("--")
except flow.FlowReadError as v:
print("Flow file corrupted. Stopped loading.")
|
from styx_msgs.msg import TrafficLight
import tensorflow as tf
import numpy as np
import cv2
import os
FASTER_RCNN_INCEPTION_V2_MODEL = 'light_classification/model_site_04/faster_rcnn_inception_v2_traffic_lights_test_site.pb'
class TLClassifier(object):
def __init__(self):
self.model_file = FASTER_RCNN_INCEPTION_V2_MODEL
self.detection_graph = self.load_graph(self.model_file)
#create session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
self.session = tf.Session(graph=self.detection_graph, config=config)
#get variables from graph in session
self.image_tensor = self.detection_graph.get_tensor_by_name('image_tensor:0')
self.detection_boxes = self.detection_graph.get_tensor_by_name('detection_boxes:0')
self.detection_scores = self.detection_graph.get_tensor_by_name('detection_scores:0')
self.detection_classes = self.detection_graph.get_tensor_by_name('detection_classes:0')
self.states = [TrafficLight.RED, TrafficLight.YELLOW, TrafficLight.GREEN, TrafficLight.UNKNOWN]
self.color_text = ['RED', 'YELLOW', 'GREEN', 'OFF']
def get_classification(self, image):
"""Performs actual classification on images"""
image_expand = np.expand_dims(image, axis=0)
(boxes, scores, classes) = self.session.run([self.detection_boxes, self.detection_scores, self.detection_classes], feed_dict={self.image_tensor: image_expand})
# Remove unnecessary dimensions
boxes = np.squeeze(boxes)
scores = np.squeeze(scores)
classes = np.squeeze(classes)
index = int(classes[0]) - 1
#print ("Color is", self.color_text[index])
return self.states[index]
def load_graph(self, graph_file):
"""Loads a frozen inference graph"""
graph = tf.Graph()
with graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(graph_file, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
return graph
|
import collections
import sys
import json
from tqdm import tqdm
import numpy as np
import utils.load_info_for_model
def recommendations_to_hits(recommended, correct, track_to_album, album_to_artist):
track_size = track_to_album.size
artist_size = np.max(album_to_artist) + 1
artist_recommended = album_to_artist[track_to_album[recommended]]
artist_correct = album_to_artist[track_to_album[correct]]
track_stats = np.zeros(track_size)
count = np.bincount(correct)
track_stats[:count.size] = count
artist_stats = np.zeros(artist_size)
count = np.bincount(artist_correct)
artist_stats[:count.size] = count
track_vector = []
artist_vector = []
for candidate_track, candidate_artist in zip(recommended, artist_recommended):
correct_track = False
if track_stats[candidate_track] > 0:
correct_track = True
track_stats[candidate_track] -= 1
correct_artist = False
if artist_stats[candidate_artist] > 0:
correct_artist = True
artist_stats[candidate_artist] -= 1
track_vector.append(int(correct_track))
artist_vector.append(int(correct_artist))
return track_vector, artist_vector
if __name__ == '__main__':
playlists = utils.load_info_for_model.load_lines_json(sys.argv[1])
print("TEST READ")
recommendations_filename = sys.argv[2]
track_to_album = sys.argv[3]
album_to_artist = sys.argv[4]
output_filename = sys.argv[5]
track_to_album = np.array(utils.load_info_for_model.load_json(track_to_album))
album_to_artist = np.array(utils.load_info_for_model.load_json(album_to_artist))
test_dict = dict()
for playlist in playlists:
test_dict[playlist["pid"]] = playlist["deleted_track"]
with open(recommendations_filename) as recommendations_file, open(output_filename, "w") as output_file:
for i, playlist in enumerate(tqdm(recommendations_file)):
playlist = json.loads(playlist)
result = dict()
result["pid"] = playlist["pid"]
result["num_holdouts"] = playlist["num_holdouts"]
track_vector, artist_vector = recommendations_to_hits(playlist["recommended"], test_dict[playlist["pid"]],
track_to_album, album_to_artist)
result["vector_artist"] = artist_vector
result["vector_track"] = track_vector
output_file.write(json.dumps(result) + "\n")
if i % 20 == 0:
output_file.flush()
|
import pytest
def test_get_costumes_nonexistent(parser):
result = parser.get_costumes(dict())
assert result == False
def test_get_costumes_empty(parser, empty_sb3):
result = parser.get_costumes(empty_sb3)
assert type(result) == list
assert len(result) == 1
def test_get_costumes_full(parser, full_sb3):
result = parser.get_costumes(full_sb3)
assert type(result) == list
assert len(result) == 4
assert result == ["backdrop1", "Bedroom 1", "cat1", "cat2"]
|
import click
from .main import main
@click.command()
@click.option('--n-reps', '-r', nargs=1, default=10, type=int,
help='Number of repetitions per benchmark')
@click.option('--n-users', '-n', multiple=True, default='1k',
help='Number of users, e.g. 1K')
@click.option('--logging', '-l', multiple=True, default=('logged', 'unlogged',),
type=click.Choice(['logged', 'unlogged']),
help="Logging on `user_stats` table")
def cli(n_reps, n_users, logging):
main(n_reps, n_users, logging)
|
import numpy as np
from sklearn.metrics import confusion_matrix, roc_curve, auc
import plotly.graph_objs as go
import plotly.figure_factory as ff
from plotly.offline import iplot
from palantiri.BasePlotHandlers import PlotHandler
class ClassifierPlotHandler(PlotHandler):
""" Handles all the plots related of the chosen classifier. """
def __init__(self, dataset, trained_classifier, **params):
"""
Initialization function
:param dataset: the dataset in a dict format with the following keys:
'data' - numpy array with all the data points.
'target' - the label of the corresponding data point.
'target_names' - the label name.
:param trained_classifier: sklearn classifier (trained / fitted).
In order to plot the ROC plot - the classifier should have the predict_proba ability.
:param params: other params
"""
self._dataset = dataset
self._trained_classifier = trained_classifier
self._n_classes = len(set(dataset['target']))
if hasattr(self._dataset, 'target_names'):
self.class_names = self._dataset['target_names']
else:
self.class_names = ['Class {0}'.format(i) for i in range(self.n_classes)]
# Score of the predicted target store.
if hasattr(self._trained_classifier, 'predict_proba'):
self._predicted_target_score = self._trained_classifier.predict_proba(self._dataset['data'])
else:
self._predicted_target_score = None
self._confusion_matrix = None
self.confusion_matrix_colorscale = 'Viridis'
self.prediction_figure = None
self.roc_figure = None
self.confusion_matrix_figure = None
super(ClassifierPlotHandler, self).__init__(**params)
@classmethod
def from_pandas_dataframe(cls, dataframe, trained_classifier, **params):
"""
Constructing the handler from a pandas dataframe.
:param dataframe: the dataframe form which the handler is constructed.
The 'target' column should be included in the dataframe.
:param trained_classifier: sklearn classifier (trained / fitted).
:param params: other params.
:return: returns the classifier plot handler object.
"""
assert 'target' in dataframe.columns.values, 'target values not in dataframe'
dataset = dict()
dataset['data'] = dataframe.drop('target', axis=1).values
dataset['target'] = dataframe['target'].values
dataset['feature_names'] = dataframe.drop('target', axis=1).columns.values
return cls(dataset, trained_classifier, **params)
@property
def trained_classifier(self):
"""
The trained classifier .
:return: The classifier in the sklearn format.
"""
return self._trained_classifier
@property
def dataset(self):
"""
The dataset
:return: The dataset as a dictionary
"""
return self._dataset
@dataset.setter
def dataset(self, dataset):
"""
The dataset setter.
:param dataset: the new dataset
"""
self._dataset = dataset
@property
def predicted_target_score(self):
"""
The predicted score - available if classifier has the predict_proba functionality.
:return: The predicted score.
"""
return self._predicted_target_score
@property
def confusion_matrix(self):
"""
The confusion matrix.
:return: The confusion matrix as a numpy array.
"""
return self._confusion_matrix
@property
def n_classes(self):
"""
The number of classes.
:return: An int representing the number of classes.
"""
return self._n_classes
def build_confusion_matrix(self, normalize=False):
"""
Building the confusion matrix
:param normalize: if True confusion matrix is normalized.
"""
prediction = self.trained_classifier.predict(self._dataset['data'])
self._confusion_matrix = confusion_matrix(self._dataset['target'], prediction)
if normalize:
self._confusion_matrix = \
self._confusion_matrix.astype('float') / self._confusion_matrix.sum(axis=1)[:, np.newaxis]
else:
self._confusion_matrix = self._confusion_matrix
def build_confusion_matrix_figure(self, figure_layout):
"""
Builds the confusion matrix figure in confusion_matrix_figure.
:param figure_layout: figure layout - plot.ly layout object.
"""
if not self._confusion_matrix:
self.build_confusion_matrix()
cm = np.flipud(self._confusion_matrix)
x = list(self.class_names)
y = list(reversed(self.class_names))
self.confusion_matrix_figure = ff.create_annotated_heatmap(z=cm, x=x, y=y,
colorscale=self.confusion_matrix_colorscale)
self.confusion_matrix_figure['layout'].update(figure_layout)
def plot_confusion_matrix(self, figure_layout=None):
"""
Plotting the confusion matrix figure with plot.ly's iplot function.
:param figure_layout: figure layout - plot.ly layout object.
"""
if not figure_layout:
figure_layout = go.Layout(
xaxis={'title': 'Confusion Matrix <br /><br />Predicted Value'},
yaxis={'title': 'True Value'})
if not self.confusion_matrix_figure:
self.build_confusion_matrix_figure(figure_layout)
else:
self.confusion_matrix_figure['layout'].update(figure_layout)
iplot(self.confusion_matrix_figure)
def build_roc_figure(self, figure_layout=go.Layout()):
"""
Building the ROC curve figure of the classifier.
:param figure_layout: figure layout - plot.ly layout object.
"""
data = list()
if self.n_classes < 3:
# False positive rate and true positive rate - computed from roc_curve()
fpr, tpr, _ = roc_curve(self.dataset['target'], self.predicted_target_score[:, 1])
# Area under curve.
roc_auc = auc(fpr, tpr)
# Updating the data list.
data.append(go.Scatter(x=fpr,
y=tpr,
hoverinfo='y',
mode='lines',
line=dict(color='darkorange'),
name='ROC curve (area = %0.2f)' % roc_auc))
else:
# False Positive, True Positive rates and Area Under Curve values for each class.
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(self.n_classes):
fpr[i], tpr[i], _ = roc_curve((self.dataset['target'] == i).astype(float),
self.predicted_target_score[:, i])
roc_auc[i] = auc(fpr[i], tpr[i])
data.append(go.Scatter(x=fpr[i],
y=tpr[i],
hoverinfo='y',
mode='lines',
name='ROC curve of class {0} (area = {1:0.2f})'''.format(
self.class_names[i], roc_auc[i])))
# Diagonal
data.append(go.Scatter(x=[0, 1], y=[0, 1],
mode='lines',
hoverinfo='skip',
line=dict(color='navy', dash='dash'),
showlegend=False))
self.roc_figure = go.Figure(data=data, layout=figure_layout)
def plot_roc(self, figure_layout=None):
"""
Plotting the ROC curve figure with plot.ly's iplot function.
:param figure_layout: figure layout - plot.ly Layout object.
"""
if not figure_layout:
figure_layout = go.Layout(title=dict(text='ROC Curve', x=0.5),
xaxis=dict(title='False Positive Rate'),
yaxis=dict(title='True Positive Rate'))
if not self.roc_figure:
self.build_roc_figure(figure_layout=figure_layout)
else:
self.roc_figure['layout'].update(figure_layout)
iplot(self.roc_figure)
def build_prediction_figure(self, figure_layout):
"""
Building the classifier prediction figure.
:param figure_layout: figure layout - plot.ly Layout object.
"""
pass
def plot_prediction(self, figure_layout=None):
"""
Plotting the prediction figure with plot.ly's iplot function.
:param figure_layout: figure layout - plot.ly Layout object.
"""
if not figure_layout:
figure_layout = go.Layout(title=dict(text='Classifier Prediction', x=0.5))
if not self.prediction_figure:
self.build_prediction_figure(figure_layout=figure_layout)
else:
self.prediction_figure['layout'].update(figure_layout)
iplot(self.prediction_figure)
def save_prediction_figure(self, file_name):
"""
Saving the prediction figure as an html file.
:param file_name: the html file name.
"""
self.save_figure(self.prediction_figure, file_name)
def save_roc_figure(self, file_name):
"""
Saving the ROC curve figure as an html file.
:param file_name: the html file name.
"""
self.save_figure(self.roc_figure, file_name)
def save_confusion_matrix_figure(self, file_name):
"""
Saving the confusion matrix figure as an html file.
:param file_name: the html file name.
"""
self.save_figure(self.confusion_matrix_figure, file_name)
class TwoDimensionalClassifierPlotHandler(ClassifierPlotHandler):
""" Handles all the plots related of the chosen classifier on 2D. """
def __init__(self, dataset, trained_classifier, **params):
"""
The initialization function of the 2D classifier plot handler.
:param dataframe: the dataframe form which the handler is constructed.
:param trained_classifier: sklearn classifier (trained / fitted).
:param params: other params.
"""
dataset['data'] = dataset['data'][:, :2]
super(TwoDimensionalClassifierPlotHandler, self).__init__(dataset, trained_classifier, **params)
def build_prediction_figure(self, figure_layout=go.Layout(), step_size=0.01):
"""
Building the classifier prediction figure.
:param figure_layout: figure layout - plot.ly Layout object.
:param step_size: Plot resolution.
"""
data = list()
x_min, x_max = self.dataset['data'][:, 0].min() - 1, self.dataset['data'][:, 0].max() + 1
y_min, y_max = self.dataset['data'][:, 1].min() - 1, self.dataset['data'][:, 1].max() + 1
x = np.arange(x_min, x_max, step_size)
y = np.arange(y_min, y_max, step_size)
x_mesh, y_mesh = np.meshgrid(x, y)
z = self.trained_classifier.predict(np.column_stack((x_mesh.ravel(), y_mesh.ravel())))
z = z.reshape(x_mesh.shape)
data.append(go.Contour(x=x, y=y, z=z,
showscale=False,
hoverinfo='skip',
colorscale='Viridis'))
data.append(go.Scatter(x=self.dataset['data'][:, 0],
y=self.dataset['data'][:, 1],
text=[self.class_names[i] for i in self.dataset['target']],
hoverinfo='text',
mode='markers',
marker=dict(color=self.dataset['target'],
showscale=False,
colorscale='Reds',
line=dict(color='black', width=1))))
if 'feature_names' in self.dataset.keys():
figure_layout['xaxis'].update({'title': self.dataset['feature_names'][0]})
figure_layout['yaxis'].update({'title': self.dataset['feature_names'][1]})
self.prediction_figure = go.Figure(data=data, layout=figure_layout)
class ThreeDimensionalClassifierPlotHandler(ClassifierPlotHandler):
""" Handles all the plots related of the chosen classifier on 3D. """
def __init__(self, dataset, trained_classifier, **params):
"""
The initialization function of the 3D classifier plot handler.
:param dataframe: the dataframe form which the handler is constructed.
:param trained_classifier: sklearn classifier (trained / fitted).
:param params: other params.
"""
dataset['data'] = dataset['data'][:, :3]
super(ThreeDimensionalClassifierPlotHandler, self).__init__(dataset, trained_classifier, **params)
def build_prediction_figure(self, figure_layout=go.Layout()):
"""
Plotting the classifier prediction and saving the figure.
:param figure_layout: figure layout - plot.ly Layout object.
"""
labels = self.trained_classifier.predict(self.dataset['data'])
data = list()
for label in set(labels):
data_points = self.dataset['data'][np.in1d(labels, np.asarray(label))]
data.append(go.Scatter3d(x=data_points[:, 0],
y=data_points[:, 1],
z=data_points[:, 2],
text=self.class_names[label],
hoverinfo='text',
showlegend=True,
name=self.class_names[label],
mode='markers',
marker=dict(
line=dict(color='black', width=1))))
if 'feature_names' in self.dataset.keys():
figure_layout['scene'].update(
dict(xaxis={'title': self.dataset['feature_names'][0]},
yaxis={'title': self.dataset['feature_names'][1]},
zaxis={'title': self.dataset['feature_names'][2]}))
self.prediction_figure = go.Figure(data=data, layout=figure_layout)
|
import json
import logging
import logging.handlers
import os
from contextlib import redirect_stdout
from http import HTTPStatus
from io import StringIO
from pathlib import Path
from typing import List
from victoria import config
from victoria.script import victoria
PROJECT_PATH = Path(os.path.abspath(os.path.dirname(__file__)))
CONFIG_PATH = PROJECT_PATH / "victoria.yaml"
def setup_logger():
# initialize log settings using the base class
# see: https://docs.aws.amazon.com/lambda/latest/dg/python-logging.html
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('handler')
return logger
def run_victoria(args: List[str]):
"""
Loads local config file and invokes Victoria module through
CLI capabilities with supplied arguments `args` and config.
Args:
args: List[str] arguments passed to Victoria.
Returns:
See :func:`handler` function below
"""
cfg = config.load(CONFIG_PATH)
victoria.cli.main(prog_name="victoria", args=args, obj=cfg)
def read_output_val(f: StringIO) -> str:
"""
Retrieves the content of StringIO object without trailing whitespace.
Args:
f: StringIO object
Returns:
String content of `f` StringIO object with trailing whitespace removed.
"""
return f.getvalue().rstrip()
def handler(event, context):
"""
Lambda function handler.
Executes Victoria and returns an appropriate HTTP response dict.
Reads Victoria's standard output stream for gathering text output of execution.
It assumes that Victoria performs sys.exit(...) upon execution, which results in SystemExit exception
with the code 0 designating success of operation and others standing for errors.
It returns BAD_REQUEST when this assumption is not met.
It returns INTERNAL_SERVER_ERROR status code when Victoria responded with the code other than 0
or when an exception other than SystemExit has been thrown during execution.
It provides a "message" field in HTTP response for failures of execution, which is built from
standard output stream or an exception details, if any.
Args:
event: Lambda-handler event data.
context: Lambda-handler runtime information (context object).
Returns:
A dict with status code and body or error details.
"""
app_logger = setup_logger()
try:
body = json.loads(event['body'])
except (KeyError, ValueError) as e:
app_logger.error(msg="Invalid request.")
return {
'statusCode': HTTPStatus.BAD_REQUEST,
'error': 'Invalid request',
'message': str(e)
}
f = StringIO()
try:
with redirect_stdout(f):
run_victoria(body['args'])
except SystemExit as e:
output = read_output_val(f)
if e.code == 0:
app_logger.info("Victoria completed successfully.")
return {'statusCode': HTTPStatus.OK, 'body': json.dumps(output)}
else:
app_logger.error(msg='Command failed: non-zero status code %s.' %
e.code)
return {
'statusCode': HTTPStatus.INTERNAL_SERVER_ERROR,
'error': 'Command failed: non-zero status code',
'message': output
}
except Exception as e:
app_logger.error(msg="Uncaught Victoria exception detected.")
return {
'statusCode': HTTPStatus.INTERNAL_SERVER_ERROR,
'error': 'Command failed: uncaught Victoria exception detected',
'message': str(e)
}
# For completeness: in case Victoria didn't exit with sys.exit(...) and didn't throw any exceptions
app_logger.error(
"Victoria didn't exit with sys.exit(...) and didn't throw any exceptions."
)
return {
'statusCode': HTTPStatus.BAD_REQUEST,
'error': 'Invalid request',
'message': ''
}
|
"""Main library."""
from typing import Optional
# Import module
import jpype
# Enable Java imports
import jpype.imports
# Pull in types
from jpype.types import *
import importlib
class JavaLib:
ROBOT_LIBRARY_SCOPE = "GLOBAL"
"""General library documentation."""
def __init__(
self,
library: str,
classpath: Optional[str] = None):
if jpype.isJVMStarted():
print("JVM running")
else:
jpype.startJVM(classpath=classpath.split(":"))
JavaLibrary = importlib.import_module(library)
self.javaLibrary = JavaLibrary()
def get_keyword_names(self):
keywords = []
# AnnotationLibrary return Java's ArrayList with Java's Strings, converting to Python
for keyword in self.javaLibrary.getKeywordNames():
keywords.append(str(keyword))
return keywords
def run_keyword(self, keyword: str, args, kwargs):
import java
return self.javaLibrary.runKeyword(JString(keyword), java.util.ArrayList(args), java.util.HashMap(kwargs))
def get_keyword_documentation(self, keyword: str):
try:
# AnnotationLibrary returns java.lang.String
documentation = str(self.javaLibrary.getKeywordDocumentation(keyword))
except:
documentation = ""
return documentation
|
#!/usr/bin/env python3
# -*- coding utf-8 -*-
__Author__ ='eamon'
'Modules Built-In'
from datetime import datetime
now = datetime.now()
print(now)
print(type(now))
dt=datetime(2015,10,5,20,1,20)
print(dt)
print(dt.timestamp())
t=1444046480.0
print(datetime.fromtimestamp(t))
print(datetime.utcfromtimestamp(t))
cday=datetime.strptime('2015-10-05 20:07:59','%Y-%m-%d %H:%M:%S')
print(cday)
now=datetime.now()
print(now.strftime('%a,%b,%d %H:%M'))
from datetime import timedelta
now = datetime.now()
print(now)
# datetime.datetime(2015,10,05,20,12,58,10054)
print(now+timedelta(hours=10))
from datetime import timezone
tz_utc_8 = timezone(timedelta(hours=8))
now= datetime.now()
print(now)
dt=now.replace(tzinfo=tz_utc_8)
print(dt)
print('------------------------')
utc_dt=datetime.utcnow().replace(tzinfo=timezone.utc)
print(utc_dt)
bjtm=utc_dt.astimezone(timezone(timedelta(hours=8)))
print(bjtm)
tokyo_tm=bjtm.astimezone(timezone(timedelta(hours=9)))
print('------------------------')
print(tokyo_tm)
import re
def to_timestamp(dt_str,tz_str):
tz_fmt_str='^UTC([+-]\d{1,2})\:\d{2}$'
tm_fmt=re.match(tz_fmt_str,tz_str)
if tm_fmt:
tz_hours=int(tm_fmt.group(1))
cur_datetime=datetime.strptime(dt_str,'%Y-%m-%d %H:%M:%S')
return cur_datetime.replace(tzinfo=timezone(timedelta(hours=tz_hours))).timestamp()
t1 = to_timestamp('2015-6-1 08:10:30', 'UTC+7:00')
assert t1 == 1433121030.0, t1
print('Pass')
|
from .cli import integrate_alembic_cli
from .config import alembic_config_from_solo
__all__ = ['alembic_config_from_solo', 'integrate_alembic_cli']
|
"""pytims - algorithmic libs"""
__version__ = "0.1.0"
__author__ = "Natu Lauchande <nlauchande at google mail>"
__all__ = []
|
from time import sleep
from config.locators import Locators
from infra.web_driver_extensions import WebDriverExtensions
from selenium.webdriver.common.by import By
class NewProjectPage:
"""
This class represents the New Project page object model
Attributes:
driver (WebDriver): WebDriver instance
driver_extended (WebDriver): WebDriverExtensions instance
"""
def __init__(self, driver):
self.driver = driver
self.driver_extended = WebDriverExtensions(driver)
def set_project_name(self, project_name):
self.driver_extended.get_visible_element((By.ID, Locators.input_project_name_id)).send_keys(project_name)
def click_advanced_settings(self):
self.driver_extended.get_element((By.XPATH, Locators.btn_advanced_settings_xpath)).click()
def set_project_description(self, project_desc):
self.driver_extended.get_element((By.XPATH, Locators.editor_project_desc_xpath)).send_keys(project_desc)
sleep(1)
def set_status(self, status):
"""
Opens the project status drop-down menu element and selects a status from the list.
Args:
status (str): given project status, status list: On track, At risk, Off track
"""
# Opening project status drop-down menu
self.driver_extended.get_element((By.XPATH, Locators.dd_project_status_xpath)).click()
# Selecting a status from the list
self.driver_extended.get_visible_element((By.XPATH, Locators.dd_project_selected_status_xpath.format(status))).click()
def save_new_project(self):
self.driver_extended.get_enabled_element((By.XPATH, Locators.btn_save_project_xpath)).click()
def get_project_identifier(self):
"""
Extracts project identifier from current url
Returns:
str: project identifier
"""
return self.driver.current_url.split("/")[4]
|
import seagrass
import sys
import unittest
import warnings
from collections import Counter, defaultdict
from seagrass import auto, get_current_event
from seagrass.hooks import CounterHook
from test.utils import SeagrassTestCaseMixin, req_python_version
with seagrass.create_global_auditor() as _:
class ExampleClass:
# Test class used to check how functions are auto-named by Seagrass
@staticmethod
@seagrass.audit(auto)
def say_hello(name: str) -> str:
return f"Hello, {name}!"
class EventsTestCase(SeagrassTestCaseMixin, unittest.TestCase):
"""Tests for events created by Seagrass."""
def test_wrap_class_property(self):
# Override a class property to call a hook whenever it's accessed
class Foo:
def __init__(self):
self.x = 0
def add_one(self):
return self.x + 1
hook = CounterHook()
@self.auditor.audit("test.foo.get_x", hooks=[hook])
def get_x(self):
return self.__x
@self.auditor.audit("test.foo.set_x", hooks=[hook])
def set_x(self, val):
self.__x = val
@self.auditor.audit("test.foo.del_x", hooks=[hook])
def del_x(self):
del self.__x
setattr(Foo, "x", property(fget=get_x, fset=set_x, fdel=del_x))
with self.auditor.start_auditing():
f = Foo()
f.x = 1
y = f.x # noqa: F841
f.x += 2
del f.x
# We call get_x twice (once for y = f.x, another for f.x += 2)
# We call set_x three times (once during Foo.__init__, once during f.x = 1, and
# once during f.x += 2)
# We call del_x once (when we call del f.x)
self.assertEqual(hook.event_counter["test.foo.get_x"], 2)
self.assertEqual(hook.event_counter["test.foo.set_x"], 3)
self.assertEqual(hook.event_counter["test.foo.del_x"], 1)
# Now override the add_one function belonging to Foo
current_add_one = Foo.add_one
@self.auditor.audit("test.foo.add_one", hooks=[hook])
def add_one(self, *args, **kwargs):
return current_add_one(self, *args, **kwargs)
setattr(Foo, "add_one", add_one)
with self.auditor.start_auditing():
f = Foo()
result = f.add_one()
self.assertEqual(result, 1)
self.assertEqual(hook.event_counter["test.foo.add_one"], 1)
def test_toggle_event(self):
hook = CounterHook()
@self.auditor.audit("test.foo", hooks=[hook])
def foo():
return
@self.auditor.audit("test.bar", hooks=[hook])
def bar():
return foo()
with self.auditor.start_auditing():
bar()
self.assertEqual(hook.event_counter["test.foo"], 1)
self.assertEqual(hook.event_counter["test.bar"], 1)
# After disabling an event, its event hooks should no longer be called
self.auditor.toggle_event("test.foo", False)
bar()
self.assertEqual(hook.event_counter["test.foo"], 1)
self.assertEqual(hook.event_counter["test.bar"], 2)
# Now we re-enable the event so that hooks get called again
self.auditor.toggle_event("test.foo", True)
bar()
self.assertEqual(hook.event_counter["test.foo"], 2)
self.assertEqual(hook.event_counter["test.bar"], 3)
@req_python_version(min=(3, 8))
def test_wrap_function_and_create_sys_audit_event(self):
# We should be able to set up sys.audit events when we wrap functions
@self.auditor.audit("test.foo", raise_runtime_events=True)
def foo(x, y, z=None):
return x + y + (0 if z is None else z)
@self.auditor.audit("test.bar", raise_runtime_events=False)
def bar(x, y, z=None):
return x + y + (0 if z is None else z)
@self.auditor.audit(
"test.baz",
raise_runtime_events=True,
prehook_audit_event_name="baz_prehook",
posthook_audit_event_name="baz_posthook",
)
def baz(x, y, z=None):
return x + y + (0 if z is None else z)
events_counter = Counter()
args_dict = defaultdict(list)
def audit_hook(event: str, *args):
try:
if event.startswith("prehook:") or event.startswith("posthook:"):
events_counter[event] += 1
args_dict[event].append(args)
elif event in ("baz_prehook", "baz_posthook"):
events_counter[event] += 1
args_dict[event].append(args)
except Exception as ex:
warnings.warn(f"Exception raised in audit_hook: ex={ex}")
sys.addaudithook(audit_hook)
test_args = [(-3, 4), (5, 8), (0, 0)]
test_kwargs = [{}, {}, {"z": 1}]
def run_fns(args_list, kwargs_list):
for (args, kwargs) in zip(args_list, kwargs_list):
for fn in (foo, bar, baz):
fn(*args, **kwargs)
# The following call to run_fns shouldn't raise any audit events since
# it isn't performed in an auditing context.
run_fns(test_args, test_kwargs)
self.assertEqual(set(events_counter), set())
self.assertEqual(set(args_dict), set())
# Now some audit events should be raised:
with self.auditor.start_auditing():
run_fns(test_args, test_kwargs)
expected_prehooks = ["prehook:test.foo", "baz_prehook"]
expected_posthooks = ["posthook:test.foo", "baz_posthook"]
self.assertEqual(
set(events_counter), set(expected_prehooks + expected_posthooks)
)
self.assertEqual(set(events_counter), set(args_dict))
for event in expected_prehooks:
self.assertEqual(events_counter[event], len(test_args))
args = [args[0][0] for args in args_dict[event]]
kwargs = [args[0][1] for args in args_dict[event]]
self.assertEqual(args, test_args)
self.assertEqual(kwargs, test_kwargs)
# If we try running our functions outside of an auditing context again,
# we should once again find that no system events are raised.
events_counter.clear()
args_dict.clear()
run_fns(test_args, test_kwargs)
self.assertEqual(set(events_counter), set())
self.assertEqual(set(args_dict), set())
@req_python_version(max=(3, 8))
def test_get_error_with_runtime_events_for_python_before_38(self):
"""For Python versions before 3.8, sys.audit and sys.addaudithook do not exist, so
an exception should be raised if raise_runtime_events=True."""
with self.assertRaises(NotImplementedError):
self.auditor.create_event("my_test_event", raise_runtime_events=True)
def test_auto_name_event(self):
from pathlib import Path
auhome = self.auditor.audit(auto, Path.home)
self.assertEqual(auhome.__event_name__, "pathlib.Path.home")
# Check the name of the audited function from the ExampleClass class at
# the top of the file.
self.assertEqual(
ExampleClass.say_hello.__event_name__, f"{__name__}.ExampleClass.say_hello"
)
def test_get_current_event(self):
@self.auditor.audit("test.foo")
def foo():
self.assertEqual(get_current_event(), "test.foo")
@self.auditor.audit("test.bar")
def bar():
self.assertEqual(get_current_event(), "test.bar")
foo()
self.assertEqual(get_current_event(), "test.bar")
with self.auditor.start_auditing():
foo()
bar()
# We should be able to specify a default value for get_current_event(). If no default is
# specified and an event isn't being executed, an exception should be thrown.
self.assertEqual(get_current_event(None), None)
with self.assertRaises(LookupError):
get_current_event()
if __name__ == "__main__":
unittest.main()
|
import hubspot.cms.audit_logs as api_client
from ...discovery_base import DiscoveryBase
class Discovery(DiscoveryBase):
@property
def audit_logs_api(self) -> api_client.AuditLogsApi:
return self._configure_api_client(api_client, "AuditLogsApi")
|
"""
Tests for api ride endpoint
"""
import json
def test_get_request(test_client):
"""
Test that get request works correctly
"""
response = test_client.get('/api/v1/rides')
result = json.loads(response.data)
assert result['rides'][0]['origin'] == 'Mombasa'
assert result['rides'][0]['destination'] == 'Nairobi'
assert result['rides'][0]['travel_date'] == '23th June 2018'
assert result['rides'][0]['time'] == '10:00 am'
assert result['rides'][0]['price'] == 500
assert result['rides'][0]['requests'] == []
assert result['rides'][1]['origin'] == 'Kisumu'
assert result['rides'][1]['destination'] == 'Lodwar'
assert result['rides'][1]['travel_date'] == '25th June 2018'
assert result['rides'][1]['time'] == '12:00 am'
assert result['rides'][1]['price'] == 400
assert result['rides'][1]['requests'] == []
assert response.status_code == 200
def test_get_single_ride(test_client):
"""
Test request returns correct ride with specified ID
"""
response = test_client.get('/api/v1/rides/2')
result = json.loads(response.data)
assert response.status_code == 200
assert result['ride']['origin'] == 'Kisumu'
assert result['ride']['destination'] == 'Lodwar'
assert result['ride']['travel_date'] == '25th June 2018'
assert result['ride']['time'] == '12:00 am'
assert result['ride']['price'] == 400
assert result['ride']['requests'] == []
def test_unavailable_ride(test_client):
"""
Test request returns a 404 error if ride is not present
"""
response = test_client.get('/api/v1/rides/4')
assert response.status_code == 404
def test_create_ride(test_client):
"""
Test A new ride is created with the post method
"""
my_data = {'origin':'Londiani', 'destination': 'Brooke',
'travel_date': '30th August 2018',
'time': '03:00 pm', 'price' : '200'}
response = test_client.post('/api/v1/rides', data=json.dumps(my_data),
content_type='application/json')
result = json.loads(response.data)
assert response.status_code == 201
assert result['ride']['origin'] == 'Londiani'
assert result['ride']['destination'] == 'Brooke'
assert result['ride']['travel_date'] == '30th August 2018'
assert result['ride']['time'] == '03:00 pm'
assert result['ride']['price'] == '200'
assert result['ride']['requests'] == []
|
from easytello import tello
import threading
import time
import socket
from tkinter import *
import tkinter as tk
from PIL import Image,ImageTk
import tkinter.messagebox
import cv2
import PIL.Image
from gaze_tracking import GazeTracking
root = tk.Tk()
root.title('湧泉相報系統')
root.geometry('1024x600')
root.configure(bg="#FBE5D6")
global button_function_train,button_history,show_top_bar,show_title,show_eye,show_drown,show_train_title,gaze,socket,tello_address
tello_ip = '192.168.10.1'
tello_port = 8889
tello_address = (tello_ip, tello_port)
socket = socket.socket (socket.AF_INET, socket.SOCK_DGRAM)
socket.sendto ('command'.encode (' utf-8 '), tello_address)
socket.sendto ('streamon'.encode (' utf-8 '), tello_address)
gaze = GazeTracking()
###############################################################################################
img_start_1 = Image.open("./image/top_bar.png")
img_start_2 = img_start_1.resize((1024,100),Image.ANTIALIAS)
img_start_3 = ImageTk.PhotoImage(img_start_2)
#首頁標題
img_title_1 = Image.open("./image/title.png")
img_title_2 = img_title_1.resize((450,90),Image.ANTIALIAS)
img_title_3 = ImageTk.PhotoImage(img_title_2)
#眼睛圖示
img_eye_1 = Image.open("./image/eye_icon.png")
img_eye_2 = img_eye_1.resize((150,90),Image.ANTIALIAS)
img_eye_3 = ImageTk.PhotoImage(img_eye_2)
#空拍機圖示
img_drown_1 = Image.open("./image/drown_icon.png")
img_drown_2 = img_drown_1.resize((150,90),Image.ANTIALIAS)
img_drown_3 = ImageTk.PhotoImage(img_drown_2)
#訓練按鈕
train_btn_1 = Image.open("./image/train_btn.png")
train_btn_2 = train_btn_1.resize((300,90),Image.ANTIALIAS)
train_btn_3 = ImageTk.PhotoImage(train_btn_2)
#歷史按鈕
history_btn_1 = Image.open("./image/history.png")
history_btn_2 = history_btn_1.resize((300,90),Image.ANTIALIAS)
history_btn_3 = ImageTk.PhotoImage(history_btn_2)
################################################################################################
#訓練模式 #
train_title_image = Image.open("./image/train_title.png").resize((450,90),Image.ANTIALIAS) #
train_title = ImageTk.PhotoImage(train_title_image)
#返回按鈕圖示
return_icon_1 = Image.open("./image/return_icon.png")
return_icon_2 = return_icon_1.resize((90,90),Image.ANTIALIAS)
return_icon = ImageTk.PhotoImage(return_icon_2)
#home鍵按鈕圖示
home_icon_1 = Image.open("./image/home_icon.png")
home_icon_2 = home_icon_1.resize((90,90),Image.ANTIALIAS)
home_icon = ImageTk.PhotoImage(home_icon_2)
#home鍵按鈕圖示
home_icon_1 = Image.open("./image/home_icon.png")
home_icon_2 = home_icon_1.resize((90,90),Image.ANTIALIAS)
home_icon = ImageTk.PhotoImage(home_icon_2)
#確認鍵按鈕圖示
confirm_icon_1 = Image.open("./image/confirm_icon.png")
confirm_icon_2 = confirm_icon_1.resize((300,90),Image.ANTIALIAS)
confirm_icon = ImageTk.PhotoImage(confirm_icon_2)
###############################################################################################
#報告判斷區sidebar
report_judgment_area_1 = Image.open("./image/report_judgment_area.png")
report_judgment_area_2 = report_judgment_area_1.resize((300,495),Image.ANTIALIAS)
report_judgment_area = ImageTk.PhotoImage(report_judgment_area_2)
#開始報告btn
report_start_icon_1 = Image.open("./image/report_start_icon.png")
report_start_icon_2 = report_start_icon_1.resize((100,60),Image.ANTIALIAS)
report_start_icon = ImageTk.PhotoImage(report_start_icon_2)
#結束報告btn
report_finish_icon_1 = Image.open("./image/report_finish_icon.png")
report_finish_icon_2 = report_finish_icon_1.resize((100,60),Image.ANTIALIAS)
report_finish_icon = ImageTk.PhotoImage(report_finish_icon_2)
#報告名稱的區域
report_name_area_1 = Image.open("./image/report_name_area.png")
report_name_area_2 = report_name_area_1.resize((210,70),Image.ANTIALIAS)
report_name_area = ImageTk.PhotoImage(report_name_area_2)
#顯示判斷區域
judge_area_1 = Image.open("./image/judge_area.png")
judge_area_2 = judge_area_1.resize((170,70),Image.ANTIALIAS)
judge_area = ImageTk.PhotoImage(judge_area_2)
###############################################################################################
#顯示肢體判斷區域
body_score_1 = Image.open("./image/body_score.png")
body_score_2 = body_score_1.resize((420,385),Image.ANTIALIAS)
body_score = ImageTk.PhotoImage(body_score_2)
#顯示眼神判斷區域
eye_score_1 = Image.open("./image/eye_score.png")
eye_score_2 = eye_score_1.resize((420,385),Image.ANTIALIAS)
eye_score = ImageTk.PhotoImage(eye_score_2)
###############################################################################################
#顯示歷史標題
history_icon_1 = Image.open("./image/history_icon.png")
history_icon_2 = history_icon_1.resize((450,90),Image.ANTIALIAS)
history_icon = ImageTk.PhotoImage(history_icon_2)
#顯示歷史報告按鈕
report_history_btn_1 = Image.open("./image/report_history_btn.png")
report_history_btn_2 = report_history_btn_1.resize((400,140),Image.ANTIALIAS)
report_history_btn = ImageTk.PhotoImage(report_history_btn_2)
###############################################################################################
show_top_bar = tk.Label(root, image=img_start_3)
show_title = tk.Label(root, image=img_title_3, bg="#FFD966")
show_eye = tk.Label(root, image=img_eye_3, bg="#FFD966")
show_drown = tk.Label(root, image=img_drown_3, bg="#FFD966")
global g
g=0
panel_for_trt = tk.Label(root,height=500,width=720,bg="#000000") # initialize image panel2
def drone():
my_drone = tello.Tello()
my_drone.takeoff()
for i in range(4):
my_drone.forward(10)
my_drone.cw(90)
my_drone.land()
def open_cam():
global cap
# camera
width, height = 720, 500
cap = cv2.VideoCapture(0)
if cap is None:
print("Camera Open Error")
sys.exit(0)
def drone_cam():
global capture
# camera
print ("Start streaming")
capture = cv2.VideoCapture ('udp:/0.0.0.0:11111',cv2.CAP_FFMPEG)
capture.open('udp:/0.0.0.0:11111')
def drone_stream():
global panel_for_trt,imgtk,gaze,socket,tello_address,capture
ret, frame =capture.read()
gaze.refresh(frame)
frame = gaze.annotated_frame()
text = ""
if gaze.is_blinking():
text = "Blinking"
elif gaze.is_right():
text = "Looking right"
elif gaze.is_left():
text = "Looking left"
elif gaze.is_center():
text = "Looking center"
cv2.putText(frame, text, (90, 60), cv2.FONT_HERSHEY_DUPLEX, 1.6, (147, 58, 31), 2)
left_pupil = gaze.pupil_left_coords()
right_pupil = gaze.pupil_right_coords()
cv2.putText(frame, "Left pupil: " + str(left_pupil), (90, 130), cv2.FONT_HERSHEY_DUPLEX, 0.9, (147, 58, 31), 1)
cv2.putText(frame, "Right pupil: " + str(right_pupil), (90, 165), cv2.FONT_HERSHEY_DUPLEX, 0.9, (147, 58, 31), 1)
img = cv2.resize(frame, dsize=(720, 500), interpolation=cv2.INTER_AREA)
cv2image = cv2.cvtColor(img, cv2.COLOR_BGR2RGBA)#转换颜色从BGR到RGBA
current_image = PIL.Image.fromarray(cv2image)#将图像转换成Image对象
imgtk = PIL.ImageTk.PhotoImage(image=current_image)
panel_for_trt.imgtk = imgtk
panel_for_trt.config(image=imgtk)
root.after(1, drone_stream)
def vid_stream():
global panel_for_trt,imgtk,g,gaze,cap
success,img = cap.read()
# We send this frame to GazeTracking to analyze it
gaze.refresh(img)
img = gaze.annotated_frame()
text = ""
if gaze.is_blinking():
text = "Blinking"
elif gaze.is_right():
text = "Looking right"
elif gaze.is_left():
text = "Looking left"
elif gaze.is_center():
text = "Looking center"
cv2.putText(img, text, (90, 60), cv2.FONT_HERSHEY_DUPLEX, 1.6, (147, 58, 31), 2)
left_pupil = gaze.pupil_left_coords()
right_pupil = gaze.pupil_right_coords()
cv2.putText(img, "Left pupil: " + str(left_pupil), (90, 130), cv2.FONT_HERSHEY_DUPLEX, 0.9, (147, 58, 31), 1)
cv2.putText(img, "Right pupil: " + str(right_pupil), (90, 165), cv2.FONT_HERSHEY_DUPLEX, 0.9, (147, 58, 31), 1)
img = cv2.resize(img, dsize=(720, 500), interpolation=cv2.INTER_AREA)
cv2image = cv2.cvtColor(img, cv2.COLOR_BGR2RGBA)#转换颜色从BGR到RGBA
current_image = PIL.Image.fromarray(cv2image)#将图像转换成Image对象
imgtk = PIL.ImageTk.PhotoImage(image=current_image)
panel_for_trt.imgtk = imgtk
panel_for_trt.config(image=imgtk)
root.after(1, vid_stream)
def del_cap():
global cap,imgtk
cap.release()
imgtk=""
def del_drone_cap():
global capture,imgtk
capture.release()
imgtk=""
def del_main():
global button_function_train,button_history,show_top_bar,show_title,show_eye,show_drown
button_function_train.place_forget(),button_history.place_forget(),show_title.place_forget(),show_eye.place_forget(),show_drown.place_forget()
def do_usb_cam():
t1 = threading.Thread(target = vid_stream)
# t2 = threading.Thread(target = drone)
t1.start()
# t2.start()
def do_drone():
t3 = threading.Thread(target = drone_stream)
t4 = threading.Thread(target = drone)
t3.start()
t4.start()
def choose_delcam(radio_val):
if radio_val == 1:
del_cap()
elif radio_val == 2:
del_drone_cap()
def choose_cam(radio_val):
if radio_val == 1:
open_cam()
vid_stream()
elif radio_val == 2:
drone_cam()
drone_stream()
else:
messagebox = tkinter.messagebox.showinfo('警告','請選擇畫面呈現影像')
def del_train_init_page():
global show_train_title,button_return_icon,button_home_icon,button_confirm_icon,text,content
show_train_title.place_forget(),button_return_icon.place_forget(),button_home_icon.place_forget(),button_confirm_icon.place_forget(),text.place_forget(),
content.place_forget()
def del_train_start_page():
global show_report_judgment_area,button_return_icon,button_home_icon,panel_for_trt,show_train_title,report_start_icon_btn,report_finish_icon_btn,show_report_name_area
global show_judge_area_fun1,show_judge_area_fun2,show_title,fun1_text,fun2_text,drone_sel,cam_sel,radio_text
show_report_judgment_area.place_forget(),button_return_icon.place_forget(),button_home_icon.place_forget(),panel_for_trt.place_forget(),
show_train_title.place_forget(),report_start_icon_btn.place_forget(),report_finish_icon_btn.place_forget(),show_report_name_area.place_forget(),
show_judge_area_fun1.place_forget(),show_judge_area_fun2.place_forget(),show_title.place_forget(),drone_sel.place_forget(),cam_sel.place_forget(),radio_text.place_forget()
del fun1_text,fun2_text
def del_train_finish_page():
global button_return_icon,button_home_icon,show_train_title,show_eye_score,show_body_score
button_return_icon.place_forget(),button_home_icon.place_forget(),show_train_title.place_forget(),show_eye_score.place_forget(),show_body_score.place_forget()
def del_history_init_page():
global show_history_icon,button_return_icon,button_home_icon,report_history_btn1,report_history_btn2,report_history_btn3,report_history_btn4
show_history_icon.place_forget(),button_return_icon.place_forget(),button_home_icon.place_forget(),report_history_btn1.place_forget(),
report_history_btn2.place_forget(),report_history_btn3.place_forget(),report_history_btn4.place_forget()
#輸入主題頁面
def train_init_page():
global show_train_title,button_return_icon,button_home_icon,button_confirm_icon,text,content,theme_var
show_train_title = tk.Label(root,bg="#FFD966", image = train_title)
button_return_icon= tk.Button(root, image=return_icon,bg="#FFD966",command=lambda:[del_train_init_page(),main()], activebackground="#FFD966",bd=0)
button_home_icon= tk.Button(root, image=home_icon,bg="#FFD966",command=lambda:[del_train_init_page(),main()], activebackground="#FFD966",bd=0)
#
button_confirm_icon= tk.Button(root, image=confirm_icon,bg="#FBE5D6",command=lambda:[del_train_init_page(),train_start_page(content.get())], activebackground="#FBE5D6",bd=0)
#
show_train_title.place(x=285,y=5)
button_return_icon.place(x=20,y=5)
button_home_icon.place(x=900,y=5)
button_confirm_icon.place(x=360, y=344)
#輸入框
text = tk.Label(root,font=("Calibri",36), text='請輸入報告主題',bg="#FBE5D6")
text.place(x=345, y=180)
theme_var = tk.StringVar()
content = tk.Entry(root,textvariable=theme_var, bd=3,width=16,font=("Calibri",36))
content.place(x=320, y=267)
#報告開始頁面
def train_start_page(theme_value):
global theme_var,cap
if theme_value == "":
train_init_page()
messagebox = tkinter.messagebox.showinfo('警告','請輸入報告主題')
else:
i = 0
global show_report_judgment_area,button_return_icon,button_home_icon,panel_for_trt,show_train_title,report_start_icon_btn,report_finish_icon_btn,show_report_name_area
global show_judge_area_fun1,show_judge_area_fun2,show_title,show_fun1,show_fun2,fun1_text,fun2_text,drone_sel,cam_sel,radio_text,radio_val
show_train_title = tk.Label(root,bg="#FFD966", image = train_title)
show_report_judgment_area = tk.Label(root,bg="#FBE5D6", image = report_judgment_area)
button_return_icon= tk.Button(root, image=return_icon,bg="#FFD966",command=lambda:[del_train_start_page(),main()], activebackground="#FFD966",bd=0)
button_home_icon= tk.Button(root, image=home_icon,bg="#FFD966",command=lambda:[del_train_start_page(),main()], activebackground="#FFD966",bd=0)
report_start_icon_btn = tk.Button(root, image=report_start_icon, bg="#FFF2CC",command=lambda:[drone_cam(),do_drone()], activebackground="#FFF2CC",bd=0)
report_finish_icon_btn = tk.Button(root, image=report_finish_icon, bg="#FFF2CC",command=lambda:[choose_delcam(radio_val.get()),del_train_start_page(),train_finish_page()], activebackground="#FFF2CC",bd=0)
show_report_name_area = tk.Label(root,bg="#FFF2CC", image = report_name_area)
show_judge_area_fun1 = tk.Label(root,bg="#FFF2CC", image = judge_area)
show_judge_area_fun2 = tk.Label(root,bg="#FFF2CC", image = judge_area)
show_title = tk.Label(show_report_name_area,bg="#F4B183",text=theme_value,font=("Calibri",26))
fun1_text = tk.StringVar()
fun1_text.set("眼神偏移次數:0")
fun2_text = tk.StringVar()
fun2_text.set("身體晃動次數:0")
show_fun1 = tk.Label(show_judge_area_fun1,bg="#FBE5D6",textvariable=fun1_text,font=("Calibri",14))
show_fun2 = tk.Label(show_judge_area_fun2,bg="#FBE5D6",textvariable=fun2_text,font=("Calibri",14))
show_train_title.place(x=285,y=5)
button_return_icon.place(x=20,y=5)
button_home_icon.place(x=900,y=5)
show_report_judgment_area.place(x=0,y=102)
show_report_name_area.place(x=45,y=140)
show_judge_area_fun1.place(x=63,y=230)
show_judge_area_fun2.place(x=63,y=330)
report_start_icon_btn.place(x=30,y=520)
report_finish_icon_btn.place(x=170,y=520)
panel_for_trt.place(x=304,y=102)
show_title.place(x=10,y=10)
show_fun1.place(x=10,y=17)
show_fun2.place(x=10,y=17)
radio_val = IntVar()
def ShowChoice():
print (radio_val.get())
radio_text=tk.Label(root,
text="請選擇呈現影像",bg="#FFF2CC")
drone_sel=Radiobutton(root,
text="攝影機影像",
padx = 20,
indicatoron=0,
variable=radio_val,
bg="#FFF2CC",
value=1)
cam_sel=Radiobutton(root,
text="空拍機影像",
padx = 20,
indicatoron=0,
variable=radio_val,
bg="#FFF2CC",
value=2)
radio_text.place(x=100,y=450)
drone_sel.place(x=45,y=480)
cam_sel.place(x=145,y=480)
theme_var=""
#報告結束頁面
def train_finish_page():
global button_return_icon,button_home_icon,show_train_title,show_eye_score,show_body_score
show_train_title = tk.Label(root,bg="#FFD966", image = train_title)
button_return_icon= tk.Button(root, image=return_icon,bg="#FFD966",command=lambda:[del_train_finish_page(),main()], activebackground="#FFD966",bd=0)
button_home_icon= tk.Button(root, image=home_icon,bg="#FFD966",command=lambda:[del_train_finish_page(),main()], activebackground="#FFD966",bd=0)
show_body_score = tk.Label(root,bg="#FBE5D6", image = body_score)
show_eye_score = tk.Label(root,bg="#FBE5D6", image = eye_score)
show_train_title.place(x=285,y=5)
button_return_icon.place(x=20,y=5)
button_home_icon.place(x=900,y=5)
show_eye_score.place(x=550,y=160)
show_body_score.place(x=50,y=160)
def history_init_page():
global show_history_icon,button_return_icon,button_home_icon,report_history_btn1,report_history_btn1,report_history_btn2,report_history_btn3,report_history_btn4
show_history_icon = tk.Label(root,bg="#FFD966", image = history_icon)
button_return_icon= tk.Button(root, image=return_icon,bg="#FFD966",command=lambda:[del_history_init_page(),main()], activebackground="#FFD966",bd=0)
button_home_icon= tk.Button(root, image=home_icon,bg="#FFD966",command=lambda:[del_history_init_page(),main()], activebackground="#FFD966",bd=0)
report_history_btn1 = tk.Button(root, image=report_history_btn,bg="#FBE5D6",command=lambda:[del_history_init_page(),main()], activebackground="#FBE5D6",bd=0)
report_history_btn2 = tk.Button(root, image=report_history_btn,bg="#FBE5D6",command=lambda:[del_history_init_page(),main()], activebackground="#FBE5D6",bd=0)
report_history_btn3 = tk.Button(root, image=report_history_btn,bg="#FBE5D6",command=lambda:[del_history_init_page(),main()], activebackground="#FBE5D6",bd=0)
report_history_btn4 = tk.Button(root, image=report_history_btn,bg="#FBE5D6",command=lambda:[del_history_init_page(),main()], activebackground="#FBE5D6",bd=0)
show_history_icon.place(x=285,y=5)
button_return_icon.place(x=20,y=5)
button_home_icon.place(x=900,y=5)
report_history_btn1.place(x=70 ,y=180)
report_history_btn2.place(x=70 ,y=380)
report_history_btn3.place(x=550 ,y=180)
report_history_btn4.place(x=550 ,y=380)
def main():
global button_function_train,button_history,show_top_bar,show_title,show_eye,show_drown
button_function_train= tk.Button(root, image=train_btn_3,bg="#FBE5D6",command=lambda:[del_main(),train_init_page()], activebackground="#FBE5D6",bd=0)
button_history= tk.Button(root, image=history_btn_3,bg="#FBE5D6",command=lambda:[del_main(),history_init_page()], activebackground="#FBE5D6",bd=0)
show_title = tk.Label(root,bg="#FFD966", image = img_title_3)
show_top_bar.place(x=-2,y=0)
show_title.place(x=285,y=5)
show_eye.place(x=20,y=5)
show_drown.place(x=850,y=3)
button_function_train.place(x=360, y=222)
button_history.place(x=360, y=377)
main()
root.mainloop()
|
from hover.core.representation.manifold import LayerwiseManifold
import numpy as np
def test_LayerwiseManifold(distance_preserving_array_sequence):
LM = LayerwiseManifold(distance_preserving_array_sequence)
LM.unfold(method="umap")
_, disparities = LM.procrustes()
assert (np.array(disparities) < 1e-16).all()
|
"""Top-level package for Keats Crawler."""
__author__ = 'Arman Mann'
__email__ = 'arman.mann@kcl.ac.uk'
__version__ = '0.1.0'
|
import os
#Convertendo a lista de str em lista de números
def ConvertStrTOData(TXTData, delimiter=False):
DataMod=[] # Achando os delimitadores
if delimiter == False:
for line in list(TXTData):
DataMod.append(float(line[0:-1]))
return(DataMod)
else:
for line in list(TXTData):
DataMod.append(delimiter + ' ' + line[1:-2] + delimiter)
# return(ImgDataMod)
ListDelim=[] # Achando os delimitadores
for line in DataMod:
Delimiters=[]
for x in list(range(len(line))):
if line[x] == delimiter: #o delimiter é uma str
Delimiters.append(x)
ListDelim.append(Delimiters)
# return(ListDelim)
Data=[]
for lineNumber in list(range(len(DataMod))): #convertend str to floats
RowElements=[]
for y in list(range(len(ListDelim[lineNumber])-1)):
try:
RowElements.append(float(DataMod[lineNumber][ListDelim[lineNumber][y]+2:ListDelim[lineNumber][y+1]]))
except ValueError:
RowElements.append(DataMod[lineNumber][ListDelim[lineNumber][y]+2:ListDelim[lineNumber][y+1]])
Data.append(RowElements)
return(Data)
#Convertendo a lista de str em lista de números
def ConvertStrTOData2(TXTData, delimiter, LinesToTake):
DataMod=[] # Achando os delimitadores
Input=list(TXTData)
for lineNumber in list(range(len(Input))):
if lineNumber+1 <= LinesToTake:
DataMod.append(delimiter + ' ' + Input[lineNumber][1:-2] + delimiter)
# return(ImgDataMod)
ListDelim=[] # Achando os delimitadores
for line in DataMod:
Delimiters=[]
for x in list(range(len(line))):
if line[x] == delimiter: #o delimiter é uma str
Delimiters.append(x)
ListDelim.append(Delimiters)
# return(ListDelim)
Data=[]
for lineNumber in list(range(len(DataMod))): #convertend str to floats
RowElements=[]
for y in list(range(len(ListDelim[lineNumber])-1)):
try:
RowElements.append(float(DataMod[lineNumber][ListDelim[lineNumber][y]+2:ListDelim[lineNumber][y+1]]))
except ValueError:
RowElements.append(DataMod[lineNumber][ListDelim[lineNumber][y]+2:ListDelim[lineNumber][y+1]])
Data.append(RowElements)
return(Data)
#Importando as matrizes das imagens extraidas pelo Mathematica e convertendo de string para listas
def ImportImgTXTToData(diretorio,fileRoot):
DataList1=[]
MatrixImp=open(diretorio + fileRoot + '.txt','r')
ImportedMatrix=ConvertStrTOData(MatrixImp,',') #essa função converte de string para lista no python
DataList1.append(ImportedMatrix)
MatrixImp.close()
return(DataList1)
# Função para exportar em txt as matrizes das imagens
# A entrada eh uma lista
def ExportImgToData(diretorio,stackRootName,ImgData,stackNumber,SliceNumber):
if not os.path.exists(diretorio + stackRootName + '/TXTData/t' + str(stackNumber)):
os.makedirs(diretorio + stackRootName + '/TXTData/t' + str(stackNumber))
fileXport=open(diretorio + stackRootName + '/TXTData/t' + str(stackNumber) + '/Slice' + str(SliceNumber) + '.txt','w')
for line in ImgData:
fileXport.write(str(line) + '\n')
fileXport.close()
# Função normalizar
# A entrada é uma Image ou Stack no formato de lista
def NormalizeList(imgStackData):
try:
imgStackData[0,0,0]
flat_list=[]
for k in imgStackData:
for j in k:
for i in j:
flat_list.append(i)
MAX=max(flat_list)
list1=[]
for k in imgStackData:
list2=[]
for j in k:
list2.append([i/MAX for i in j])
list1.append(list2)
except IndexError:
flat_list=[]
for j in imgStackData:
for i in j:
flat_list.append(i)
MAX=max(flat_list)
list1=[]
for j in imgStackData:
list1.append([i/MAX for i in j])
return(list1)
# Função para converter as imagens em matrizes de posicao
def getPositions3D(imgStack):
POSlistZ=[]
for k in list(range(len(imgStack))):
POSlistY=[]
for j in list(range(len(imgStack[k]))):
POSlistX=[]
for i in list(range(len(imgStack[k][j]))):
POSlistX.append([[i+1,j+1,k+1],imgStack[len(imgStack)-k-1][len(imgStack[k])-j-1][i]])
POSlistY.append(POSlistX)
POSlistZ.append(POSlistY)
return(POSlistZ)
|
from typing import List, Iterable
from math import ceil, floor, log
from src.util import range_incl
AgentId = int
Time = int
# TODO for (a), find max NF cost, for (b), find min NF cost
def generate_all_path_lenghts(sum_of_costs: int,
agents: int,
smaller_than: int = -1) -> Iterable[List[int]]:
"""
Generates a set of path lengths for n agents.
:return Set of path lengths for agents [1, 2, ..., n]
"""
if smaller_than == -1:
smaller_than = sum_of_costs
if agents == 1 and sum_of_costs >= 1:
yield [sum_of_costs]
else:
min_value = ceil(sum_of_costs / agents)
for value in range(min_value, sum_of_costs + 1):
if value < 1 or value > smaller_than:
continue
for permutation in generate_all_path_lenghts(
sum_of_costs - value, agents - 1, value):
yield [value, *permutation]
def compute_for_agents(path_length: int, agent_amount: int, minimize: bool,
t_max: int) -> (int, List[int]):
all_lengths = list(generate_all_path_lenghts(path_length, agent_amount))
def get_cost(t: int, reduced: bool):
def series_sum(n: int) -> int:
return int((1 - agent_amount**n) / (1 - agent_amount))
if reduced:
return t_max - t - 1
else:
return agent_amount * t_max * (t_max - 1) / 2
cost = 999_999_999 if minimize else 0
returned_agent_lengths = []
for agent_lengths in all_lengths:
curr_cost = 0
for agent_length in agent_lengths:
for t in range_incl(0, t_max - 1):
if t >= agent_length - 1:
# Once you're standing still, you get a reduced cost
curr_cost += get_cost(t, True)
elif t <= agent_length - 2:
# Due to the definition of path length, the two timesteps before reaching the goal you definitely incur full cost
# This is due to the fact that you cannot be on the goal the time before the goal, so you need to step off and on again (costly)
curr_cost += get_cost(t, False)
if minimize and curr_cost < cost:
cost = curr_cost
returned_agent_lengths = agent_lengths
elif not minimize and curr_cost > cost:
cost = curr_cost
returned_agent_lengths = agent_lengths
return cost, returned_agent_lengths
if __name__ == "__main__":
a_all = list(range(3, 50))
b_all = list(range(4, 52))
for a, b in zip(a_all, b_all):
amount_of_agents = list(range(2, a + 1))
for agent_amount in amount_of_agents:
t_max = b + 10
cost_a, agent_lengths_a = compute_for_agents(a,
agent_amount,
minimize=False,
t_max=t_max)
cost_b, agent_lengths_b = compute_for_agents(b,
agent_amount,
minimize=True,
t_max=t_max)
if len(agent_lengths_a) == 0 or len(agent_lengths_b) == 0:
continue
print(
f"{agent_amount} ({a}, {b}): Cost A {cost_a} [{agent_lengths_a}], Cost B {cost_b} [{agent_lengths_b}]"
)
assert cost_a < cost_b
print("\n\n")
|
"""
This module defines the constant presence test generator.
Tests whether a specific constant exists. No execution is required only building success or fail.
"""
import logging
from typing import Set
from lemonspotter.core.database import Database
from lemonspotter.core.test import Test, TestType, TestOutcome
from lemonspotter.core.testgenerator import TestGenerator
from lemonspotter.core.constant import Constant
from lemonspotter.core.variable import Variable
from lemonspotter.core.statement import (DeclarationAssignmentStatement,
FunctionStatement,
MainDefinitionStatement,
ReturnStatement)
class ConstantPresenceGenerator(TestGenerator):
"""
This TestGenerator generates tests which check existance and captures the value of a constant.
"""
def generate(self) -> Set[Test]:
"""
Generates all constant presence test objects for all constants in the database.
"""
tests = set()
# find all functions which have not been tested
constants = filter(lambda c: not c.properties.get('presence_tested', False),
Database().get_constants())
# for all applicable functions
for constant in constants:
test = self.generate_test(constant)
tests.add(test)
return tests
def generate_test(self, constant: Constant) -> Test:
"""
Generates a Test with a main statement and a variable with the assignment of
the given constant.
"""
logging.info('generating constant presence test for %s', constant.name)
source = self._generate_source_frame()
block_main = MainDefinitionStatement()
source.add_at_start(block_main)
block_main.add_at_end(ReturnStatement('0'))
variable = Variable(constant.type, f'variable_{constant.name}', constant.name)
declaration = DeclarationAssignmentStatement(variable,
'declare variable with constant name')
block_main.add_at_start(declaration)
if constant.type.printable:
block_main.add_at_start(FunctionStatement.generate_print(variable,
'extract constant value'))
test = Test(f'constant_presence_{constant.name}',
TestType.BUILD_AND_RUN,
source)
else:
test = Test(f'constant_presence_{constant.name}',
TestType.BUILD_ONLY,
source)
def build_fail():
constant.properties['presence_tested'] = True
constant.properties['present'] = False
test.build_outcome = TestOutcome.FAILED
test.build_fail_function = build_fail
def build_success():
constant.properties['presence_tested'] = True
constant.properties['present'] = True
test.build_outcome = TestOutcome.SUCCESS
test.build_success_function = build_success
def run_fail():
test.run_outcome = TestOutcome.FAILED
test.run_fail_function = run_fail
def run_success():
# assign constant value found
if variable.type.printable and variable.value is None:
raise RuntimeError('Variable is printable, but no value is set.')
constant.properties['value'] = variable.value
# verify against specification
if not constant.defined:
test.run_outcome = TestOutcome.SUCCESS
else:
if constant.validate():
test.run_outcome = TestOutcome.SUCCESS
else:
test.run_outcome = TestOutcome.FAILED
test.run_success_function = run_success
return test
|
# terrascript/data/hashicorp/azuread.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:12:52 UTC)
import terrascript
class azuread_application(terrascript.Data):
pass
class azuread_application_published_app_ids(terrascript.Data):
pass
class azuread_application_template(terrascript.Data):
pass
class azuread_client_config(terrascript.Data):
pass
class azuread_domains(terrascript.Data):
pass
class azuread_group(terrascript.Data):
pass
class azuread_groups(terrascript.Data):
pass
class azuread_service_principal(terrascript.Data):
pass
class azuread_service_principals(terrascript.Data):
pass
class azuread_user(terrascript.Data):
pass
class azuread_users(terrascript.Data):
pass
__all__ = [
"azuread_application",
"azuread_application_published_app_ids",
"azuread_application_template",
"azuread_client_config",
"azuread_domains",
"azuread_group",
"azuread_groups",
"azuread_service_principal",
"azuread_service_principals",
"azuread_user",
"azuread_users",
]
|
from fastapi import FastAPI
from msdss_base_api import API
from msdss_base_database import Database
from .routers import *
from .handlers import *
class DataAPI(API):
"""
Class for creating Data APIs.
Parameters
----------
users_api : :class:`msdss_users_api:msdss_users_api.core.UsersAPI` or None
Users API object to enable user authentication for data routes.
If ``None``, user authentication will not be used for data routes.
database : :class:`msdss_base_database:msdss_base_database.core.Database` or None
A :class:`msdss_base_database:msdss_base_database.core.Database` object for managing data. If ``None``, a default database will be setup.
data_router_settings : dict
Additional arguments passed to :func:`msdss_data_api.routers.get_data_router` except ``database``.
api : :class:`fastapi:fastapi.FastAPI`
API object for creating routes.
*args, **kwargs
Additional arguments passed to :class:`msdss_base_api:msdss_base_api.core.API`.
Attributes
----------
data_api_database : :class:`msdss_base_datbase:msdss_base_database.core.Database`
Database object used for the data API.
Author
------
Richard Wen <rrwen.dev@gmail.com>
Example
-------
Create Data API without users:
.. jupyter-execute::
from msdss_base_database import Database
from msdss_data_api import DataAPI
from msdss_users_api import UsersAPI
# Create database object
database = Database(
driver='postgresql',
user='msdss',
password='msdss123',
host='localhost',
port='5432',
database='msdss'
)
# Create a data api without users
app = DataAPI(database=database)
Create Data API with users:
.. jupyter-execute::
from msdss_base_database import Database
from msdss_data_api import DataAPI
from msdss_users_api import UsersAPI
# Create database object
database = Database(
driver='postgresql',
user='msdss',
password='msdss123',
host='localhost',
port='5432',
database='msdss'
)
# Create a data api with users
# CHANGE SECRETS TO STRONG PHRASES
users_api = UsersAPI(
'cookie-secret',
'jwt-secret',
'reset-secret',
'verification-secret',
database=database
)
app = DataAPI(users_api, database=database)
# Add users routes
app.add_apps(users_api)
# Run the app with app.start()
# Try API at http://localhost:8000/docs
# app.start()
"""
def __init__(
self,
users_api=None,
database=None,
data_router_settings={},
api=FastAPI(
title='MSDSS Data API',
version='0.2.9'
),
*args, **kwargs):
super().__init__(api=api, *args, **kwargs)
# (DataAPI_settings) Setup router params
database = database if database else Database()
data_router_settings['database'] = database
# (DataAPI_users) Add users app if specified
if users_api:
data_router_settings['users_api'] = users_api
# (DataAPI_router_data) Add data router
data_router = get_data_router(**data_router_settings)
self.add_router(data_router)
# (DataAPI_attr) Set attributes
self.data_api_database = database
|
from control4.nn.rnn import RNN
from control4.nn.nn import DenseLayer,NetworkFromFunc
from control4.misc.var_collection import VarCollection
from control4.config import floatX,setup_logging,print_theano_config
from control4.algs.alg_params import AlgParams,validate_and_filter_args
from control4.algs.save_load_utils import dump_dict_to_hdf,gen_output_h5_name
from control4.maths import symbolic
import numpy as np
import theano, theano.tensor as TT
import argparse
from tabulate import tabulate
from collections import defaultdict
import time,h5py
class Task(object):
def __init__(self, input_size, output_size, loss_type):
self.input_size = input_size
self.output_size = output_size
self.loss_type = loss_type
def generate(self,batch_size,length):
raise NotImplementedError
class Substitution(Task):
def __init__(self,seq_length,ab_size):
self.seq_length=seq_length
self.ab_size=ab_size
Task.__init__(self,ab_size+1,ab_size,"ce")
def generate(self,batch_size):
Q_tna = np.zeros((self.seq_length,batch_size,self.ab_size+1),floatX)
A_na = np.zeros((batch_size,self.ab_size),floatX)
for t in xrange(Q_tna.shape[0]):
Q_tna[t][np.arange(batch_size),np.random.randint(0,self.ab_size,size=(batch_size,))]=1
t_n = np.random.randint(0,self.seq_length,size=(batch_size,))
n_n = np.arange(batch_size)
_,aidx_n = np.nonzero(Q_tna[t_n,n_n])
A_na[n_n,aidx_n] = 1
Q_tna[t_n,n_n,-1] = 1
A_na[n_n,aidx_n] = 1
return Q_tna, A_na
class Addition(Task):
def __init__(self,length):
self.nin = 2
self.nout = 1
self.length=length
Task.__init__(self,2,1,"se")
def generate(self, batch_size):
l = np.random.randint(int(self.length*.1))+self.length
p0 = np.random.randint(int(l*.1), size=(batch_size,))
p1 = np.random.randint(int(l*.4), size=(batch_size,)) + int(l*.1)
data = np.random.uniform(size=(l, batch_size, 2)).astype(floatX)
data[:,:,0] = 0.
data[p0, np.arange(batch_size), np.zeros((batch_size,),
dtype='int32')] = 1.
data[p1, np.arange(batch_size), np.zeros((batch_size,),
dtype='int32')] = 1.
targs = (data[p0, np.arange(batch_size),
np.ones((batch_size,), dtype='int32')] + \
data[p1, np.arange(batch_size),
np.ones((batch_size,), dtype='int32')])/2.
return data, targs.reshape((-1,1)).astype(floatX)
class TempOrder(Task):
def __init__(self,length):
self.length = length
Task.__init__(self,6,4,"ce")
def generate(self, batch_size):
l = self.length
p0 = np.random.randint(int(l*.1), size=(batch_size,)) + int(l*.1)
v0 = np.random.randint(2, size=(batch_size,))
p1 = np.random.randint(int(l*.1), size=(batch_size,)) + int(l*.5)
v1 = np.random.randint(2, size=(batch_size,))
targ_vals = v0 + v1*2
vals = np.random.randint(4, size=(l, batch_size))+2
vals[p0, np.arange(batch_size)] = v0
vals[p1, np.arange(batch_size)] = v1
data = np.zeros((l, batch_size, 6), dtype=floatX)
targ = np.zeros((batch_size, 4), dtype=floatX)
data.reshape((l*batch_size, 6))[np.arange(l*batch_size),
vals.flatten()] = 1.
targ[np.arange(batch_size), targ_vals] = 1.
return data, targ
class Multiplication(Task):
def __init__(self,length):
self.length=length
Task.__init__(self,2,1,"se")
def generate(self, batchsize):
l = np.random.randint(int(self.length*.1))+self.length
p0 = np.random.randint(int(l*.1), size=(batchsize,))
p1 = np.random.randint(int(l*.4), size=(batchsize,)) + int(l*.1)
data = np.random.uniform(size=(l, batchsize, 2)).astype(floatX)
data[:,:,0] = 0.
data[p0, np.arange(batchsize), np.zeros((batchsize,),
dtype='int32')] = 1.
data[p1, np.arange(batchsize), np.zeros((batchsize,),
dtype='int32')] = 1.
targs = (data[p0, np.arange(batchsize),
np.ones((batchsize,), dtype='int32')] * \
data[p1, np.arange(batchsize),
np.ones((batchsize,), dtype='int32')])
return data, targs.astype(floatX).reshape((-1,1))
def compute_losses(netout_nk,act_nk,loss_type):
if loss_type=="ce":
sP_nk = TT.nnet.softmax(netout_nk)
sloss = TT.nnet.categorical_crossentropy(sP_nk,act_nk).mean()#+vc.l2()*.001
serr = TT.neq(netout_nk.argmax(axis=1),act_nk.argmax(axis=1)).mean()
elif loss_type=="se":
sloss = TT.square(netout_nk - act_nk).sum(axis=1).mean()
serr = sloss
else:
raise NotImplementedError
return (sloss,serr)
class RNNParams(AlgParams):
mem_size = 20
cell_type = "gru" # tanh, lstm
batch_size=50
opt_method="adaptive_rmsprop" # or sgd
opt_iters=20
init_reset_off = 0
hessian_subsample= 20
truncate_gradient= -1.0
class TaskParams(AlgParams):
task = str
n_examples = 5000
seq_length = 100
ab_size = 5
def main():
setup_logging()
print_theano_config()
np.set_printoptions(precision=3)
parser = argparse.ArgumentParser(formatter_class=lambda prog : argparse.ArgumentDefaultsHelpFormatter(prog,max_help_position=50))
param_list = [RNNParams,TaskParams]
for param in param_list: param.add_to_parser(parser)
parser.add_argument("--seed",type=int,default=0)
parser.add_argument("--outfile",type=str,default="")
parser.add_argument("--metadata",type=str,default="")
args = parser.parse_args()
validate_and_filter_args(param_list, args)
params = args.__dict__
mem_size = params["mem_size"]
n_total = params["n_examples"]
batch_size = params["batch_size"]
seq_length = params["seq_length"]
n_train = int(n_total*.75)
n_test = n_total-n_train
np.random.seed(params["seed"])
task_name = params["task"]
if task_name == "substitution":
task = Substitution(seq_length,params["ab_size"])
elif task_name == "addition":
task = Addition(seq_length)
elif task_name == "temp_order":
task = TempOrder(seq_length)
elif task_name == "multiplication":
task = Multiplication(seq_length)
else:
raise NotImplementedError("Unrecognized task %s"%task_name)
Q_tna, A_nk = task.generate(n_total)
Q_tna = theano.shared(Q_tna)
A_nk = theano.shared(A_nk)
sQ_tna = TT.tensor3("Q")
sQ_tna.tag.test_value=Q_tna
sA_nk = TT.matrix("A")
sA_nk.tag.test_value=sA_nk
sN = sA_nk.shape[0] #pylint: disable=E1101
rnn = RNN(mem_size,task.input_size,cell_type=params["cell_type"],truncate_gradient=params["truncate_gradient"]>0 and params["truncate_gradient"])
final_layer = DenseLayer([mem_size],task.output_size,"none",src_names=["mem"],targ_name="output")
net = NetworkFromFunc([rnn,final_layer],lambda X,Minit: final_layer(rnn(X,Minit)))
if args.init_reset_off:
init_br = rnn.cell.br.get_value(borrow=True)
init_br -= 1
sinitm_nm = TT.zeros([sN,mem_size],floatX)
netout_nk = net(sQ_tna,sinitm_nm)
sloss,serr = compute_losses(netout_nk,sA_nk,task.loss_type)
loss_names = ["loss","err"] # loss is smooth function we're optimizing, err is the measure we care about, which might be non-smooth (e.g. 0-1 error)
optvars = VarCollection(net.opt_vars())
optvars.disp()
sgradloss = symbolic.flatten(TT.grad(sloss, optvars.vars()))
# logp =
th = TT.vector('th')
dth = TT.vector("dth")
sstart = TT.lscalar("start")
sstop = TT.lscalar("stop")
th.tag.test_value = optvars.var_values_flat()
var_shapes = optvars.var_shapes()
replace_dict = {v:thslice for (v,thslice) in zip(optvars.vars(),symbolic.unflatten(th,var_shapes, optvars.vars()))}
givens = {sQ_tna:Q_tna[:,sstart:sstop], sA_nk:A_nk[sstart:sstop]}
subsamp = params["hessian_subsample"]
given_subsamp = {sQ_tna:Q_tna[:,sstart:sstop:subsamp], sA_nk:A_nk[sstart:sstop:subsamp]}
dparams = symbolic.unflatten(dth, optvars.var_shapes(), optvars.vars())
from control4.core.cpd import FactoredCategoricalDistribution
cpd = FactoredCategoricalDistribution([task.output_size])
sfvp = symbolic.flatten(TT.Lop(netout_nk, optvars.vars(), cpd.fvp(netout_nk,TT.Rop(netout_nk, optvars.vars(), dparams))))
flosses = theano.function([th,sstart,sstop],theano.clone(TT.stack(sloss,serr),replace=replace_dict),givens=givens,allow_input_downcast=True)
fgradloss = theano.function([th,sstart,sstop],theano.clone(sgradloss,replace=replace_dict),givens=givens,allow_input_downcast=True)
# meanfvp = Averager(lambda (th,p,sli): sum_count_reducer(tuple(f_fvp(th,p,path.prevm_tg,path.prevj_tb,path.o_tf)) for path in get_local_paths(sli)))
fmetric = theano.function([th,dth,sstart,sstop],theano.clone(sfvp,replace=replace_dict),givens=givens,allow_input_downcast=True)
th0 = optvars.var_values_flat()
diags = defaultdict(list)
tstart = time.time()
def diagnostics_update(th):
test_losses = flosses(th,n_train,n_total)
train_losses = flosses(th,0,n_test)
for (loss,name) in zip(test_losses,loss_names):
diags["test_"+name].append(loss)
for (loss,name) in zip(train_losses,loss_names):
diags["train_"+name].append(loss)
diags["time"].append(time.time()-tstart)
print tabulate([(name,ts[-1]) for (name,ts) in sorted(diags.items())])
if params["opt_method"] in ("adaptive_sgd","adaptive_rmsprop"):
from control4.optim.adaptive_descent import adaptive_descent
for state in adaptive_descent(
lambda th,(start,stop) : flosses(th,start,stop)[0],
lambda th,(start,stop) : fgradloss(th,start,stop),
th0,
[(start,start+batch_size) for start in xrange(0,n_train-(n_train%batch_size),batch_size)],
(0,n_train),
initial_stepsize=0.1,
max_iter=params["opt_iters"],
method=params["opt_method"][len("adaptive_"):]):
diagnostics_update(state.x)
elif params["opt_method"] == "lbfgs":
from control4.optim.lbfgs import lbfgs
for th in lbfgs(
lambda th: flosses(th,0,n_train)[0],
lambda th: fgradloss(th,0,n_train),
th0,
maxiter = params["opt_iters"],
):
diagnostics_update(th)
elif params["opt_method"] == "cg":
from control4.optim.cg_optimize import cg_optimize
th = th0
for iteration in xrange(params["opt_iters"]):
th = cg_optimize(th,
floss=lambda th: flosses(th,0,n_train)[0],
fgradloss=lambda th: fgradloss(th,0,n_train),
metric_length=0.1,
substeps=1,
damping=1e-3,
fmetric=lambda th,dth: fmetric(th,dth,0,n_train)
)
diagnostics_update(th)
else:
raise NotImplementedError("invalid opt method: %s"%params["opt_method"])
fname = args.outfile or gen_output_h5_name()
print "saving to",fname
hdf = h5py.File(fname,"w")
dump_dict_to_hdf(hdf, "params", args.__dict__)
hdf.create_group("diagnostics")
for (diagname, val) in diags.items():
hdf["diagnostics"][diagname] = val
if __name__ == "__main__":
main()
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .auto_login import *
from .basic_auth import *
from .bookmark import *
from .get_app import *
from .get_metadata_saml import *
from .get_saml import *
from .group_assignment import *
from .o_auth import *
from .o_auth_redirect_uri import *
from .saml import *
from .secure_password_store import *
from .swa import *
from .three_field import *
from .user import *
from .user_base_schema import *
from .user_schema import *
from ._inputs import *
from . import outputs
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
from devtools_testutils import (
test_proxy,
add_remove_header_sanitizer,
add_general_regex_sanitizer,
add_oauth_response_sanitizer,
)
@pytest.fixture(scope="session", autouse=True)
def add_sanitizers(test_proxy):
add_remove_header_sanitizer(headers="Ocp-Apim-Subscription-Key")
add_general_regex_sanitizer(
value="fakeendpoint",
regex="(?<=\\/\\/)[a-z-]+(?=\\.cognitiveservices\\.azure\\.com)"
)
add_oauth_response_sanitizer()
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Main Flow"),
"items": [
{
"type": "doctype",
"name": "Quotation",
"description": _("Quotes to Leads or Customers."),
},
{
"type": "doctype",
"name": "Sales Order",
"description": _("Confirmed orders from Customers."),
},
{
"type": "doctype",
"name": "Job Order CT",
"label": _("Job Order"),
"description":_("Job Order"),
},
{
"type": "doctype",
"name": "Daily Press",
"label": _("Daily Press"),
"description":_("Daily Press"),
},
{
"type": "doctype",
"name": "Stock Entry",
}
]
},
{
"label": _("Support Flow"),
"items": [
{
"type": "doctype",
"name": "Item",
"description":_("Item"),
},
{
"type": "doctype",
"name": "Serial No",
"description":_("Serial No"),
}
]
},
{
"label": _("Settings"),
"items": [
{
"type": "doctype",
"name": "Company",
"description":_("Company"),
},
{
"type": "doctype",
"name": "Manufacturing Settings",
"description": _("Global settings for all manufacturing processes."),
}
]
}
]
|
"""
Enforces a data cutoff date for PPI data.
Original Issue: DC-1445
Intent is to enforce the data cutoff date for PPI data in all CDM tables excluding the person table by sandboxing and
removing any records that persist after the data cutoff date.
"""
# Python imports
import logging
from datetime import datetime
# Project imports
from cdr_cleaner.cleaning_rules.base_cleaning_rule import BaseCleaningRule
from common import JINJA_ENV, AOU_REQUIRED
from constants import bq_utils as bq_consts
from utils import pipeline_logging
from utils.bq import validate_bq_date_string
from resources import fields_for
import constants.cdr_cleaner.clean_cdr as cdr_consts
LOGGER = logging.getLogger(__name__)
SANDBOX_QUERY = JINJA_ENV.from_string("""
CREATE OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{intermediary_table}}` AS (
SELECT * FROM `{{project_id}}.{{dataset_id}}.{{cdm_table}}`
WHERE
(GREATEST({{date_fields}}) > DATE("{{cutoff_date}}"))
{% if datetime_fields != '' %}
AND (GREATEST({{datetime_fields}}) > TIMESTAMP("{{cutoff_date}}"))
{% endif %}
)
""")
DATE_CUTOFF_QUERY = JINJA_ENV.from_string("""
SELECT * FROM `{{project_id}}.{{dataset_id}}.{{cdm_table}}` cdm
EXCEPT DISTINCT
SELECT * FROM `{{project_id}}.{{sandbox_id}}.{{intermediary_table}}`
""")
class EhrSubmissionDataCutoff(BaseCleaningRule):
"""
All rows of data in the RDR ETL with dates after the cutoff date should be sandboxed and dropped
"""
def __init__(self,
project_id,
dataset_id,
sandbox_dataset_id,
cutoff_date=None):
"""
Initialize the class with proper information.
Set the issue numbers, description and affected datasets. As other tickets may affect
this SQL, append them to the list of Jira Issues.
DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS!
:params: cutoff_date: the last date that should be included in the
dataset
"""
try:
# set to provided date string if the date string is valid
self.cutoff_date = validate_bq_date_string(cutoff_date)
except (TypeError, ValueError):
# otherwise, default to using today's date as the date string
self.cutoff_date = str(datetime.now().date())
desc = (f'All rows of data in the RDR ETL with dates after '
f'{self.cutoff_date} will be sandboxed and dropped.')
super().__init__(issue_numbers=['DC1445'],
description=desc,
affected_datasets=[cdr_consts.UNIONED],
project_id=project_id,
dataset_id=dataset_id,
sandbox_dataset_id=sandbox_dataset_id)
def get_affected_tables(self):
"""
This method gets all the tables that are affected by this cleaning rule which are all the CDM tables
except for the person table. The birth date field in the person table will be cleaned in another
cleaning rule where all participants under the age of 18 will be dropped. Ignoring this table will
optimize this cleaning rule's runtime.
:return: list of affected tables
"""
tables = []
for table in AOU_REQUIRED:
# skips the person table
if table == 'person':
continue
# appends all CDM tables except for the person table
else:
tables.append(table)
return tables
def get_query_specs(self, *args, **keyword_args):
"""
Return a list of dictionary query specifications.
:return: A list of dictionaries. Each dictionary contains a single query
and a specification for how to execute that query. The specifications
are optional but the query is required.
"""
queries_list = []
sandbox_queries_list = []
for table in self.get_affected_tables():
# gets all fields from the affected table
fields = fields_for(table)
date_fields = []
datetime_fields = []
for field in fields:
# appends only the date columns to the date_fields list
if field['type'] in ['date']:
date_fields.append(
f'COALESCE({field["name"]}, DATE("1900-01-01"))')
# appends only the datetime columns to the datetime_fields list
if field['type'] in ['timestamp']:
datetime_fields.append(
f'COALESCE({field["name"]}, TIMESTAMP("1900-01-01"))')
# will render the queries only if a CDM table contains a date or datetime field
# will ignore the CDM tables that do not have a date or datetime field
if date_fields or datetime_fields:
sandbox_query = {
cdr_consts.QUERY:
SANDBOX_QUERY.render(
project_id=self.project_id,
sandbox_id=self.sandbox_dataset_id,
intermediary_table=self.sandbox_table_for(table),
dataset_id=self.dataset_id,
cdm_table=table,
date_fields=(", ".join(date_fields)),
datetime_fields=(", ".join(datetime_fields)),
cutoff_date=self.cutoff_date),
}
sandbox_queries_list.append(sandbox_query)
date_cutoff_query = {
cdr_consts.QUERY:
DATE_CUTOFF_QUERY.render(
project_id=self.project_id,
dataset_id=self.dataset_id,
cdm_table=table,
sandbox_id=self.sandbox_dataset_id,
intermediary_table=self.sandbox_table_for(table)),
cdr_consts.DESTINATION_TABLE:
table,
cdr_consts.DESTINATION_DATASET:
self.dataset_id,
cdr_consts.DISPOSITION:
bq_consts.WRITE_TRUNCATE
}
queries_list.append(date_cutoff_query)
return sandbox_queries_list + queries_list
def setup_rule(self, client):
"""
Function to run any data upload options before executing a query.
"""
pass
def setup_validation(self, client):
"""
Run required steps for validation setup
"""
raise NotImplementedError("Please fix me.")
def validate_rule(self, client):
"""
Validates the cleaning rule which deletes or updates the data from the tables
"""
raise NotImplementedError("Please fix me.")
def get_sandbox_tablenames(self):
sandbox_tables = []
for table in self.affected_tables:
sandbox_tables.append(self.sandbox_table_for(table))
return sandbox_tables
def validate_date_string(date_string):
"""
Validates the date string is a valid date in the YYYY-MM-DD format.
If the string is valid, it returns the string. Otherwise, it raises either
a ValueError or TypeError.
:param date_string: The string to validate
:return: a valid date string
:raises: A ValueError if the date string is not a valid date or
doesn't conform to the specified format.
"""
datetime.strptime(date_string, '%Y-%m-%d')
return date_string
if __name__ == '__main__':
import cdr_cleaner.args_parser as parser
import cdr_cleaner.clean_cdr_engine as clean_engine
ext_parser = parser.get_argument_parser()
ext_parser.add_argument(
'-c',
'--cutoff_date',
dest='cutoff_date',
action='store',
help=
('Cutoff date for data based on <table_name>_date and <table_name>_datetime fields. '
'Should be in the form YYYY-MM-DD.'),
required=True,
type=validate_bq_date_string,
)
ARGS = ext_parser.parse_args()
pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True)
if ARGS.list_queries:
clean_engine.add_console_logging()
query_list = clean_engine.get_query_list(ARGS.project_id,
ARGS.dataset_id,
ARGS.sandbox_dataset_id,
[(EhrSubmissionDataCutoff,)],
cutoff_date=ARGS.cutoff_date)
for query in query_list:
LOGGER.info(query)
else:
clean_engine.add_console_logging(ARGS.console_log)
clean_engine.clean_dataset(ARGS.project_id,
ARGS.dataset_id,
ARGS.sandbox_dataset_id,
ARGS.cutoff_date,
[(EhrSubmissionDataCutoff,)],
cutoff_date=ARGS.cutoff_date)
|
import math
def sumacomplejos(a,b):
return [a[0]+b[0],a[1]+b[1]]
def restacomplejos(a,b):
return [a[0]-b[0],a[1]-b[1]]
def multiplicacioncomplejos(a,b):
r = a[0]*b[0]+((a[1]*b[1])*-1)
i = a[0]*b[1]+a[1]*b[0]
return [r,i]
def conjugadocomplejo(a):
return [a[0],a[1]*(-1)]
def divisioncomplejos(a,b):
c = conjugadocomplejo(b)
n = multiplicacioncomplejos(a,c)
d = multiplicacioncomplejos(b,c)
return [n[0]/d[0],n[1]/d[0]]
def modulocomplejo(a):
o = (a[0]**2+a[1]**2)**(1/2)
return o
def faseComplejo(a):
return math.atan2(a[1],a[0])
def conversionComplejoPolar(a):
m = modulocomplejo(a)
f = faseComplejo(a)
return [m,f]
def conversionPolarComplejo(a):
r = a[0]*math.cos(a[1])
i = a[0]*math.sin(a[1])
return [r,i]
def adicionVectores(a,b):
if len(a)!=len(b):
print("Los vectores no pueden ser operados, no son de igual dimension")
else:
rta = [[None for fila in range(1)] for column in range(len(a))]
f = len(a)
for i in range(f):
rta[i][0] = sumacomplejos(a[i][0],b[i][0])
return rta
def restaVectores(a,b):
if len(a)!=len(b):
print("Los vectores no pueden ser operados, no son de igual dimension")
else:
rta = [[None for fila in range(1)] for column in range(len(a))]
f = len(a)
for i in range(f):
rta[i][0] = restacomplejos(a[i][0],b[i][0])
return rta
def inversoAditivoVectores(a):
rta = [[None for fila in range(1)] for column in range(len(a))]
f = len(a)
for i in range(f):
rta[i][0] = multiplicacioncomplejos(a[i][0],[-1,0])
return rta
def multiplicacionEscalarVectores(a,b):
fila = len(a)
rta = [[None for column in range(len(a[0]))] for row in range(len(a))]
for i in range(fila):
rta[i][0] = multiplicacioncomplejos(a[i][0],b)
return rta
def adicionMatrices(a,b):
f = len(a)
c = len(a[0])
if len(a)!=len(b) or len(a[0])!=len(b[0]):
print("Los matrices pueden ser operadas, no son de igual dimension")
else:
rta = [[None for column in range(c)] for row in range(f)]
for i in range(f):
for j in range(c):
rta[i][j] = sumacomplejos(a[i][j],b[i][j])
return rta
def inversoAditivoMatrices(a):
f = len(a)
c = len(a[0])
for i in range(f):
for j in range(c):
a[i][j] = multiplicacioncomplejos(a[i][j],[-1,0])
return a
def multiplicacionEscalarMatrices(a,b):
f = len(a)
c = len(a[0])
rta = [[None for column in range(c)] for row in range(f)]
for i in range(f):
for j in range(c):
rta[i][j] = multiplicacioncomplejos(a[i][j],b)
return rta
def transpuestaMatrizVector(a):
f = len(a)
c = len(a[0])
t = [[None for column in range(f)] for row in range(c)]
for i in range(f):
for j in range(c):
t[j][i] = a[i][j]
return t
def conjugadoMatrizVector(a):
f = len(a)
c = len(a[0])
rta = [[None for j in range(c)] for i in range(f)]
if c>1:
for i in range(f):
for j in range(c):
rta[i][j] = conjugadocomplejo(a[i][j])
else:
for i in range(f):
rta[i][0] = conjugadocomplejo(a[i][0])
return rta
def adjuntaMatrizVector(a):
c = a[:]
n = conjugadoMatrizVector(c)
t = transpuestaMatrizVector(n)
return t
def productoMatriz(a,b):
fA = len(a)
fB = len(b)
cA = len(a[0])
cB = len(b[0])
if cA!=fB:
print("Los matrices pueden ser operadas, no son compatibles")
else:
rta = [[[0,0] for columna in range(cB)] for fila in range(fA)]
for i in range(fA):
for j in range(cB):
for k in range(fB):
rta[i][j] = sumacomplejos(rta[i][j],multiplicacioncomplejos(a[i][k],b[k][j]))
return rta
def accionMatrizSobreVector(a,b):
return productoMatriz(a,b)
def productoInternoVector(a,b):
n = adjuntaMatrizVector(a)
return productoMatriz(n,b)
def normaVector(a):
f = len(a)
c = len(a[0])
rta = 0
for i in range(f):
for j in range(c):
rta += a[i][j][0]**2+a[i][j][1]**2
return math.sqrt(rta)
def distanciaVector(a,b):
r = restaVectores(a,b)
n = normaVector(r)
return n
def matrizUnitaria(a):
d = adjuntaMatrizVector(a)
o = productoMatriz(a,d)
f = len(a)
c = len(a[0])
matrizIdentidad = [[[1,0] if x==y else [0,0] for y in range(c)] for x in range(f)]
for i in range(f):
for j in range(c):
o[i][j] = [round(o[i][j][0]),round(o[i][j][1])]
if o==matrizIdentidad:
return True
else:
return False
def matrizHermitiana(a):
d = adjuntaMatrizVector(a)
if d==a:
return True
else:
return False
def productoTensorialMatrizVector(a,b):
f = len(a)*len(b)
c = len(a[0])*len(b[0])
nuevaMatriz = [[None for column in range(c)] for row in range(f)]
m = []
pos = 0
col = 0
f = 0
c = 0
cont = 1
for h in range(len(a)):
for k in range(len(a[0])):
m.append(multiplicacionEscalarMatrices(b,a[h][k]))
for i in range(len(nuevaMatriz)):
for j in range(len(nuevaMatriz[0])):
nuevaMatriz[i][j] = m[pos][f][c]
col += 1
c += 1
if col==len(b[0]):
pos += 1
c = 0
col = 0
if cont==len(b[0]):
f = 0
cont = 1
else:
cont += 1
pos -= len(a[0])
f += 1
return nuevaMatriz
def conjugada_vector(v):
v_conju=[]
for i in v:
v_conju.append(conjugadocomplejo(i))
return v_conju
def producto_interno(v_one, v_two):
r = [0,0]
v_adjunto= conjugada_vector(v_one)
for j in range(len(v_two)):
r = sumacomplejos(r, multiplicacioncomplejos(v_two[j], v_adjunto[j]))
return r
def norma_vector(v):
r = producto_interno(v,v)
norma = r[0] **(1/2)
return norma
def sumacompvector(v1):
if len(v1) < 2 :
return v1[0]
elif len(v1) == 2:
s = sumacomplejos(v1[0],v1[1])
return s
else:
s = sumacomplejos(v1[0],v1[1])
for i in range (2,len(v1)):
s = sumacomplejos(s,v1[i])
return s
def accionmatrizvector(m1,v1):
c = []
d = []
for i in range (len(m1)):
c.append([])
for j in range (len(m1)):
c[i].append(multiplicacioncomplejos(m1[i][j],v1[j]))
for i in range (len(c)):
d.append(sumacompvector(c[i]))
return d
|
# -*- coding: utf-8 -*-
__all__ = ["resize_or_set"]
import numpy as np
def resize_or_set(outputs, n, shape, dtype=np.float64):
if outputs[n][0] is None:
outputs[n][0] = np.empty(shape, dtype=dtype)
else:
outputs[n][0] = np.ascontiguousarray(
np.resize(outputs[n][0], shape), dtype=dtype
)
return outputs[n][0]
|
import socket
import sys
import time
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("IP")
parser.add_argument("PORT", type=int)
args = parser.parse_args()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = (args.IP, args.PORT)
data = [0, 0]
myTime = 0
i = 0
print("starting up on {} port {}".format(*server_address))
sock.bind(server_address)
sock.listen(1)
print("waiting for a connection...")
connection, client_address = sock.accept()
try:
print("connection from", client_address)
while i<=1:
data[i] = (connection.recv(120000))
if i==0:
myTime = time.time()
print("received {!r}".format(data[i].decode()))
if data[i]:
print("sending data back to the client...")
connection.send(data[i])
i+=1
else:
break
difference = myTime - (float(data[0].decode()) + float(data[1].decode()))
print (difference, " seconds - this is difference of times")
connection.send(str(difference).encode())
finally:
connection.close()
|
# Generated by Django 2.2.4 on 2019-09-14 01:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('job', '0014_auto_20190913_2133'),
]
operations = [
migrations.RemoveField(
model_name='jobopening',
name='image',
),
]
|
# Foo.
|
## Set the filename for the next image to be written
counter = 0
## Is this is set to true, existing images in /faces will be overwritten, otherwise
## the existing image names will be skipped
overwrite_images = False
## Set set all videos you want to extract faces from
## These videos should be located in the ./srcvideos directory
input_file_names = [ "movie.mov" ]
import os
## If you want to extract the faces from all videos uncomment the following lines
# def is_video_file(file):
# video_endings = [".mov", ".mp4"]
# for ending in video_endings:
# if file.endswith(ending):
# return True
# return False
# input_file_names = list(filter(is_video_file, os.listdir("./srcvideos")))
import face_recognition
import cv2
import numpy as np
import math
from PIL import Image
errorcount = 0
def euclidean_distance(a, b):
x1 = a[0]; y1 = a[1]
x2 = b[0]; y2 = b[1]
return math.sqrt(((x2 - x1) * (x2 - x1)) + ((y2 - y1) * (y2 - y1)))
def find_eye_center(face, landmark):
eye = tuple([(face[landmark][0][0] + face[landmark][1][0]), face[landmark][0][1] + face[landmark][1][1]])
eye = tuple([(int)(eye[0]/2), (int)(eye[1]/2)])
return eye
for i in input_file_names:
# Open the input movie file
input_movie = cv2.VideoCapture("srcvideos/" + i)
length = int(input_movie.get(cv2.CAP_PROP_FRAME_COUNT))
# Initialize some variables
frame_number = 0
current_path = os.getcwd()
while True:
# Grab a single frame of video
ret, frame = input_movie.read()
frame_number += 1
# Quit when the input video file ends
if not ret:
break
# Find all the faces in the current frame of video
face_landmarks = face_recognition.face_landmarks(frame, model="small")
# Align and save the face
for face in face_landmarks:
right_eye = find_eye_center(face, "right_eye")
left_eye = find_eye_center(face, "left_eye")
# Check which direction needs to be rotated and set a third point to get a triangle (with a 90° angle)
if left_eye[1] > right_eye[1]:
point_3rd = (right_eye[0], left_eye[1])
direction = -1 #rotate same direction to clock
else:
point_3rd = (left_eye[0], right_eye[1])
direction = 1 #rotate inverse direction of clock
# Calculate the amount by which the image needs to be rotated
a = euclidean_distance(left_eye, point_3rd)
b = euclidean_distance(right_eye, left_eye)
c = euclidean_distance(right_eye, point_3rd)
cos_a = (b*b + c*c - a*a)/(2*b*c)
angle = np.arccos(cos_a)
angle = (angle * 180) / math.pi
if direction == -1:
angle = 90 - angle
# Scale percent, b is the distance between the eyes. This ensures a constant distance between the eyes
# and therefore ensure that the face is always aligned.
percent = 1 / (b / 40)
# Calculate the amount by which the image needs to be scaled
width = int(frame.shape[1] * percent)
height = int(frame.shape[0] * percent)
dim = (width, height)
# Create a copy of the image and resize it
new_img = cv2.resize(frame, dim)
# Scale the eye coordinates
new_left_eye = tuple(i*percent for i in left_eye)
new_right_eye = tuple(i*percent for i in right_eye)
# Calculate the center between the two eyes (after being scaled)
center_x=(int)((new_right_eye[0] + new_left_eye[0]) / 2)
center_y=(int)((new_right_eye[1] + new_left_eye[1]) / 2)
# Rotate the image
new_img = Image.fromarray(new_img)
new_img = np.array(new_img.rotate(direction * angle, center=(center_x, center_y)))
# Crop the image to 100x100 pixels.
new_img = new_img[center_y-30:center_y+70, center_x-50:center_x+50]
# If the image does not have 100x100 pixels, skip it
# This usually happens when the face is too close to an edge of the screen
if(len(new_img) < 100 or len(new_img[0]) < 100):
print("File {} at frame {}: Image too small. Skipping...".format(i, frame_number))
errorcount += 1
else:
try:
file_path = "{}/faces/{:0>6d}.png".format(current_path, counter)
# Check if the file already exists; this can be used to fill deleted files
# Comment out the entire loop to just overwrite images
while os.path.isfile(file_path) and not overwrite_images:
counter += 1
file_path = "{}/faces/{:0>6d}.png".format(current_path, counter)
cv2.imwrite(file_path, new_img)
print("Wrote image {:0>6d}.png".format(counter))
counter += 1
except cv2.error:
print("An error occured in file {} at frame {}. Skipping...".format(i, frame_number))
errorcount += 1
# Write the resulting image to the output video file
print("Processed Frame {} / {}".format(frame_number, length))
input_movie.release()
cv2.destroyAllWindows()
print("Last written Image: {:0>6d}.png".format(counter-1))
print("Images Skipped due to errors: {}".format(errorcount))
# All done!
|
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-
class python_cli_args(object):
def __init__(self):
pass
def python_add_args(self, subparser):
# python_version
p = subparser.add_parser('ver', help = 'Print the python sys.version.')
# python_path
p = subparser.add_parser('path', help = 'Print the python sys.path.')
# python_installed
p = subparser.add_parser('installed', help = 'Return the full versions for all installed pythons.')
p.add_argument('-v', '--verbose', action = 'store_true',
default = False, help = 'Verbose output')
# python_install
p = subparser.add_parser('install', help = 'Install python.')
p.add_argument('full_version', action = 'store', help = 'The full version of python to install')
p.add_argument('-v', '--verbose', action = 'store_true',
default = False, help = 'Verbose output')
# python_uninstall
p = subparser.add_parser('uninstall', help = 'Uninstall python.')
p.add_argument('full_version', action = 'store', help = 'The full version of python to uninstall')
p.add_argument('-v', '--verbose', action = 'store_true',
default = False, help = 'Verbose output')
# python_reinstall
p = subparser.add_parser('reinstall', help = 'Reinstall python.')
p.add_argument('full_version', action = 'store', help = 'The full version of python to reinstall')
p.add_argument('-v', '--verbose', action = 'store_true',
default = False, help = 'Verbose output')
# python_available
p = subparser.add_parser('available', help = 'List python versions available to install.')
p.add_argument('-n', '--num', action = 'store', type = int, default = 3,
help = 'Number of versions to show for each major python version [ 3 ]')
p.add_argument('-v', '--verbose', action = 'store_true',
default = False, help = 'Verbose output')
def _command_python(self, command, *args, **kargs):
from .python_cli_command import python_cli_command
return python_cli_command.handle_command(command, **kargs)
|
import numpy as np
import cv2
video = cv2.VideoCapture(0)
while(video.isOpened()):
_,frame = video.read()
cv2.imshow("Image",frame)
if cv2.waitKey(10)==13:
bbox = cv2.selectROI(frame)
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
obj_img = hsv[bbox[1]:bbox[1]+bbox[3], bbox[0]:bbox[0]+bbox[2]]
h,s,v = np.median(obj_img[:,:,0]), np.median(obj_img[:,:,1]), np.median(obj_img[:,:,2])
lower = np.array([h-5, max(0,s-50), max(0,v-50)])
upper = np.array([h+5, min(s+50,255), min(v+50,255)])
break
while(video.isOpened()):
_, frame = video.read()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
masked = cv2.inRange(hsv,lower,upper)
blur = cv2.medianBlur(masked,5)
blob_mask = cv2.bitwise_and(frame, frame, mask=blur)
cv2.imshow("blob_mask", blob_mask)
contours, _ = cv2.findContours(blur, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
idx, current_max, counter = 0,0,0
for n in contours:
area = cv2.contourArea(n)
if area > current_max:
current_max = area
idx = counter
counter += 1
cv2.drawContours(frame, contours, idx, (0,0,255), 2)
cv2.imshow("output", frame)
if cv2.waitKey(10)==ord('x'):
cv2.destroyAllWindows()
video.release()
break
|
from cloudaux.aws.glacier import describe_vault, get_vault_access_policy, list_tags_for_vault
from cloudaux.decorators import modify_output
from cloudaux.orchestration.aws.arn import ARN
from flagpole import FlagRegistry, Flags
from six import string_types
registry = FlagRegistry()
FLAGS = Flags('BASE', 'POLICY', 'TAGS')
@registry.register(flag=FLAGS.BASE)
def _get_base(vault_obj, **conn):
base_fields = ['VaultARN', 'VaultName', 'CreationDate', 'NumberOfArchives', 'SizeInBytes']
if not all(field in vault_obj for field in base_fields):
vault_obj = describe_vault(vault_name=vault_obj['VaultName'], **conn)
vault_obj['_version'] = 1
# sometimes it's expected that the item contains 'Arn' and not 'VaultARN'
vault_obj['Arn'] = vault_obj['VaultARN']
return vault_obj
@registry.register(flag=FLAGS.POLICY, key='Policy')
def _get_vault_access_policy(vault_obj, **conn):
return get_vault_access_policy(vault_name=vault_obj['VaultName'], **conn)
@registry.register(flag=FLAGS.TAGS, key='Tags')
def _list_tags_for_vault(vault_obj, **conn):
return list_tags_for_vault(vault_name=vault_obj['VaultName'], **conn)
@modify_output
def get_vault(vault_obj, flags=FLAGS.ALL, **conn):
"""
Orchestrates calls to build a Glacier Vault in the following format:
{
"VaultARN": ...,
"VaultName": ...,
"CreationDate" ...,
"LastInventoryDate" ...,
"NumberOfArchives" ...,
"SizeInBytes" ...,
"Policy" ...,
"Tags" ...
}
Args:
vault_obj: name, ARN, or dict of Glacier Vault
flags: Flags describing which sections should be included in the return value. Default ALL
Returns:
dictionary describing the requested Vault
"""
if isinstance(vault_obj, string_types):
vault_arn = ARN(vault_obj)
if vault_arn.error:
vault_obj = {'VaultName': vault_obj}
else:
vault_obj = {'VaultName': vault_arn.parsed_name}
return registry.build_out(flags, vault_obj, **conn)
|
import subprocess
import argparse
import datetime
import time
import os
class OSBuild(object):
def __init__(self, build_version):
self._build_version = build_version
self._datetime = datetime.datetime.now()
self._pubdate = datetime.datetime.now().strftime("%m-%d-%Y")
self._dt = self._datetime.strftime('%m%d%Y-%H%M%S')
self._build_path = os.getcwd()
self._iso_path = ("../releases/testing/zeta-{}.iso".format(self._dt))
self._sl0_path = ("zeta-{}.iso".format(self._dt))
self._sl1_path = ("current-test.iso")
self._iso_details = ("zeta")
self._initramfs_name = ("zeta.gz")
def _build_initramfs(self):
os.chdir(self._build_path + "/src/root")
# Create Minimal Folder structure
dirs = ["dev", "proc", "sys"]
for ele in dirs:
if not os.path.exists(ele):
os.mkdir(ele)
# Compress initramfs from root filesystem
cmd = ("find | cpio -o -H newc | gzip -2 > {}"\
.format(self._build_path + "/src/iso/boot/" + self._initramfs_name))
subprocess.call(cmd, shell=True)
# Further compress initramfs (insane)
cmd = ("advdef -z4 {} 2> /dev/null"\
.format(self._build_path + "/src/iso/boot/" + self._initramfs_name))
subprocess.call(cmd, shell=True)
os.chdir(self._build_path)
def _build_iso(self):
os.chdir(self._build_path + "/src")
cmd = ("mkisofs -l -J -R -V {} -no-emul-boot -boot-load-size 4 \
-input-charset utf-8 \
-boot-info-table -b {} -c {} -o {} {}"\
.format(self._iso_details,
("boot/isolinux/isolinux.bin"),
("boot/isolinux/boot.cat"),
(self._iso_path), ("./iso")))
subprocess.call(cmd, shell=True)
os.chmod(self._iso_path, 775)
os.chdir(self._build_path)
def _softlink(self):
os.chdir(self._build_path + "/releases/testing")
# Delete softlink if present
try: os.remove(self._sl1_path)
except: pass
# Create softlink
cmd = ("ln -s {} {} ".format(self._sl0_path,self._sl1_path))
subprocess.call(cmd, shell=True)
os.chdir(self._build_path)
def _cleanup(self):
os.remove(self._build_path + "/src/iso/boot/" + self._initramfs_name)
def _stamp(self):
someFiles = ["README.md", "./src/iso/boot/isolinux/boot.msg",
"./src/root/etc/motd"]
# Update README.md, MOTD, and boot.msg
for F in someFiles:
f = open(F, "r+")
aFile = f.read()
f.close();os.remove(F)
aFile = aFile.split("\n")
for ix, ele in enumerate(aFile):
if ("Current Build") in ele:
ele = (" .... . Current Build: v{} [{}]"\
.format(self._build_version, self._pubdate))
aFile[ix] = ele
f = open(F, "w")
f.write("\n".join(aFile));f.close()
def run(self):
self._build_initramfs()
self._build_iso()
self._cleanup()
self._softlink()
self._stamp()
def main():
parser = argparse.ArgumentParser()
required = parser.add_argument_group('Required arguments')
required.add_argument('-b', '--build', action='store',
help='Build version. Ex: 1.20',
required=True)
args = parser.parse_args()
osb = OSBuild(args.build)
osb.run()
if __name__ == "__main__":
main()
|
class AnalysisDisplayVectorSettings(object, IDisposable):
"""
Contains vector settings for analysis display style element.
AnalysisDisplayVectorSettings()
AnalysisDisplayVectorSettings(other: AnalysisDisplayVectorSettings)
"""
def Dispose(self):
""" Dispose(self: AnalysisDisplayVectorSettings) """
pass
def IsEqual(self, other):
"""
IsEqual(self: AnalysisDisplayVectorSettings,other: AnalysisDisplayVectorSettings) -> bool
Compares two vector settings objects.
other: Vector settings object to compare with.
Returns: True if objects are equal,false otherwise.
"""
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: AnalysisDisplayVectorSettings,disposing: bool) """
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self, other=None):
"""
__new__(cls: type)
__new__(cls: type,other: AnalysisDisplayVectorSettings)
"""
pass
def __repr__(self, *args):
""" __repr__(self: object) -> str """
pass
ArrowheadScale = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Type of arrow head scaling.
Get: ArrowheadScale(self: AnalysisDisplayVectorSettings) -> AnalysisDisplayStyleVectorArrowheadScale
Set: ArrowheadScale(self: AnalysisDisplayVectorSettings)=value
"""
ArrowLineWeight = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Line weight assigned to arrow lines for vectors.
Get: ArrowLineWeight(self: AnalysisDisplayVectorSettings) -> int
Set: ArrowLineWeight(self: AnalysisDisplayVectorSettings)=value
"""
IsValidObject = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Specifies whether the .NET object represents a valid Revit entity.
Get: IsValidObject(self: AnalysisDisplayVectorSettings) -> bool
"""
Rounding = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Increment to which numeric values of analysis results are rounded in vectors.
Get: Rounding(self: AnalysisDisplayVectorSettings) -> float
Set: Rounding(self: AnalysisDisplayVectorSettings)=value
"""
TextTypeId = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Element id of text associated with the settings.
Get: TextTypeId(self: AnalysisDisplayVectorSettings) -> ElementId
Set: TextTypeId(self: AnalysisDisplayVectorSettings)=value
"""
VectorOrientation = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Vector orientation.
Get: VectorOrientation(self: AnalysisDisplayVectorSettings) -> AnalysisDisplayStyleVectorOrientation
Set: VectorOrientation(self: AnalysisDisplayVectorSettings)=value
"""
VectorPosition = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Vector position.
Get: VectorPosition(self: AnalysisDisplayVectorSettings) -> AnalysisDisplayStyleVectorPosition
Set: VectorPosition(self: AnalysisDisplayVectorSettings)=value
"""
VectorTextType = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Type of vector text visualization.
Get: VectorTextType(self: AnalysisDisplayVectorSettings) -> AnalysisDisplayStyleVectorTextType
Set: VectorTextType(self: AnalysisDisplayVectorSettings)=value
"""
|
# Generated by Django 2.2.13 on 2021-03-01 19:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clist', '0060_auto_20210227_1943'),
]
operations = [
migrations.AddIndex(
model_name='contest',
index=models.Index(fields=['resource', 'end_time'], name='clist_conte_resourc_3c9264_idx'),
),
migrations.AddIndex(
model_name='contest',
index=models.Index(fields=['resource', 'start_time'], name='clist_conte_resourc_4d9b15_idx'),
),
]
|
#!/usr/bin/python
import sys, re;
from structs import unions, structs, defines;
# command line arguments
arch = sys.argv[1];
outfile = sys.argv[2];
infiles = sys.argv[3:];
###########################################################################
# configuration #2: architecture information
inttypes = {};
header = {};
footer = {};
# x86_32
inttypes["x86_32"] = {
"unsigned long" : "uint32_t",
"long" : "uint32_t",
"xen_pfn_t" : "uint32_t",
};
header["x86_32"] = """
#define __i386___X86_32 1
#pragma pack(4)
""";
footer["x86_32"] = """
#pragma pack()
""";
# x86_64
inttypes["x86_64"] = {
"unsigned long" : "__align8__ uint64_t",
"long" : "__align8__ uint64_t",
"xen_pfn_t" : "__align8__ uint64_t",
};
header["x86_64"] = """
#if defined(__GNUC__) && !defined(__STRICT_ANSI__)
# define __DECL_REG(name) union { uint64_t r ## name, e ## name; }
# define __align8__ __attribute__((aligned (8)))
#else
# define __DECL_REG(name) uint64_t r ## name
# define __align8__ FIXME
#endif
#define __x86_64___X86_64 1
""";
# ia64
inttypes["ia64"] = {
"unsigned long" : "__align8__ uint64_t",
"long" : "__align8__ uint64_t",
"xen_pfn_t" : "__align8__ uint64_t",
"long double" : "__align16__ ldouble_t",
};
header["ia64"] = """
#define __align8__ __attribute__((aligned (8)))
#define __align16__ __attribute__((aligned (16)))
typedef unsigned char ldouble_t[16];
""";
###########################################################################
# main
input = "";
output = "";
fileid = re.sub("[-./]", "_", "__FOREIGN_%s__" % outfile.upper());
# read input header files
for name in infiles:
f = open(name, "r");
input += f.read();
f.close();
# add header
output += """
/*
* public xen defines and struct for %s
* generated by %s -- DO NOT EDIT
*/
#ifndef %s
#define %s 1
""" % (arch, sys.argv[0], fileid, fileid)
if arch in header:
output += header[arch];
output += "\n";
# add defines to output
for line in re.findall("#define[^\n]+", input):
for define in defines:
regex = "#define\s+%s\\b" % define;
match = re.search(regex, line);
if None == match:
continue;
if define.upper()[0] == define[0]:
replace = define + "_" + arch.upper();
else:
replace = define + "_" + arch;
regex = "\\b%s\\b" % define;
output += re.sub(regex, replace, line) + "\n";
output += "\n";
# delete defines, comments, empty lines
input = re.sub("#define[^\n]+\n", "", input);
input = re.compile("/\*(.*?)\*/", re.S).sub("", input)
input = re.compile("\n\s*\n", re.S).sub("\n", input);
# add unions to output
for union in unions:
regex = "union\s+%s\s*\{(.*?)\n\};" % union;
match = re.search(regex, input, re.S)
if None == match:
output += "#define %s_has_no_%s 1\n" % (arch, union);
else:
output += "union %s_%s {%s\n};\n" % (union, arch, match.group(1));
output += "\n";
# add structs to output
for struct in structs:
regex = "struct\s+%s\s*\{(.*?)\n\};" % struct;
match = re.search(regex, input, re.S)
if None == match:
output += "#define %s_has_no_%s 1\n" % (arch, struct);
else:
output += "struct %s_%s {%s\n};\n" % (struct, arch, match.group(1));
output += "typedef struct %s_%s %s_%s_t;\n" % (struct, arch, struct, arch);
output += "\n";
# add footer
if arch in footer:
output += footer[arch];
output += "\n";
output += "#endif /* %s */\n" % fileid;
# replace: defines
for define in defines:
if define.upper()[0] == define[0]:
replace = define + "_" + arch.upper();
else:
replace = define + "_" + arch;
output = re.sub("\\b%s\\b" % define, replace, output);
# replace: unions
for union in unions:
output = re.sub("\\b(union\s+%s)\\b" % union, "\\1_%s" % arch, output);
# replace: structs + struct typedefs
for struct in structs:
output = re.sub("\\b(struct\s+%s)\\b" % struct, "\\1_%s" % arch, output);
output = re.sub("\\b(%s)_t\\b" % struct, "\\1_%s_t" % arch, output);
# replace: integer types
integers = inttypes[arch].keys();
integers.sort(lambda a, b: cmp(len(b),len(a)));
for type in integers:
output = re.sub("\\b%s\\b" % type, inttypes[arch][type], output);
# print results
f = open(outfile, "w");
f.write(output);
f.close;
|
#!/usr/bin/env python
"""
Example of a message box window.
"""
from quo.shortcuts import message
def main():
message(
title="Example dialog window",
text="Do you want to continue?\nPress ENTER to quit.",
).run()
if __name__ == "__main__":
main()
|
# Generated by Django 2.2.3 on 2020-03-20 00:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('skeleton', '0009_auto_20200320_1344'),
]
operations = [
migrations.AlterField(
model_name='site',
name='emitter_spacing',
field=models.FloatField(blank=True, null=True, verbose_name='Emitter Spacing (Meters)'),
),
migrations.AlterField(
model_name='site',
name='plant_spacing',
field=models.FloatField(blank=True, null=True, verbose_name='Plant Spacing (Meters)'),
),
migrations.AlterField(
model_name='site',
name='row_spacing',
field=models.FloatField(blank=True, null=True, verbose_name='Row Spacing (Meters)'),
),
]
|
from setuptools import setup, find_packages
long_description = 'RIDDLE (Race and ethnicity Imputation from Disease history with Deep LEarning) is an open-source deep learning (DL) framework for estimating/imputing race and ethnicity information in anonymized electronic medical records (EMRs). It utilizes Keras, a modular DL library, and DeepLIFT, an algorithm by Shrikumar et al. (2016) for learning important features in deep neural networks. ' + \
'Please see the PLOS Computational Biology paper (https://doi.org/10.1371/journal.pcbi.1006106) for information on the research project results and design. \n' + \
'The riddle Python 2 library makes it easy to perform categorical imputations using a variety of DL architectures -- not just for EMR datasets. Furthermore, compared to alternative methods (e.g., scikit-learn/Python, Amelia II/R), RIDDLE is more efficient due to its parallelized backend (TensorFlow under Keras). ' + \
'RIDDLE uses Keras to specify, train, and build the underlying DL models. It was debugged using Keras with a TensorFlow backend. The default architecture is a deep multilayer perceptron (deep MLP) that takes "one-hot-encoded" features. However, you can specify any DL architecture (e.g., LSTM, CNN) by writing your own model_module files! '
setup(
name='RIDDLE',
version='2.0.1',
description='Race and ethnicity Imputation from Disease history with Deep LEarning',
long_description=long_description,
author='Ji-Sung Kim',
author_email='hello (at) jisungkim.com',
url='https://riddle.ai',
license='Apache 2.0',
download_url='https://github.com/jisungk/riddle/archive/master.tar.gz',
packages=find_packages(exclude=['tests*']),
install_requires=['keras', 'tensorflow', 'sklearn', 'xgboost', 'numpy',
'scipy', 'matplotlib', 'h5py'],
keywords=['deep learning', 'machine learning', 'neural networks',
'imputation', 'emr', 'epidemiology', 'biomedicine', 'biology',
'computational bioloigy', 'bioinformatics']
)
|
import json
import re
import subprocess
def split_indent(txt, i=0):
return [
x.strip('\n') for x in
re.split(r'\n\s{{{}}}(?!\s|\n)'.format(i), txt.strip('\n')) if x]
def matchmany(out, patterns):
matches = [
re.search(pattern, line) for line in out.splitlines()
for pattern in patterns]
return {
k: v for match in matches if match
for k, v in match.groupdict().items()}
def execmatch(cmd, patterns, error_msgs=None):
'''Perform re.match against all the patterns after running the bash command.
Arguments:
command (str): bash command to execute.
patterns (list(str)): list of regex patterns to match against
Returns:
dict containing the group name (as passed in pattern element)
and value as the matched value.
'''
try:
cmdargs = cmd.split(' ') if isinstance(cmd, str) else cmd
out = subprocess.run(cmdargs, check=True, capture_output=True).stdout.decode('utf-8')
if any(msg.lower() in out.lower() for msg in as_tuple(error_msgs)):
return {}
return matchmany(out, patterns)
except subprocess.CalledProcessError:
return {}
def as_tuple(x):
return x if isinstance(x, tuple) else (x,) if x else ()
def maybe_cast(data, type_, *keys):
return dict(data, **{k: type_(data[k]) for k in keys if k in data})
class attrdict(dict):
'''Simple attribute dict.'''
def __getitem__(self, k):
return self.__convert(super().__getitem__(k))
def __str__(self):
return json.dumps(
self, sort_keys=True, indent=4)
def __getattr__(self, k):
if k not in self:
raise AttributeError(k)
return self[k]
def get(self, k, **kw):
return self.__convert(super().get(k, **kw))
def __convert(self, v):
return attrdict(v) if isinstance(v, dict) else v
|
from my_generators.fibonacci import fibonacci
def get_result():
count = 0
for fib in fibonacci():
count += 1
if len(str(fib)) >= 1000:
return count
|
import cross3d
from PyQt4.QtCore import QObject
class AbstractUndoContext(QObject):
def __init__(self, name):
super(AbstractUndoContext, self).__init__()
self.name = name
def __enter__(self):
return None
def __exit__(self, exc_type, exc_value, traceback):
return False
@classmethod
def openUndo(cls, name):
return None
@classmethod
def closeUndo(cls):
return False
# register the symbol
cross3d.registerSymbol('UndoContext', AbstractUndoContext, ifNotFound=True)
|
#!/usr/bin/env python
"""
.. module:: lheChecks
:synopsis: Check LHE file format.
.. moduleauthor:: Ursula Laa <ursula.laa@lpsc.in2p3.fr>
"""
from __future__ import print_function
from smodels.tools.ioObjects import LheStatus
def main(args):
status = LheStatus(args.filename)
print(status.status)
|
from pymatgen import Structure, Lattice
a = 5.6402
lattice = Lattice.from_parameters(a, a, a, 90.0, 90.0, 90.0)
print(lattice)
structure = Structure.from_spacegroup(sg='Fm-3m', lattice=lattice,
species=['Na', 'Cl'],
coords=[[0, 0, 0], [0.5, 0, 0]])
print(structure)
from vasppy.rdf import RadialDistributionFunction
indices_Na = [i for i, site in enumerate(structure) if site.species_string is 'Na']
indices_Cl = [i for i, site in enumerate(structure) if site.species_string is 'Cl']
print(indices_Na)
print(indices_Cl)
rdf_nana = RadialDistributionFunction(structures=[structure],
indices_i=indices_Na)
rdf_clcl = RadialDistributionFunction(structures=[structure],
indices_i=indices_Cl)
rdf_nacl = RadialDistributionFunction(structures=[structure],
indices_i=indices_Na,
indices_j=indices_Cl)
import matplotlib.pyplot as plt
plt.plot(rdf_nana.r, rdf_nana.rdf, 'k', label='Na-Na')
plt.plot(rdf_clcl.r, rdf_clcl.rdf, 'b:', label='Cl-Cl')
plt.plot(rdf_nacl.r, rdf_nacl.rdf, 'g--', label='Na-Cl')
plt.legend(loc='best', fontsize=20)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.show()
plt.plot(rdf_nana.r, rdf_nana.smeared_rdf(), 'k', label='Na-Na')
plt.plot(rdf_clcl.r, rdf_clcl.smeared_rdf(sigma=0.05), 'b:', label='Cl-Cl')
plt.plot(rdf_nacl.r, rdf_nacl.smeared_rdf(sigma=0.05), 'g--', label='Na-Cl')
plt.legend(loc='best', fontsize=20)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.show()
rdf_nana = RadialDistributionFunction.from_species_strings(structures=[structure],
species_i='Na')
rdf_clcl = RadialDistributionFunction.from_species_strings(structures=[structure],
species_i='Cl')
rdf_nacl = RadialDistributionFunction.from_species_strings(structures=[structure],
species_i='Na',
species_j='Cl')
plt.plot(rdf_nana.r, rdf_nana.smeared_rdf(), 'k', label='Na-Na')
plt.plot(rdf_clcl.r, rdf_clcl.smeared_rdf(sigma=0.07), 'b:', label='Cl-Cl')
plt.plot(rdf_nacl.r, rdf_nacl.smeared_rdf(sigma=0.07), 'g--', label='Na-Cl')
plt.legend(loc='best', fontsize=20)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.show()
from pymatgen.io.vasp import Xdatcar
xd = Xdatcar('NaCl_800K_MD_XDATCAR')
rdf_nana_800K = RadialDistributionFunction.from_species_strings(
structures=xd.structures,
species_i='Na',
r_max=20
)
rdf_clcl_800K = RadialDistributionFunction.from_species_strings(
structures=xd.structures,
species_i='Cl',
r_max=20
)
rdf_nacl_800K = RadialDistributionFunction.from_species_strings(
structures=xd.structures,
species_i='Na',
species_j='Cl',
r_max=20
)
plt.plot(rdf_nana_800K.r, rdf_nana_800K.rdf, 'k', label='Na-Na')
plt.plot(rdf_clcl_800K.r, rdf_clcl_800K.rdf, 'b:', label='Cl-Cl')
plt.plot(rdf_nacl_800K.r, rdf_nacl_800K.rdf, 'g--', label='Na-Cl')
plt.legend(loc='best', fontsize=20)
plt.xticks(fontsize=20)
plt.yticks(fontsize=20)
plt.xlim(0, 20)
plt.show()
struct_1 = struct_2 = struct_3 = structure
rdf_nacl_mc = RadialDistributionFunction(structures=[struct_1, struct_2, struct_3],
indices_i=indices_Na, indices_j=indices_Cl,
weights=[34, 27, 146])
|
# getting user info:
# 3 numbers: age (int), height (float) and weight (float);
# two strings name and nationality.
name = str(input())
age = int(input())
height = float(input())
weight = float(input())
nationality = str(input())
print(name)
print(age, "anos")
print("%.2f" % height, "de altura")
print("%.2f" % weight,"quilos")
print(nationality)
|
#
# PySNMP MIB module NAGIOS-ROOT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NAGIOS-ROOT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:07:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
TimeTicks, iso, Counter32, Gauge32, ObjectIdentity, MibIdentifier, Unsigned32, NotificationType, Integer32, Counter64, enterprises, IpAddress, Bits, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "iso", "Counter32", "Gauge32", "ObjectIdentity", "MibIdentifier", "Unsigned32", "NotificationType", "Integer32", "Counter64", "enterprises", "IpAddress", "Bits", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
nagios = ModuleIdentity((1, 3, 6, 1, 4, 1, 20006))
nagios.setRevisions(('2005-03-09 00:00', '2005-01-20 00:00',))
if mibBuilder.loadTexts: nagios.setLastUpdated('200503090000Z')
if mibBuilder.loadTexts: nagios.setOrganization('Nagios')
class NotifyType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))
namedValues = NamedValues(("problem", 0), ("recovery", 1), ("acknowledgement", 2), ("flappingstart", 3), ("flappingstop", 4))
class HostStateID(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 3))
namedValues = NamedValues(("up", 0), ("down", 1), ("unreachable", 3))
class HostStateType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("hard", 0), ("soft", 1))
class ServiceStateID(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("ok", 0), ("warning", 1), ("critical", 2), ("unknown", 3))
mibBuilder.exportSymbols("NAGIOS-ROOT-MIB", PYSNMP_MODULE_ID=nagios, HostStateID=HostStateID, ServiceStateID=ServiceStateID, nagios=nagios, HostStateType=HostStateType, NotifyType=NotifyType)
|
from PySide import QtGui
from PySide import QtCore
class difftoolbar(QtGui.QToolBar):
sigCake = QtCore.Signal()
sigRemesh = QtCore.Signal()
def __init__(self):
super(difftoolbar, self).__init__()
self.actionCenterFind = QtGui.QAction(self)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("xicam/gui/icons_27.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionCenterFind.setIcon(icon1)
self.actionCenterFind.setObjectName("actionCenterFind")
self.actionCenterFind.setToolTip('Auto-calibrate AgB')
self.actionPolyMask = QtGui.QAction(self)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap("xicam/gui/icons_05.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionPolyMask.setIcon(icon2)
self.actionPolyMask.setToolTip('Polygon mask')
self.actionPolyMask.setText("")
self.actionPolyMask.setObjectName("actionPolyMask")
self.actionThresholdMask = QtGui.QAction(self)
icont = QtGui.QIcon()
icont.addPixmap(QtGui.QPixmap("xicam/gui/icons_03.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionThresholdMask.setIcon(icont)
self.actionThresholdMask.setToolTip('Threshold mask')
self.actionThresholdMask.setText("")
self.actionThresholdMask.setObjectName("actionThresholdMask")
self.actionThresholdMask.setVisible(False)
self.actionOpen = QtGui.QAction(self)
self.actionOpen.setObjectName("actionOpen")
self.actionOpen.setToolTip('Open')
self.actionSaveExperiment = QtGui.QAction(self)
self.actionSaveExperiment.setObjectName("actionSaveExperiment")
self.actionSaveExperiment.setToolTip('Save experiment')
self.actionLoadExperiment = QtGui.QAction(self)
self.actionLoadExperiment.setObjectName("actionLoadExperiment")
self.actionLoadExperiment.setToolTip('Load experiment')
self.actionClose = QtGui.QAction(self)
self.actionClose.setObjectName("actionClose")
self.actionClose.setToolTip('Close Xi-cam')
self.actionMasking = QtGui.QAction(self)
self.actionMasking.setToolTip('Masking')
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap("xicam/gui/icons_03.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionMasking.setIcon(icon3)
self.actionMasking.setObjectName("actionMasking")
self.actionLog_Intensity = QtGui.QAction(self)
self.actionLog_Intensity.setCheckable(True)
self.actionLog_Intensity.setToolTip('Log intensity')
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap("xicam/gui/icons_02.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionLog_Intensity.setIcon(icon4)
self.actionLog_Intensity.setObjectName("actionLog_Intensity")
self.actionLog_Intensity.setChecked(True)
self.actionCake = QtGui.QAction(self)
self.actionCake.setToolTip('Cake')
self.actionCake.setCheckable(True)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap("xicam/gui/icons_04.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionCake.setIcon(icon5)
self.actionCake.setObjectName("actionCake")
self.actionRemove_Cosmics = QtGui.QAction(self)
self.actionRemove_Cosmics.setToolTip('Remove cosmics')
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap("xicam/gui/icons_06.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.actionRemove_Cosmics.setIcon(icon6)
self.actionRemove_Cosmics.setText("")
self.actionRemove_Cosmics.setObjectName("actionRemove_Cosmics")
self.actionMaskLoad = QtGui.QAction(self)
self.actionMaskLoad.setToolTip('Load mask')
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap("xicam/gui/icons_08.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.actionMaskLoad.setIcon(icon7)
self.actionMaskLoad.setText("")
self.actionMaskLoad.setObjectName("actionMaskLoad")
self.actionRadial_Symmetry = QtGui.QAction(self)
self.actionRadial_Symmetry.setToolTip('Radial symmetry mask-filling')
self.actionRadial_Symmetry.setCheckable(True)
icon15 = QtGui.QIcon()
icon15.addPixmap(QtGui.QPixmap("xicam/gui/icons_18.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionRadial_Symmetry.setIcon(icon15)
self.actionRadial_Symmetry.setObjectName("actionRadial_Symmetry")
self.actionMirror_Symmetry = QtGui.QAction(self)
self.actionMirror_Symmetry.setToolTip('Mirror symmetry mask-filling')
self.actionMirror_Symmetry.setCheckable(True)
icon16 = QtGui.QIcon()
icon16.addPixmap(QtGui.QPixmap("xicam/gui/icons_17.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionMirror_Symmetry.setIcon(icon16)
self.actionMirror_Symmetry.setObjectName("actionMirror_Symmetry")
self.actionShow_Mask = QtGui.QAction(self)
self.actionShow_Mask.setToolTip('Show mask')
self.actionShow_Mask.setCheckable(True)
icon17 = QtGui.QIcon()
icon17.addPixmap(QtGui.QPixmap("xicam/gui/icons_20.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
icon17.addPixmap(QtGui.QPixmap("xicam/gui/icons_19.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.actionShow_Mask.setIcon(icon17)
self.actionShow_Mask.setObjectName("actionShow_Mask")
self.actionPolygon_Cut = QtGui.QAction(self)
self.actionPolygon_Cut.setToolTip('Polygon region-of-interest')
self.actionPolygon_Cut.setCheckable(True)
icon18 = QtGui.QIcon()
icon18.addPixmap(QtGui.QPixmap("xicam/gui/icons_21.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionPolygon_Cut.setIcon(icon18)
self.actionPolygon_Cut.setObjectName("actionPolygon_Cut")
self.actionVertical_Cut = QtGui.QAction(self)
self.actionVertical_Cut.setToolTip('Vertical region-of-interest')
icon19 = QtGui.QIcon()
icon19.addPixmap(QtGui.QPixmap("xicam/gui/icons_22.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionVertical_Cut.setIcon(icon19)
self.actionVertical_Cut.setObjectName("actionVertical_Cut")
self.actionHorizontal_Cut = QtGui.QAction(self)
self.actionHorizontal_Cut.setToolTip('Horizontal region-of-interest')
icon20 = QtGui.QIcon()
icon20.addPixmap(QtGui.QPixmap("xicam/gui/icons_23.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionHorizontal_Cut.setIcon(icon20)
self.actionHorizontal_Cut.setObjectName("actionHorizontal_Cut")
self.actionLine_Cut = QtGui.QAction(self)
self.actionLine_Cut.setToolTip('Line region-of-interest')
icon21 = QtGui.QIcon()
icon21.addPixmap(QtGui.QPixmap("xicam/gui/icons_24.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionLine_Cut.setIcon(icon21)
self.actionLine_Cut.setObjectName("actionLine_Cut")
self.actionRemeshing = QtGui.QAction(self)
self.actionRemeshing.setToolTip('GIXS Ewald-sphere correction')
self.actionRemeshing.setCheckable(True)
icon23 = QtGui.QIcon()
icon23.addPixmap(QtGui.QPixmap("xicam/gui/icons_25.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionRemeshing.setIcon(icon23)
self.actionRemeshing.setObjectName("actionRemeshing")
self.actionRefine_Center = QtGui.QAction(self)
self.actionRefine_Center.setToolTip('Refine calibration')
icon24 = QtGui.QIcon()
icon24.addPixmap(QtGui.QPixmap("xicam/gui/icons_28.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionRefine_Center.setIcon(icon24)
self.actionRefine_Center.setObjectName("actionRefine_Center")
self.actionCalibrate_AgB = QtGui.QAction(self)
self.actionCalibrate_AgB.setToolTip('Auto-calibrate AgB')
icon25 = QtGui.QIcon()
icon25.addPixmap(QtGui.QPixmap("xicam/gui/icons_29.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionCalibrate_AgB.setIcon(icon25)
self.actionCalibrate_AgB.setObjectName("actionCalibrate_AgB")
self.actionArc = QtGui.QAction(self)
self.actionArc.setToolTip('Arc region-of-interest')
icon26 = QtGui.QIcon()
icon26.addPixmap(QtGui.QPixmap("xicam/gui/icons_32.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionArc.setIcon(icon26)
self.actionArc.setObjectName("actionArc")
self.actionProcess = QtGui.QAction(self)
self.actionProcess.setToolTip('Process')
icon27 = QtGui.QIcon()
icon27.addPixmap(QtGui.QPixmap("xicam/gui/icons_34.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
icon27.addPixmap(QtGui.QPixmap("xicam/gui/icons_33.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.actionProcess.setIcon(icon27)
self.actionProcess.setObjectName("actionProcess")
self.actionProcess.setCheckable(True)
self.actionProcess.setVisible(False)
self.actionVideo = QtGui.QAction(self)
self.actionVideo.setToolTip('Export Video')
icon28 = QtGui.QIcon()
icon28.addPixmap(QtGui.QPixmap("xicam/gui/icons_31.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionVideo.setIcon(icon28)
self.actionVideo.setObjectName("actionVideo")
self.actionVideo.setVisible(False)
# self.actionSpaceGroup = QtGui.QAction(self)
# icon29 = QtGui.QIcon()
# icon29.addPixmap(QtGui.QPixmap("xicam/gui/icons_35.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
# self.actionSpaceGroup.setIcon(icon29)
# self.actionSpaceGroup.setObjectName("actionSpaceGroup")
# self.actionSpaceGroup.setCheckable(True)
# self.actionSpaceGroup.setVisible(False)
self.actionCapture = QtGui.QAction(self)
self.actionCapture.setToolTip('Capture region-of-interest')
icon30 = QtGui.QIcon()
icon30.addPixmap(QtGui.QPixmap("xicam/gui/icons_36.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionCapture.setIcon(icon30)
self.actionCapture.setObjectName("actionCapture")
self.actionCapture.setVisible(False)
# self.actionROI = QtGui.QAction(self)
#icon25 = QtGui.QIcon()
#icon25.addPixmap(QtGui.QPixmap("xicam/gui/icons_29.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
#self.actionROI.setIcon(icon25)
#self.actionROI.setObjectName("actionROI")
menu = QtGui.QMenu()
# menu.addAction(self.actionShow_Mask)
menu.addAction(self.actionPolyMask)
menu.addAction(self.actionRemove_Cosmics)
menu.addAction(self.actionThresholdMask)
menu.addAction(self.actionMaskLoad)
toolbuttonMasking = QtGui.QToolButton()
toolbuttonMasking.setDefaultAction(self.actionMasking)
toolbuttonMasking.setMenu(menu)
toolbuttonMasking.setPopupMode(QtGui.QToolButton.InstantPopup)
toolbuttonMaskingAction = QtGui.QWidgetAction(self)
toolbuttonMaskingAction.setDefaultWidget(toolbuttonMasking)
self.setIconSize(QtCore.QSize(32, 32))
self.addAction(self.actionProcess)
self.addAction(self.actionVideo)
self.addAction(self.actionCalibrate_AgB)
# self.addAction(self.actionCenterFind) # Hide old buttons
#self.addAction(self.actionRefine_Center)
self.addAction(self.actionShow_Mask)
self.addAction(toolbuttonMaskingAction)
self.addAction(self.actionCake)
self.addAction(self.actionRemeshing)
self.addAction(self.actionArc)
self.addAction(self.actionLine_Cut)
self.addAction(self.actionVertical_Cut)
self.addAction(self.actionHorizontal_Cut)
self.addAction(self.actionLog_Intensity)
self.addAction(self.actionRadial_Symmetry)
self.addAction(self.actionMirror_Symmetry)
# self.addAction(self.actionSpaceGroup)
self.addAction(self.actionCapture)
def connecttriggers(self, calibrate, centerfind, refine, showmask, cake, remesh, linecut, vertcut, horzcut, logint,
radialsym, mirrorsym, roi, arc, polymask, process=None, video=None,
capture=None,removecosmics=None,thresholdmask=None):
self.actionCalibrate_AgB.triggered.connect(calibrate)
self.actionCenterFind.triggered.connect(centerfind)
self.actionRefine_Center.triggered.connect(refine)
self.actionShow_Mask.triggered.connect(showmask)
self.actionCake.triggered.connect(self.caketoggle) #####################3
self.actionRemeshing.triggered.connect(self.remeshtoggle) ##############
self.actionLine_Cut.triggered.connect(linecut)
self.actionVertical_Cut.triggered.connect(vertcut)
self.actionHorizontal_Cut.triggered.connect(horzcut)
self.actionPolyMask.triggered.connect(polymask)
self.actionLog_Intensity.triggered.connect(logint)
self.actionRadial_Symmetry.triggered.connect(radialsym)
self.actionMirror_Symmetry.triggered.connect(mirrorsym)
#self.actionROI.triggered.connect(roi)
self.actionArc.triggered.connect(arc)
self.sigCake.connect(cake)
self.sigRemesh.connect(remesh)
if process is not None:
self.actionProcess.setVisible(True)
self.actionProcess.triggered.connect(process)
if video is not None:
self.actionVideo.setVisible(True)
self.actionVideo.triggered.connect(video)
# if spacegroup is not None:
# self.actionSpaceGroup.setVisible(True)
# self.actionSpaceGroup.triggered.connect(spacegroup)
if capture is not None:
self.actionCapture.setVisible(True)
self.actionCapture.triggered.connect(capture)
if removecosmics is not None:
self.actionRemove_Cosmics.setVisible(True)
self.actionRemove_Cosmics.triggered.connect(removecosmics)
if thresholdmask is not None:
self.actionThresholdMask.setVisible(True)
self.actionThresholdMask.triggered.connect(thresholdmask)
def caketoggle(self):
if self.actionCake.isChecked():
self.actionRemeshing.setChecked(False)
self.sigCake.emit()
def remeshtoggle(self):
if self.actionRemeshing.isChecked():
self.actionCake.setChecked(False)
self.sigRemesh.emit()
|
# -*- coding: utf-8 -*-
from distutils.core import setup
# ATTRIBUTES TO INCLUDE
# name, author
# version
# packages,scripts
setup(
name = 'PieSeis',
version = '0.1dev',
author = u"Asbjørn Alexander Fellinghaug",
author_email = "asbjorn <dot> fellinghaug _dot_ com",
packages = ['pieseis'],
license = 'GNU LGPL',
long_description = """
PieSeis is a Python library for reading and writing JavaSeis dataset.
""",
install_requires=[
"lxml>=3.1.0"
]
)
|
def exact_change_recursive(amount,coins):
""" Return the number of different ways a change of 'amount' can be
given using denominations given in the list of 'coins'
>>> exact_change_recursive(10,[50,20,10,5,2,1])
11
>>> exact_change_recursive(100,[100,50,20,10,5,2,1])
4563
"""
assert amount >= 0
if amount==0:
# no amount left -> 1 solution exists with no coins
return 1
elif not coins:
# amount>0, no coins left -> no solution possibe
return 0
else:
current_denomination = coins[0]
remaining_coins = coins[1:]
running_total = 0
solutions_count = 0
# Create solutions with increasing numbers of coins of the current_denomination
while running_total<= amount:
# reaming_amount needs to be achieved with remaining_coins
remaining_amount = amount - running_total
solutions_count += exact_change_recursive(remaining_amount,remaining_coins)
running_total += current_denomination
return solutions_count
def exact_change_recursive_print(amount,coins,current_change=""):
assert amount >= 0
if amount==0:
# no amount left -> 1 solution exists with no coins
print(current_change[1:])
return 1
elif not coins:
# amount>0, no coins left -> no solution possibe
return 0
else:
current_denomination = coins[0]
remaining_coins = coins[1:]
running_total = 0
current_denomination_count = 0
solutions_count = 0
while running_total<= amount:
remaining = amount - running_total
new_change = current_change + ",{}x{}ct".format(current_denomination_count,current_denomination)
solutions_count += exact_change_recursive_print(remaining,remaining_coins,new_change)
current_denomination_count += 1
running_total = current_denomination_count * current_denomination
return solutions_count
def exact_change_dynamic(amount,coins):
"""
counts[x] counts the number of ways an amount of x can be made in exact change out of a subset of coins
given in the list of denominations 'coins'.
Initially there are no possibilities, if no coins are allowed
>>> exact_change_dynamic(20,[50,20,10,5,2,1])
[1, 1, 2, 2, 3, 4, 5, 6, 7, 8, 11, 12, 15, 16, 19, 22, 25, 28, 31, 34, 41]
>>> exact_change_dynamic(100,[100,50,20,10,5,2,1])[-10:]
[3229, 3376, 3484, 3631, 3778, 3925, 4072, 4219, 4366, 4563]
"""
counts = [0]*(amount+1)
# Except: there is 1 way to get a change of 0 ct using no coins
counts[0] = 1
# Recalculate counts by allowing additional denominations from coins one by one
for denomination in coins:
for x in range(denomination,amount+1):
# Using an additional coin of 'denomination' we have an additional 'counts[x-denomination]' possibilities
#
counts[x] += counts[x-denomination]
return counts
if __name__ == "__main__":
import doctest
doctest.testmod(verbose=True)
|
import sys
from data_access_layer.mongo_db.mongo_db_atlas import MongoDBOperation
from exception_layer.generic_exception.generic_exception import GenericException as DbOperationMongoDbException
from integration_layer.file_management.file_manager import FileManager
from logging_layer.logger.logger import AppLogger
from project_library_layer.initializer.initializer import Initializer
class DbOperationMongoDB:
def __init__(self, project_id, executed_by, execution_id, cloud_storage, socket_io=None):
try:
self.mongodb = MongoDBOperation()
self.file_manager = FileManager(cloud_storage)
self.initializer = Initializer()
self.project_id = project_id
self.logger_db_writer = AppLogger(project_id=project_id, executed_by=executed_by, execution_id=execution_id,
socket_io=socket_io)
self.good_file_path = self.initializer.get_training_good_raw_data_file_path(self.project_id)
self.bad_file_path = self.initializer.get_training_bad_raw_data_file_path(self.project_id)
self.logger_db_writer.log_database = self.initializer.get_training_database_name()
except Exception as e:
db_operation_mongo_db_exception = DbOperationMongoDbException(
"Failed during instantiation of object in module [{0}] class [{1}] method [{2}]"
.format(self.__module__, DbOperationMongoDB.__name__,
"__init__"))
raise Exception(db_operation_mongo_db_exception.error_message_detail(str(e), sys)) from e
def insert_into_table_good_data(self, column_name):
"""
Description: Load all csv file into mongo db database "training_database" ,collection:"Good_Raw_Data"
:return:
"""
try:
self.logger_db_writer.log_collection_name = self.initializer.get_db_insert_log_collection_name()
training_database_name = self.initializer.get_training_database_name()
self.logger_db_writer.log(
"Droping existing collection if present in database {}".format(training_database_name))
good_raw_data_collection_name = self.initializer.get_training_good_raw_data_collection_name(self.project_id)
self.mongodb.drop_collection(training_database_name, good_raw_data_collection_name)
self.logger_db_writer.log(
"Starting loading of good files in database:training_database and collection: Good_Raw_Data")
response = self.file_manager.list_files(self.good_file_path)
if not response['status']:
return True
files = None
if 'files_list' in response:
files = response['files_list']
if files is None:
return True
# files=self.az_blob_mgt.getAllFileNameFromDirectory(self.good_file_path)
self.logger_db_writer.log("{} files found in {} ".format(len(files), self.good_file_path))
for file in files:
try:
self.logger_db_writer.log("Insertion of file " + file + " started...")
# df=self.az_blob_mgt.readCsvFileFromDirectory(self.good_file_path,file)
response = self.file_manager.read_file_content(self.good_file_path, file)
if not response['status']:
continue
df = None
if 'file_content' in response:
df = response['file_content']
if df is None:
continue
df.columns = column_name
self.mongodb.insert_dataframe_into_collection(training_database_name,
good_raw_data_collection_name,
df)
self.logger_db_writer.log("File: {0} loaded successfully".format(file))
except Exception as e:
self.logger_db_writer.log(str(e))
self.file_manager.move_file(self.good_file_path, self.bad_file_path, file, over_write=True)
# self.az_blob_mgt.moveFileInDirectory(self.good_file_path,self.bad_file_path,file)
self.logger_db_writer.log(
"File " + file + " was not loaded successfully hence moved tp dir:" + self.bad_file_path)
except Exception as e:
db_operation_mongo_db_exception = DbOperationMongoDbException(
"Failed in module [{0}] class [{1}] method [{2}]"
.format(self.__module__, DbOperationMongoDB.__name__,
self.insert_into_table_good_data.__name__))
raise Exception(db_operation_mongo_db_exception.error_message_detail(str(e), sys)) from e
def insert_into_table_good_data_zomato(self, column_name):
"""
Description: Load all csv file into mongo db database "training_database" ,collection:"Good_Raw_Data"
:return:
"""
try:
self.logger_db_writer.log_collection_name = self.initializer.get_db_insert_log_collection_name()
training_database_name = self.initializer.get_training_database_name()
self.logger_db_writer.log(
"Droping existing collection if present in database {}".format(training_database_name))
good_raw_data_collection_name = self.initializer.get_training_good_raw_data_collection_name(self.project_id)
self.mongodb.drop_collection(training_database_name, good_raw_data_collection_name)
self.logger_db_writer.log(
"Starting loading of good files in database:training_database and collection: Good_Raw_Data")
response = self.file_manager.list_files(self.good_file_path)
if not response['status']:
return True
files = None
if 'files_list' in response:
files = response['files_list']
if files is None:
return True
# files=self.az_blob_mgt.getAllFileNameFromDirectory(self.good_file_path)
self.logger_db_writer.log("{} files found in {} ".format(len(files), self.good_file_path))
for file in files:
try:
self.logger_db_writer.log("Insertion of file " + file + " started...")
# df=self.az_blob_mgt.readCsvFileFromDirectory(self.good_file_path,file)
response = self.file_manager.read_file_content(self.good_file_path, file)
if not response['status']:
continue
df = None
if 'file_content' in response:
df = response['file_content']
if df is None:
continue
df=df[column_name]
df.columns = column_name
self.mongodb.insert_dataframe_into_collection(training_database_name,
good_raw_data_collection_name,
df)
self.logger_db_writer.log("File: {0} loaded successfully".format(file))
except Exception as e:
self.logger_db_writer.log(str(e))
self.file_manager.move_file(self.good_file_path, self.bad_file_path, file, over_write=True)
# self.az_blob_mgt.moveFileInDirectory(self.good_file_path,self.bad_file_path,file)
self.logger_db_writer.log(
"File " + file + " was not loaded successfully hence moved tp dir:" + self.bad_file_path)
except Exception as e:
db_operation_mongo_db_exception = DbOperationMongoDbException(
"Failed in module [{0}] class [{1}] method [{2}]"
.format(self.__module__, DbOperationMongoDB.__name__,
self.insert_into_table_good_data_zomato.__name__))
raise Exception(db_operation_mongo_db_exception.error_message_detail(str(e), sys)) from e
def selecting_data_from_table_into_csv(self):
"""
:return:
"""
try:
directory_name = self.initializer.get_training_file_from_db_path(self.project_id)
# directory_name="training-file-from-db"
# file_name="InputFile.csv"
file_name = self.initializer.get_training_input_file_name() # directory name conatin project name hence only file name needed.
database_name = self.initializer.get_training_database_name()
collection_name = self.initializer.get_export_to_csv_log_collection_name()
training_collection = self.initializer.get_training_good_raw_data_collection_name(self.project_id)
self.logger_db_writer.log_collection_name = collection_name
msg = "starting of loading of database:training_database,collection:Good_Raw_Data records into InputFile.csv"
self.logger_db_writer.log(msg)
df = self.mongodb.get_dataframe_of_collection(database_name, training_collection)
msg = "Good_Raw_data has been loaded into pandas dataframe"
self.logger_db_writer.log(msg)
df.reset_index(drop=True, inplace=True)
# self.az_blob_mgt.saveDataFrameTocsv(directory_name,file_name,df)
self.file_manager.write_file_content(directory_name, file_name, df, over_write=True)
msg = "InputFile.csv created successfully in directory" + directory_name
self.logger_db_writer.log(msg)
except Exception as e:
db_operation_mongo_db_exception = DbOperationMongoDbException(
"Failed in module [{0}] class [{1}] method [{2}]"
.format(self.__module__, DbOperationMongoDB.__name__,
self.selecting_data_from_table_into_csv.__name__))
raise Exception(db_operation_mongo_db_exception.error_message_detail(str(e), sys)) from e
|
import persistent
import BTrees.OOBTree
import re
import tempfile
from subprocess import Popen, PIPE
from stf.common.out import *
from stf.core.dataset import __datasets__
from stf.core.connections import __group_of_group_of_connections__
from stf.core.models_constructors import __modelsconstructors__
from stf.core.notes import __notes__
###############################
###############################
###############################
class Model(persistent.Persistent):
"""
The Model
"""
def __init__(self, id):
self.id = id
self.state = ''
self.note_id = False
self.last_flow_time = ''
self.constructor = ''
self.label_id = ''
self.label_name = ''
def get_id(self):
return self.id
def add_last_flow_time(self,time):
""" Used to compute during visualizations the time to wait """
self.last_flow_time = time
def get_last_flow_time(self):
try:
return self.last_flow_time
except AttributeError:
return False
def add_flow(self,flow):
""" Get a flow and generate a state to store"""
state = self.constructor.get_state(flow, self.get_id())
if state:
self.state += state
return True
else:
return False
def set_constructor(self,constructor):
""" Set the constructor of the model"""
self.constructor = constructor
def get_constructor(self):
return self.constructor
def get_state(self):
return self.state
def set_note_id(self, note_id):
self.note_id = note_id
def get_note_id(self):
try:
return self.note_id
except KeyError:
return False
def edit_note(self, note_id):
""" Edit a note """
__notes__.edit_note(note_id)
def add_note(self):
""" Add a note to the model """
note_id = __notes__.new_note()
self.set_note_id(note_id)
def del_note(self):
""" Delete the note related with this model """
try:
# First delete the note
note_id = self.note_id
__notes__.del_note(note_id)
# Then delete the reference to the note
del self.note_id
except AttributeError:
# Note does not exist, but don't print nothing becase there are a lot of models to delete
pass
def get_short_note(self):
""" Return a short text of the note """
try:
note_id = self.note_id
return __notes__.get_short_note(note_id)
except AttributeError:
return ''
def del_label_id(self, label_id):
""" Del the label id"""
if self.label_id == label_id:
self.label_id = False
self.label_name = ""
def del_label_name(self, label_name):
""" Del the label name"""
try:
if self.label_name == label_name:
self.label_name = ""
self.label_id = False
except AttributeError:
# No label name? ok.. carry on
pass
def warn_labels(self):
labelid = self.get_label_id()
if labelid:
print_warning('The label {} should be deleted by hand if not used anymore.'.format(self.get_label_id()))
def set_label_id(self, label_id):
""" Set the label id"""
self.label_id = label_id
def get_label_id(self):
try:
return self.label_id
except AttributeError:
return False
def set_label_name(self, name):
""" Set the label name. We know that this is not ok and we should only store the label id, but we can not cross import modules, so this is the best way I know how to solve it"""
self.label_name = name
def get_label_name(self):
""" Return the label name for this model"""
try:
return self.label_name
except:
return ''
def get_flow_label(self, model_group_id):
""" Returns the label in the first flow on the connections """
# Horrible to get the model group id in a parameter... i know
# Get the group of connections id
group_of_connections_id = int(model_group_id.split('-')[0])
group_of_connections = __group_of_group_of_connections__.get_group(group_of_connections_id)
# Get the flow label. This is horrible and we should not do it, but we need to access the first connection in the list... so just access the dict directly...
connection = group_of_connections.connections[self.get_id()]
return connection.get_label()
###############################
###############################
###############################
class Group_of_Models(persistent.Persistent):
def __init__(self, id):
""" This class holds all the models for a dataset"""
self.id = id
self.models = BTrees.OOBTree.BTree()
self.constructor_id = -1
self.dataset_id = -1
self.group_connection_id = -1
def set_constructor_id(self, constructor_id):
self.constructor_id = constructor_id
def get_constructor_id(self):
try:
return self.constructor_id
except AttributeError:
return 'Not Stored'
def set_dataset_id(self, dataset_id):
self.dataset_id = dataset_id
def get_dataset_id(self):
return self.dataset_id
def set_group_connection_id(self, group_connection_id):
""" Receives the id of the group of connections that this group of models is related to """
self.group_connection_id = group_connection_id
def get_group_connection_id(self):
return self.group_connection_id
def get_models(self):
return self.models.values()
def get_model(self,id):
try:
return self.models[id]
except KeyError:
return False
def get_id(self):
return self.id
def generate_models(self):
""" Generate all the individual models. We are related with only one dataset and connection group. """
# Get the group of connections from the id
group_of_connections = __group_of_group_of_connections__.get_group(self.get_group_connection_id())
# For each connection
for connection in group_of_connections.get_connections():
# Create its model. Remember that the connection id and the model id is the 4-tuple
model_id = connection.get_id()
new_model = Model(model_id)
# Set the constructor for this model. Each model has a specific way of constructing the states
#new_model.set_constructor(__modelsconstructors__.get_default_constructor())
constructor_id = self.get_constructor_id()
new_model.set_constructor(__modelsconstructors__.get_constructor(constructor_id))
for flow in connection.get_flows():
# Try to add the flow
if not new_model.add_flow(flow):
self.delete_model_by_id(new_model.get_id())
# The flows are not ordered. Delete the truckated models
__groupofgroupofmodels__.delete_group_of_models(self.get_id())
return False
self.models[model_id] = new_model
def construct_filter(self, filter):
""" Get the filter string and decode all the operations """
# If the filter string is empty, delete the filter variable
if not filter:
try:
del self.filter
except:
pass
return True
self.filter = []
# Get the individual parts. We only support and's now.
for part in filter:
# Get the key
try:
key = re.split('<|>|=|\!=', part)[0]
value = re.split('<|>|=|\!=', part)[1]
except IndexError:
# No < or > or = or != in the string. Just stop.
break
try:
part.index('<')
operator = '<'
except ValueError:
pass
try:
part.index('>')
operator = '>'
except ValueError:
pass
# We should search for != before =
try:
part.index('!=')
operator = '!='
except ValueError:
# Now we search for =
try:
part.index('=')
operator = '='
except ValueError:
pass
self.filter.append((key, operator, value))
def apply_filter(self, model):
""" Use the stored filter to know what we should match"""
responses = []
try:
self.filter
except AttributeError:
# If we don't have any filter string, just return true and show everything
return True
# Check each filter
for filter in self.filter:
key = filter[0]
operator = filter[1]
value = filter[2]
if key == 'statelength':
state = model.get_state()
if operator == '<':
if len(state) < int(value):
responses.append(True)
else:
responses.append(False)
elif operator == '>':
if len(state) > int(value):
responses.append(True)
else:
responses.append(False)
elif operator == '=':
if len(state) == int(value):
responses.append(True)
else:
responses.append(False)
elif key == 'name':
name = model.get_id()
if operator == '=':
if value in name:
responses.append(True)
else:
responses.append(False)
elif operator == '!=':
if value not in name:
responses.append(True)
else:
responses.append(False)
elif key == 'labelname':
# For filtering based on the label assigned to the model with stf (contrary to the flow label)
labelname = model.get_label_name()
if operator == '=':
if value in labelname:
responses.append(True)
else:
responses.append(False)
elif operator == '!=':
if value not in labelname:
responses.append(True)
else:
responses.append(False)
elif key == 'flowlabel':
flowlabel = model.get_flow_label(self.get_id())
if operator == '=':
if value in flowlabel:
responses.append(True)
else:
responses.append(False)
elif operator == '!=':
if value not in flowlabel:
responses.append(True)
else:
responses.append(False)
else:
return False
for response in responses:
if not response:
return False
return True
def list_models(self, filter, max_letters=0):
all_text=' Note | Label | Model Id | State |\n'
# construct the filter
self.construct_filter(filter)
amount = 0
for model in self.models.values():
if self.apply_filter(model):
if max_letters:
all_text += '[{:3}] | {:61} | {:50} | {}\n'.format(model.get_note_id() if model.get_note_id() else '', model.get_label_name() if model.get_label_name() else '', cyan(model.get_id()), model.get_state()[:max_letters])
else:
all_text += '[{:3}] | {:61} | {:50} | {}\n'.format(model.get_note_id() if model.get_note_id() else '', model.get_label_name() if model.get_label_name() else '', cyan(model.get_id()), model.get_state())
amount += 1
all_text += 'Amount of models printed: {}'.format(amount)
f = tempfile.NamedTemporaryFile()
f.write(all_text)
f.flush()
p = Popen('less -R ' + f.name, shell=True, stdin=PIPE)
p.communicate()
sys.stdout = sys.__stdout__
f.close()
def export_models(self, filter):
""" Export the models in this group that match the filter as ascii to a file"""
# construct the filter
self.construct_filter(filter)
f = tempfile.NamedTemporaryFile(mode='w+b', delete=False)
print 'Storing the models in filename {} using TAB as field separator.'.format(f.name)
text = 'ModelId\tState\tLabelName\n'
f.write(text)
amount = 1
for model in self.models.values():
if self.apply_filter(model):
text = '{}\t{}\t{}\n'.format(model.get_id(), model.get_state(),model.get_label_name())
f.write(text)
amount += 1
f.close()
print '{} models exported'.format(amount)
def delete_model_by_id(self, model_id):
""" Delete one model given a model id """
try:
# Before deleting the model, delete its relation in the constructor
model = self.models[model_id]
model.constructor.del_model(model_id)
# Delete the notes in the model
model.del_note()
# Say that the labels should be deleted by hand
model.warn_labels()
# Now delete the model
self.models.pop(model_id)
return True
except KeyError:
print_error('That model does not exists.')
return False
def delete_model_by_filter(self, filter):
""" Delete the models using the filter. Do not delete the related connections """
# set the filter
self.construct_filter(filter)
amount = 0
ids_to_delete = []
for model in self.models.values():
if self.apply_filter(model):
ids_to_delete.append(model.get_id())
amount += 1
# We should delete the models AFTER finding them, if not, for some reason the following model after a match is missed.
for id in ids_to_delete:
self.delete_model_by_id(id)
print_info('Amount of modules deleted: {}'.format(amount))
# Add an auto note
self.add_note_to_dataset('{} models deleted from the group id {} using the filter {}.'.format(amount, self.get_id(), filter))
def count_models(self, filter=''):
# set the filter
self.construct_filter(filter)
amount = 0
for model in self.models.values():
if self.apply_filter(model):
amount += 1
print_info('Amount of modules filtered: {}'.format(amount))
def has_model(self, id):
if self.models.has_key(id):
return True
else:
return False
def plot_histogram(self, filter):
""" Plot the histogram of statelengths """
# Construct the filter
self.construct_filter(filter)
""" Plot the histogram of length of states using an external tool """
dist_path,error = Popen('bash -i -c "type distribution"', shell=True, stderr=PIPE, stdin=PIPE, stdout=PIPE).communicate()
if not error:
distribution_path = dist_path.split()[0]
all_text_state = ''
for model in self.get_models():
if self.apply_filter(model):
state_len = str(len(model.get_state()))
all_text_state += state_len + '\n'
print 'Key=Length of state'
Popen('echo \"' + all_text_state + '\" |distribution --height=900 | sort -nk1', shell=True).communicate()
else:
print_error('For ploting the histogram we use the tool https://github.com/philovivero/distribution. Please install it in the system to enable this command.')
def list_notes(self, filter_string=''):
""" List the notes in all the models """
all_text='| Note Id | Model Id | Note(...) |\n'
# construct the filter
self.construct_filter(filter_string)
amount = 0
for model in self.get_models():
if self.apply_filter(model) and model.get_short_note():
note_id = model.get_note_id()
if note_id:
all_text += '{} | {:40} | {}\n'.format(note_id, model.get_id(), model.get_short_note())
amount += 1
all_text += 'Amount of models listed: {}'.format(amount)
f = tempfile.NamedTemporaryFile()
f.write(all_text)
f.flush()
p = Popen('less -R ' + f.name, shell=True, stdin=PIPE)
p.communicate()
sys.stdout = sys.__stdout__
f.close()
def edit_note_in_model(self, model_id):
""" Edit note in model """
try:
model = self.models[model_id]
if model.get_note_id():
note_id = model.get_note_id()
model.edit_note(note_id)
else:
print_info('Model {} does not have a note attached yet.'.format(model.get_id()))
model.add_note()
note_id = model.get_note_id()
model.edit_note(note_id)
except KeyError:
print_error('That model does not exists.')
def del_note_in_model(self, model_id):
""" Delete the note in a model """
try:
model = self.models[model_id]
model.del_note()
except KeyError:
print_error('That model does not exists.')
def add_note_to_dataset(self, text_to_add):
""" Add an auto note to the dataset where this group of model belongs """
try:
note_id = __datasets__.current.get_note_id()
except AttributeError:
# The dataset may be already deleted?
return False
if note_id:
__notes__.add_auto_text_to_note(note_id, text_to_add)
else:
# There was no note yet. Create it and add the text.
note_id = __notes__.new_note()
__datasets__.current.set_note_id(note_id)
__notes__.add_auto_text_to_note(note_id, text_to_add)
###############################
###############################
###############################
class Group_of_Group_of_Models(persistent.Persistent):
def __init__(self):
""" This class holds all the groups of models"""
self.group_of_models = BTrees.OOBTree.BTree()
def get_group(self, group_id):
""" Given the id of a group of models, return its object """
try:
return self.group_of_models[group_id]
except KeyError:
return False
def get_groups(self):
return self.group_of_models.values()
def get_groups_ids(self):
return self.group_of_models.keys()
def list_groups(self):
print_info('Groups of Models')
# If we selected a dataset, just print the one belonging to the dataset
if __datasets__.current:
rows = []
for group in self.group_of_models.values():
if group.get_dataset_id() == __datasets__.current.get_id():
rows.append([group.get_id(), group.get_constructor_id(), len(group.get_models()), __datasets__.current.get_id(), __datasets__.current.get_name() ])
print(table(header=['Group of Model Id', 'Constructor ID', 'Amount of Models', 'Dataset Id', 'Dataset Name'], rows=rows))
# Otherwise print them all
else:
rows = []
for group in self.group_of_models.values():
# Get the dataset based on the dataset id stored from this group
dataset = __datasets__.get_dataset(group.get_dataset_id())
rows.append([group.get_id(), group.get_constructor_id(), len(group.get_models()), dataset.get_id(), dataset.get_name() ])
print(table(header=['Group of Model Id', 'Constructor ID', 'Amount of Models', 'Dataset Id', 'Dataset Name'], rows=rows))
def delete_group_of_models(self, id):
"""Get the id of a group of models and delete it"""
try:
# Get the group
group = self.group_of_models[id]
except KeyError:
print_error('There is no such an id for a group of models.')
return False
# First delete all the the models in the group
ids_to_delete = []
for model in group.get_models():
model_id = model.get_id()
ids_to_delete.append(model_id)
# We should delete the models AFTER finding them, if not, for some reason the following model after a match is missed.
amount = 0
for modelid in ids_to_delete:
if group.delete_model_by_id(modelid):
amount += 1
print_info('Deleted {} models inside the group'.format(amount))
# Now delete the model
self.group_of_models.pop(id)
# Here we should put all the t1 and t2 of the models in zero somehow????
print_info('Deleted group of models with id {}'.format(id))
# Add an auto note
group.add_note_to_dataset('Deleted group of models id {}.'.format(id))
def delete_group_of_models_with_dataset_id(self, target_dataset_id):
"""Get the id of a dataset and delete all the models that were generated from it"""
for group in self.group_of_models.values():
dataset_id_of_group = group.get_dataset_id()
group_id = group.get_id()
if dataset_id_of_group == target_dataset_id:
# First delete all the the models in the group
group.delete_model_by_filter('statelength>0')
# Now delete the model
self.group_of_models.pop(group_id)
print_info('Deleted group of models with id {}'.format(group_id))
def generate_group_of_models(self, constructor_id):
if __datasets__.current:
# Get the id for the current dataset
dataset_id = __datasets__.current.get_id()
# We should check that there is a group of connections already for this dataset
if not __group_of_group_of_connections__.get_group(dataset_id):
# There are not group of connections for this dataset, just generate it
print_info('There were no connections for this dataset. Generate them first.')
return False
# Get the id of the groups of connections these models are related to
group_connection = __group_of_group_of_connections__.get_group(dataset_id)
if group_connection:
group_connection_id = group_connection.get_id()
else:
print_error('There are no connections for this dataset yet. Please generate them.')
# The id of this group of models is the id of the dataset + the id of the model constructor. Because we can have the same connnections modeled by different constructors.
#group_of_models_id = str(dataset_id) + '-' + str(__modelsconstructors__.get_default_constructor().get_id())
group_of_models_id = str(dataset_id) + '-' + str(constructor_id)
# If we already have a group of models, ask what to do
try:
group_of_models = self.group_of_models[group_of_models_id]
print_warning('There is already a group of models for this dataset. Do you want to delete the current models and create a new one?')
answer = raw_input('YES/NO?')
if answer == 'YES':
# First delete the old models
self.delete_group_of_models(group_of_models_id)
else:
return False
except KeyError:
# first time. Not to repeat the code, we leave this empty and we do a new try
pass
# Do we have the group of models for this id?
try:
group_of_models = self.group_of_models[group_of_models_id]
except KeyError:
# First time.
# Create the group of models
group_of_models = Group_of_Models(group_of_models_id)
# Set the group of connections they will be using
group_of_models.set_group_connection_id(group_connection_id)
# Set the dataset id for this group of models
group_of_models.set_dataset_id(dataset_id)
# Set the model constructor used for all the models
group_of_models.set_constructor_id(constructor_id)
# Store the model
self.group_of_models[group_of_models_id] = group_of_models
# Update the dataset to include this group of models
__datasets__.current.add_group_of_models(group_of_models_id)
# Generate the models
group_of_models.generate_models()
else:
print_error('There is no dataset selected.')
def list_models_in_group(self, id, filter, max_letters=0):
try:
group = self.group_of_models[id]
group.list_models(filter, max_letters)
except KeyError:
print_error('Inexistant id of group of models.')
def export_models_in_group(self, id, filter):
try:
group = self.group_of_models[id]
group.export_models(filter)
except KeyError:
print_error('Inexistant id of group of models.')
def delete_a_model_from_the_group_by_id(self, group_of_models_id, model_id):
# Get the id of the current dataset
if __datasets__.current:
try:
group_of_models = self.group_of_models[group_of_models_id]
except KeyError:
print_error('No such group of models id available.')
return False
group_of_models.delete_model_by_id(model_id)
# Add an auto note
group_of_models.add_note_to_dataset('Model {} deleted from the group of models id {}.'.format(model_id, group_of_models.get_id()))
else:
# This is not necesary to work, but is a nice precaution
print_error('There is no dataset selected.')
def delete_a_model_from_the_group_by_filter(self, group_of_models_id, filter=''):
# Get the id of the current dataset
if __datasets__.current:
try:
group_of_models = self.group_of_models[group_of_models_id]
except KeyError:
print_error('No such group of models id available.')
return False
group_of_models.delete_model_by_filter(filter)
else:
# This is not necesary to work, but is a nice precaution
print_error('There is no dataset selected.')
def count_models_in_group(self, id, filter=''):
try:
group = self.group_of_models[id]
group.count_models(filter)
except KeyError:
print_error('No such group of models.')
def plot_histogram(self, group_of_models_id, filter=""):
try:
group = self.group_of_models[group_of_models_id]
group.plot_histogram(filter)
except KeyError:
print_error('No such group of models.')
def edit_note(self, group_of_models_id, model_id):
""" Get a model id and edit its note """
if __datasets__.current:
try:
group_of_models = self.group_of_models[group_of_models_id]
except KeyError:
print_error('There is no model group with that id')
return False
try:
group_of_models.edit_note_in_model(model_id)
except KeyError:
print_error('No such model id.')
else:
print_error('There is no dataset selected.')
def del_note(self, group_of_models_id, model_id):
""" Get a model id and delete its note """
if __datasets__.current:
group_of_models = self.group_of_models[group_of_models_id]
try:
group_of_models.del_note_in_model(model_id)
except KeyError:
print_error('No such model id.')
else:
print_error('There is no dataset selected.')
def list_notes(self, group_of_models_id, filter=""):
""" List the notes in a group_of_models """
if __datasets__.current:
group_of_models = self.group_of_models[group_of_models_id]
group_of_models.list_notes(filter)
else:
print_error('There is no dataset selected.')
__groupofgroupofmodels__ = Group_of_Group_of_Models()
|
import base64
import bcrypt
import json
import mock
import testtools
from shakenfist import config
from shakenfist.external_api import app as external_api
from shakenfist import ipmanager
from shakenfist import net
class FakeResponse(object):
def __init__(self, status_code, text):
self.status_code = status_code
self.text = text
def json(self):
return json.loads(self.text)
class FakeScheduler(object):
def place_instance(self, *args, **kwargs):
return config.parsed.get('NODE_NAME')
class FakeInstance(object):
def __init__(self, namespace=None):
self.db_entry = {'namespace': namespace}
def unique_label(self):
return ('instance', self.db_entry['uuid'])
def _encode_key(key):
return bcrypt.hashpw(key.encode('utf-8'), bcrypt.gensalt())
def _clean_traceback(resp):
if 'traceback' in resp:
del resp['traceback']
return resp
class AuthTestCase(testtools.TestCase):
def setUp(self):
super(AuthTestCase, self).setUp()
external_api.TESTING = True
external_api.app.testing = True
external_api.app.debug = False
self.client = external_api.app.test_client()
def test_post_auth_no_args(self):
resp = self.client.post('/auth', data=json.dumps({}))
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': 'missing namespace in request',
'status': 400
},
resp.get_json())
def test_post_auth_no_key(self):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana'}))
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': 'missing key in request',
'status': 400
},
resp.get_json())
def test_post_auth_bad_parameter(self):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'keyyy': 'pwd'}))
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': "post() got an unexpected keyword argument 'keyyy'",
'status': 400
},
_clean_traceback(resp.get_json()))
def test_post_auth_key_non_string(self):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'key': 1234}))
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': 'key is not a string',
'status': 400
},
resp.get_json())
@mock.patch('shakenfist.external_api.app.Auth._get_keys',
return_value=(None, [_encode_key('cheese')]))
def test_post_auth(self, mock_get_keys):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'key': 'cheese'}))
self.assertEqual(200, resp.status_code)
self.assertIn('access_token', resp.get_json())
@mock.patch('shakenfist.external_api.app.Auth._get_keys',
return_value=('cheese', [_encode_key('bacon')]))
def test_post_auth_not_authorized(self, mock_get_keys):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'key': 'hamster'}))
self.assertEqual(401, resp.status_code)
self.assertEqual(
{
'error': 'unauthorized',
'status': 401
},
resp.get_json())
@mock.patch('shakenfist.etcd.get',
return_value={
'service_key': 'cheese',
'keys': {
'key1': str(base64.b64encode(_encode_key('bacon')), 'utf-8'),
'key2': str(base64.b64encode(_encode_key('sausage')), 'utf-8')
}
})
def test_post_auth_service_key(self, mock_get):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'key': 'cheese'}))
self.assertEqual(200, resp.status_code)
self.assertIn('access_token', resp.get_json())
def test_no_auth_header(self):
resp = self.client.post('/auth/namespaces',
data=json.dumps({
'namespace': 'foo'
}))
self.assertEqual(401, resp.status_code)
self.assertEqual(
{
'error': 'Missing Authorization Header',
'status': 401
},
_clean_traceback(resp.get_json()))
def test_auth_header_wrong(self):
resp = self.client.post('/auth/namespaces',
headers={'Authorization': 'l33thacker'},
data=json.dumps({
'namespace': 'foo'
}))
self.assertEqual(
{
'error': "Bad Authorization header. Expected value 'Bearer <JWT>'",
'status': 401
},
_clean_traceback(resp.get_json()))
self.assertEqual(401, resp.status_code)
def test_auth_header_bad_jwt(self):
resp = self.client.post('/auth/namespaces',
headers={'Authorization': 'Bearer l33thacker'},
data=json.dumps({
'namespace': 'foo'
}))
self.assertEqual(
{
'error': 'invalid JWT in Authorization header',
'status': 401
},
_clean_traceback(resp.get_json()))
self.assertEqual(401, resp.status_code)
class ExternalApiTestCase(testtools.TestCase):
def setUp(self):
super(ExternalApiTestCase, self).setUp()
self.add_event = mock.patch(
'shakenfist.db.add_event')
self.mock_add_event = self.add_event.start()
self.addCleanup(self.add_event.stop)
self.scheduler = mock.patch(
'shakenfist.scheduler.Scheduler', FakeScheduler)
self.mock_scheduler = self.scheduler.start()
self.addCleanup(self.scheduler.stop)
external_api.TESTING = True
external_api.app.testing = True
external_api.app.debug = False
self.client = external_api.app.test_client()
# Make a fake auth token
self.get_keys = mock.patch(
'shakenfist.external_api.app.Auth._get_keys',
return_value=('foo', ['bar'])
)
self.mock_get_keys = self.get_keys.start()
self.addCleanup(self.get_keys.stop)
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'system', 'key': 'foo'}))
self.assertEqual(200, resp.status_code)
self.auth_header = 'Bearer %s' % resp.get_json()['access_token']
class ExternalApiGeneralTestCase(ExternalApiTestCase):
def setUp(self):
super(ExternalApiGeneralTestCase, self).setUp()
def test_get_root(self):
resp = self.client.get('/')
self.assertEqual('Shaken Fist REST API service',
resp.get_data().decode('utf-8'))
self.assertEqual(200, resp.status_code)
self.assertEqual('text/plain; charset=utf-8', resp.content_type)
def test_auth_add_key_missing_args(self):
resp = self.client.post('/auth/namespaces',
headers={'Authorization': self.auth_header},
data=json.dumps({}))
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': 'no namespace specified',
'status': 400
},
resp.get_json())
@mock.patch('shakenfist.db.get_lock')
@mock.patch('shakenfist.etcd.get', return_value=None)
@mock.patch('shakenfist.etcd.put')
def test_auth_add_key_missing_keyname(self, mock_put, mock_get, mock_lock):
resp = self.client.post('/auth/namespaces',
headers={'Authorization': self.auth_header},
data=json.dumps({
'namespace': 'foo'
}))
self.assertEqual(200, resp.status_code)
self.assertEqual('foo', resp.get_json())
@mock.patch('shakenfist.db.get_lock')
@mock.patch('shakenfist.etcd.get', return_value=None)
@mock.patch('shakenfist.etcd.put')
def test_auth_add_key_missing_key(self, mock_put, mock_get, mock_lock):
resp = self.client.post('/auth/namespaces',
headers={'Authorization': self.auth_header},
data=json.dumps({
'namespace': 'foo',
'key_name': 'bernard'
}))
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': 'no key specified',
'status': 400
},
resp.get_json())
@mock.patch('shakenfist.db.get_lock')
@mock.patch('shakenfist.etcd.get', return_value=None)
def test_auth_add_key_illegal_keyname(self, mock_get, mock_lock):
resp = self.client.post('/auth/namespaces',
headers={'Authorization': self.auth_header},
data=json.dumps({
'namespace': 'foo',
'key_name': 'service_key',
'key': 'cheese'
}))
self.assertEqual(
{
'error': 'illegal key name',
'status': 403
},
resp.get_json())
self.assertEqual(403, resp.status_code)
@mock.patch('shakenfist.db.get_lock')
@mock.patch('shakenfist.etcd.get', return_value=None)
@mock.patch('shakenfist.etcd.put')
@mock.patch('bcrypt.hashpw', return_value='terminator'.encode('utf-8'))
def test_auth_add_key_new_namespace(self, mock_hashpw, mock_put, mock_get, mock_lock):
resp = self.client.post('/auth/namespaces',
headers={'Authorization': self.auth_header},
data=json.dumps({
'namespace': 'foo',
'key_name': 'bernard',
'key': 'cheese'
}))
self.assertEqual(200, resp.status_code)
self.assertEqual('foo', resp.get_json())
mock_put.assert_called_with(
'namespace', None, 'foo',
{'name': 'foo', 'keys': {'bernard': 'dGVybWluYXRvcg=='}})
@mock.patch('shakenfist.etcd.get_all',
return_value=[
{'name': 'aaa'}, {'name': 'bbb'}, {'name': 'ccc'}
])
def test_get_namespaces(self, mock_get_all):
resp = self.client.get('/auth/namespaces',
headers={'Authorization': self.auth_header})
self.assertEqual(200, resp.status_code)
self.assertEqual(['aaa', 'bbb', 'ccc'], resp.get_json())
def test_delete_namespace_missing_args(self):
resp = self.client.delete('/auth/namespaces',
headers={'Authorization': self.auth_header})
self.assertEqual(405, resp.status_code)
self.assertEqual(
{
'message': 'The method is not allowed for the requested URL.'
},
resp.get_json())
def test_delete_namespace_system(self):
resp = self.client.delete('/auth/namespaces/system',
headers={'Authorization': self.auth_header})
self.assertEqual(403, resp.status_code)
self.assertEqual(
{
'error': 'you cannot delete the system namespace',
'status': 403
},
resp.get_json())
@mock.patch('shakenfist.db.get_instances',
return_value=[{'uuid': '123', 'state': 'created'}])
def test_delete_namespace_with_instances(self, mock_get_instances):
resp = self.client.delete('/auth/namespaces/foo',
headers={'Authorization': self.auth_header})
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': 'you cannot delete a namespace with instances',
'status': 400
},
resp.get_json())
@mock.patch('shakenfist.db.get_instances', return_value=[])
@mock.patch('shakenfist.db.get_networks',
return_value=[{'uuid': '123', 'state': 'created'}])
def test_delete_namespace_with_networks(self, mock_get_networks, mock_get_instances):
resp = self.client.delete('/auth/namespaces/foo',
headers={'Authorization': self.auth_header})
self.assertEqual(400, resp.status_code)
self.assertEqual(
{
'error': 'you cannot delete a namespace with networks',
'status': 400
},
resp.get_json())
def test_delete_namespace_key_missing_args(self):
resp = self.client.delete('/auth/namespaces/system/',
headers={'Authorization': self.auth_header})
self.assertEqual(404, resp.status_code)
self.assertEqual(None, resp.get_json())
@mock.patch('shakenfist.db.get_lock')
@mock.patch('shakenfist.etcd.get', return_value={'keys': {}})
def test_delete_namespace_key_missing_key(self, mock_get, mock_lock):
resp = self.client.delete('/auth/namespaces/system/keys/mykey',
headers={'Authorization': self.auth_header})
self.assertEqual(404, resp.status_code)
self.assertEqual(
{
'error': 'key name not found in namespace',
'status': 404
},
resp.get_json())
@mock.patch('shakenfist.db.get_lock')
@mock.patch('shakenfist.etcd.get', return_value={'keys': {'mykey': 'foo'}})
@mock.patch('shakenfist.etcd.put')
def test_delete_namespace_key(self, mock_put, mock_get, mock_lock):
resp = self.client.delete('/auth/namespaces/system/keys/mykey',
headers={'Authorization': self.auth_header})
self.assertEqual(200, resp.status_code)
mock_put.assert_called_with('namespace', None, 'system', {'keys': {}})
@mock.patch('shakenfist.db.get_metadata', return_value={'a': 'a', 'b': 'b'})
def test_get_namespace_metadata(self, mock_md_get):
resp = self.client.get(
'/auth/namespaces/foo/metadata', headers={'Authorization': self.auth_header})
self.assertEqual({'a': 'a', 'b': 'b'}, resp.get_json())
self.assertEqual(200, resp.status_code)
self.assertEqual('application/json', resp.content_type)
@mock.patch('shakenfist.db.get_metadata', return_value={})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_put_namespace_metadata(self, mock_get_lock, mock_md_put,
mock_md_get):
resp = self.client.put('/auth/namespaces/foo/metadata/foo',
headers={'Authorization': self.auth_header},
data=json.dumps({
'key': 'foo',
'value': 'bar'
}))
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('namespace', 'foo', {'foo': 'bar'})
@mock.patch('shakenfist.db.get_metadata', return_value={})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_post_namespace_metadata(self, mock_get_lock, mock_md_put,
mock_md_get):
resp = self.client.post('/auth/namespaces/foo/metadata',
headers={'Authorization': self.auth_header},
data=json.dumps({
'key': 'foo',
'value': 'bar'
}))
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('namespace', 'foo', {'foo': 'bar'})
@mock.patch('shakenfist.db.get_metadata', return_value={'foo': 'bar', 'real': 'smart'})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_delete_namespace_metadata(self, mock_get_lock, mock_md_put,
mock_md_get):
resp = self.client.delete('/auth/namespaces/foo/metadata/foo',
headers={'Authorization': self.auth_header})
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('namespace', 'foo', {'real': 'smart'})
@mock.patch('shakenfist.db.get_metadata', return_value={})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_delete_namespace_metadata_bad_key(self, mock_get_lock,
mock_md_put, mock_md_get):
resp = self.client.delete('/auth/namespaces/foo/metadata/wrong',
headers={'Authorization': self.auth_header})
self.assertEqual({'error': 'key not found', 'status': 404},
resp.get_json())
self.assertEqual(404, resp.status_code)
@mock.patch('shakenfist.db.get_metadata', return_value={'foo': 'bar', 'real': 'smart'})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_delete_namespace_metadata_no_keys(self, mock_get_lock,
mock_md_put, mock_md_get):
resp = self.client.delete('/auth/namespaces/foo/metadata/wrong',
headers={'Authorization': self.auth_header})
self.assertEqual({'error': 'key not found', 'status': 404},
resp.get_json())
self.assertEqual(404, resp.status_code)
@mock.patch('shakenfist.db.get_instance',
return_value={'uuid': '123',
'name': 'banana',
'namespace': 'foo'})
def test_get_instance(self, mock_get_instance):
resp = self.client.get(
'/instances/foo', headers={'Authorization': self.auth_header})
self.assertEqual({'uuid': '123', 'name': 'banana', 'namespace': 'foo'},
resp.get_json())
self.assertEqual(200, resp.status_code)
self.assertEqual('application/json', resp.content_type)
@mock.patch('shakenfist.db.get_instance', return_value=None)
def test_get_instance_not_found(self, mock_get_instance):
resp = self.client.get(
'/instances/foo', headers={'Authorization': self.auth_header})
self.assertEqual({'error': 'instance not found', 'status': 404},
resp.get_json())
self.assertEqual(404, resp.status_code)
self.assertEqual('application/json', resp.content_type)
@mock.patch('shakenfist.db.get_instance',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={'a': 'a', 'b': 'b'})
def test_get_instance_metadata(self, mock_get_instance, mock_md_get):
resp = self.client.get(
'/instances/foo/metadata', headers={'Authorization': self.auth_header})
self.assertEqual({'a': 'a', 'b': 'b'}, resp.get_json())
self.assertEqual('application/json', resp.content_type)
self.assertEqual(200, resp.status_code)
@mock.patch('shakenfist.db.get_instance',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_put_instance_metadata(self, mock_get_lock, mock_md_put,
mock_md_get, mock_get_instance):
resp = self.client.put('/instances/foo/metadata/foo',
headers={'Authorization': self.auth_header},
data=json.dumps({
'key': 'foo',
'value': 'bar'
}))
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('instance', 'foo', {'foo': 'bar'})
@mock.patch('shakenfist.db.get_instance',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_post_instance_metadata(self, mock_get_lock, mock_md_put,
mock_md_get, mock_get_instance):
resp = self.client.post('/instances/foo/metadata',
headers={'Authorization': self.auth_header},
data=json.dumps({
'key': 'foo',
'value': 'bar'
}))
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('instance', 'foo', {'foo': 'bar'})
@mock.patch('shakenfist.db.get_network',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={'a': 'a', 'b': 'b'})
def test_get_network_metadata(self, mock_md_get, mock_get_network):
resp = self.client.get(
'/networks/foo/metadata', headers={'Authorization': self.auth_header})
self.assertEqual({'a': 'a', 'b': 'b'}, resp.get_json())
self.assertEqual(200, resp.status_code)
self.assertEqual('application/json', resp.content_type)
@mock.patch('shakenfist.db.get_network',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_put_network_metadata(self, mock_get_lock, mock_md_put,
mock_md_get, mock_get_network):
resp = self.client.put('/networks/foo/metadata/foo',
headers={'Authorization': self.auth_header},
data=json.dumps({
'key': 'foo',
'value': 'bar'
}))
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('network', 'foo', {'foo': 'bar'})
@mock.patch('shakenfist.db.get_network',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_post_network_metadata(self, mock_get_lock, mock_md_put,
mock_md_get, mock_get_network):
resp = self.client.post('/networks/foo/metadata',
headers={'Authorization': self.auth_header},
data=json.dumps({
'key': 'foo',
'value': 'bar'
}))
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('network', 'foo', {'foo': 'bar'})
@mock.patch('shakenfist.db.get_instance',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={'foo': 'bar', 'real': 'smart'})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_delete_instance_metadata(self, mock_get_lock, mock_md_put,
mock_md_get, mock_get_instance):
resp = self.client.delete('/instances/foo/metadata/foo',
headers={'Authorization': self.auth_header})
self.assertEqual(None, resp.get_json())
mock_md_put.assert_called_with('instance', 'foo', {'real': 'smart'})
self.assertEqual(200, resp.status_code)
@mock.patch('shakenfist.db.get_instance',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={'foo': 'bar', 'real': 'smart'})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_delete_instance_metadata_bad_key(self, mock_get_lock,
mock_md_put, mock_md_get,
mock_get_instance):
resp = self.client.delete('/instances/foo/metadata/wrong',
headers={'Authorization': self.auth_header})
self.assertEqual({'error': 'key not found', 'status': 404},
resp.get_json())
self.assertEqual(404, resp.status_code)
@mock.patch('shakenfist.db.get_network',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={'foo': 'bar', 'real': 'smart'})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_delete_network_metadata(self, mock_get_lock, mock_md_put,
mock_md_get, mock_get_network):
resp = self.client.delete('/networks/foo/metadata/foo',
headers={'Authorization': self.auth_header})
self.assertEqual(None, resp.get_json())
self.assertEqual(200, resp.status_code)
mock_md_put.assert_called_with('network', 'foo', {'real': 'smart'})
@mock.patch('shakenfist.db.get_network',
return_value={'uuid': 'foo',
'name': 'banana',
'namespace': 'foo'})
@mock.patch('shakenfist.db.get_metadata', return_value={'foo': 'bar', 'real': 'smart'})
@mock.patch('shakenfist.db.persist_metadata')
@mock.patch('shakenfist.db.get_lock')
def test_delete_network_metadata_bad_key(self, mock_get_lock,
mock_md_put, mock_md_get,
mock_get_network):
resp = self.client.delete('/networks/foo/metadata/wrong',
headers={'Authorization': self.auth_header})
self.assertEqual({'error': 'key not found', 'status': 404},
resp.get_json())
self.assertEqual(404, resp.status_code)
@mock.patch('shakenfist.db.get_nodes',
return_value=[{
'fqdn': 'sf-1',
'ip': '192.168.72.240',
'lastseen': 1594952905.2100437,
'version': '0.0.1'
},
{
'fqdn': 'sf-2',
'ip': '192.168.72.230',
'lastseen': 1594952904.8870885,
'version': '0.0.1'
}])
def test_get_node(self, mock_md_get):
resp = self.client.get('/nodes',
headers={'Authorization': self.auth_header})
self.assertEqual([{
'name': 'sf-1',
'ip': '192.168.72.240',
'lastseen': 1594952905.2100437,
'version': '0.0.1'
},
{
'name': 'sf-2',
'ip': '192.168.72.230',
'lastseen': 1594952904.8870885,
'version': '0.0.1'
}],
resp.get_json())
self.assertEqual(200, resp.status_code)
self.assertEqual('application/json', resp.content_type)
class ExternalApiInstanceTestCase(ExternalApiTestCase):
def setUp(self):
super(ExternalApiInstanceTestCase, self).setUp()
def fake_virt_from_db(uuid):
return {'uuid': uuid}
self.virt_from_db = mock.patch('shakenfist.virt.from_db',
fake_virt_from_db)
self.mock_virt_from_db = self.virt_from_db.start()
self.addCleanup(self.virt_from_db.stop)
def fake_config_instance(key):
fc = {
'API_ASYNC_WAIT': 1,
'LOG_METHOD_TRACE': 1,
}
if key in fc:
return fc[key]
raise Exception('fake_config_instance() Unknown config key')
self.config = mock.patch('shakenfist.config.parsed.get',
fake_config_instance)
self.mock_config = self.config.start()
self.addCleanup(self.config.stop)
@mock.patch('shakenfist.db.enqueue')
@mock.patch('shakenfist.db.get_instances',
return_value=[{
'namespace': 'system',
'node': 'sf-2',
'power_state': 'initial',
'state': 'created',
'uuid': '6a973b82-31b3-4780-93e4-04d99ae49f3f',
},
{
'name': 'timma',
'namespace': 'system',
'node': 'sf-2',
'power_state': 'initial',
'state': 'created',
'uuid': '847b0327-9b17-4148-b4ed-be72b6722c17',
}])
@mock.patch('shakenfist.db.get_instance',
return_value={
'state': 'deleted',
},)
@mock.patch('shakenfist.etcd.put')
@mock.patch('shakenfist.db.get_lock')
def test_delete_all_instances(self,
mock_db_get_lock,
mock_etcd_put,
mock_get_instance,
mock_get_instances,
mock_enqueue):
resp = self.client.delete('/instances',
headers={'Authorization': self.auth_header},
data=json.dumps({
'confirm': True,
'namespace': 'foo'
}))
self.assertEqual(['6a973b82-31b3-4780-93e4-04d99ae49f3f',
'847b0327-9b17-4148-b4ed-be72b6722c17'],
resp.get_json())
self.assertEqual(200, resp.status_code)
@mock.patch('shakenfist.db.enqueue')
@mock.patch('shakenfist.db.get_instances',
return_value=[{
'namespace': 'system',
'node': 'sf-2',
'power_state': 'initial',
'state': 'deleted',
'uuid': '6a973b82-31b3-4780-93e4-04d99ae49f3f',
},
{
'name': 'timma',
'namespace': 'system',
'node': 'sf-2',
'power_state': 'initial',
'state': 'created',
'uuid': '847b0327-9b17-4148-b4ed-be72b6722c17',
}])
@mock.patch('shakenfist.db.get_instance',
return_value={
'state': 'deleted',
},)
@mock.patch('shakenfist.etcd.put')
@mock.patch('shakenfist.db.get_lock')
def test_delete_all_instances_one_already_deleted(self,
mock_db_get_lock,
mock_etcd_put,
mock_get_instance,
mock_get_instances,
mock_enqueue):
resp = self.client.delete('/instances',
headers={'Authorization': self.auth_header},
data=json.dumps({
'confirm': True,
'namespace': 'foo'
}))
self.assertEqual(['847b0327-9b17-4148-b4ed-be72b6722c17'],
resp.get_json())
self.assertEqual(200, resp.status_code)
def test_post_instance_no_disk(self):
resp = self.client.post('/instances',
headers={'Authorization': self.auth_header},
data=json.dumps({
'name': 'test_instance',
'cpus': 1,
'memory': 1024,
'network': [],
'disk': None,
'ssh_key': None,
'user_data': None,
'placed_on': None,
'namespace': None,
'instance_uuid': None
}))
self.assertEqual(
{'error': 'instance must specify at least one disk', 'status': 400},
resp.get_json())
self.assertEqual(400, resp.status_code)
def test_post_instance_invalid_disk(self):
resp = self.client.post('/instances',
headers={'Authorization': self.auth_header},
data=json.dumps({
'name': 'test_instance',
'cpus': 1,
'memory': 1024,
'network': [],
'disk': ['8@cirros'],
'ssh_key': None,
'user_data': None,
'placed_on': None,
'namespace': None,
'instance_uuid': None
}))
self.assertEqual(
{'error': 'disk specification should contain JSON objects', 'status': 400},
resp.get_json())
self.assertEqual(400, resp.status_code)
def test_post_instance_invalid_network(self):
resp = self.client.post('/instances',
headers={'Authorization': self.auth_header},
data=json.dumps({
'name': 'test_instance',
'cpus': 1,
'memory': 1024,
'network': ['87c15186-5f73-4947-a9fb-2183c4951efc'],
'disk': [{'size': 8,
'base': 'cirros'}],
'ssh_key': None,
'user_data': None,
'placed_on': None,
'namespace': None,
'instance_uuid': None
}))
self.assertEqual(
{'error': 'network specification should contain JSON objects', 'status': 400},
resp.get_json())
self.assertEqual(400, resp.status_code)
def test_post_instance_invalid_network_uuid(self):
resp = self.client.post('/instances',
headers={'Authorization': self.auth_header},
data=json.dumps({
'name': 'test_instance',
'cpus': 1,
'memory': 1024,
'network': [
{'uuid': '87c15186-5f73-4947-a9fb-2183c4951efc'}],
'disk': [{'size': 8,
'base': 'cirros'}],
'ssh_key': None,
'user_data': None,
'placed_on': None,
'namespace': None,
'instance_uuid': None
}))
self.assertEqual(
{'error': 'network specification is missing network_uuid', 'status': 400},
resp.get_json())
self.assertEqual(400, resp.status_code)
def test_post_instance_only_system_allocates_uuids(self):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'key': 'foo'}))
self.assertEqual(200, resp.status_code)
non_system_auth_header = 'Bearer %s' % resp.get_json()['access_token']
resp = self.client.post('/instances',
headers={
'Authorization': non_system_auth_header},
data=json.dumps({
'name': 'test_instance',
'cpus': 1,
'memory': 1024,
'network': [
{'network_uuid': '87c15186-5f73-4947-a9fb-2183c4951efc'}],
'disk': [{'size': 8,
'base': 'cirros'}],
'ssh_key': None,
'user_data': None,
'placed_on': None,
'namespace': None,
'instance_uuid': 'cbc58e78-d9ec-4cd5-b417-f715849126e1'
}))
self.assertEqual(
{'error': 'only system can specify an instance uuid', 'status': 401},
resp.get_json())
self.assertEqual(401, resp.status_code)
def test_post_instance_only_system_specifies_namespaces(self):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'key': 'foo'}))
self.assertEqual(200, resp.status_code)
non_system_auth_header = 'Bearer %s' % resp.get_json()['access_token']
resp = self.client.post('/instances',
headers={
'Authorization': non_system_auth_header},
data=json.dumps({
'name': 'test_instance',
'cpus': 1,
'memory': 1024,
'network': [
{'network_uuid': '87c15186-5f73-4947-a9fb-2183c4951efc'}],
'disk': [{'size': 8,
'base': 'cirros'}],
'ssh_key': None,
'user_data': None,
'placed_on': None,
'namespace': 'gerkin',
'instance_uuid': None
}))
self.assertEqual(
{'error': 'only admins can create resources in a different namespace',
'status': 401},
resp.get_json())
self.assertEqual(401, resp.status_code)
@mock.patch('shakenfist.virt.from_db', return_value=FakeInstance(namespace='foo'))
@mock.patch('shakenfist.db.add_event')
def test_post_instance_fails_ownership(self, mock_event, mock_virt_from_db):
resp = self.client.post(
'/auth', data=json.dumps({'namespace': 'banana', 'key': 'foo'}))
self.assertEqual(200, resp.status_code)
non_system_auth_header = 'Bearer %s' % resp.get_json()['access_token']
resp = self.client.post('/instances',
headers={
'Authorization': non_system_auth_header},
data=json.dumps({
'name': 'test_instance',
'cpus': 1,
'memory': 1024,
'network': [
{'network_uuid': '87c15186-5f73-4947-a9fb-2183c4951efc'}],
'disk': [{'size': 8,
'base': 'cirros'}],
'ssh_key': None,
'user_data': None,
'placed_on': None,
'namespace': None,
'instance_uuid': None
}))
self.assertEqual({'error': 'instance not found',
'status': 404}, resp.get_json())
self.assertEqual(404, resp.status_code)
class ExternalApiNetworkTestCase(ExternalApiTestCase):
def setUp(self):
super(ExternalApiNetworkTestCase, self).setUp()
def fake_config_network(key):
fc = {
'NODE_NAME': 'seriously',
'NODE_IP': '127.0.0.1',
'NETWORK_NODE_IP': '127.0.0.1',
'LOG_METHOD_TRACE': 1,
'NODE_EGRESS_NIC': 'eth0'
}
if key in fc:
return fc[key]
raise Exception('fake_config_network() Unknown config key')
self.config = mock.patch('shakenfist.config.parsed.get',
fake_config_network)
self.mock_config = self.config.start()
# Without this cleanup, other test classes will have 'config.parsed.get'
# mocked during parallel testing by stestr.
self.addCleanup(self.config.stop)
@mock.patch('shakenfist.db.get_networks',
return_value=[{
'floating_gateway': '10.10.0.150',
'name': 'bob',
'state': 'created',
'uuid': '30f6da44-look-i-am-uuid',
}])
@mock.patch('shakenfist.db.get_network_interfaces', return_value=[])
@mock.patch('shakenfist.db.get_ipmanager',
return_value=ipmanager.NetBlock('10.0.0.0/24'))
@mock.patch('shakenfist.net.Network.remove_dhcp')
@mock.patch('shakenfist.net.Network.delete')
@mock.patch('shakenfist.db.update_network_state')
@mock.patch('shakenfist.etcd.put')
@mock.patch('shakenfist.db.get_lock')
def test_delete_all_networks(self,
mock_db_get_lock,
mock_etcd_put,
mock_update_network_state,
mock_delete,
mock_remove_dhcp,
mock_get_ipmanager,
mock_db_get_network_interfaces,
mock_db_get_networks):
mock_network = mock.patch('shakenfist.net.from_db',
return_value=net.Network({'uuid': 'foo'}))
mock_network.start()
self.addCleanup(mock_network.stop)
resp = self.client.delete('/networks',
headers={'Authorization': self.auth_header},
data=json.dumps({
'confirm': True,
'namespace': 'foo'
}))
self.assertEqual(['30f6da44-look-i-am-uuid'],
resp.get_json())
self.assertEqual(200, resp.status_code)
mock_network.stop()
@mock.patch('shakenfist.db.get_networks',
return_value=[{
'floating_gateway': '10.10.0.150',
'name': 'bob',
'state': 'deleted',
'uuid': '30f6da44-look-i-am-uuid',
}])
@mock.patch('shakenfist.db.get_network_interfaces', return_value=[])
@mock.patch('shakenfist.db.get_ipmanager',
return_value=ipmanager.NetBlock('10.0.0.0/24'))
@mock.patch('shakenfist.net.Network.remove_dhcp')
@mock.patch('shakenfist.etcd.put')
@mock.patch('shakenfist.db.get_lock')
def test_delete_all_networks_none_to_delete(self,
mock_db_get_lock,
mock_etcd_put,
mock_remove_dhcp,
mock_get_ipmanager,
mock_db_get_network_interfaces,
mock_db_get_networks):
resp = self.client.delete('/networks',
headers={'Authorization': self.auth_header},
data=json.dumps({
'confirm': True,
'namespace': 'foo'
}))
self.assertEqual([], resp.get_json())
|
from .mklink import mergelink
from .mklink import mergelinks
from .mklink import mklink
from .mklink import mklinks
from .zipfile_ import unzip_file
|
import gc
import threading
from pickle import loads, dumps
from go import Stone, WHITE, BLACK
from time import time
import numpy as np
def masked_softmax(mask, x, temperature):
if len(x.shape) != 1:
print("softmax input must be 1-D numpy array")
return
# astype("float64") because numpy's multinomial convert array to float64 after pval.sum()
# sometime the sum exceed 1.0 due to numerical rounding
x = x.astype("float64")
# do not consider i if mask[i] == True
mask_indice = np.argwhere(~mask)
masked_x = x[mask_indice]
# stablize/normalize because if x too big will cause NAN when exp(x)
normal_masked_x = masked_x - np.max(masked_x)
masked_softmax_x = np.exp(normal_masked_x/temperature) / np.sum(np.exp(normal_masked_x/temperature))
softmax = np.zeros(x.shape)
softmax[mask_indice] = masked_softmax_x
return softmax
class BatchInfo():
def __init__ (self, node_path, action_path, state):
self.node_path = node_path
self.action_path = action_path
self.state = state
self.value = None
class MonteCarloNode():
def __init__ (self, state, policies, value, children_keys):
self.state = state
self.policies = policies
self.value = value
# Q, N are updated in back propergate phrase
self.Q = {k : 0 for k in children_keys} # expected value of an action from this node's perspective
self.N = {k : 0 for k in children_keys} # number of times that this node take path to an action
self.expanding = {k : False for k in children_keys} # True when selected to expand... set to False in back propagate phase
self.visiting = {k : 0 for k in children_keys} # +1 when selected in path... -1 back propagate phase
self.children = {k : None for k in children_keys} # action_id : node
class ParallelMonteCarlo():
def __init__ (self, model, batch_size, thread_max = 0):
"""
model: playmodel
batch_size: larger than 8 will not be too effective
use_thread: True or False
thread_max: if this parameter is set, thread_num will equal to the number of children of root
approximately equal to degree
"""
self.model = model
self.size = model.size
self.size_square = self.size**2
self.action_size = self.size_square + 1
self.dirichlet_alpha = [10 / self.size_square]*self.action_size
self.root = None
self.playout_limit = 0 # reset every time search method called
self.batch_size = batch_size
self.thread_max = thread_max
self.treelock = threading.Lock()
self.playout_count_lock = threading.Lock()
print("Parallel Monte Carlo parameters: Batch size:", batch_size, "thread max:", thread_max)
def clear_visit(self):
self.root = None
self.visited = {}
# explicitly release memory
gc.collect()
def re_root(self, new_root_action):
if self.root.children[new_root_action] is None:
# remove ref of root delete whole tree
self.root = None
return
# remove ref in root excpet new_root
self.root = self.root.children[new_root_action]
def search(self, root_board, prev_action, playout, temperature):
self.record_time = []
self.playout_limit = playout
self.playout_count = 0
if self.root is None:
# clean out visited & make root from root baord
self.root = self.add_node(loads(dumps(root_board)))
else:
# check root_board.hash = root node hash
if root_board.grid_hash() != loads(self.root.state).grid_hash():
raise "root_board.grid_hash() != self.root.state.hash. Doesn't know how to handle yet"
self.prev_action = prev_action
self.threaded_playout_loop()
#print("playout time", np.sum(self.record_time), np.mean(self.record_time), np.std(self.record_time))
N_sum = sum(self.root.N)
children_action = list(self.root.N.keys())
children_values = np.array([v / N_sum for v in self.root.N])
if children_values.sum() > 0:
# exp(100) is 1e43, so keep temperature > 0.01, otherwise it would overflow
if temperature < 0.01: temperature = 0.01
value_softmax = np.exp(children_values/temperature) / np.sum(np.exp(children_values/temperature))
mcts_policy = np.zeros((self.action_size))
mcts_policy[children_action] = value_softmax
try:
action = np.random.choice(self.action_size, p=mcts_policy)
except Exception as e:
print(temperature)
print(children_values)
print(value_softmax)
print(mcts_policy)
raise e
# choose resign if value lower than resign_value
if self.root.Q[action] < self.model.resign_value:
return 0, -1, mcts_policy
else:
# some things wrong
raise "No search in MCTS???"
self.re_root(action)
return action%self.size, action//self.size, mcts_policy
def threaded_playout_loop(self):
#print("threaded_playout_loop")
while self.playout_count < self.playout_limit:
self.batch_playout()
if len(self.root.children) <= 1:
break
def playout(self):
node_path, action_path, is_terminal = self.select()
if is_terminal:
value = self.handle_terminal(node_path[-1])
else:
value = self.expand(node_path[-1], action_path[-1])
if value:
self.backpropagate(node_path, action_path, value)
def batch_playout(self):
batch_list = []
for _ in range(self.batch_size):
node_path, action_path, is_terminal = self.select(batching = True)
if len(action_path) == 0: # no path available to choose
break
if is_terminal:
batchinfo = None
value = self.handle_terminal(node_path[-1])
else:
value = None
batchinfo = self.delayed_expand(node_path, action_path)
if batchinfo is not None: # if expand is legal
batch_list.append(batchinfo)
elif value is not None: # if this action path leads to visited or terminal
self.backpropagate(node_path, action_path, value)
#illegal action: do nothing
# self.playout_count_lock.acquire()
self.playout_count += 1
# SHOULD BE CRITICAL SECTION
# BUT for speed reason, we doesn't really need precise number of playout
# it will end nonetheless
# self.playout_count_lock.release()
if len(batch_list) > 0:
self.batch_add_node(batch_list)
self.backpropagate_with_batch(batch_list)
def select(self, batching = False):
#print("selecting nodes")
curnode = self.root
node_path = []
action_path = []
is_terminal = False
#fuse = self.playout_limit
#while fuse > 0:
self.treelock.acquire()
while True:
#fuse -= 1
best_a = -1
N_visiting_sum_sqrt = np.sqrt(sum(curnode.N.values()) + sum(curnode.visiting.values()))
"""
UCT is upper confience bound of v
UCT[a] = Q[a] + P[a] where P is some formula for controlling exploration-exploitation balance
P[a] = c * sqrt(sum(N)) / (1 + N[a])
This formula is used by AlphaGoZero
c is coefficent, it controls the exploration rate
1.4 is recommend in [0, 1] valued environment
1.5~3.0 are used in [-1, 1] valued environment
"""
"""
Virtual Loss
is to discourage over-exploitaion in parallel, which change P's formula to:
P[a] = c * sqrt(sum(N)+ sum(O)) / (1 + N[a] + O[a])
where O[a] is the number of playout visiting action 'a' that's not finished back-propergation yet
"""
cur_filtered_uct_dict = {
k : curnode.Q.get(k,0)+2*curnode.policies[k]*N_visiting_sum_sqrt/(1+curnode.N[k]+curnode.visiting[k])
for k in curnode.children
if not curnode.expanding[k] and (k == self.size_square or not curnode.children[k] in node_path)
}
if len(cur_filtered_uct_dict) > 0:
best_a = max(cur_filtered_uct_dict, key=cur_filtered_uct_dict.get)
# if no valid children
# although curnode is visited... in some sense this is a terminal node
if best_a == -1:
break
node_path.append(curnode)
action_path.append(best_a)
# check two consecutive pass
prev_action = action_path[-2] if len(action_path) > 1 else self.prev_action
if best_a == prev_action and best_a == self.size_square:
is_terminal = True
break
# check if not visited
if curnode.children[best_a] is None:
# tell other thread this is visited
curnode.expanding[best_a] = True
break
else:
curnode.visiting[best_a] += 1
curnode = curnode.children[best_a]
self.treelock.release()
# traverse to an unexpanded node
# print("selected path:", action_path)
return node_path, action_path, is_terminal
def handle_terminal(self, terminal_node):
board = loads(terminal_node.state)
winner, score_diff = board.score()
# node is valued as board.next player
value = 1.0 if board.next == winner else -1.0
#print("terminal action value", value)
return value
def delayed_expand(self, node_path, action_path):
leaf_node = node_path[-1]
leaf_action = action_path[-1]
board = loads(leaf_node.state)
# update game board
islegal = True
if leaf_action >= self.size_square: # is pass
board.pass_move()
else:
x = leaf_action % self.size
y = leaf_action // self.size
add_stone = Stone(board, (x, y))
islegal = add_stone.islegal
if islegal:
# don't add children to parent node yet, that's done in batch_add_node
# print(new_zhash, "is batching")
return BatchInfo(node_path=node_path, action_path=action_path,
state=dumps(board))
else:
self.treelock.acquire()
del leaf_node.children[leaf_action]
del leaf_node.Q[leaf_action]
del leaf_node.N[leaf_action]
# re-dump board state for its illegal record,
# it can reduce some illegal children in endgame point expand
leaf_node.state = dumps(board)
self.treelock.release()
# print(leaf_action, "is illegal")
# return None, None so that it won't go to backpropergate
return None
def expand(self, leaf_node, leaf_action):
board = loads(leaf_node.state)
# update game board
islegal = True
if leaf_action >= self.size_square: # is pass
board.pass_move()
else:
x = leaf_action % self.size
y = leaf_action // self.size
add_stone = Stone(board, (x, y))
islegal = add_stone.islegal
# add node
if islegal:
new_node = self.add_node(board)
value = new_node.value
# parent node add children
self.treelock.acquire()
leaf_node.children[leaf_action] = new_node
self.treelock.release()
return -value
else:
self.treelock.acquire()
del leaf_node.children[leaf_action]
del leaf_node.Q[leaf_action]
del leaf_node.N[leaf_action]
leaf_node.state = dumps(board)
self.treelock.release()
#print(leaf_action, "is illegal")
return
def batch_add_node(self, batch_list):
# print("batch_add_node")
# batch-y get value
batched_mask = np.empty((len(batch_list), self.action_size), dtype=bool)
batched_board_grid = np.empty((len(batch_list), self.size, self.size, 2))
for i, binfo in enumerate(batch_list):
board = loads(binfo.state)
batched_mask[i] = self.model.get_invalid_mask(board)
if board.next == WHITE:
batched_board_grid[i] = board.grid[:,:,[1,0]]
else:
batched_board_grid[i] = board.grid
# fix initialization problem in threading scheme in Ubuntu
# with self.model.graph.as_default():
# with self.model.session.as_default():
batched_logit = self.model.actor.predict_on_batch(batched_board_grid) # shape = (batch_size, action_size)
batched_value = self.model.critic.predict_on_batch(batched_board_grid) # shape = (batch_size, 1)
batched_noise = np.random.dirichlet(alpha=self.dirichlet_alpha, size=(len(batch_list)))
batched_noised_logit = 0.75 * batched_logit + 0.25 * batched_noise
for i, binfo in enumerate(batch_list):
binfo.value = batched_value[i][0]
# create new node
masked_intuitions = masked_softmax(batched_mask[i], batched_noised_logit[i], 1.0)
children_actions = masked_intuitions.nonzero()[0]
new_node = MonteCarloNode(state=binfo.state,
policies=masked_intuitions,
value=batched_value[i][0],
children_keys=children_actions)
# update parent's node
self.treelock.acquire()
binfo.node_path[-1].children[binfo.action_path[-1]] = new_node
self.treelock.release()
# end def batch_add_node
def add_node(self, board):
#print("adding node id", zhash)
masked_intuitions = self.model.get_masked_intuitions(board, 1.0)
value = self.model.get_value(board.next, board.grid)
children_actions = masked_intuitions.nonzero()[0]
return MonteCarloNode(state=dumps(board),
policies=masked_intuitions,
value=value,
children_keys=children_actions)
# end def add_node
def backpropagate(self, node_path, action_path, value):
# print("bp with value:", value)
self.treelock.acquire()
for rev_i, a in reversed(list(enumerate(action_path))):
curnode = node_path[rev_i]
Qa = curnode.Q[a]
Na = curnode.N[a]
curnode.Q[a] = (value + Na * Qa) / (Na + 1)
curnode.N[a] += 1
curnode.visiting[a] -= 1
curnode.expanding[a] = False
value = -value # to switch side
self.treelock.release()
def backpropagate_with_batch(self, batch_list):
# print("batch bp with value:")
self.treelock.acquire()
for binfo in batch_list:
node_path, action_path, value = binfo.node_path, binfo.action_path, binfo.value
for rev_i, a in reversed(list(enumerate(action_path))):
curnode = node_path[rev_i]
Qa = curnode.Q[a]
Na = curnode.N[a]
curnode.Q[a] = (value + Na * Qa) / (Na + 1)
curnode.N[a] += 1
curnode.visiting[a] -= 1
curnode.expanding[a] = False
value = -value # to switch side
self.treelock.release()
|
__copyright__ = "Copyright 2020 Profilence"
__license__ = "Apache License, Version 2.0"
def enum(**enums):
return type('Enum', (), enums)
ResetType = enum(HARD_RESET=1,
SOFT_RESET=2,
REQUESTED_RESET=4,
MODEM_HIDDEN=8)
EventType = enum(WARNING=1,
NATIVE_CRASH=2,
STRICT_MODE_VIOLATION=4,
TERRIBLE_FAILURE=8,
LOW_MEMORY=16,
APP_NATIVE_CRASH=32,
APP_CRASH=64,
APP_NOT_RESPONDING=128,
WATCHDOG=256,
KERNEL_PANIC=16384,
CONSOLE_RAMOOPS=262144,
OTHER=2147483647)
LogPriority = enum(NOT_AVAILABLE=0,
UNKNOWN=1,
KERNEL_EMERGENCY=2,
KERNEL_ALERT=3,
KERNEL_CRITICAL=4,
KERNEL_ERROR=5,
KERNEL_WARNING=6,
KERNEL_NOTICE=7,
KERNEL_INFO=8,
KERNEL_DEBUG=9,
DEFAULT=10,
VERBOSE=11,
DEBUG=12,
INFO=13,
WARNING=14,
ERROR=15,
FATAL=16,
SILENT=17)
SourceBuffer = enum(MAIN=1,
RADIO=2,
EVENTS=4,
SYSTEM=8,
KERNEL=16,
MARKER=32)
SeriesType = enum(SINGLE_SERIES=0,
STACKED_SERIES=1)
PingResponseType = enum(FAILED=0,
OK=1)
TestType = enum(NORMAL=0,
PRE_CONDITION=1,
POST_CONDITION=2)
class TestRequestListenerBase(object):
""" Base class for test request handling """
def on_error(self, e):
""" Called if error occurs while listening test requests
Parameters:
e (Exception): The exception object
"""
pass
def on_completed(self):
""" Called when the asynchronous sequence completes """
pass
def on_test_start_requested(self, request):
""" Called by service for starting a new test in the test node (this client)
Parameters:
request (TestStartRequest): Start request details
"""
pass
def on_test_stop_requested(self, request):
""" Called by server for stopping ongoing test in the test node (this client)
Parameters:
request (TestStopRequest): Stop request details
"""
pass
|
from urllib import request, parse, error
import json, time, re
import configparser, os
config=configparser.ConfigParser(allow_no_value=True)
config.read(os.getenv('FRONTEND_CONFIG'))
config.read(os.getenv('BACKEND_CONFIG'))
dungRegex = re.compile("(^|\D):\d{2}(\D|$)")
url = "https://api.telegram.org/bot" + config['bot']['token'] + "/sendMessage"
chatID=config['chat']['commonID']
dungChatID=config['chat']['dungID']
lasttime = 0
messages = set()
def sendMessage(chatid, textmessage):
data = parse.urlencode({'text':textmessage, 'chat_id': chatid, 'disable_web_page_preview':'true'}).encode()
while True:
req = request.Request(url, data=data) # this will make the method "POST"
try:
resp = request.urlopen(req)
except error.HTTPError as httperror:
if httperror.status == 429:
print("too many requests")
time.sleep(60)
elif httperror.status != 200:
print("common httperror")
time.sleep(1)
continue;
time.sleep(0.2)
break
while True:
with open(config['paths']['message_pipe'], "r", encoding="utf-8") as f:
while True:
try:
numb = int(f.readline())
except ValueError:
break;
message = tuple(json.loads(f.read(numb)))
if message[0]>lasttime or message[0]==lasttime and message[1:] not in messages:
if message[0]>lasttime:
messages=set()
lasttime = message[0]
messages.add(message[1:])
textmessage = time.strftime("[%d.%m %H:%M] ", time.gmtime(message[0]))+message[1]+': '+message[2]
sendMessage(chatID, textmessage)
if dungRegex.search(message[2]):
sendMessage(dungChatID, textmessage)
|
"""
This module is for calculating stats on a large corpus of text data.
"""
import os
import json
from chemdataextractor.doc import Paragraph
import sys
import random
#import pubchempy as pcp
print('Successful imports')
def find_nth(haystack, needle, n):
"""
This function finds the index of the nth instance of a substring
in a string
"""
start = haystack.find(needle)
while start >= 0 and n > 1:
start = haystack.find(needle, start+len(needle))
n -= 1
return start
def clean_paper(paper):
"""
This method takes a single paper and does all the rule-based text preprocessing.
Parameters:
___________
paper (str): The single paper to be preprocessed
Returns:
________
paper (str): The cleaned and preprocessed paper
"""
# this series of statements cuts off the abstract/highlights/references/intro words
# this method needs to be fixed, the elif sentences don't totally make sense
if paper.lower().count('highlights') != 0:
h_index = paper.lower().find('highlights')
paper = paper[h_index + len('highlights'):]
elif paper.lower().count('abstract') != 0:
a_index = paper.lower().find('abstract')
paper = paper[a_index + len('abstract'):]
elif paper.lower().count('introduction') != 0:
i_index = find_nth(paper.lower(),'introduction',2)
paper = paper[i_index + len('introduction'):]
else:
pass
r_index = paper.rfind('References')
paper = paper[:r_index]
return paper
def read_ptable():
"""
This function reads the periodic table file 'periodic_table.txt' and sets up a
dictionary of values for each element.
Parameters:
none
Returns:
dict: dictionary of element names and symbols as keys, integers = 0 as all
values
"""
ptable = {}
with open('periodic_table.txt', 'r') as file:
for line in file:
tokens = line.split()
element = tokens[1].lower
symbol = tokens[2].lower
ptable[element] = 0
ptable[symbol] = 0
return ptable
def corpus_stats(corpus_path):
"""
This function runs through an entire literature corpus and calculates many
statistics about the corpus.
Parameters:
corpus_path (str, required): The path to the master corpus.
level (str, optional): Whether to gather stats from the 'abstract' level,
'fulltext', or 'both'.
Returns:
dict: Dictionary with each key corresponding to an element, and each value
equal to the number of times that element/symbol was seen
dict: Dictionary of various other corpus stats.
"""
ptable = read_ptable() # create dictionary of periodic table elements
stats = {'papers':0, 'abstracts':0, 'fulltexts':0, 'words':0}
# make sure we have consistent endings
if not corpus_path.endswith('/'):
corpus_path += '/'
# get a list of all the journal directories and remove the README
journals = os.listdir(corpus_path)
journals.remove('README.txt')
# iterate through every journal in corpus
for journal_name in journals:
journal_path = corpus_path + journal_name +'/'
journal_json = journal_path + journal_name + '.json'
print('On journal ', journal_name)
# open the entire dictionary corresponding to a single jornal
with open(journal_json) as json_file:
journal_dict = json.load(json_file)
# iterate through the journal years
for year in journal_dict:
year_dict = journal_dict[year]
print(year)
# iterate through every paper number in that year
for paper in year_dict:
stats['papers'] += 1 #increment paper stat
paper_dict = year_dict[paper]
# in the paper dict there are,
# 'description', 'fulltext', 'doi', 'pii'...
try:
abstract = paper_dict['description']
if abstract != None:
stats['abstracts'] += 1 #increment abs stat
stats['words'] += len(abstract.split())
else:
pass
except KeyError:
print('Abstract key error')
pass
try:
fulltext = paper_dict['fulltext']
if fulltext != None:
stats['fulltexts'] += 1 #increment ft stat
stats['words'] += len(fulltext.split())
else:
pass
except KeyError:
print('fulltext key error')
pass
return ptable, stats
def get_CDE_mols(corpus_path, years, ppy, output_path, mode='fulltext'):
"""
This function grabs
Parameters:
corpus_path (str, required): Path to the corpus
years (list, required): List of years to find mols for
ppy (int, required): Papers per year. How many papers to get mols from per
year
output_path (str, required): path to place output data to be furher analyzed
mode (str, optional): Either 'fulltext' or 'abstract' or 'both'
"""
paper_count = 0
# make sure we have consistent endings
if not corpus_path.endswith('/'):
corpus_path += '/'
# get a list of all the journal directories and remove the README
journals = os.listdir(corpus_path)
journals.remove('README.txt')
random.seed(42)
random.shuffle(journals)
# iterate through every journal in corpus
for journal_name in journals:
journal_path = corpus_path + journal_name +'/'
journal_json = journal_path + journal_name + '.json'
print('On journal ', journal_name)
# open the entire dictionary corresponding to a single jornal
with open(journal_json) as json_file:
journal_dict = json.load(json_file)
# iterate through the specified years in parameter
for year in years:
year_dict = journal_dict[year]
print(year)
try:
# don't know if there will be enough papers in this year for this pub
paper_idxs = random.sample(range(len(year_dict)), ppy)
except:
continue
for num in paper_idxs:
paper_count += 1
print('On paper ', paper_count, ' of ', len(journals)*len(years)*ppy)
# grab the paper from this year corresponding to the 'numth' paper
paper_dict = year_dict[str(num)]
# get the fulltext out
try:
text = paper_dict['fulltext']
except:
continue
if type(text) != str:
continue
# remove nonsense information
text = clean_paper(text)
para = Paragraph(text)
mols = para.cems # find all molecules in the text
mols = ['<<NEW_PAPER>>'] + [mol.text for mol in mols]
with open(output_path, 'a') as file:
for entry in mols:
file.write(entry + '\n')
file.write('\n')
def append_cde_mols(text, mol_list, ptable):
"""
This function uses ChemDataExtractor to find all molecules in a chunk of text.
Parameters:
text (str, required): The text to find molecules in
Returns:
list: list of all molecules in the text
"""
para = Paragraph(text)
new_mols = para.cems # find all molecules in the text
for mol in new_mols:
mol_list.append(mol.text)
print('appended ', mol)
def main(corpus_path, output_path):
"""
Main method to be called.
Parameters:
output_dir (str, required): Absolute path to a directory
Returns:
none
"""
ppy = 5
years = ['2019']
get_CDE_mols(corpus_path, years, ppy, output_path, mode='fulltext')
output_path = '/gscratch/pfaendtner/dacj/nlp/stats_pmmo/2015_5ppy_CDE_mols.txt'
corpus_path = '/gscratch/pfaendtner/dacj/nlp/fulltext_pOmOmOo'
main(corpus_path, output_path)
|
__author__ = 'silencedut'
|
from http import HTTPStatus
import pytest
import requests
from rotkehlchen.tests.utils.api import api_url_for, assert_error_response, assert_proper_response
from rotkehlchen.tests.utils.blockchain import assert_btc_balances_result
from rotkehlchen.tests.utils.factories import UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2
from rotkehlchen.tests.utils.rotkehlchen import setup_balances
def test_add_and_query_tags(
rotkehlchen_api_server,
):
"""Test that adding and querying tags via the API works fine"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
response = requests.get(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
),
)
assert_proper_response(response)
data = response.json()
assert data['result'] == {}, 'In the beginning we should have no tags'
# Add one tag and see its response shows it was added
tag1 = {
'name': 'Public',
'description': 'My public accounts',
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag1,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 1
assert data['result']['Public'] == tag1
# Add a second tag and see its response shows it was added
tag2 = {
'name': 'private',
'description': 'My private accounts',
'background_color': '000000',
'foreground_color': 'ffffff',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag2,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
assert data['result']['private'] == tag2
# Try to add a different tag that matches tag1 case insensitive and see request fails
tag3 = {
'name': 'PuBlIc',
'description': 'Some other tag',
'background_color': 'f2f2f2',
'foreground_color': '222222',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag3,
)
assert_error_response(
response=response,
contained_in_msg='Tag with name PuBlIc already exists.',
status_code=HTTPStatus.CONFLICT,
)
# Query tags and see that both added tags are in the response
response = requests.get(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
),
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
assert data['result']['private'] == tag2
# And finally also check the DB to be certain
db_response = rotki.data.db.get_tags()
assert len(db_response) == 2
assert db_response['Public'].serialize() == tag1
assert db_response['private'].serialize() == tag2
def test_add_tag_without_description(
rotkehlchen_api_server,
):
"""Test that adding a tag without a description works"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
tag1 = {
'name': 'Public',
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag1,
)
assert_proper_response(response)
tag1['description'] = None
data = response.json()
assert len(data['result']) == 1
assert data['result']['Public'] == tag1
# Query tags and see that the added tag is there
response = requests.get(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
),
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 1
assert data['result']['Public'] == tag1
# And finally also check the DB to be certain
db_response = rotki.data.db.get_tags()
assert len(db_response) == 1
assert db_response['Public'].serialize() == tag1
@pytest.mark.parametrize('verb', ('PUT', 'PATCH'))
def test_add_edit_tag_errors(
rotkehlchen_api_server,
verb,
):
"""Test that errors in input data while adding/editing a tag are handled correctly"""
# Name missing
tag = {
'description': 'My public accounts',
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg='name": ["Missing data for required field',
status_code=HTTPStatus.BAD_REQUEST,
)
# Invalid type for name
tag = {
'name': 456,
'description': 'My public accounts',
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg='name": ["Not a valid string',
status_code=HTTPStatus.BAD_REQUEST,
)
# Invalid type for description
tag = {
'name': 'Public',
'description': 54.2,
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg='description": ["Not a valid string',
status_code=HTTPStatus.BAD_REQUEST,
)
model_tag = {
'name': 'Public',
'description': 'My public accounts',
'background_color': 'ffffff',
'foreground_color': '000000',
}
for field in ('background_color', 'foreground_color'):
if verb == 'PUT':
# Missing color
tag = model_tag.copy()
tag.pop(field)
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg=f'"{field}": ["Missing data for required field',
status_code=HTTPStatus.BAD_REQUEST,
)
# Invalid color type
tag = model_tag.copy()
tag[field] = 55
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg=f'"{field}": ["Failed to deserialize color code from int',
status_code=HTTPStatus.BAD_REQUEST,
)
# Wrong kind of string
tag = model_tag.copy()
tag[field] = 'went'
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg=(
f'"{field}": ["The given color code value \\"went\\" could '
f'not be processed as a hex color value'
),
status_code=HTTPStatus.BAD_REQUEST,
)
# Hex code but out of range
tag = model_tag.copy()
tag[field] = 'ffef01ff'
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg=(
f'"{field}": ["The given color code value \\"ffef01ff\\" is out '
f'of range for a normal color field'
),
status_code=HTTPStatus.BAD_REQUEST,
)
# Hex code but not enough digits
tag = model_tag.copy()
tag[field] = 'ff'
response = requests.request(
verb,
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag,
)
assert_error_response(
response=response,
contained_in_msg=(
f'"{field}": ["The given color code value \\"ff\\" does not '
f'have 6 hexadecimal digits'
),
status_code=HTTPStatus.BAD_REQUEST,
)
def test_edit_tags(
rotkehlchen_api_server,
):
"""Test that editing a tag via the REST API works fine"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
# Add two tags
tag1 = {
'name': 'Public',
'description': 'My public accounts',
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag1,
)
assert_proper_response(response)
tag2 = {
'name': 'private',
'description': 'My private accounts',
'background_color': '000000',
'foreground_color': 'ffffff',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag2,
)
assert_proper_response(response)
# Now try to edit the second tag and change all its fields
edit_tag_data = {
'name': 'PrIvAtE', # notice that name should match case insensitive
'description': 'My super private accounts',
'background_color': '010101',
'foreground_color': 'fefefe',
}
response = requests.patch(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=edit_tag_data,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
tag2 = edit_tag_data
tag2['name'] = 'private'
assert data['result']['private'] == tag2
# Now try to edit the second tag and change all but description
edit_tag_data = {
'name': 'private',
'background_color': '020202',
'foreground_color': 'fafafa',
}
response = requests.patch(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=edit_tag_data,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
edit_tag_data['description'] = tag2['description']
assert data['result']['private'] == edit_tag_data
tag2 = data['result']['private']
# Now try to edit the second tag and change only foreground_color
edit_tag_data = {
'name': 'private',
'foreground_color': 'fbfbfb',
}
response = requests.patch(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=edit_tag_data,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
tag2['foreground_color'] = edit_tag_data['foreground_color']
assert data['result']['private'] == tag2
tag2 = data['result']['private']
# Now try to edit the second tag and change only background_color
edit_tag_data = {
'name': 'private',
'background_color': '000000',
}
response = requests.patch(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=edit_tag_data,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
tag2['background_color'] = edit_tag_data['background_color']
assert data['result']['private'] == tag2
# Now try to edit a tag without modifying any field and see it's an error
edit_tag_data = {'name': 'private'}
response = requests.patch(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=edit_tag_data,
)
assert_error_response(
response=response,
contained_in_msg='No field was given to edit for tag "private"',
status_code=HTTPStatus.BAD_REQUEST,
)
# Now try to edit a non-existing tag
edit_tag_data = {
'name': 'hello',
'background_color': '000000',
}
response = requests.patch(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=edit_tag_data,
)
assert_error_response(
response=response,
contained_in_msg='Tried to edit tag with name "hello" which does not exist',
status_code=HTTPStatus.CONFLICT,
)
# Query tags and see that both added/edited tags are in the response
response = requests.get(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
),
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
assert data['result']['private'] == tag2
# And finally also check the DB to be certain
db_response = rotki.data.db.get_tags()
assert len(db_response) == 2
assert db_response['Public'].serialize() == tag1
assert db_response['private'].serialize() == tag2
def test_delete_tags(
rotkehlchen_api_server,
):
"""Test that deleting a tag via the REST API works fine"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
# Add two tags
tag1 = {
'name': 'Public',
'description': 'My public accounts',
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag1,
)
assert_proper_response(response)
tag2 = {
'name': 'private',
'description': 'My private accounts',
'background_color': '000000',
'foreground_color': 'ffffff',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=tag2,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
assert data['result']['private'] == tag2
# Query tags and see that both added tags are in the response
response = requests.get(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
),
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 2
assert data['result']['Public'] == tag1
assert data['result']['private'] == tag2
# Now delete the first tag
delete_tag_data = {
'name': 'pUbLiC', # notice that name should match case insensitive
}
response = requests.delete(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=delete_tag_data,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 1
# Now try to delete a non existing tag
delete_tag_data = {'name': 'hello'}
response = requests.delete(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=delete_tag_data,
)
assert_error_response(
response=response,
contained_in_msg='Tried to delete tag with name "hello" which does not exist',
status_code=HTTPStatus.CONFLICT,
)
# Query tags and see that the deleted tag is not in the response
response = requests.get(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
),
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 1
assert data['result']['private'] == tag2
# And finally also check the DB to be certain
db_response = rotki.data.db.get_tags()
assert len(db_response) == 1
assert db_response['private'].serialize() == tag2
def test_delete_tag_errors(
rotkehlchen_api_server,
):
"""Test that errors in input data while deleting a tag are handled correctly"""
# Name missing
data = {}
response = requests.delete(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=data,
)
assert_error_response(
response=response,
contained_in_msg='name": ["Missing data for required field',
status_code=HTTPStatus.BAD_REQUEST,
)
# Invalid type for name
data = {'name': 55.52}
response = requests.delete(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=data,
)
assert_error_response(
response=response,
contained_in_msg='name": ["Not a valid string',
status_code=HTTPStatus.BAD_REQUEST,
)
@pytest.mark.parametrize('number_of_eth_accounts', [0])
def test_delete_utilized_tag(rotkehlchen_api_server):
"""
Test that deleting a tag that is already utilized by an account
also removes it from the account"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
# Add two tags
tag1 = {
'name': 'public',
'description': 'My public accounts',
'background_color': 'ffffff',
'foreground_color': '000000',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
'tagsresource',
), json=tag1,
)
assert_proper_response(response)
tag2 = {
'name': 'desktop',
'description': 'Accounts that are stored in the desktop PC',
'background_color': '000000',
'foreground_color': 'ffffff',
}
response = requests.put(
api_url_for(
rotkehlchen_api_server,
'tagsresource',
), json=tag2,
)
assert_proper_response(response)
# Now add 2 accounts both of them using the above tags
new_btc_accounts = [UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2]
btc_balances = ['10000', '500500000']
setup = setup_balances(
rotki,
ethereum_accounts=None,
btc_accounts=new_btc_accounts,
eth_balances=None,
token_balances=None,
btc_balances=btc_balances,
)
accounts_data = [{
"address": new_btc_accounts[0],
"label": 'my btc miner',
'tags': ['public', 'desktop'],
}, {
"address": new_btc_accounts[1],
'label': 'other account',
'tags': ['desktop'],
}]
with setup.bitcoin_patch:
response = requests.put(api_url_for(
rotkehlchen_api_server,
"blockchainsaccountsresource",
blockchain='BTC',
), json={'accounts': accounts_data})
assert_proper_response(response)
assert_btc_balances_result(
json_data=response.json(),
btc_accounts=new_btc_accounts,
btc_balances=btc_balances,
also_eth=False,
)
# Now delete the tag used by both accounts
delete_tag_data = {
'name': 'desktop',
}
response = requests.delete(
api_url_for(
rotkehlchen_api_server,
"tagsresource",
), json=delete_tag_data,
)
assert_proper_response(response)
data = response.json()
assert len(data['result']) == 1
assert data['result']['public'] is not None
# Now check the DB directly and see that tag mappings of the deleted tag are gone
cursor = rotki.data.db.conn.cursor()
query = cursor.execute('SELECT object_reference, tag_name FROM tag_mappings;').fetchall()
assert len(query) == 1
assert query[0][0] == UNIT_BTC_ADDRESS1
assert query[0][1] == 'public'
|
from typing import List, Dict, Callable, Any, Tuple
from pyri.webui_browser.plugins.variable_dialog import PyriWebUIBrowserVariableDialogInfo, PyriWebUIBrowserVariableDialogPluginFactory, PyriWebUIBrowserVariableDialogBase
from pyri.webui_browser import PyriWebUIBrowser
from .new_calibrate_intrinsic_dialog import show_new_camera_calibration_intrinsic_dialog
from .new_calibrate_extrinsic_dialog import show_new_camera_calibration_extrinsic_dialog
from .new_calibrate_robot_origin_dialog import show_new_robot_origin_calibration_dialog
from .new_image_template_dialog import show_new_image_template_dialog
from .new_image_roi_dialog import show_new_image_roi_dialog
_variable_dialog_infos = {
("com.robotraconteur.imaging.camerainfo.CameraCalibration",("camera_calibration_intrinsic")): \
PyriWebUIBrowserVariableDialogInfo(
"vision_camera_calibration_intrinsic",
"Camera Calibration Intrinsic",
"com.robotraconteur.imaging.camerainfo.CameraCalibration",
["camera_calibration_intrinsic"],
"Camera Intrinsic Calibration Parameters",
),
("com.robotraconteur.geometry.NamedPoseWithCovariance",("camera_calibration_extrinsic")): \
PyriWebUIBrowserVariableDialogInfo(
"vision_camera_calibration_extrinsic",
"Camera Calibration Extrinsic",
"com.robotraconteur.geometry.NamedPoseWithCovariance",
["camera_calibration_extrinsic"],
"Camera Extrinsic Calibration Parameters"
),
("com.robotraconteur.geometry.NamedPoseWithCovariance",("robot_origin_pose_calibration")): \
PyriWebUIBrowserVariableDialogInfo(
"robot_origin_pose_calibration",
"Robot Origin Pose Calibration",
"com.robotraconteur.geometry.NamedPoseWithCovariance",
["robot_origin_pose_calibration"],
"Robot Origin Pose Calibration"
),
("com.robotraconteur.image.CompressedImage",("image_template")): \
PyriWebUIBrowserVariableDialogInfo(
"image_template",
"Image Template",
"com.robotraconteur.image.CompressedImage",
["image_template"],
"Template image for matching"
),
("com.robotraconteur.geometry.BoundingBox2D",("image_roi")): \
PyriWebUIBrowserVariableDialogInfo(
"image_roi",
"Image ROI",
"com.robotraconteur.geometry.BoundingBox2D",
["image_roi"],
"Image Region of Interest (ROI)"
)
}
class PyriVisionWebUIBrowserVariableDialogPluginFactory(PyriWebUIBrowserVariableDialogPluginFactory):
def __init__(self):
super().__init__()
def get_plugin_name(self) -> str:
return "pyri-vision-browser"
def get_variable_dialog_infos(self) -> Dict[Tuple[str,Tuple[str]],PyriWebUIBrowserVariableDialogInfo]:
return _variable_dialog_infos
def show_variable_new_dialog(self, new_name: str, variable_type: str, variable_tags: str, core: "PyriWebUIBrowser") -> None:
if variable_type == "com.robotraconteur.imaging.camerainfo.CameraCalibration" and "camera_calibration_intrinsic" in variable_tags:
show_new_camera_calibration_intrinsic_dialog(new_name, variable_type, variable_tags, core)
return
if variable_type == "com.robotraconteur.geometry.NamedPoseWithCovariance" and "camera_calibration_extrinsic" in variable_tags:
show_new_camera_calibration_extrinsic_dialog(new_name, variable_type, variable_tags, core)
return
if variable_type == "com.robotraconteur.geometry.NamedPoseWithCovariance" and "robot_origin_pose_calibration" in variable_tags:
show_new_robot_origin_calibration_dialog(new_name, variable_type, variable_tags, core)
return
if variable_type == "com.robotraconteur.image.CompressedImage" and "image_template" in variable_tags:
show_new_image_template_dialog(new_name, variable_type, variable_tags, core)
return
if variable_type == "com.robotraconteur.geometry.BoundingBox2D" and "image_roi" in variable_tags:
show_new_image_roi_dialog(new_name, variable_type, variable_tags, core)
return
assert False, "Invalid new variable dialog type requested"
def show_variable_edit_dialog(self, variable_name: str, variable_type: str, variable_tags: List[str], core: "PyriWebUIBrowser") -> None:
raise NotImplementedError()
def get_webui_browser_variable_dialog_factory():
return PyriVisionWebUIBrowserVariableDialogPluginFactory()
|
from typing import List
from uuid import UUID
from fastapi import APIRouter
from fastapi.param_functions import Depends
from sqlmodel import Session
from starlette.status import HTTP_201_CREATED
from src.core.controller import user
from src.core.helpers.database import make_session
from src.core.models import Context, CreateUser, QueryUser, User
from src.core.models.user import UpdateUser, UpdateUserPassword
from src.utils.dependencies import context_manager, get_current_user
router = APIRouter()
@router.get("/me", response_model=User, response_model_exclude={"password_hash": ...})
async def get_me(current_user: User = Depends(get_current_user)):
return current_user
@router.get("/", response_model=List[User], response_model_exclude={"password_hash": ...})
async def get(
query: QueryUser = Depends(),
session: Session = Depends(make_session),
context: Context = Depends(context_manager),
):
return user.get_all(session, query, context=context)
@router.get("/{user_id}", response_model=User, response_model_exclude={"password_hash": ...})
async def get_by_id(
user_id: UUID, session: Session = Depends(make_session), context: Context = Depends(context_manager)
):
return user.get_by_id(session, user_id, context=context)
@router.post("/", response_model=User, status_code=HTTP_201_CREATED, response_model_exclude={"password_hash": ...})
async def create(
schema: CreateUser, session: Session = Depends(make_session), context: Context = Depends(context_manager)
):
return user.create(session, schema, context=context)
@router.post("/{user_id}", response_model=User, response_model_exclude={"password_hash": ...})
async def update_by_id(
user_id: UUID,
data: UpdateUser,
session: Session = Depends(make_session),
context: Context = Depends(context_manager),
):
return user.update_by_id(session, user_id, data, context)
@router.post("/password/{user_id}")
async def update_password_by_id(
user_id: UUID,
data: UpdateUserPassword,
session: Session = Depends(make_session),
context: Context = Depends(context_manager),
):
return user.update_password(session, user_id, data, context)
|
# Copyright 2019-2020 ETH Zurich and the DaCe authors. All rights reserved.
""" State elimination transformations """
import networkx as nx
from dace import dtypes, registry, sdfg
from dace.sdfg import nodes
from dace.sdfg import utils as sdutil
from dace.transformation import transformation
from dace.config import Config
@registry.autoregister_params(strict=True)
class EndStateElimination(transformation.Transformation):
"""
End-state elimination removes a redundant state that has one incoming edge
and no contents.
"""
_end_state = sdfg.SDFGState()
@staticmethod
def expressions():
return [sdutil.node_path_graph(EndStateElimination._end_state)]
@staticmethod
def can_be_applied(graph, candidate, expr_index, sdfg, strict=False):
state = graph.nodes()[candidate[EndStateElimination._end_state]]
out_edges = graph.out_edges(state)
in_edges = graph.in_edges(state)
# If this is an end state, there are no outgoing edges
if len(out_edges) != 0:
return False
# We only match end states with one source and no conditions
if len(in_edges) != 1:
return False
edge = in_edges[0]
if not edge.data.is_unconditional():
return False
# Only empty states can be eliminated
if state.number_of_nodes() > 0:
return False
return True
@staticmethod
def match_to_str(graph, candidate):
state = graph.nodes()[candidate[EndStateElimination._end_state]]
return state.label
def apply(self, sdfg):
state = sdfg.nodes()[self.subgraph[EndStateElimination._end_state]]
sdfg.remove_node(state)
@registry.autoregister
class StateAssignElimination(transformation.Transformation):
"""
State assign elimination removes all assignments into the final state
and subsumes the assigned value into its contents.
"""
_end_state = sdfg.SDFGState()
@staticmethod
def expressions():
return [sdutil.node_path_graph(StateAssignElimination._end_state)]
@staticmethod
def can_be_applied(graph, candidate, expr_index, sdfg, strict=False):
state = graph.nodes()[candidate[StateAssignElimination._end_state]]
out_edges = graph.out_edges(state)
in_edges = graph.in_edges(state)
# If this is an end state, there are no outgoing edges
if len(out_edges) != 0:
return False
# We only match end states with one source and at least one assignment
if len(in_edges) != 1:
return False
edge = in_edges[0]
if len(edge.data.assignments) == 0:
return False
return True
@staticmethod
def match_to_str(graph, candidate):
state = graph.nodes()[candidate[StateAssignElimination._end_state]]
return state.label
def apply(self, sdfg):
state = sdfg.nodes()[self.subgraph[StateAssignElimination._end_state]]
edge = sdfg.in_edges(state)[0]
# Since inter-state assignments that use an assigned value leads to
# undefined behavior (e.g., {m: n, n: m}), we can replace each
# assignment separately.
for varname, assignment in edge.data.assignments.items():
state.replace(varname, assignment)
# Remove assignments from edge
edge.data.assignments = {}
|
# Enter script code
import re
winClass = window.get_active_class()
isGoogleChrome1 = re.search("google-chrome\.[g|G]oogle-chrome", winClass)
isGoogleChrome2 = re.search("Google-chrome-stable\.Google-chrome-stable", winClass)
isTerminalWin = re.search("x+terminal.*", winClass)
isKonsole = re.search("konsole\\.konsole", winClass)
if isTerminalWin or isGoogleChrome1 or isGoogleChrome2:
keyboard.send_keys("<ctrl>+<page_up>")
else:
keyboard.send_keys("<alt>+<super>+<left>")
|
# encoding:utf-8
import csv
import os
import time
import string
# 控制类
class Controller(object):
def __init__(self, count):
# 定义测试的次数
self.counter = count
# 定义收集数据的数组
self.all_data = [("timestamp", "traffic")]
# 单次测试过程
def test_process(self):
# 执行获取进程ID的指令 window 下用findstr,Mac下用grep
# receive, transmit, receive2, transmit2 = "0"
cmd = "adb shell ps | findstr org.chromium.webview_shell"
result = os.popen(cmd)
# 获取进程ID
pid = result.readlines()[0].split(" ")[5]
# 获取进程ID使用的流量
traffic = os.popen("adb shell cat /proc/" + pid + "/net/dev")
for line in traffic:
# 第一个网卡的数据
if "eth0" in line:
# 将所有空行换成#
line = "#".join(line.split())
# 然后按#号进行拆分,获取到收到和发出的流量值
receive = line.split("#")[1]
transmit = line.split("#")[9]
# 第二个网卡的数据
elif "eth1" in line:
line2 = "#".join(line.split())
# 然后按#号进行拆分,获取到收到和发出的流量值
receive2 = line2.split("#")[1]
transmit2 = line2.split("#")[9]
# 计算所有流量之和
all_traffic = string.atoi(receive) + string.atoi(transmit) + string.atoi(receive2) + string.atoi(transmit2)
# 按KB计算流量值
all_traffic = all_traffic / 1024
# 获取当前时间
current_time = self.get_current_time()
# 将获取到的数据存到数组中
self.all_data.append((current_time, all_traffic))
# 多次执行测试过程
def run(self):
while self.counter > 0:
self.test_process()
self.counter = self.counter - 1
# 每5秒采集一次数据
time.sleep(5)
# 获取当前的时间戳
@staticmethod
def get_current_time():
current_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
return current_time
# 数据的存储
def save_data_to_csv(self):
csv_file = file('traffic.csv', 'wb')
writer = csv.writer(csv_file)
writer.writerows(self.all_data)
csv_file.close()
if __name__ == '__main__':
controller = Controller(5)
controller.run()
controller.save_data_to_csv()
|
"""obstacle_avoid_test controller."""
# You may need to import some classes of the controller module. Ex:
# from controller import Robot, LED, DistanceSensor
from controller import Supervisor
from odometry import Odometry
from data_collector import DataCollector
from predictor import Predictor
import matplotlib.pyplot as plt
import numpy as np
import math
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# hello = tf.constant("hello TensorFlow!")
# sess=tf.Session()
# print(sess.run(hello))
MAX_SPEED = 6
TIME_STEP = 8
WHEEL_RADIUS = 0.05
SAMPLING_PERIOD = 10
MAX_X = 2
MAX_Y = 1.5
ENCODER_UNIT = 159.23
INIT_X = 0.0
INIT_Y = 0.0
INIT_ANGLE = 0
PRED_STEPS = 450
correction_x = 0
correction_y = 0
correction_theta = 0
# create the Robot instance.
robot = Supervisor()
robot_sup = robot.getFromDef("e-puck")
robot_trans = robot_sup.getField("translation")
compass = robot.getCompass("compass")
motorLeft = robot.getMotor("left wheel motor")
motorRight = robot.getMotor("right wheel motor")
positionLeft = robot.getPositionSensor("left wheel sensor")
positionRight = robot.getPositionSensor("right wheel sensor")
predictor = Predictor()
timestep = int(robot.getBasicTimeStep())
x = []
y = []
theta = []
distance_sensors_info = []
x_odometry = []
y_odometry = []
theta_odometry = []
sensorNames = ['ds0', 'ds1', 'ds2', 'ds3', 'ds4', 'ds5', 'ds6', 'ds7']
x_pred = []
y_pred = []
theta_pred = []
data_collector = DataCollector()
def init():
compass.enable(timestep)
# motorLeft.setPosition(0.5/WHEEL_RADIUS)
# motorRight.setPosition(0.5/WHEEL_RADIUS)
motorLeft.setPosition(float('inf'))
motorRight.setPosition(float('inf'))
positionRight.enable(timestep)
positionLeft.enable(timestep)
def robot_to_xy(x, y):
return x+1, y+0.75
def xy_to_robot(x, y):
return x-1, y-0.75
def get_bearing_degrees():
north = compass.getValues()
rad = np.arctan2(north[0], north[2])
bearing = (rad) / np.pi * 180
if bearing < 0.0:
bearing += 360
bearing = 360 - bearing - 90
if bearing < 0.0:
bearing += 360
return bearing
def step():
return (robot.step(timestep) != -1)
def save_supervisor_coordinates():
# true robot position information
trans_info = robot_trans.getSFVec3f()
x_coordinate, y_coordinate = robot_to_xy(trans_info[2], trans_info[0])
x.append(x_coordinate)
y.append(y_coordinate)
theta.append((get_bearing_degrees()))
def save_odometry_coordinates(coordinate):
# convert robot coordinates into global coordinate system
x_odometry.append(1 + 2*INIT_X - coordinate.x + correction_x)
y_odometry.append(0.75 + 2*INIT_Y - coordinate.y + correction_y)
theta_odometry.append(convert_angle_to_xy_coordinates(coordinate.theta) + correction_theta)
def save_sensor_distances(distanceSensors):
distances = []
for distanceSensor in distanceSensors:
distance = distanceSensor.getValue()
#there is no real messure.
if distance == 10:
distance = None
distances.append(distance)
distance_sensors_info.append(distances)
def get_sensor_distance():
# Read the sensors, like:
distanceSensors = []
for sensorName in sensorNames:
sensor = robot.getDistanceSensor(sensorName)
sensor.enable(timestep)
distanceSensors.append(sensor)
return distanceSensors
def calculate_velocity(distanceSensors):
# Process sensor data here
sensorValues = [distanceSensor.getValue() + np.random.normal(0, 0.1) for distanceSensor in distanceSensors]
rightObstacle = sensorValues[0] < 0.15 or sensorValues[1] < 0.15
leftObstacle = sensorValues[6] < 0.15 or sensorValues[7] < 0.15
left_speed = .5 * MAX_SPEED
right_speed = .5 * MAX_SPEED
# avoid collition
if leftObstacle:
left_speed += .7 * MAX_SPEED
right_speed -= .7 * MAX_SPEED
elif rightObstacle:
left_speed -= .7 * MAX_SPEED
right_speed += .7 * MAX_SPEED
return left_speed, right_speed
def convert_angle_to_xy_coordinates(angle):
angle = angle*180/np.pi
angle = angle - 180
if angle < 0.0:
angle += 360
return angle
def plot():
# Enter here exit cleanup code.
plt.ylim([0, 1.5])
plt.xlim([0, 2])
plt.xlabel("x")
plt.ylabel("y")
plt.plot(x, y, label="real")
plt.plot(x_odometry, y_odometry, label="odometry")
plt.plot(x_pred, y_pred, 's', label="correction", marker='o')
plt.title("Robot position estimation")
plt.legend()
plt.savefig("results/position.eps", format='eps')
def correct_state(x, y, theta, sensors_data, delta = 10, omega = 3):
# corresponds to the E set
errors = []
# corresponds to the X set
predictions = []
xrange = [l/100 for l in range(max(0, int(x*100) - delta), min(MAX_X*100, int(x*100) + delta), 1)]
yrange = [l/100 for l in range(max(0, int(y*100) - delta), min(int(MAX_Y*100), int(y*100) + delta), 1)]
thetarange = [l for l in range(max(0, int(theta) - omega), min(360, int(theta) + omega), 1)]
print("XRANGE------------------")
print(x)
print(xrange)
print("YRANGE------------------")
print(y)
print(yrange)
print("THETARANGE------------------")
print("theta: ", theta)
print(thetarange)
for i in xrange:
for j in yrange:
for k in thetarange:
error, bad_data = predictor.predict(i, j, k, sensors_data)
if not bad_data:
predictions.append([i, j, k])
errors.append(math.log(error))
if len(errors) > 0:
ix = errors.index(min(errors))
return predictions[ix]
return -1
if __name__ == '__main__':
init()
step()
odometry = Odometry(ENCODER_UNIT * (positionLeft.getValue()),
ENCODER_UNIT * (positionRight.getValue()), INIT_X, INIT_Y, INIT_ANGLE)
count = 0
while(True):
odometry_info = odometry.track_step(ENCODER_UNIT * (positionLeft.getValue()),
ENCODER_UNIT * (positionRight.getValue()))
if not step():
# print('saving data')
data_collector.collect(x_odometry, y_odometry, theta_odometry, x, y, theta, np.array(distance_sensors_info))
plot()
print('Compass: ', get_bearing_degrees(), 'Odometry:', convert_angle_to_xy_coordinates(odometry_info.theta))
distanceSensors = get_sensor_distance()
# collect data
save_sensor_distances(distanceSensors)
save_odometry_coordinates(odometry_info)
save_supervisor_coordinates()
# calculate new velocity
left_speed, right_speed = calculate_velocity(distanceSensors)
motorLeft.setVelocity(left_speed)
motorRight.setVelocity(right_speed)
# correction step each 100 steps
if count % PRED_STEPS == 0:
pred = correct_state(x_odometry[-1], y_odometry[-1], theta_odometry[-1], distanceSensors)
if pred != -1:
# save correction
x_pred.append(pred[0])
y_pred.append(pred[1])
theta_pred.append(pred[2])
# calculate correction
correction_x = correction_x + (x_pred[-1] - x_odometry[-1])
correction_y = correction_y + (y_pred[-1] - y_odometry[-1])
correction_theta = correction_theta + (theta_pred[-1] - theta_odometry[-1])
count += 1
|
import unittest
from datetime import datetime
import sqlalchemy as sa
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from sqlalchemy_mixins import TimestampsMixin
Base = declarative_base()
class BaseModel(Base, TimestampsMixin):
"""Model to use as base."""
__abstract__ = True
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String)
class User(BaseModel):
"""User model exemple."""
__tablename__ = 'user'
class TestTimestamps(unittest.TestCase):
"""Test case for Timestamp mixin."""
@classmethod
def setUpClass(cls):
cls.engine = create_engine('sqlite:///:memory:', echo=False)
def setUp(self):
self.session = Session(self.engine)
Base.metadata.create_all(self.engine)
user_1 = User(name='User')
self.session.add(user_1)
self.session.commit()
def tearDown(self):
Base.metadata.drop_all(self.engine)
def test_timestamp_must_be_abstract(self):
"""Test whether TimestampsMixin is abstract."""
self.assertTrue(hasattr(TimestampsMixin, '__abstract__'),
'TimestampsMixin must have attribute __abstract__')
self.assertTrue(TimestampsMixin.__abstract__,
'__abstract__ must be True')
def test_timestamp_has_datetime_columns(self):
"""Test whether TimestampsMixin has attrs created_at and updated_at."""
user = self.session.query(User).first()
self.assertTrue(hasattr(User, 'created_at'),
'Timestamp doesn\'t have created_at attribute.')
self.assertEqual(datetime, type(user.created_at),
'created_at column should be datetime')
self.assertTrue(hasattr(User, 'updated_at'),
'Timestamp doesn\'t have updated_at attribute.')
self.assertEqual(datetime, type(user.updated_at),
'updated_at column should be datetime')
def test_updated_at_column_must_change_value(self):
"""Test whether updated_at value is most recently after update."""
user = self.session.query(User).first()
dt_1 = user.updated_at
user.name = 'New name'
self.session.commit()
dt_2 = user.updated_at
self.assertLess(dt_1, dt_2, 'dt_1 should be older than dt_2')
if __name__ == '__main__':
unittest.main()
|
from setuptools import setup, find_packages
import re
import os
exec(open('hermione/_version.py').read())
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='hermione-ml',
version=__version__,
author='A3Data',
author_email='hermione@a3data.com.br',
url='https://github.com/A3Data/hermione',
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development'
],
keywords='machine learning mlops devops artificial intelligence',
license='Apache License 2.0',
install_requires=[
'Click'
],
entry_points='''
[console_scripts]
hermione=hermione.cli:cli
''',
python_requires='>=3.6'
)
|
"""
Quick tests against arbitrarily chosen LRP test vectors from AN12304
"""
import binascii
from Crypto.Protocol.SecretSharing import _Element
from lrp import LRP, nibbles, incr_counter
def test_incr_counter():
assert b"\x01" == incr_counter(b"\x00")
assert b"\x02" == incr_counter(b"\x01")
assert b"\x00" == incr_counter(b"\xFF")
assert b"\x12\x12" == incr_counter(b"\x12\x11")
assert b"\x00\x00" == incr_counter(b"\xFF\xFF")
assert b"\x00\x01" == incr_counter(b"\x00\x00")
assert b"\x00\x02" == incr_counter(b"\x00\x01")
assert b"\x00\x00\x00\x00" == incr_counter(b"\xFF\xFF\xFF\xFF")
def test_vectors_generate_plaintexts():
p = LRP.generate_plaintexts(b"\x56\x78\x26\xB8\xDA\x8E\x76\x84\x32\xA9\x54\x8D\xBE\x4A\xA3\xA0")
assert p[0] == b"\xAC\x20\xD3\x9F\x53\x41\xFE\x98\xDF\xCA\x21\xDA\x86\xBA\x79\x14"
assert p[15] == b"\x71\xB4\x44\xAF\x25\x7A\x93\x21\x53\x11\xD7\x58\xDD\x33\x32\x47"
def test_vectors_generate_updated_keys():
uk = LRP.generate_updated_keys(b"\x56\x78\x26\xB8\xDA\x8E\x76\x84\x32\xA9\x54\x8D\xBE\x4A\xA3\xA0")
assert uk[0] == b"\x16\x3D\x14\xED\x24\xED\x93\x53\x73\x56\x8E\xC5\x21\xE9\x6C\xF4"
assert uk[2] == b"\xFE\x30\xAB\x50\x46\x7E\x61\x78\x3B\xFE\x6B\x5E\x05\x60\x16\x0E"
def test_nibbles():
assert list(nibbles(b"\x13\x59")) == [1, 3, 5, 9]
assert list(nibbles(b"\x4B\x07\x3B\x24\x7C\xD4\x8F\x7E\x0A")) \
== [4, 0xB, 0, 7, 3, 0xB, 2, 4, 7, 0xC, 0xD, 4, 8, 0xF, 7, 0xE, 0, 0xA]
def test_eval_lrp():
p = LRP.generate_plaintexts(binascii.unhexlify("567826B8DA8E768432A9548DBE4AA3A0"))
uk = LRP.generate_updated_keys(binascii.unhexlify("567826B8DA8E768432A9548DBE4AA3A0"))
assert LRP.eval_lrp(p, uk[2], b"\x13\x59", final=True).hex() \
== "1ba2c0c578996bc497dd181c6885a9dd"
p = LRP.generate_plaintexts(binascii.unhexlify("88B95581002057A93E421EFE4076338B"))
uk = LRP.generate_updated_keys(binascii.unhexlify("88B95581002057A93E421EFE4076338B"))
assert LRP.eval_lrp(p, uk[2], b"\x77\x29\x9D", final=True).hex() \
== "E9C04556A214AC3297B83E4BDF46F142".lower()
p = LRP.generate_plaintexts(binascii.unhexlify("9AFF3EF56FFEC3153B1CADB48B445409"))
uk = LRP.generate_updated_keys(binascii.unhexlify("9AFF3EF56FFEC3153B1CADB48B445409"))
assert LRP.eval_lrp(p, uk[3], b"\x4B\x07\x3B\x24\x7C\xD4\x8F\x7E\x0A", final=False).hex() \
== "909415E5C8BE77563050F2227E17C0E4".lower()
def test_lricb_enc():
key = binascii.unhexlify("E0C4935FF0C254CD2CEF8FDDC32460CF")
pt = binascii.unhexlify("012D7F1653CAF6503C6AB0C1010E8CB0")
lrp = LRP(key, 0, b"\xC3\x31\x5D\xBF", pad=True)
ct = lrp.encrypt(pt)
assert ct.hex().upper() == "FCBBACAA4F29182464F99DE41085266F480E863E487BAAF687B43ED1ECE0D623"
def test_lricb_dec():
key = binascii.unhexlify("E0C4935FF0C254CD2CEF8FDDC32460CF")
ct = binascii.unhexlify("FCBBACAA4F29182464F99DE41085266F480E863E487BAAF687B43ED1ECE0D623")
lrp = LRP(key, 0, b"\xC3\x31\x5D\xBF", pad=True)
pt = lrp.decrypt(ct)
assert pt.hex().upper() == "012D7F1653CAF6503C6AB0C1010E8CB0"
def test_cmac_subkeys():
k = binascii.unhexlify("8195088CE6C393708EBBE6C7914ECB0B")
kx = binascii.unhexlify("2D22571A33B2965A9B49FF4395A43046")
k0 = LRP.eval_lrp(LRP.generate_plaintexts(k), LRP.generate_updated_keys(k)[0], b"\x00" * 16, True)
assert (_Element(k0) * _Element(4)).encode().hex() == kx.hex()
def test_cmac():
k = binascii.unhexlify("8195088CE6C393708EBBE6C7914ECB0B")
lrp = LRP(k, 0, b"\x00" * 16, True)
assert lrp.cmac(binascii.unhexlify("BBD5B85772C7")).hex() \
== "AD8595E0B49C5C0DB18E77355F5AAFF6".lower()
k = binascii.unhexlify("E2F84A0B0AF40EFEB3EEA215A436605C")
lrp = LRP(k, 0, b"\x00" * 16, True)
assert lrp.cmac(binascii.unhexlify("8BF1DDA9FE445560A4F4EB9CE0")).hex() \
== "D04382DF71BC293FEC4BB10BDB13805F".lower()
k = binascii.unhexlify("5AA9F6C6DE5138113DF5D6B6C77D5D52")
lrp = LRP(k, 0, b"\x00" * 16, True)
assert lrp.cmac(binascii.unhexlify("A4434D740C2CB665FE5396959189383F")).hex() \
== "8B43ADF767E46B692E8F24E837CB5EFC".lower()
|
### IMPORT
# il file di run da importare è nella cartella pgr/, padre di quella corrente
import sys
sys.path.append('../pgr')
import imghdr
import os
from flask import Flask, render_template, request, redirect, url_for, abort, \
send_from_directory
from werkzeug.utils import secure_filename
import zipfile
import json
from pgr import PathwayGenerator
from logger import CustomLogger
import logging
from logstash_formatter import LogstashFormatterV1
from flask_cors import CORS
### CONFIGURATION
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 10 * 1024 * 1024 #10MB
app.config['UPLOAD_EXTENSIONS'] = ['.docx', '.doc', '.pdf', '.txt']
app.config['UPLOAD_PATH'] = 'api/uploads'
cors = CORS(app)
os.makedirs('log/', exist_ok=True)
flasklog = open('log/flask.log', 'a+')
handler = logging.StreamHandler(stream=flasklog)
handler.setFormatter(LogstashFormatterV1())
logging.basicConfig(handlers=[handler], level=logging.INFO)
from doccano_api_client import DoccanoClient
from config import doccano_client_params, pilots_legend
doccano_client = DoccanoClient(
doccano_client_params['endpoint'],
doccano_client_params['username'],
doccano_client_params['password']
)
@app.errorhandler(413)
def too_large(e):
return "File is too large", 413
@app.route('/')
def index():
files = os.listdir(app.config['UPLOAD_PATH'])
return render_template('index.html', files=files)
@app.route('/', methods=['POST'])
def upload_files():
uploaded_file = request.files['file']
filename = secure_filename(uploaded_file.filename)
if filename != '':
file_ext = os.path.splitext(filename)[1]
if file_ext not in app.config['UPLOAD_EXTENSIONS'] :
return "Invalid file", 400
uploaded_file.save(os.path.join(app.config['UPLOAD_PATH'], filename))
result_pathway = run.run(os.path.join(app.config['UPLOAD_PATH'], filename), generate_pathway=True)
print(result_pathway)
return '', 204
@app.route('/uploads/<filename>')
def upload(filename):
return send_from_directory(app.config['UPLOAD_PATH'], filename)
def get_project_by_name(name):
project_list = doccano_client.get_project_list()
try:
project = [prj for prj in project_list if prj['name'] == name][0]
except Exception as e:
raise(Exception('The project {} does not exists!'.format(name)))
return project
def get_document(metadata, project_id):
document_list = doccano_client.get_document_list(project_id=project_id, url_parameters={'limit': [10000], 'offset': [0], 'q': ['']})
document = []
for doc in document_list['results']:
meta = doc['meta'].split(' - ')[2]
if metadata.split(' - ')[2] == meta:
document.append(doc)
if len(document) > 0:
app.config['logger'].log({'message': 'The document {} already exists.'.format(metadata.split('-')[-1].strip())})
return document
return False
def refactor_export_annotations(document_dict, project_id):
doccano_dict = {}
doccano_dict['text'] = document_dict['text']
doccano_dict['labels'] = []
doccano_dict['meta'] = document_dict['meta']
for item in document_dict['annotations']:
label_type = doccano_client.get_label_detail(project_id=project_id, label_id=item['label'])['text']
doccano_dict['labels'].append([item['start_offset'], item['end_offset'], label_type])
return doccano_dict
def refactor_export_generations(document_list):
pathway_jsonl = []
for document in document_list:
print(document['meta'])
tmp_dict = {'text': document['text'], 'labels': [], 'meta': document['meta'].replace("\\", "")}
for annotation in document['annotations']:
tmp_dict['labels'].append(annotation['text'])
pathway_jsonl.append(tmp_dict)
return_string = ''
for element in pathway_jsonl:
string_element = str(json.dumps(element, ensure_ascii=False))
return_string = return_string + string_element + '\n'
return return_string
def doccano_to_dict_format(annotation_list, document, project_id):
# ner_dict = {'text': 'tutto il testo', 'entities': [{'start, end, value, type, confidence}]}
ner_dict = {}
ner_dict['text'] = document['text']
ner_dict['entities'] = []
for annotation in annotation_list:
element_dict = {}
label = doccano_client.get_label_detail(project_id, annotation['label'])
element_dict['start_offset'] = annotation['start_offset']
element_dict['end_offset'] = annotation['end_offset']
element_dict['confidence'] = 0.8
element_dict['type'] = label['text']
element_dict['value'] = ner_dict['text'][annotation['start_offset']:annotation['end_offset']]
ner_dict['entities'].append(element_dict)
return ner_dict
# curl -i -F data='{"pilot"="Malaga","service"="Asylum Request"}' -F 'file=@/home/rizzo/Workspace/pgr/documentation/es/Asylum_and_Employment_Procedimiento_plazas.pdf' http://localhost:5000/v0.1/annotate
@app.route('/v0.2/annotate', methods=['POST'])
def annotate():
uploaded_file = request.files['file']
filename = secure_filename(uploaded_file.filename)
if filename != '':
file_ext = os.path.splitext(filename)[1]
if file_ext not in app.config['UPLOAD_EXTENSIONS'] :
return "Invalid file", 400
app.config['logger'].log({'file': 'test'})
data = json.loads(request.form['data'])
file_path = os.path.join('documentation/' + data['pilot'] + '/', filename)
uploaded_file.save(file_path)
# Instantiate PathwayGeneration object
if 'model' in data:
pgr = PathwayGenerator(file_path=file_path, pilot=data['pilot'], service=data['service'], use_cuda=True, cuda_device=0, annotation_model=data['model'])
else:
pgr = PathwayGenerator(file_path=file_path, pilot=data['pilot'], service=data['service'], use_cuda=True, cuda_device=0)
# Check for annotation project
project = get_project_by_name('ER ' + pilots_legend[data['pilot']] + ' Annotated Documents')
# Check if document already exists: if so, return annotations. Otherwise, create a new one
document = get_document(pgr.annotation_metadata, project['id'])
if document:
return refactor_export_annotations(document[0], project['id'])
converted_file = pgr.do_convert()
ner_dict = pgr.do_annotate(pgr.to_list())
doccano_dict, ner_path = pgr.export_annotation_to_doccano()
# WARNING: current issue of file upload/download Response -> https://github.com/doccano/doccano-client/issues/13
try:
doccano_client.post_doc_upload(project_id=project['id'], file_format='json', file_name=ner_path)
except json.decoder.JSONDecodeError:
pass
return doccano_dict
return 'NOK', 400
# curl -i -F data='{"pilot"="Malaga","service"="Asylum Request"}' -F 'file=@/home/rizzo/Workspace/pgr/documentation/es/Asylum_and_Employment_Procedimiento_plazas.pdf' http://localhost:5000/v0.1/generate
@app.route('/v0.2/generate', methods=['POST'])
def generate():
uploaded_file = request.files['file']
filename = secure_filename(uploaded_file.filename)
if filename != '':
file_ext = os.path.splitext(filename)[1]
if file_ext not in app.config['UPLOAD_EXTENSIONS'] :
return "Invalid file", 400
data = json.loads(request.form['data'])
file_path = os.path.join('documentation/' + data['pilot'] + '/', filename)
uploaded_file.save(file_path)
# Instantiate PathwayGeneration object
if 'model' in data:
pgr = PathwayGenerator(file_path=file_path, pilot=data['pilot'], service=data['service'], use_cuda=True, cuda_device=0, annotation_model=data['model'], section_split_model='section_split/models/training_unfolding_structure-2020-12-22_11-07-07_distilroberta-base')
else:
pgr = PathwayGenerator(file_path=file_path, pilot=data['pilot'], service=data['service'], use_cuda=True, cuda_device=0, section_split_model='section_split/models/training_unfolding_structure-2020-12-22_11-07-07_distilroberta-base')
# Check for projects
generation_project = get_project_by_name('ER ' + pilots_legend[data['pilot']] + ' Pathways')
annotation_project = get_project_by_name('ER ' + pilots_legend[data['pilot']] + ' Annotated Documents')
# Check if document already exists: if so, return annotations. Otherwise, create a new one
document_annotation = get_document(pgr.annotation_metadata, annotation_project['id'])
document_generation = get_document(pgr.generation_metadata, generation_project['id'])
if document_generation:
document_generation = sorted(document_generation, key=lambda x: int("".join([i for i in x['text'] if i.isdigit()])))
return refactor_export_generations(document_generation)
# Check if document already exists: if so, return annotations. Otherwise, create a new one
if document_annotation:
annotations = doccano_client.get_annotation_list(annotation_project['id'], document_annotation[0]['id'])
pgr.ner_dict = doccano_to_dict_format(annotations, document_annotation[0], annotation_project['id'])
converted_file = pgr.do_convert()
ner_dict = pgr.do_annotate(pgr.to_list())
doccano_dict, ner_path = pgr.export_annotation_to_doccano()
try:
doccano_client.post_doc_upload(project_id=annotation_project['id'], file_format='json', file_name=ner_path)
except json.decoder.JSONDecodeError:
pass
sections = pgr.do_split()
full_ner_dict = {}
count = 1
for section in sections:
pgr.annotation_model.reset_preprocesser()
ner_dict = pgr.do_annotate(section)
pathway = pgr.do_generate()
label = 'Step'+str(count)
full_ner_dict[label] = pathway
count = count + 1
pathway_dict, pathway_path = pgr.export_generation_to_doccano(full_ner_dict)
try:
if count < 50:
doccano_client.post_doc_upload(project_id=generation_project['id'], file_format='json', file_name=pathway_path)
except json.decoder.JSONDecodeError:
pass
return pathway_dict
return 'NOK', 400
# curl -i -F data='{"pilot"="Malaga","service"="Asylum Request"}' -F 'file=@/home/rizzo/Workspace/pgr/documentation/es/Asylum_and_Employment_Procedimiento_plazas.pdf' http://localhost:5000/v0.2/segment
@app.route('/v0.2/segment', methods=['POST'])
def segment():
uploaded_file = request.files['file']
filename = secure_filename(uploaded_file.filename)
if filename != '':
file_ext = os.path.splitext(filename)[1]
if file_ext not in app.config['UPLOAD_EXTENSIONS'] :
return "Invalid file", 400
data = json.loads(request.form['data'])
file_path = os.path.join('documentation/' + data['pilot'] + '/', filename)
uploaded_file.save(file_path)
# Instantiate PathwayGeneration object
if 'model' in data:
pgr = PathwayGenerator(file_path=file_path, pilot=data['pilot'], service=data['service'], use_cuda=True, cuda_device=0, annotation_model=data['model'], section_split_model='section_split/models/training_unfolding_structure-2020-12-22_11-07-07_distilroberta-base')
else:
pgr = PathwayGenerator(file_path=file_path, pilot=data['pilot'], service=data['service'], use_cuda=True, cuda_device=0, section_split_model='section_split/models/training_unfolding_structure-2020-12-22_11-07-07_distilroberta-base')
pgr.do_convert()
document_sections = pgr.do_split()
return pgr.sections_to_doccano(document_sections)
return 'NOK', 400
# curl -X POST -F data='{"pilot":"Malaga","service":"Asylum Request"}' http://easyrights.linksfoundation.com/v0.3/generate
@app.route('/v0.3/generate', methods=['POST'])
def retrieve_pathways():
data = json.loads(request.form['data'])
if data['pilot'].strip().lower() == 'malaga':
if data['service'].strip().lower() == 'asylum request':
return json.loads(open('api/pathways/asylum_request_malaga_pathway.json', 'r').read())
if data['service'].strip().lower() == 'work permission':
return json.loads(open('api/pathways/work_permission_malaga_pathway.json', 'r').read())
if data['pilot'].strip().lower() == 'birmingham':
if data['service'].strip().lower() == 'clean air zone':
return json.loads(open('api/pathways/caz_birmingham_pathway.json', 'r').read())
if data['service'].strip().lower() == 'baes esol':
return json.loads(open('api/pathways/baes_esol_birmingham_pathway.json', 'r').read())
if data['pilot'].strip().lower() == 'palermo' and data['service'].strip().lower() == 'registration at registry office':
return json.loads(open('api/pathways/registry_office_palermo_pathway.json', 'r').read())
if data['pilot'].strip().lower() == 'larissa':
if data['service'].strip().lower() == 'certification of nationality':
return json.loads(open('api/pathways/nationality_certification_larissa_pathway.json', 'r').read())
if data['service'].strip().lower() == 'birth certification':
return json.loads(open('api/pathways/birth_certificate_larissa_pathway.json', 'r').read())
return 'Service not available yet. Supported services: \n asylum request in Malaga, \n clean air zone in Birmingham, \n registration at registry office in Palermo, \n certification of residence in Larissa', 400
@app.route("/alive", methods=['GET'])
def alive ():
return "OK", 200
if __name__ == '__main__':
app.config['logger'] = CustomLogger('log/pgr.log')
app.run(host='0.0.0.0', debug=True, port=5000)
|
from typing import Any, Tuple
import logging
from enum import Enum
import numpy as np
import torch
# AUTHORSHIP
__version__ = "0.0.0dev"
__author__ = "Mirko Polato"
__copyright__ = "Copyright 2021, gossipy"
__license__ = "MIT"
__maintainer__ = "Mirko Polato, PhD"
__email__ = "mak1788@gmail.com"
__status__ = "Development"
#
__all__ = ["node",
"simul",
"utils",
"data",
"model",
"set_seed",
"DuplicateFilter",
"CreateModelMode",
"AntiEntropyProtocol",
"MessageType",
"CacheKey",
"CacheItem"]
class DuplicateFilter(object):
def __init__(self):
self.msgs = set()
def filter(self, record):
rv = record.msg not in self.msgs
self.msgs.add(record.msg)
return rv
logging.basicConfig(level=logging.INFO,
format="[%(asctime)s] %(message)s",
datefmt='%d%m%y-%H:%M:%S')
LOG = logging.getLogger("gossipy")
LOG.addFilter(DuplicateFilter())
def set_seed(seed=0) -> None:
np.random.seed(seed)
torch.manual_seed(seed)
class CreateModelMode(Enum):
UPDATE = 1
MERGE_UPDATE = 2
UPDATE_MERGE = 3
PASS = 4
class AntiEntropyProtocol(Enum):
PUSH = 1,
PULL = 2,
PUSH_PULL = 3
class MessageType(Enum):
PUSH = 1,
PULL = 2,
REPLY = 3,
PUSH_PULL = 4
class EqualityMixin(object):
def __eq__(self, other: Any) -> bool:
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
class Sizeable():
def get_size(self) -> int:
raise NotImplementedError()
class CacheKey(Sizeable):
def __init__(self, *args):
self.key = tuple(args)
def get(self):
return self.key
def get_size(self) -> int:
from gossipy.model.handler import ModelHandler
val = ModelHandler._CACHE[self].value
if isinstance(val, (float, int, bool)): return 1
elif isinstance(val, Sizeable): return val.get_size()
else:
LOG.warning("Impossible to compute the size of %s. Set to 0." %val)
return 0
def __repr__(self):
return str(self.key)
def __hash__(self):
return hash(self.key)
def __eq__(self, other: Any) -> bool:
if isinstance(other, CacheKey):
return self.key == other.key
return False
def __ne__(self, other: Any):
return not (self == other)
class CacheItem(Sizeable):
def __init__(self, value: Any):
self.value = value
self.refs = 1
def add_ref(self):
self.refs += 1
def del_ref(self):
self.refs -= 1
return self.value
def is_referenced(self):
return self.refs > 0
def get_size(self) -> int:
if isinstance(self.value, (tuple, list)):
sz: int = 0
for t in self.value:
if t is None: continue
if isinstance(t, (float, int, bool)): sz += 1
elif isinstance(t, Sizeable): sz += t.get_size()
else:
LOG.warning("Impossible to compute the size of %s. Set to 0." %t)
return max(sz, 1)
elif isinstance(self.value, Sizeable):
return self.value.get_size()
elif isinstance(self.value, (float, int, bool)):
return 1
else:
LOG.warning("Impossible to compute the size of %s. Set to 0." %self.value)
return 0
class Message(Sizeable):
def __init__(self,
timestamp: int,
sender: int,
receiver: int,
type: MessageType,
value: Tuple[Any, ...]):
self.timestamp = timestamp
self.sender = sender
self.receiver = receiver
self.type = type
self.value = value
def get_size(self) -> int:
if self.value is None: return 1
if isinstance(self.value, (tuple, list)):
sz: int = 0
for t in self.value:
if t is None: continue
if isinstance(t, (float, int, bool)): sz += 1
elif isinstance(t, Sizeable): sz += t.get_size()
else: raise TypeError("Cannot compute the size of the payload!")
return max(sz, 1)
elif isinstance(self.value, Sizeable):
return self.value.get_size()
elif isinstance(self.value, (float, int, bool)):
return 1
else:
raise TypeError("Cannot compute the size of the payload!")
def __repr__(self) -> str:
s: str = "T%d [%d -> %d] {%s}: " %(self.timestamp,
self.sender,
self.receiver,
self.type.name)
s += "ACK" if self.value is None else str(self.value)
return s
|
"""pytest-splinter package."""
__version__ = "3.3.0"
|
# Generated by Django 2.2.6 on 2019-10-17 18:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("zezere", "0001_initial")]
operations = [
migrations.AddField(
model_name="runrequest",
name="auto_generated_id",
field=models.CharField(
blank=True,
max_length=80,
null=True,
unique=True,
verbose_name="Auto generated ID",
),
)
]
|
# Python 2.7 or Python 3.x
"""
Unittest of parse_lammps_log.parse_log
Adapted from/inspired by pymatgen:
https://github.com/materialsproject/pymatgen/blob/master/pymatgen/io/lammps/tests/test_output.py
Alta Fang, 2017
"""
import os
import unittest
import numpy as np
from parse_lammps_log.parse_log import LammpsLog
test_dir = os.path.join(os.path.dirname(__file__))
class TestParseLog(unittest.TestCase):
def test_log_custom(self):
"""
Test parsing a LAMMPs log file with thermo_style custom.
"""
# Create LammpsLog object from log text file
log_file_custom = os.path.join(test_dir, "test_files", "log.lammps_custom")
self.log_custom = LammpsLog(log_file=log_file_custom)
# Check fields
expected_fields = "step time pe ke etotal temp press c_msd[4]".split()
self.assertEqual(sorted(expected_fields),
sorted(self.log_custom.thermo_data.keys()))
# Code for making the expected data (dict values sorted by dict keys)
#np.savetxt(os.path.join(test_dir, "log_custom_data.txt"),
# np.stack([v[1] for v
# in sorted(self.log_custom.thermo_data.items())])[0])
# Check data
expected_data = np.loadtxt(os.path.join(test_dir, "test_files",
"log_custom_data.txt"))
np.testing.assert_allclose(expected_data, np.stack([v[1] for v
in sorted(self.log_custom.thermo_data.items())])[0])
def test_log_multi(self):
"""
Test parsing a LAMMPs log file with thermo_style multi.
"""
# Create LammpsLog object from log text file
log_file_multi = os.path.join(test_dir, "test_files", "log.lammps_multi")
self.log_multi = LammpsLog(log_file=log_file_multi)
# Check fields
expected_fields = "Step CPU TotEng KinEng Temp PotEng E_bond E_angle \
E_dihed E_impro E_vdwl E_coul E_long Press \
Volume".split()
self.assertEqual(sorted(expected_fields),
sorted(self.log_multi.thermo_data.keys()))
# Code for making the expected data (dict values sorted by dict keys)
#np.savetxt(os.path.join(test_dir, "log_multi_data.txt"),
# np.stack([v[1] for v
# in sorted(self.log_multi.thermo_data.items())])[0])
# Check data
expected_data = np.loadtxt(os.path.join(test_dir, "test_files",
"log_multi_data.txt"))
np.testing.assert_allclose(expected_data, np.stack([v[1] for v
in sorted(self.log_multi.thermo_data.items())])[0])
def test_log_one(self):
"""
Test parsing a LAMMPs log file with thermo_style one.
"""
# Create LammpsLog object from log text file
log_file_one = os.path.join(test_dir, "test_files", "log.lammps_one")
self.log_one = LammpsLog(log_file=log_file_one)
# Code for making the expected data (dict values sorted by dict keys)
#np.savetxt(os.path.join(test_dir, "log_one_data.txt"),
# np.stack([v[1] for v
# in sorted(self.log_one.thermo_data.items())])[0])
# Check data
expected_data = np.loadtxt(os.path.join(test_dir, "test_files",
"log_one_data.txt"))
np.testing.assert_allclose(expected_data, np.stack([v[1] for v
in sorted(self.log_one.thermo_data.items())])[0])
if __name__ == '__main__':
unittest.main()
|
from multiprocessing import Process
import os
import time
def info(title):
print(title)
print('module name:', __name__)
print('parent process:', os.getppid())
print('process id:', os.getpid())
def foo(name):
info('function f')
count = 0
while(True):
print('Thread {}, count: {}'.format(name, count))
count+=1
time.sleep(1)
# print('hello', name)
def boo(name):
info('function b')
count = 0
while(True):
print('Thread {}, count: {}'.format(name, count))
count+=1
time.sleep(1)
if __name__ == '__main__':
info('main line')
p = Process(target=foo, args=('bob', ))
j = Process(target=boo, args=('sarah', ))
j.start()
p.start()
p.join()
j.join()
|
# pip install selenium
# pip install webdriver-manager
import re
import urllib
import soup as soup
import time
from selenium import webdriver
import pandas
from webdriver_manager.chrome import ChromeDriverManager
driver = webdriver.Chrome(ChromeDriverManager().install())
driver.implicitly_wait(3) # 웹 자원 로드를 위해 3초 기다려줌
from selenium.webdriver.common.keys import Keys
from bs4 import BeautifulSoup
import time
# 이미지 크롤링
body = driver.find_element_by_tag_name('body')
# 인기순/작성순 선택할 수 있는 영역 클릭
# driver.find_element_by_xpath('//paper-button[@class="dropdown-trigger style-scope yt-dropdown-menu"]').click()
# 인기순 카테고리 클릭
# driver.find_element_by_xpath('//paper-listbox[@class="dropdown-content style-scope yt-dropdown-menu"]/a[1]').click()
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
# comments=soup.find_all('yt-formatted-string',attrs={'class':'style-scope ytd-comment-renderer'})
cmmt_box = soup.find_all(attrs={'id': 'wrap'})
# real=soup.find('video')
# real=real.get('src')
# print(real)
# //*[@id="container"]/div/div/div[3]/div[1]/table/tbody/tr[1]/td[2]/dl/dt/a/text()
# //*[@id="container"]/div/div/div[3]/div[1]/table/tbody/tr[1]/td[3]
# //*[@id="container"]/div/div/div[3]/div[1]/table/tbody/tr[2]/td[2]/dl/dt/a/text()
from collections import OrderedDict
import json
data = OrderedDict()
nightlord = []
nightwalker = []
darkknight = []
demonslayer = []
demonavenger = []
dualblader = []
luminous = []
mercedes = []
mechanic = []
mihile = []
viper = []
battlemage = []
bowmaster = []
blaster = []
bishop = []
shadower = []
soulmaster = []
striker = []
marks = []
adele = []
aran = []
ark = []
arkmagefp = []
arkmagetc = []
evan = []
angelicbuster = []
wildhunter = []
windbreaker = []
shade = []
illium = []
xenon = []
zero = []
cadena = []
kaiser = []
cannonmaster = []
captain = []
kinesis = []
paladin = []
pathfinder = []
phantom = []
flamewizard = []
hoyoung = []
hero = []
driver.get('https://maple.gg/job/nightlord')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
nightlord.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
nightlord.append(top)
driver.get('https://maple.gg/job/nightwalker')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
nightwalker.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
nightwalker.append(top)
driver.get('https://maple.gg/job/darkknight')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
darkknight.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
darkknight.append(top)
driver.get('https://maple.gg/job/demonslayer')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
demonslayer.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
demonslayer.append(top)
driver.get('https://maple.gg/job/demonavenger')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
demonavenger.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
demonavenger.append(top)
driver.get('https://maple.gg/job/dualblader')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
dualblader.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
dualblader.append(top)
driver.get('https://maple.gg/job/luminous')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
luminous.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
luminous.append(top)
driver.get('https://maple.gg/job/mercedes')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
mercedes.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
mercedes.append(top)
driver.get('https://maple.gg/job/mechanic')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
mechanic.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
mechanic.append(top)
driver.get('https://maple.gg/job/mihile')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
mihile.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
mihile.append(top)
driver.get('https://maple.gg/job/viper')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
viper.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
viper.append(top)
driver.get('https://maple.gg/job/battlemage')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
battlemage.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
battlemage.append(top)
driver.get('https://maple.gg/job/bowmaster')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
bowmaster.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
bowmaster.append(top)
driver.get('https://maple.gg/job/blaster')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
blaster.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
blaster.append(top)
driver.get('https://maple.gg/job/bishop')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
bishop.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
bishop.append(top)
driver.get('https://maple.gg/job/shadower')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
shadower.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
shadower.append(top)
driver.get('https://maple.gg/job/soulmaster')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
soulmaster.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
soulmaster.append(top)
driver.get('https://maple.gg/job/striker')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
striker.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
striker.append(top)
driver.get('https://maple.gg/job/marks')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
marks.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
marks.append(top)
driver.get('https://maple.gg/job/adele')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
adele.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
adele.append(top)
driver.get('https://maple.gg/job/aran')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
aran.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
aran.append(top)
driver.get('https://maple.gg/job/ark')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
ark.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
ark.append(top)
driver.get('https://maple.gg/job/arkmagefp')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
arkmagefp.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
arkmagefp.append(top)
driver.get('https://maple.gg/job/arkmagetc')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
arkmagetc.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
arkmagetc.append(top)
driver.get('https://maple.gg/job/evan')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
evan.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
evan.append(top)
driver.get('https://maple.gg/job/angelicbuster')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
angelicbuster.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
angelicbuster.append(top)
driver.get('https://maple.gg/job/wildhunter')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
wildhunter.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
wildhunter.append(top)
driver.get('https://maple.gg/job/windbreaker')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
windbreaker.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
windbreaker.append(top)
driver.get('https://maple.gg/job/shade')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
shade.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
shade.append(top)
driver.get('https://maple.gg/job/illium')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
illium.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
illium.append(top)
driver.get('https://maple.gg/job/xenon')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
xenon.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
xenon.append(top)
driver.get('https://maple.gg/job/zero')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
zero.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
zero.append(top)
driver.get('https://maple.gg/job/cadena')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
cadena.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
cadena.append(top)
driver.get('https://maple.gg/job/kaiser')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
kaiser.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
kaiser.append(top)
driver.get('https://maple.gg/job/cannonmaster')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
cannonmaster.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
cannonmaster.append(top)
driver.get('https://maple.gg/job/captain')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
captain.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
captain.append(top)
driver.get('https://maple.gg/job/kinesis')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
kinesis.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
kinesis.append(top)
driver.get('https://maple.gg/job/paladin')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
paladin.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
paladin.append(top)
driver.get('https://maple.gg/job/pathfinder')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
pathfinder.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
pathfinder.append(top)
driver.get('https://maple.gg/job/phantom')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
phantom.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
phantom.append(top)
driver.get('https://maple.gg/job/flamewizard')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
flamewizard.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
flamewizard.append(top)
driver.get('https://maple.gg/job/hoyoung')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
hoyoung.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
hoyoung.append(top)
driver.get('https://maple.gg/job/hero')
for j in range(1, 4):
top = {}
title = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/header').text
topRecode = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/h1').text
topTime = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/small').text
nameLevel = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[1]/span').text
ranking = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[2]/span').text
date = driver.find_element_by_xpath(
'//*[@id="app"]/div[2]/div[4]/div[1]/div/div[' + str(j) + ']/section/div/div[3]/span').text
if j != 3:
top['title'] = title
top['topRecode'] = topRecode
top['topTime'] = topTime
top['nameLevel'] = nameLevel
top['ranking'] = ranking
top['date'] = date
hero.append(top)
else:
top['title'] = title
top['ranking'] = topRecode
top['jobCount'] = topTime
top['people'] = nameLevel
top['ratio'] = ranking
top['date'] = date
hero.append(top)
data['nightlord'] = nightlord
data['nightWalker'] = nightwalker
data['darkknight']=darkknight
data['demonslayer']=demonslayer
data['demonavenger']=demonavenger
data['dualblader']=dualblader
data['luminous']=luminous
data['mercedes']=mercedes
data['mecanic']=mechanic
data['mikhail']=mihile
data['viper']=viper
data['battleMage']=battlemage
data['bowmaster']=bowmaster
data['blaster']=blaster
data['bishop']=bishop
data['shadower']=shadower
data['soulMaster']=soulmaster
data['striker']=striker
data['marks']=marks
data['adele']=adele
data['aran']=aran
data['ark']=ark
data['arkmagefp']=arkmagefp
data['arkmagetc']=arkmagetc
data['evan']=evan
data['angelicbuster']=angelicbuster
data['wildHunter']=wildhunter
data['windBreaker']=windbreaker
data['shade']=shade
data['illium']=illium
data['xenon']=xenon
data['zero']=zero
data['cadena']=cadena
data['kaiser']=kaiser
data['cannonmaster']=cannonmaster
data['captain']=captain
data['kinesis']=kinesis
data['paladin']=paladin
data['pathfinder']=pathfinder
data['phantom']=phantom
data['flameWizard']=flamewizard
data['hoyoung']=hoyoung
data['hero']=hero
with open('Job.json', 'w', encoding="utf-8") as make_file:
json.dump(data, make_file, ensure_ascii=False, indent="\t")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.